diff --git a/.github/workflows/eval.yml b/.github/workflows/eval.yml deleted file mode 100644 index b5da9e7b7c8e293fb565f4de269a1ae266c19692..0000000000000000000000000000000000000000 --- a/.github/workflows/eval.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Run Agent Eval - -on: - schedule: - - cron: "0 0 * * *" - - pull_request: - branches: - - "**" - types: [synchronize, reopened, labeled] - - workflow_dispatch: - -concurrency: - # Allow only one workflow per any non-`main` branch. - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} - cancel-in-progress: true - -env: - CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: 0 - RUST_BACKTRACE: 1 - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_EVAL_TELEMETRY: 1 - -jobs: - run_eval: - timeout-minutes: 60 - name: Run Agent Eval - if: > - github.repository_owner == 'zed-industries' && - (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) - runs-on: - - namespace-profile-16x32-ubuntu-2204 - steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Linux dependencies - run: ./script/linux - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Compile eval - run: cargo build --package=eval - - - name: Run eval - run: cargo run --package=eval -- --repetitions=8 --concurrency=1 - - # Even the Linux runner is not stateful, in theory there is no need to do this cleanup. - # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code - # to clean up the config file, I’ve included the cleanup code here as a precaution. - # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution. - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 56ea875d5cefec240d554984a8c0e1c1992a574c..f047f3064a6828d6121169f7f9cca189d5dbe73f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,10 +3,7 @@ name: release env: CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: '0' RUST_BACKTRACE: '1' - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} on: push: tags: @@ -66,6 +63,10 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: steps::setup_node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: @@ -77,7 +78,7 @@ jobs: run: cargo install cargo-nextest --locked shell: bash -euxo pipefail {0} - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 100 + run: ./script/clear-target-dir-if-larger-than 250 shell: bash -euxo pipefail {0} - name: steps::cargo_nextest run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final @@ -177,11 +178,15 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} timeout-minutes: 60 - bundle_linux_arm64: + bundle_linux_aarch64: needs: - run_tests_linux - check_scripts runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 + env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -200,27 +205,28 @@ jobs: - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact zed-linux-aarch64.tar.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - path: target/release/zed-*.tar.gz + name: zed-linux-aarch64.tar.gz + path: target/release/zed-linux-aarch64.tar.gz if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - path: target/zed-remote-server-*.gz + name: zed-remote-server-linux-aarch64.gz + path: target/zed-remote-server-linux-aarch64.gz if-no-files-found: error - outputs: - zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz timeout-minutes: 60 bundle_linux_x86_64: needs: - run_tests_linux - check_scripts runs-on: namespace-profile-32x64-ubuntu-2004 + env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -239,28 +245,28 @@ jobs: - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact zed-linux-x86_64.tar.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - path: target/release/zed-*.tar.gz + name: zed-linux-x86_64.tar.gz + path: target/release/zed-linux-x86_64.tar.gz if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - path: target/zed-remote-server-*.gz + name: zed-remote-server-linux-x86_64.gz + path: target/zed-remote-server-linux-x86_64.gz if-no-files-found: error - outputs: - zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz timeout-minutes: 60 - bundle_mac_arm64: + bundle_mac_aarch64: needs: - run_tests_mac - check_scripts runs-on: self-mini-macos env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} @@ -282,24 +288,21 @@ jobs: - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - - name: run_bundling::bundle_mac + - name: run_bundling::bundle_mac::bundle_mac run: ./script/bundle-mac aarch64-apple-darwin shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg' + - name: '@actions/upload-artifact Zed-aarch64.dmg' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg - path: target/aarch64-apple-darwin/release/Zed.dmg + name: Zed-aarch64.dmg + path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz' + - name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz + name: zed-remote-server-macos-aarch64.gz path: target/zed-remote-server-macos-aarch64.gz if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz timeout-minutes: 60 bundle_mac_x86_64: needs: @@ -307,6 +310,9 @@ jobs: - check_scripts runs-on: self-mini-macos env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} @@ -328,31 +334,31 @@ jobs: - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - - name: run_bundling::bundle_mac + - name: run_bundling::bundle_mac::bundle_mac run: ./script/bundle-mac x86_64-apple-darwin shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg' + - name: '@actions/upload-artifact Zed-x86_64.dmg' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg - path: target/x86_64-apple-darwin/release/Zed.dmg + name: Zed-x86_64.dmg + path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz' + - name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz + name: zed-remote-server-macos-x86_64.gz path: target/zed-remote-server-macos-x86_64.gz if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz timeout-minutes: 60 - bundle_windows_arm64: + bundle_windows_aarch64: needs: - run_tests_windows - check_scripts runs-on: self-32vcpu-windows-2022 env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} @@ -371,18 +377,16 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: run_bundling::bundle_windows + - name: run_bundling::bundle_windows::bundle_windows run: script/bundle-windows.ps1 -Architecture aarch64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe' + - name: '@actions/upload-artifact Zed-aarch64.exe' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe - path: ${{ env.SETUP_PATH }} + name: Zed-aarch64.exe + path: target/Zed-aarch64.exe if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe timeout-minutes: 60 bundle_windows_x86_64: needs: @@ -390,6 +394,9 @@ jobs: - check_scripts runs-on: self-32vcpu-windows-2022 env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} @@ -408,51 +415,49 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: run_bundling::bundle_windows + - name: run_bundling::bundle_windows::bundle_windows run: script/bundle-windows.ps1 -Architecture x86_64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe' + - name: '@actions/upload-artifact Zed-x86_64.exe' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe - path: ${{ env.SETUP_PATH }} + name: Zed-x86_64.exe + path: target/Zed-x86_64.exe if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe timeout-minutes: 60 upload_release_assets: needs: - create_draft_release - - bundle_linux_arm64 + - bundle_linux_aarch64 - bundle_linux_x86_64 - - bundle_mac_arm64 + - bundle_mac_aarch64 - bundle_mac_x86_64 - - bundle_windows_arm64 + - bundle_windows_aarch64 - bundle_windows_x86_64 runs-on: namespace-profile-4x8-ubuntu-2204 steps: - - name: release::upload_release_assets::download_workflow_artifacts + - name: release::download_workflow_artifacts uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 with: path: ./artifacts/ - name: ls -lR ./artifacts run: ls -lR ./artifacts shell: bash -euxo pipefail {0} - - name: release::upload_release_assets::prep_release_artifacts + - name: release::prep_release_artifacts run: |- mkdir -p release-artifacts/ - mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.dmg - mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.dmg - mv ./artifacts/${{ needs.bundle_windows_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.exe - mv ./artifacts/${{ needs.bundle_windows_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.exe - mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.zed }}/* release-artifacts/zed-linux-aarch64.tar.gz - mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.zed }}/* release-artifacts/zed-linux-x86_64.tar.gz - mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-x86_64.gz - mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-aarch64.gz - mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-x86_64.gz - mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-aarch64.gz + mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg + mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg + mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz + mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz + mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe + mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe + mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz + mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz + mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz + mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz shell: bash -euxo pipefail {0} - name: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/* run: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/* @@ -473,7 +478,7 @@ jobs: shell: bash -euxo pipefail {0} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: release::auto_release_preview::create_sentry_release + - name: release::create_sentry_release uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c with: environment: production diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 20230fb499ea9fa892a316bd1762424869004262..f3efe70a498e5718740adca572358c8b7bb81609 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -3,12 +3,7 @@ name: release_nightly env: CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: '0' RUST_BACKTRACE: '1' - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} - DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} on: push: tags: @@ -32,41 +27,6 @@ jobs: run: ./script/clippy shell: bash -euxo pipefail {0} timeout-minutes: 60 - run_tests_mac: - if: github.repository_owner == 'zed-industries' - runs-on: self-mini-macos - steps: - - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - with: - clean: false - - name: steps::setup_cargo_config - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - shell: bash -euxo pipefail {0} - - name: steps::setup_node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 - with: - node-version: '20' - - name: steps::clippy - run: ./script/clippy - shell: bash -euxo pipefail {0} - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked - shell: bash -euxo pipefail {0} - - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 300 - shell: bash -euxo pipefail {0} - - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final - shell: bash -euxo pipefail {0} - - name: steps::cleanup_cargo_config - if: always() - run: | - rm -rf ./../.cargo - shell: bash -euxo pipefail {0} - timeout-minutes: 60 run_tests_windows: if: github.repository_owner == 'zed-industries' runs-on: self-32vcpu-windows-2022 @@ -102,55 +62,109 @@ jobs: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue shell: pwsh timeout-minutes: 60 - bundle_mac_nightly_x86_64: + bundle_linux_aarch64: needs: - check_style - - run_tests_mac - if: github.repository_owner == 'zed-industries' - runs-on: self-mini-macos + - run_tests_windows + runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 env: - MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} - MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} - APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} - APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} - APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: steps::setup_node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 - with: - node-version: '20' + - name: run_bundling::set_release_channel_to_nightly + run: | + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + shell: bash -euxo pipefail {0} - name: steps::setup_sentry uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 300 + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold shell: bash -euxo pipefail {0} - - name: release_nightly::set_release_channel_to_nightly + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact zed-linux-aarch64.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-linux-aarch64.tar.gz + path: target/release/zed-linux-aarch64.tar.gz + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-linux-aarch64.gz + path: target/zed-remote-server-linux-aarch64.gz + if-no-files-found: error + timeout-minutes: 60 + bundle_linux_x86_64: + needs: + - check_style + - run_tests_windows + runs-on: namespace-profile-32x64-ubuntu-2004 + env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: run_bundling::set_release_channel_to_nightly run: | set -eu version=$(git rev-parse --short HEAD) echo "Publishing version: ${version} on release channel nightly" echo "nightly" > crates/zed/RELEASE_CHANNEL shell: bash -euxo pipefail {0} - - name: run_bundling::bundle_mac - run: ./script/bundle-mac x86_64-apple-darwin + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::setup_linux + run: ./script/linux shell: bash -euxo pipefail {0} - - name: release_nightly::upload_zed_nightly - run: script/upload-nightly macos x86_64 + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact zed-linux-x86_64.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-linux-x86_64.tar.gz + path: target/release/zed-linux-x86_64.tar.gz + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-linux-x86_64.gz + path: target/zed-remote-server-linux-x86_64.gz + if-no-files-found: error timeout-minutes: 60 - bundle_mac_nightly_aarch64: + bundle_mac_aarch64: needs: - check_style - - run_tests_mac - if: github.repository_owner == 'zed-industries' + - run_tests_windows runs-on: self-mini-macos env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} @@ -161,6 +175,13 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - name: run_bundling::set_release_channel_to_nightly + run: | + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + shell: bash -euxo pipefail {0} - name: steps::setup_node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: @@ -172,100 +193,84 @@ jobs: - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - - name: release_nightly::set_release_channel_to_nightly - run: | - set -eu - version=$(git rev-parse --short HEAD) - echo "Publishing version: ${version} on release channel nightly" - echo "nightly" > crates/zed/RELEASE_CHANNEL - shell: bash -euxo pipefail {0} - - name: run_bundling::bundle_mac + - name: run_bundling::bundle_mac::bundle_mac run: ./script/bundle-mac aarch64-apple-darwin shell: bash -euxo pipefail {0} - - name: release_nightly::upload_zed_nightly - run: script/upload-nightly macos aarch64 - shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact Zed-aarch64.dmg' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed-aarch64.dmg + path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-macos-aarch64.gz + path: target/zed-remote-server-macos-aarch64.gz + if-no-files-found: error timeout-minutes: 60 - bundle_linux_nightly_x86_64: + bundle_mac_x86_64: needs: - check_style - - run_tests_mac - if: github.repository_owner == 'zed-industries' - runs-on: namespace-profile-32x64-ubuntu-2004 + - run_tests_windows + runs-on: self-mini-macos + env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: steps::setup_sentry - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b - with: - token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: ./script/linux - run: ./script/linux - shell: bash -euxo pipefail {0} - - name: ./script/install-mold - run: ./script/install-mold - shell: bash -euxo pipefail {0} - - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 100 - shell: bash -euxo pipefail {0} - - name: release_nightly::set_release_channel_to_nightly + - name: run_bundling::set_release_channel_to_nightly run: | set -eu version=$(git rev-parse --short HEAD) echo "Publishing version: ${version} on release channel nightly" echo "nightly" > crates/zed/RELEASE_CHANNEL shell: bash -euxo pipefail {0} - - name: ./script/bundle-linux - run: ./script/bundle-linux - shell: bash -euxo pipefail {0} - - name: release_nightly::upload_zed_nightly - run: script/upload-nightly linux-targz x86_64 - shell: bash -euxo pipefail {0} - timeout-minutes: 60 - bundle_linux_nightly_aarch64: - needs: - - check_style - - run_tests_mac - if: github.repository_owner == 'zed-industries' - runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 - steps: - - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: - clean: false + node-version: '20' - name: steps::setup_sentry uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: ./script/linux - run: ./script/linux - shell: bash -euxo pipefail {0} - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 100 - shell: bash -euxo pipefail {0} - - name: release_nightly::set_release_channel_to_nightly - run: | - set -eu - version=$(git rev-parse --short HEAD) - echo "Publishing version: ${version} on release channel nightly" - echo "nightly" > crates/zed/RELEASE_CHANNEL - shell: bash -euxo pipefail {0} - - name: ./script/bundle-linux - run: ./script/bundle-linux + run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - - name: release_nightly::upload_zed_nightly - run: script/upload-nightly linux-targz aarch64 + - name: run_bundling::bundle_mac::bundle_mac + run: ./script/bundle-mac x86_64-apple-darwin shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact Zed-x86_64.dmg' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed-x86_64.dmg + path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-macos-x86_64.gz + path: target/zed-remote-server-macos-x86_64.gz + if-no-files-found: error timeout-minutes: 60 - bundle_windows_nightly_x86_64: + bundle_windows_aarch64: needs: - check_style - run_tests_windows - if: github.repository_owner == 'zed-industries' runs-on: self-32vcpu-windows-2022 env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} @@ -280,11 +285,7 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: steps::setup_sentry - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b - with: - token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: release_nightly::set_release_channel_to_nightly + - name: run_bundling::set_release_channel_to_nightly run: | $ErrorActionPreference = "Stop" $version = git rev-parse --short HEAD @@ -292,22 +293,30 @@ jobs: "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: run_bundling::bundle_windows - run: script/bundle-windows.ps1 -Architecture x86_64 - shell: pwsh - working-directory: ${{ env.ZED_WORKSPACE }} - - name: release_nightly::upload_zed_nightly - run: script/upload-nightly.ps1 -Architecture x86_64 + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: run_bundling::bundle_windows::bundle_windows + run: script/bundle-windows.ps1 -Architecture aarch64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} + - name: '@actions/upload-artifact Zed-aarch64.exe' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed-aarch64.exe + path: target/Zed-aarch64.exe + if-no-files-found: error timeout-minutes: 60 - bundle_windows_nightly_aarch64: + bundle_windows_x86_64: needs: - check_style - run_tests_windows - if: github.repository_owner == 'zed-industries' runs-on: self-32vcpu-windows-2022 env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} @@ -322,11 +331,7 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: steps::setup_sentry - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b - with: - token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: release_nightly::set_release_channel_to_nightly + - name: run_bundling::set_release_channel_to_nightly run: | $ErrorActionPreference = "Stop" $version = git rev-parse --short HEAD @@ -334,19 +339,25 @@ jobs: "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: run_bundling::bundle_windows - run: script/bundle-windows.ps1 -Architecture aarch64 - shell: pwsh - working-directory: ${{ env.ZED_WORKSPACE }} - - name: release_nightly::upload_zed_nightly - run: script/upload-nightly.ps1 -Architecture aarch64 + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: run_bundling::bundle_windows::bundle_windows + run: script/bundle-windows.ps1 -Architecture x86_64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} + - name: '@actions/upload-artifact Zed-x86_64.exe' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed-x86_64.exe + path: target/Zed-x86_64.exe + if-no-files-found: error timeout-minutes: 60 build_nix_linux_x86_64: needs: - check_style - - run_tests_mac + - run_tests_windows if: github.repository_owner == 'zed-industries' runs-on: namespace-profile-32x64-ubuntu-2004 env: @@ -377,7 +388,7 @@ jobs: build_nix_mac_aarch64: needs: - check_style - - run_tests_mac + - run_tests_windows if: github.repository_owner == 'zed-industries' runs-on: self-mini-macos env: @@ -414,20 +425,48 @@ jobs: continue-on-error: true update_nightly_tag: needs: - - bundle_mac_nightly_x86_64 - - bundle_mac_nightly_aarch64 - - bundle_linux_nightly_x86_64 - - bundle_linux_nightly_aarch64 - - bundle_windows_nightly_x86_64 - - bundle_windows_nightly_aarch64 + - bundle_linux_aarch64 + - bundle_linux_x86_64 + - bundle_mac_aarch64 + - bundle_mac_x86_64 + - bundle_windows_aarch64 + - bundle_windows_x86_64 if: github.repository_owner == 'zed-industries' - runs-on: namespace-profile-2x4-ubuntu-2404 + runs-on: namespace-profile-4x8-ubuntu-2204 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false fetch-depth: 0 + - name: release::download_workflow_artifacts + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 + with: + path: ./artifacts/ + - name: ls -lR ./artifacts + run: ls -lR ./artifacts + shell: bash -euxo pipefail {0} + - name: release::prep_release_artifacts + run: |- + mkdir -p release-artifacts/ + + mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg + mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg + mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz + mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz + mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe + mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe + mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz + mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz + mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz + mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz + shell: bash -euxo pipefail {0} + - name: ./script/upload-nightly + run: ./script/upload-nightly + shell: bash -euxo pipefail {0} + env: + DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} + DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} - name: release_nightly::update_nightly_tag_job::update_nightly_tag run: | if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then @@ -439,7 +478,7 @@ jobs: git tag -f nightly git push origin nightly --force shell: bash -euxo pipefail {0} - - name: release_nightly::update_nightly_tag_job::create_sentry_release + - name: release::create_sentry_release uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c with: environment: production diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml new file mode 100644 index 0000000000000000000000000000000000000000..fa686148590785f1ba93501ecd873d19af6bcb2b --- /dev/null +++ b/.github/workflows/run_agent_evals.yml @@ -0,0 +1,62 @@ +# Generated from xtask::workflows::run_agent_evals +# Rebuild with `cargo xtask workflows`. +name: run_agent_evals +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_EVAL_TELEMETRY: '1' +on: + pull_request: + types: + - synchronize + - reopened + - labeled + branches: + - '**' + schedule: + - cron: 0 0 * * * + workflow_dispatch: {} +jobs: + agent_evals: + if: | + github.repository_owner == 'zed-industries' && + (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: cargo build --package=eval + run: cargo build --package=eval + shell: bash -euxo pipefail {0} + - name: run_agent_evals::agent_evals::run_eval + run: cargo run --package=eval -- --repetitions=8 --concurrency=1 + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index a6d563b5b12faa2d5f2cf03b644cfcacbdd17400..5cf10d11f3f9444c9b57f594897dbfa0e435f0b4 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -3,192 +3,194 @@ name: run_bundling env: CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: '0' RUST_BACKTRACE: '1' - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} on: pull_request: types: - labeled - synchronize jobs: - bundle_mac_x86_64: + bundle_linux_aarch64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) - runs-on: self-mini-macos + runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 env: - MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} - MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} - APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} - APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} - APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: steps::setup_node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 - with: - node-version: '20' - name: steps::setup_sentry uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 300 + - name: steps::setup_linux + run: ./script/linux shell: bash -euxo pipefail {0} - - name: run_bundling::bundle_mac - run: ./script/bundle-mac x86_64-apple-darwin + - name: steps::install_mold + run: ./script/install-mold shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg' + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact zed-linux-aarch64.tar.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg - path: target/x86_64-apple-darwin/release/Zed.dmg + name: zed-linux-aarch64.tar.gz + path: target/release/zed-linux-aarch64.tar.gz if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz' + - name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz - path: target/zed-remote-server-macos-x86_64.gz + name: zed-remote-server-linux-aarch64.gz + path: target/zed-remote-server-linux-aarch64.gz if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz timeout-minutes: 60 - bundle_mac_arm64: + bundle_linux_x86_64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) - runs-on: self-mini-macos + runs-on: namespace-profile-32x64-ubuntu-2004 env: - MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} - MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} - APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} - APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} - APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: steps::setup_node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 - with: - node-version: '20' - name: steps::setup_sentry uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 300 + - name: steps::setup_linux + run: ./script/linux shell: bash -euxo pipefail {0} - - name: run_bundling::bundle_mac - run: ./script/bundle-mac aarch64-apple-darwin + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg' + - name: '@actions/upload-artifact zed-linux-x86_64.tar.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg - path: target/aarch64-apple-darwin/release/Zed.dmg + name: zed-linux-x86_64.tar.gz + path: target/release/zed-linux-x86_64.tar.gz if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz' + - name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz - path: target/zed-remote-server-macos-aarch64.gz + name: zed-remote-server-linux-x86_64.gz + path: target/zed-remote-server-linux-x86_64.gz if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz timeout-minutes: 60 - bundle_linux_x86_64: + bundle_mac_aarch64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) - runs-on: namespace-profile-32x64-ubuntu-2004 + runs-on: self-mini-macos + env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' - name: steps::setup_sentry uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: steps::setup_linux - run: ./script/linux - shell: bash -euxo pipefail {0} - - name: steps::install_mold - run: ./script/install-mold + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - - name: ./script/bundle-linux - run: ./script/bundle-linux + - name: run_bundling::bundle_mac::bundle_mac + run: ./script/bundle-mac aarch64-apple-darwin shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact Zed-aarch64.dmg' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - path: target/release/zed-*.tar.gz + name: Zed-aarch64.dmg + path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - path: target/zed-remote-server-*.gz + name: zed-remote-server-macos-aarch64.gz + path: target/zed-remote-server-macos-aarch64.gz if-no-files-found: error - outputs: - zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz timeout-minutes: 60 - bundle_linux_arm64: + bundle_mac_x86_64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) - runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 + runs-on: self-mini-macos + env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' - name: steps::setup_sentry uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: steps::setup_linux - run: ./script/linux - shell: bash -euxo pipefail {0} - - name: steps::install_mold - run: ./script/install-mold + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - - name: ./script/bundle-linux - run: ./script/bundle-linux + - name: run_bundling::bundle_mac::bundle_mac + run: ./script/bundle-mac x86_64-apple-darwin shell: bash -euxo pipefail {0} - - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact Zed-x86_64.dmg' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - path: target/release/zed-*.tar.gz + name: Zed-x86_64.dmg + path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg if-no-files-found: error - - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + - name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - path: target/zed-remote-server-*.gz + name: zed-remote-server-macos-x86_64.gz + path: target/zed-remote-server-macos-x86_64.gz if-no-files-found: error - outputs: - zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz timeout-minutes: 60 - bundle_windows_x86_64: + bundle_windows_aarch64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: self-32vcpu-windows-2022 env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} @@ -207,25 +209,26 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: run_bundling::bundle_windows - run: script/bundle-windows.ps1 -Architecture x86_64 + - name: run_bundling::bundle_windows::bundle_windows + run: script/bundle-windows.ps1 -Architecture aarch64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe' + - name: '@actions/upload-artifact Zed-aarch64.exe' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe - path: ${{ env.SETUP_PATH }} + name: Zed-aarch64.exe + path: target/Zed-aarch64.exe if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe timeout-minutes: 60 - bundle_windows_arm64: + bundle_windows_x86_64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: self-32vcpu-windows-2022 env: + CARGO_INCREMENTAL: 0 + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} @@ -244,18 +247,16 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: run_bundling::bundle_windows - run: script/bundle-windows.ps1 -Architecture aarch64 + - name: run_bundling::bundle_windows::bundle_windows + run: script/bundle-windows.ps1 -Architecture x86_64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe' + - name: '@actions/upload-artifact Zed-x86_64.exe' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe - path: ${{ env.SETUP_PATH }} + name: Zed-x86_64.exe + path: target/Zed-x86_64.exe if-no-files-found: error - outputs: - zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe timeout-minutes: 60 concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 51ff9ee331ca69b251bb00905e22213527cbf118..002044580a89bb9d894237fa490cca2b8d9d438f 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -66,6 +66,10 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: steps::setup_pnpm uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 with: @@ -145,6 +149,10 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: steps::setup_node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: @@ -156,7 +164,7 @@ jobs: run: cargo install cargo-nextest --locked shell: bash -euxo pipefail {0} - name: steps::clear_target_dir_if_large - run: ./script/clear-target-dir-if-larger-than 100 + run: ./script/clear-target-dir-if-larger-than 250 shell: bash -euxo pipefail {0} - name: steps::cargo_nextest run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final @@ -214,10 +222,10 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: steps::cache_rust_dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 with: - save-if: ${{ github.ref == 'refs/heads/main' }} + cache: rust - name: steps::setup_linux run: ./script/linux shell: bash -euxo pipefail {0} @@ -261,6 +269,10 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: cargo build -p collab run: cargo build -p collab shell: bash -euxo pipefail {0} @@ -317,6 +329,10 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: run_tests::check_dependencies::install_cargo_machete uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 with: @@ -350,10 +366,10 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml shell: bash -euxo pipefail {0} - - name: steps::cache_rust_dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 with: - save-if: ${{ github.ref == 'refs/heads/main' }} + cache: rust - name: run_tests::check_docs::lychee_link_check uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 with: @@ -392,6 +408,10 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: ./script/check-licenses run: ./script/check-licenses shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml new file mode 100644 index 0000000000000000000000000000000000000000..e4a22c3f164b78699e36ea55854731f4657c3c79 --- /dev/null +++ b/.github/workflows/run_unit_evals.yml @@ -0,0 +1,63 @@ +# Generated from xtask::workflows::run_agent_evals +# Rebuild with `cargo xtask workflows`. +name: run_agent_evals +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} +on: + schedule: + - cron: 47 1 * * 2 + workflow_dispatch: {} +jobs: + unit_evals: + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 250 + shell: bash -euxo pipefail {0} + - name: ./script/run-unit-evals + run: ./script/run-unit-evals + shell: bash -euxo pipefail {0} + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + - name: run_agent_evals::unit_evals::send_failure_to_slack + if: ${{ failure() }} + uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 + with: + method: chat.postMessage + token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} + payload: | + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/unit_evals.yml b/.github/workflows/unit_evals.yml deleted file mode 100644 index 53ed33a1af300d6b641b3b9430de0bb6846b27cc..0000000000000000000000000000000000000000 --- a/.github/workflows/unit_evals.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: Run Unit Evals - -on: - schedule: - # GitHub might drop jobs at busy times, so we choose a random time in the middle of the night. - - cron: "47 1 * * 2" - workflow_dispatch: - -concurrency: - # Allow only one workflow per any non-`main` branch. - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} - cancel-in-progress: true - -env: - CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: 0 - RUST_BACKTRACE: 1 - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - -jobs: - unit_evals: - if: github.repository_owner == 'zed-industries' - timeout-minutes: 60 - name: Run unit evals - runs-on: - - namespace-profile-16x32-ubuntu-2204 - steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Linux dependencies - run: ./script/linux - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Install Rust - shell: bash -euxo pipefail {0} - run: | - cargo install cargo-nextest --locked - - - name: Install Node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: "18" - - - name: Limit target directory size - shell: bash -euxo pipefail {0} - run: script/clear-target-dir-if-larger-than 100 - - - name: Run unit evals - shell: bash -euxo pipefail {0} - run: cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' - env: - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - - - name: Send failure message to Slack channel if needed - if: ${{ failure() }} - uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 - with: - method: chat.postMessage - token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} - payload: | - channel: C04UDRNNJFQ - text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" - - # Even the Linux runner is not stateful, in theory there is no need to do this cleanup. - # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code - # to clean up the config file, I’ve included the cleanup code here as a precaution. - # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution. - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo diff --git a/Cargo.lock b/Cargo.lock index 71bc1406166d7468b9f8ad684d68befb5c5ac06b..0ae2b1697a9f6ddbb76e0b26a60199b6af538610 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1339,6 +1339,7 @@ dependencies = [ "settings", "smol", "tempfile", + "util", "which 6.0.3", "workspace", ] @@ -4935,6 +4936,7 @@ dependencies = [ "editor", "gpui", "indoc", + "itertools 0.14.0", "language", "log", "lsp", @@ -7077,6 +7079,7 @@ dependencies = [ "serde_json", "settings", "url", + "urlencoding", "util", ] @@ -12714,12 +12717,6 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7" -[[package]] -name = "pollster" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3" - [[package]] name = "portable-atomic" version = "1.11.1" @@ -12768,7 +12765,7 @@ dependencies = [ "log", "parking_lot", "pin-project", - "pollster 0.2.5", + "pollster", "static_assertions", "thiserror 1.0.69", ] @@ -14320,6 +14317,7 @@ dependencies = [ "gpui", "log", "rand 0.9.2", + "rayon", "sum_tree", "unicode-segmentation", "util", @@ -16245,7 +16243,6 @@ checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520" name = "streaming_diff" version = "0.1.0" dependencies = [ - "gpui", "ordered-float 2.10.1", "rand 0.9.2", "rope", @@ -16364,11 +16361,9 @@ version = "0.1.0" dependencies = [ "arrayvec", "ctor", - "futures 0.3.31", - "itertools 0.14.0", "log", - "pollster 0.4.0", "rand 0.9.2", + "rayon", "zlog", ] @@ -18053,7 +18048,7 @@ dependencies = [ [[package]] name = "tree-sitter-gomod" version = "1.1.1" -source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c#6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c" +source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=2e886870578eeba1927a2dc4bd2e2b3f598c5f9a#2e886870578eeba1927a2dc4bd2e2b3f598c5f9a" dependencies = [ "cc", "tree-sitter-language", @@ -21231,6 +21226,7 @@ dependencies = [ "project_symbols", "prompt_store", "proto", + "rayon", "recent_projects", "release_channel", "remote", diff --git a/Cargo.toml b/Cargo.toml index 369082ff16736f9f682ad8c5bd09634c03434609..7674b0bacc12e9f9ae78a3f299dc0f538e26bd35 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -680,7 +680,7 @@ tree-sitter-elixir = "0.3" tree-sitter-embedded-template = "0.23.0" tree-sitter-gitcommit = { git = "https://github.com/zed-industries/tree-sitter-git-commit", rev = "88309716a69dd13ab83443721ba6e0b491d37ee9" } tree-sitter-go = "0.23" -tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c", package = "tree-sitter-gomod" } +tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "2e886870578eeba1927a2dc4bd2e2b3f598c5f9a", package = "tree-sitter-gomod" } tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" } tree-sitter-heex = { git = "https://github.com/zed-industries/tree-sitter-heex", rev = "1dd45142fbb05562e35b2040c6129c9bca346592" } tree-sitter-html = "0.23" diff --git a/assets/icons/chevron_down_up.svg b/assets/icons/chevron_down_up.svg new file mode 100644 index 0000000000000000000000000000000000000000..340b8d1ad93113a1affe5c723c9b5f5e12a228a8 --- /dev/null +++ b/assets/icons/chevron_down_up.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 979e5a6ccc1d4520db65981fb3b8a01094f9c625..6f57e6f689a30543ee0b7d6b95d451af885d502a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -407,6 +407,7 @@ "bindings": { "escape": "project_search::ToggleFocus", "shift-find": "search::FocusSearch", + "shift-enter": "project_search::ToggleAllSearchResults", "ctrl-shift-f": "search::FocusSearch", "ctrl-shift-h": "search::ToggleReplace", "alt-ctrl-g": "search::ToggleRegex", @@ -479,6 +480,7 @@ "alt-w": "search::ToggleWholeWord", "alt-find": "project_search::ToggleFilters", "alt-ctrl-f": "project_search::ToggleFilters", + "shift-enter": "project_search::ToggleAllSearchResults", "ctrl-alt-shift-r": "search::ToggleRegex", "ctrl-alt-shift-x": "search::ToggleRegex", "alt-r": "search::ToggleRegex", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 4f9b85ff03790a8c9a59a657a3e0ca0710d41e25..a1d38b6028f8ec7690f7133b765ffdbb8d261f17 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -468,6 +468,7 @@ "bindings": { "escape": "project_search::ToggleFocus", "cmd-shift-j": "project_search::ToggleFilters", + "shift-enter": "project_search::ToggleAllSearchResults", "cmd-shift-f": "search::FocusSearch", "cmd-shift-h": "search::ToggleReplace", "alt-cmd-g": "search::ToggleRegex", @@ -496,6 +497,7 @@ "bindings": { "escape": "project_search::ToggleFocus", "cmd-shift-j": "project_search::ToggleFilters", + "shift-enter": "project_search::ToggleAllSearchResults", "cmd-shift-h": "search::ToggleReplace", "alt-cmd-g": "search::ToggleRegex", "alt-cmd-x": "search::ToggleRegex" diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 29146f3080d6ecad75bb9754503bb93c6710ff30..2dd72845b196c029bb2c575bcdd07b5ef07ae970 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -488,6 +488,7 @@ "alt-c": "search::ToggleCaseSensitive", "alt-w": "search::ToggleWholeWord", "alt-f": "project_search::ToggleFilters", + "shift-enter": "project_search::ToggleAllSearchResults", "alt-r": "search::ToggleRegex", // "ctrl-shift-alt-x": "search::ToggleRegex", "ctrl-k shift-enter": "pane::TogglePinTab" diff --git a/assets/settings/default.json b/assets/settings/default.json index f62cc1844732db2a49dc835a155e861f4268632f..1852ace708c53f6a651420b77d7e83f9afc978c3 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -255,6 +255,19 @@ // Whether to display inline and alongside documentation for items in the // completions menu "show_completion_documentation": true, + // When to show the scrollbar in the completion menu. + // This setting can take four values: + // + // 1. Show the scrollbar if there's important information or + // follow the system's configured behavior + // "auto" + // 2. Match the system's configured behavior: + // "system" + // 3. Always show the scrollbar: + // "always" + // 4. Never show the scrollbar: + // "never" (default) + "completion_menu_scrollbar": "never", // Show method signatures in the editor, when inside parentheses. "auto_signature_help": false, // Whether to show the signature help after completion or a bracket pair inserted. @@ -602,7 +615,9 @@ "whole_word": false, "case_sensitive": false, "include_ignored": false, - "regex": false + "regex": false, + // Whether to center the cursor on each search match when navigating. + "center_on_match": false }, // When to populate a new search's query based on the text under the cursor. // This setting can take the following three values: @@ -1719,6 +1734,9 @@ "allowed": true } }, + "HTML+ERB": { + "language_servers": ["herb", "!ruby-lsp", "..."] + }, "Java": { "prettier": { "allowed": true, @@ -1741,6 +1759,9 @@ "allowed": true } }, + "JS+ERB": { + "language_servers": ["!ruby-lsp", "..."] + }, "Kotlin": { "language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."] }, @@ -1755,6 +1776,7 @@ "Markdown": { "format_on_save": "off", "use_on_type_format": false, + "remove_trailing_whitespace_on_save": false, "allow_rewrap": "anywhere", "soft_wrap": "editor_width", "prettier": { @@ -1845,6 +1867,9 @@ "allowed": true } }, + "YAML+ERB": { + "language_servers": ["!ruby-lsp", "..."] + }, "Zig": { "language_servers": ["zls", "..."] } diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 5ecf2be445ecf8afc6a93e2961302758ea0037ae..37622d004a2e9cd27a3686263ffd1aa98979104f 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -3,7 +3,6 @@ mod diff; mod mention; mod terminal; -use ::terminal::terminal_settings::TerminalSettings; use agent_settings::AgentSettings; use collections::HashSet; pub use connection::*; @@ -12,7 +11,7 @@ use language::language_settings::FormatOnSave; pub use mention::*; use project::lsp_store::{FormatTrigger, LspFormatTarget}; use serde::{Deserialize, Serialize}; -use settings::{Settings as _, SettingsLocation}; +use settings::Settings as _; use task::{Shell, ShellBuilder}; pub use terminal::*; @@ -2141,17 +2140,9 @@ impl AcpThread { ) -> Task>> { let env = match &cwd { Some(dir) => self.project.update(cx, |project, cx| { - let worktree = project.find_worktree(dir.as_path(), cx); - let shell = TerminalSettings::get( - worktree.as_ref().map(|(worktree, path)| SettingsLocation { - worktree_id: worktree.read(cx).id(), - path: &path, - }), - cx, - ) - .shell - .clone(); - project.directory_environment(&shell, dir.as_path().into(), cx) + project.environment().update(cx, |env, cx| { + env.directory_environment(dir.as_path().into(), cx) + }) }), None => Task::ready(None).shared(), }; diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 39cd8ad38e5bf223987dc8efe771614b3ed2172b..055b2f7fb86ffe9d7f12459b6b16405ce77815a0 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -361,12 +361,10 @@ async fn build_buffer_diff( ) -> Result> { let buffer = cx.update(|cx| buffer.read(cx).snapshot())?; - let executor = cx.background_executor().clone(); let old_text_rope = cx .background_spawn({ let old_text = old_text.clone(); - let executor = executor.clone(); - async move { Rope::from_str(old_text.as_str(), &executor) } + async move { Rope::from(old_text.as_str()) } }) .await; let base_buffer = cx diff --git a/crates/acp_thread/src/terminal.rs b/crates/acp_thread/src/terminal.rs index 9ca6d4021b316231930ab7803957dab3a0139f1e..8b08868616e19b0d1855558a057af8eebc314e4a 100644 --- a/crates/acp_thread/src/terminal.rs +++ b/crates/acp_thread/src/terminal.rs @@ -5,10 +5,8 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, Task}; use language::LanguageRegistry; use markdown::Markdown; use project::Project; -use settings::{Settings as _, SettingsLocation}; use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant}; use task::Shell; -use terminal::terminal_settings::TerminalSettings; use util::get_default_system_shell_preferring_bash; pub struct Terminal { @@ -187,17 +185,9 @@ pub async fn create_terminal_entity( let mut env = if let Some(dir) = &cwd { project .update(cx, |project, cx| { - let worktree = project.find_worktree(dir.as_path(), cx); - let shell = TerminalSettings::get( - worktree.as_ref().map(|(worktree, path)| SettingsLocation { - worktree_id: worktree.read(cx).id(), - path: &path, - }), - cx, - ) - .shell - .clone(); - project.directory_environment(&shell, dir.clone().into(), cx) + project.environment().update(cx, |env, cx| { + env.directory_environment(dir.clone().into(), cx) + }) })? .await .unwrap_or_default() diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index a40bcbd93c878a85c85d7edd312e713988234966..7615784676c7d9ff1782a6e9537e608cb927154d 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -19,7 +19,7 @@ use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; use project::Project; use settings::Settings; use theme::ThemeSettings; -use ui::{Tooltip, prelude::*}; +use ui::{Tooltip, WithScrollbar, prelude::*}; use util::ResultExt as _; use workspace::{ Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, @@ -291,17 +291,19 @@ impl AcpTools { let expanded = self.expanded.contains(&index); v_flex() + .id(index) + .group("message") + .cursor_pointer() + .font_buffer(cx) .w_full() - .px_4() .py_3() - .border_color(colors.border) - .border_b_1() + .pl_4() + .pr_5() .gap_2() .items_start() - .font_buffer(cx) .text_size(base_size) - .id(index) - .group("message") + .border_color(colors.border) + .border_b_1() .hover(|this| this.bg(colors.element_background.opacity(0.5))) .on_click(cx.listener(move |this, _, _, cx| { if this.expanded.contains(&index) { @@ -323,15 +325,14 @@ impl AcpTools { h_flex() .w_full() .gap_2() - .items_center() .flex_shrink_0() .child(match message.direction { - acp::StreamMessageDirection::Incoming => { - ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error) - } - acp::StreamMessageDirection::Outgoing => { - ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success) - } + acp::StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown) + .color(Color::Error) + .size(IconSize::Small), + acp::StreamMessageDirection::Outgoing => Icon::new(IconName::ArrowUp) + .color(Color::Success) + .size(IconSize::Small), }) .child( Label::new(message.name.clone()) @@ -501,7 +502,7 @@ impl Focusable for AcpTools { } impl Render for AcpTools { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { v_flex() .track_focus(&self.focus_handle) .size_full() @@ -516,13 +517,19 @@ impl Render for AcpTools { .child("No messages recorded yet") .into_any() } else { - list( - connection.list_state.clone(), - cx.processor(Self::render_message), - ) - .with_sizing_behavior(gpui::ListSizingBehavior::Auto) - .flex_grow() - .into_any() + div() + .size_full() + .flex_grow() + .child( + list( + connection.list_state.clone(), + cx.processor(Self::render_message), + ) + .with_sizing_behavior(gpui::ListSizingBehavior::Auto) + .size_full(), + ) + .vertical_scrollbar_for(connection.list_state.clone(), window, cx) + .into_any() } } None => h_flex() diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 1730163a4ce7b53aa051a6af87da8ab10ad4320f..b7722f211afda3a77bc96292a50acf869e7424d6 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -3,9 +3,7 @@ use buffer_diff::BufferDiff; use clock; use collections::BTreeMap; use futures::{FutureExt, StreamExt, channel::mpsc}; -use gpui::{ - App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, Subscription, Task, WeakEntity, -}; +use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity}; use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint}; use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; use std::{cmp, ops::Range, sync::Arc}; @@ -323,7 +321,6 @@ impl ActionLog { let unreviewed_edits = tracked_buffer.unreviewed_edits.clone(); let edits = diff_snapshots(&old_snapshot, &new_snapshot); let mut has_user_changes = false; - let executor = cx.background_executor().clone(); async move { if let ChangeAuthor::User = author { has_user_changes = apply_non_conflicting_edits( @@ -331,7 +328,6 @@ impl ActionLog { edits, &mut base_text, new_snapshot.as_rope(), - &executor, ); } @@ -386,7 +382,6 @@ impl ActionLog { let agent_diff_base = tracked_buffer.diff_base.clone(); let git_diff_base = git_diff.read(cx).base_text().as_rope().clone(); let buffer_text = tracked_buffer.snapshot.as_rope().clone(); - let executor = cx.background_executor().clone(); anyhow::Ok(cx.background_spawn(async move { let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable(); let committed_edits = language::line_diff( @@ -421,11 +416,8 @@ impl ActionLog { ), new_agent_diff_base.max_point(), )); - new_agent_diff_base.replace( - old_byte_start..old_byte_end, - &unreviewed_new, - &executor, - ); + new_agent_diff_base + .replace(old_byte_start..old_byte_end, &unreviewed_new); row_delta += unreviewed.new_len() as i32 - unreviewed.old_len() as i32; } @@ -619,7 +611,6 @@ impl ActionLog { .snapshot .text_for_range(new_range) .collect::(), - cx.background_executor(), ); delta += edit.new_len() as i32 - edit.old_len() as i32; false @@ -833,7 +824,6 @@ fn apply_non_conflicting_edits( edits: Vec>, old_text: &mut Rope, new_text: &Rope, - executor: &BackgroundExecutor, ) -> bool { let mut old_edits = patch.edits().iter().cloned().peekable(); let mut new_edits = edits.into_iter().peekable(); @@ -887,7 +877,6 @@ fn apply_non_conflicting_edits( old_text.replace( old_bytes, &new_text.chunks_in_range(new_bytes).collect::(), - executor, ); applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32; has_made_changes = true; @@ -2293,7 +2282,6 @@ mod tests { old_text.replace( old_start..old_end, &new_text.slice_rows(edit.new.clone()).to_string(), - cx.background_executor(), ); } pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string()); diff --git a/crates/agent/src/edit_agent/edit_parser.rs b/crates/agent/src/edit_agent/edit_parser.rs index 8411171ba4ea491d2603014a0715ce471b34e36f..425bf93efff115d4daef380e3f82abcdb8c0746f 100644 --- a/crates/agent/src/edit_agent/edit_parser.rs +++ b/crates/agent/src/edit_agent/edit_parser.rs @@ -13,7 +13,15 @@ const EDITS_END_TAG: &str = ""; const SEARCH_MARKER: &str = "<<<<<<< SEARCH"; const SEPARATOR_MARKER: &str = "======="; const REPLACE_MARKER: &str = ">>>>>>> REPLACE"; -const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG]; +const SONNET_PARAMETER_INVOKE_1: &str = "\n"; +const SONNET_PARAMETER_INVOKE_2: &str = ""; +const END_TAGS: [&str; 5] = [ + OLD_TEXT_END_TAG, + NEW_TEXT_END_TAG, + EDITS_END_TAG, + SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call + SONNET_PARAMETER_INVOKE_2, +]; #[derive(Debug)] pub enum EditParserEvent { @@ -547,6 +555,37 @@ mod tests { ); } + #[gpui::test(iterations = 1000)] + fn test_xml_edits_with_closing_parameter_invoke(mut rng: StdRng) { + // This case is a regression with Claude Sonnet 4.5. + // Sometimes Sonnet thinks that it's doing a tool call + // and closes its response with '' + // instead of properly closing + + let mut parser = EditParser::new(EditFormat::XmlTags); + assert_eq!( + parse_random_chunks( + indoc! {" + some textupdated text + "}, + &mut parser, + &mut rng + ), + vec![Edit { + old_text: "some text".to_string(), + new_text: "updated text".to_string(), + line_hint: None, + },] + ); + assert_eq!( + parser.finish(), + EditParserMetrics { + tags: 2, + mismatched_tags: 1 + } + ); + } + #[gpui::test(iterations = 1000)] fn test_xml_nested_tags(mut rng: StdRng) { let mut parser = EditParser::new(EditFormat::XmlTags); @@ -1035,6 +1074,11 @@ mod tests { last_ix = chunk_ix; } + if new_text.is_some() { + pending_edit.new_text = new_text.take().unwrap(); + edits.push(pending_edit); + } + edits } } diff --git a/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs index 021892e738eed229568c909f72f327d93199cdc0..904ec05a8c7565d5052cd546fc0bf6d723ffa375 100644 --- a/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs +++ b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs @@ -305,20 +305,18 @@ impl SearchMatrix { #[cfg(test)] mod tests { use super::*; - use gpui::TestAppContext; use indoc::indoc; use language::{BufferId, TextBuffer}; use rand::prelude::*; use text::ReplicaId; use util::test::{generate_marked_text, marked_text_ranges}; - #[gpui::test] - fn test_empty_query(cx: &mut gpui::TestAppContext) { + #[test] + fn test_empty_query() { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Hello world\nThis is a test\nFoo bar baz", - cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -327,13 +325,12 @@ mod tests { assert_eq!(finish(finder), None); } - #[gpui::test] - fn test_streaming_exact_match(cx: &mut gpui::TestAppContext) { + #[test] + fn test_streaming_exact_match() { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Hello world\nThis is a test\nFoo bar baz", - cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -352,8 +349,8 @@ mod tests { assert_eq!(finish(finder), Some("This is a test".to_string())); } - #[gpui::test] - fn test_streaming_fuzzy_match(cx: &mut gpui::TestAppContext) { + #[test] + fn test_streaming_fuzzy_match() { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), @@ -366,7 +363,6 @@ mod tests { return x * y; } "}, - cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -387,13 +383,12 @@ mod tests { ); } - #[gpui::test] - fn test_incremental_improvement(cx: &mut gpui::TestAppContext) { + #[test] + fn test_incremental_improvement() { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Line 1\nLine 2\nLine 3\nLine 4\nLine 5", - cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -413,8 +408,8 @@ mod tests { assert_eq!(finish(finder), Some("Line 3\nLine 4".to_string())); } - #[gpui::test] - fn test_incomplete_lines_buffering(cx: &mut gpui::TestAppContext) { + #[test] + fn test_incomplete_lines_buffering() { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), @@ -423,7 +418,6 @@ mod tests { jumps over the lazy dog Pack my box with five dozen liquor jugs "}, - cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -441,8 +435,8 @@ mod tests { ); } - #[gpui::test] - fn test_multiline_fuzzy_match(cx: &mut gpui::TestAppContext) { + #[test] + fn test_multiline_fuzzy_match() { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), @@ -462,7 +456,6 @@ mod tests { } } "#}, - cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -516,7 +509,7 @@ mod tests { } #[gpui::test(iterations = 100)] - fn test_resolve_location_single_line(mut rng: StdRng, cx: &mut TestAppContext) { + fn test_resolve_location_single_line(mut rng: StdRng) { assert_location_resolution( concat!( " Lorem\n", @@ -526,12 +519,11 @@ mod tests { ), "ipsum", &mut rng, - cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_multiline(mut rng: StdRng, cx: &mut TestAppContext) { + fn test_resolve_location_multiline(mut rng: StdRng) { assert_location_resolution( concat!( " Lorem\n", @@ -541,12 +533,11 @@ mod tests { ), "ipsum\ndolor sit amet", &mut rng, - cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_function_with_typo(mut rng: StdRng, cx: &mut TestAppContext) { + fn test_resolve_location_function_with_typo(mut rng: StdRng) { assert_location_resolution( indoc! {" «fn foo1(a: usize) -> usize { @@ -559,12 +550,11 @@ mod tests { "}, "fn foo1(a: usize) -> u32 {\n40\n}", &mut rng, - cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_class_methods(mut rng: StdRng, cx: &mut TestAppContext) { + fn test_resolve_location_class_methods(mut rng: StdRng) { assert_location_resolution( indoc! {" class Something { @@ -585,12 +575,11 @@ mod tests { six() { return 6666; } "}, &mut rng, - cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_imports_no_match(mut rng: StdRng, cx: &mut TestAppContext) { + fn test_resolve_location_imports_no_match(mut rng: StdRng) { assert_location_resolution( indoc! {" use std::ops::Range; @@ -620,12 +609,11 @@ mod tests { use std::sync::Arc; "}, &mut rng, - cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_nested_closure(mut rng: StdRng, cx: &mut TestAppContext) { + fn test_resolve_location_nested_closure(mut rng: StdRng) { assert_location_resolution( indoc! {" impl Foo { @@ -653,12 +641,11 @@ mod tests { " });", ), &mut rng, - cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_tool_invocation(mut rng: StdRng, cx: &mut TestAppContext) { + fn test_resolve_location_tool_invocation(mut rng: StdRng) { assert_location_resolution( indoc! {r#" let tool = cx @@ -686,12 +673,11 @@ mod tests { " .output;", ), &mut rng, - cx, ); } #[gpui::test] - fn test_line_hint_selection(cx: &mut TestAppContext) { + fn test_line_hint_selection() { let text = indoc! {r#" fn first_function() { return 42; @@ -710,7 +696,6 @@ mod tests { ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.to_string(), - cx.background_executor(), ); let snapshot = buffer.snapshot(); let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); @@ -742,19 +727,9 @@ mod tests { } #[track_caller] - fn assert_location_resolution( - text_with_expected_range: &str, - query: &str, - rng: &mut StdRng, - cx: &mut TestAppContext, - ) { + fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) { let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false); - let buffer = TextBuffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - text.clone(), - cx.background_executor(), - ); + let buffer = TextBuffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.clone()); let snapshot = buffer.snapshot(); let mut matcher = StreamingFuzzyMatcher::new(snapshot); diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 078273dbb8a4399e1770ca08daeb1f7f44491e2a..0adff2dee3571f09b40ee69896c05e50c56b51b9 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -569,7 +569,6 @@ mod tests { use prompt_store::ProjectContext; use serde_json::json; use settings::SettingsStore; - use text::Rope; use util::{path, rel_path::rel_path}; #[gpui::test] @@ -742,7 +741,7 @@ mod tests { // Create the file fs.save( path!("/root/src/main.rs").as_ref(), - &Rope::from_str_small("initial content"), + &"initial content".into(), language::LineEnding::Unix, ) .await @@ -909,7 +908,7 @@ mod tests { // Create a simple file with trailing whitespace fs.save( path!("/root/src/main.rs").as_ref(), - &Rope::from_str_small("initial content"), + &"initial content".into(), language::LineEnding::Unix, ) .await diff --git a/crates/agent_ui/src/acp/entry_view_state.rs b/crates/agent_ui/src/acp/entry_view_state.rs index 4c058b984f4fa24074ea9e9d81e43c1d73d87d1f..382d0ee3c96ef002372046ee6a6111f8e814f892 100644 --- a/crates/agent_ui/src/acp/entry_view_state.rs +++ b/crates/agent_ui/src/acp/entry_view_state.rs @@ -4,7 +4,7 @@ use acp_thread::{AcpThread, AgentThreadEntry}; use agent::HistoryStore; use agent_client_protocol::{self as acp, ToolCallId}; use collections::HashMap; -use editor::{Editor, EditorMode, MinimapVisibility}; +use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior}; use gpui::{ AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable, ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window, @@ -357,7 +357,7 @@ fn create_editor_diff( EditorMode::Full { scale_ui_elements_with_buffer_font_size: false, show_active_line_background: false, - sized_by_content: true, + sizing_behavior: SizingBehavior::SizeByContent, }, diff.read(cx).multibuffer().clone(), None, diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index a4b3106fa9d9ded053ff2f33b720ec3b10512d01..b19e0434be054b63676e2c5623c6f305b7a7c472 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -17,7 +17,9 @@ use client::zed_urls; use cloud_llm_client::PlanV1; use collections::{HashMap, HashSet}; use editor::scroll::Autoscroll; -use editor::{Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects}; +use editor::{ + Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects, SizingBehavior, +}; use file_icons::FileIcons; use fs::Fs; use futures::FutureExt as _; @@ -881,6 +883,7 @@ impl AcpThreadView { cx: &mut Context, ) { self.set_editor_is_expanded(!self.editor_expanded, cx); + cx.stop_propagation(); cx.notify(); } @@ -892,7 +895,7 @@ impl AcpThreadView { EditorMode::Full { scale_ui_elements_with_buffer_font_size: false, show_active_line_background: false, - sized_by_content: false, + sizing_behavior: SizingBehavior::ExcludeOverscrollMargin, }, cx, ) @@ -3631,6 +3634,7 @@ impl AcpThreadView { .child( h_flex() .id("edits-container") + .cursor_pointer() .gap_1() .child(Disclosure::new("edits-disclosure", expanded)) .map(|this| { @@ -3770,6 +3774,7 @@ impl AcpThreadView { Label::new(name.to_string()) .size(LabelSize::XSmall) .buffer_font(cx) + .ml_1p5() }); let file_icon = FileIcons::get_icon(path.as_std_path(), cx) @@ -3801,14 +3806,30 @@ impl AcpThreadView { }) .child( h_flex() + .id(("file-name-row", index)) .relative() - .id(("file-name", index)) .pr_8() - .gap_1p5() .w_full() .overflow_x_scroll() - .child(file_icon) - .child(h_flex().gap_0p5().children(file_name).children(file_path)) + .child( + h_flex() + .id(("file-name-path", index)) + .cursor_pointer() + .pr_0p5() + .gap_0p5() + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .rounded_xs() + .child(file_icon) + .children(file_name) + .children(file_path) + .tooltip(Tooltip::text("Go to File")) + .on_click({ + let buffer = buffer.clone(); + cx.listener(move |this, _, window, cx| { + this.open_edited_buffer(&buffer, window, cx); + }) + }), + ) .child( div() .absolute() @@ -3818,13 +3839,7 @@ impl AcpThreadView { .bottom_0() .right_0() .bg(overlay_gradient), - ) - .on_click({ - let buffer = buffer.clone(); - cx.listener(move |this, _, window, cx| { - this.open_edited_buffer(&buffer, window, cx); - }) - }), + ), ) .child( h_flex() @@ -3966,8 +3981,12 @@ impl AcpThreadView { ) } }) - .on_click(cx.listener(|_, _, window, cx| { - window.dispatch_action(Box::new(ExpandMessageEditor), cx); + .on_click(cx.listener(|this, _, window, cx| { + this.expand_message_editor( + &ExpandMessageEditor, + window, + cx, + ); })), ), ), diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 61f8ee60a794cbd6622759a89efb6f40c8f1503d..cf154f3e95e25e5058e43f693e14a032ac04826a 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -23,16 +23,17 @@ use language::LanguageRegistry; use language_model::{ LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID, }; +use language_models::AllLanguageModelSettings; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME}, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; -use rope::Rope; -use settings::{SettingsStore, update_settings_file}; +use settings::{Settings, SettingsStore, update_settings_file}; use ui::{ - Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, - Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*, + Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, + ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch, + SwitchColor, Tooltip, WithScrollbar, prelude::*, }; use util::ResultExt as _; use workspace::{Workspace, create_and_open_local_file}; @@ -304,10 +305,76 @@ impl AgentConfiguration { } })), ) - }), + }) + .when( + is_expanded && is_removable_provider(&provider.id(), cx), + |this| { + this.child( + Button::new( + SharedString::from(format!("delete-provider-{provider_id}")), + "Remove Provider", + ) + .full_width() + .style(ButtonStyle::Outlined) + .icon_position(IconPosition::Start) + .icon(IconName::Trash) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .label_size(LabelSize::Small) + .on_click(cx.listener({ + let provider = provider.clone(); + move |this, _event, window, cx| { + this.delete_provider(provider.clone(), window, cx); + } + })), + ) + }, + ), ) } + fn delete_provider( + &mut self, + provider: Arc, + window: &mut Window, + cx: &mut Context, + ) { + let fs = self.fs.clone(); + let provider_id = provider.id(); + + cx.spawn_in(window, async move |_, cx| { + cx.update(|_window, cx| { + update_settings_file(fs.clone(), cx, { + let provider_id = provider_id.clone(); + move |settings, _| { + if let Some(ref mut openai_compatible) = settings + .language_models + .as_mut() + .and_then(|lm| lm.openai_compatible.as_mut()) + { + let key_to_remove: Arc = Arc::from(provider_id.0.as_ref()); + openai_compatible.remove(&key_to_remove); + } + } + }); + }) + .log_err(); + + cx.update(|_window, cx| { + LanguageModelRegistry::global(cx).update(cx, { + let provider_id = provider_id.clone(); + move |registry, cx| { + registry.unregister_provider(provider_id, cx); + } + }) + }) + .log_err(); + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + fn render_provider_configuration_section( &mut self, cx: &mut Context, @@ -1115,11 +1182,8 @@ async fn open_new_agent_servers_entry_in_settings_editor( ) -> Result<()> { let settings_editor = workspace .update_in(cx, |_, window, cx| { - create_and_open_local_file(paths::settings_file(), window, cx, |cx| { - Rope::from_str( - &settings::initial_user_settings_content(), - cx.background_executor(), - ) + create_and_open_local_file(paths::settings_file(), window, cx, || { + settings::initial_user_settings_content().as_ref().into() }) })? .await? @@ -1225,3 +1289,14 @@ fn find_text_in_buffer( None } } + +// OpenAI-compatible providers are user-configured and can be removed, +// whereas built-in providers (like Anthropic, OpenAI, Google, etc.) can't. +// +// If in the future we have more "API-compatible-type" of providers, +// they should be included here as removable providers. +fn is_removable_provider(provider_id: &LanguageModelProviderId, cx: &App) -> bool { + AllLanguageModelSettings::get_global(cx) + .openai_compatible + .contains_key(provider_id.0.as_ref()) +} diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index a0f117b0bf30abee9d2182cf8c3fadd10099b1f0..63eb2ac49731a5e57b4eae5bf33b821b2e223c25 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -70,14 +70,6 @@ impl AgentDiffThread { } } - fn is_generating(&self, cx: &App) -> bool { - match self { - AgentDiffThread::AcpThread(thread) => { - thread.read(cx).status() == acp_thread::ThreadStatus::Generating - } - } - } - fn has_pending_edit_tool_uses(&self, cx: &App) -> bool { match self { AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(), @@ -970,9 +962,7 @@ impl AgentDiffToolbar { None => ToolbarItemLocation::Hidden, Some(AgentDiffToolbarItem::Pane(_)) => ToolbarItemLocation::PrimaryRight, Some(AgentDiffToolbarItem::Editor { state, .. }) => match state { - EditorState::Generating | EditorState::Reviewing => { - ToolbarItemLocation::PrimaryRight - } + EditorState::Reviewing => ToolbarItemLocation::PrimaryRight, EditorState::Idle => ToolbarItemLocation::Hidden, }, } @@ -1050,7 +1040,6 @@ impl Render for AgentDiffToolbar { let content = match state { EditorState::Idle => return Empty.into_any(), - EditorState::Generating => vec![spinner_icon], EditorState::Reviewing => vec![ h_flex() .child( @@ -1222,7 +1211,6 @@ pub struct AgentDiff { pub enum EditorState { Idle, Reviewing, - Generating, } struct WorkspaceThread { @@ -1545,15 +1533,11 @@ impl AgentDiff { multibuffer.add_diff(diff_handle.clone(), cx); }); - let new_state = if thread.is_generating(cx) { - EditorState::Generating - } else { - EditorState::Reviewing - }; + let reviewing_state = EditorState::Reviewing; let previous_state = self .reviewing_editors - .insert(weak_editor.clone(), new_state.clone()); + .insert(weak_editor.clone(), reviewing_state.clone()); if previous_state.is_none() { editor.update(cx, |editor, cx| { @@ -1566,7 +1550,9 @@ impl AgentDiff { unaffected.remove(weak_editor); } - if new_state == EditorState::Reviewing && previous_state != Some(new_state) { + if reviewing_state == EditorState::Reviewing + && previous_state != Some(reviewing_state) + { // Jump to first hunk when we enter review mode editor.update(cx, |editor, cx| { let snapshot = multibuffer.read(cx).snapshot(cx); diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 173059ee535d4417cd0ff493842d889559b85ef4..b9aff018dbb520ad524e182800ffe057cbf9305a 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -19,7 +19,6 @@ use settings::{ use zed_actions::OpenBrowser; use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent}; -use crate::acp::{AcpThreadHistory, ThreadHistoryEvent}; use crate::context_store::ContextStore; use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal}; use crate::{ @@ -33,6 +32,10 @@ use crate::{ text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate}, ui::{AgentOnboardingModal, EndTrialUpsell}, }; +use crate::{ + ExpandMessageEditor, + acp::{AcpThreadHistory, ThreadHistoryEvent}, +}; use crate::{ ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command, }; @@ -106,6 +109,12 @@ pub fn init(cx: &mut App) { } }, ) + .register_action(|workspace, _: &ExpandMessageEditor, window, cx| { + if let Some(panel) = workspace.panel::(cx) { + workspace.focus_panel::(window, cx); + panel.update(cx, |panel, cx| panel.expand_message_editor(window, cx)); + } + }) .register_action(|workspace, _: &OpenHistory, window, cx| { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); @@ -944,6 +953,15 @@ impl AgentPanel { .detach_and_log_err(cx); } + fn expand_message_editor(&mut self, window: &mut Window, cx: &mut Context) { + if let Some(thread_view) = self.active_thread_view() { + thread_view.update(cx, |view, cx| { + view.expand_message_editor(&ExpandMessageEditor, window, cx); + view.focus_handle(cx).focus(window); + }); + } + } + fn open_history(&mut self, window: &mut Window, cx: &mut Context) { if matches!(self.active_view, ActiveView::History) { if let Some(previous_view) = self.previous_view.take() { diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index f9269e0bb62160633dc991b147d1d779a517e2e8..215e2a74d7be9cbcb18442dcefa1581d08eec7b2 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -487,10 +487,9 @@ impl CodegenAlternative { ) { let start_time = Instant::now(); let snapshot = self.snapshot.clone(); - let selected_text = Rope::from_iter( - snapshot.text_for_range(self.range.start..self.range.end), - cx.background_executor(), - ); + let selected_text = snapshot + .text_for_range(self.range.start..self.range.end) + .collect::(); let selection_start = self.range.start.to_point(&snapshot); diff --git a/crates/agent_ui/src/context.rs b/crates/agent_ui/src/context.rs index 2a1ff4a1d9d3e0bb6c8b128cf7f944e9ed3ff657..022f4e4d2ff4ce79aa17efce241b84f1a0640ae3 100644 --- a/crates/agent_ui/src/context.rs +++ b/crates/agent_ui/src/context.rs @@ -620,8 +620,18 @@ impl TextThreadContextHandle { impl Display for TextThreadContext { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - // TODO: escape title? - writeln!(f, "", self.title)?; + write!(f, " write!(f, "&")?, + '<' => write!(f, "<")?, + '>' => write!(f, ">")?, + '"' => write!(f, """)?, + '\'' => write!(f, "'")?, + _ => write!(f, "{}", c)?, + } + } + writeln!(f, "\">")?; write!(f, "{}", self.text.trim())?; write!(f, "\n") } diff --git a/crates/assistant_text_thread/src/text_thread.rs b/crates/assistant_text_thread/src/text_thread.rs index ddc8912aef5c08ecb9406cc27fbcdf5418ec48e2..9ad383cdfd43eed236268349e2ff97c34a0178c0 100644 --- a/crates/assistant_text_thread/src/text_thread.rs +++ b/crates/assistant_text_thread/src/text_thread.rs @@ -744,13 +744,12 @@ impl TextThread { telemetry: Option>, cx: &mut Context, ) -> Self { - let buffer = cx.new(|cx| { + let buffer = cx.new(|_cx| { let buffer = Buffer::remote( language::BufferId::new(1).unwrap(), replica_id, capability, "", - cx.background_executor(), ); buffer.set_language_registry(language_registry.clone()); buffer diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 08db9f8a97bb0783da987f84991ad1aaa62c2141..630be043dca120ca76b2552f0a729a03a684f934 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -26,6 +26,7 @@ serde_json.workspace = true settings.workspace = true smol.workspace = true tempfile.workspace = true +util.workspace = true workspace.workspace = true [target.'cfg(not(target_os = "windows"))'.dependencies] diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 9f93dd27900e4b90de8c6d61d41b3b6c287eaaf0..331a58414958a48feaad70babee2dc2ea3b730e0 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -962,7 +962,7 @@ pub async fn finalize_auto_update_on_quit() { .parent() .map(|p| p.join("tools").join("auto_update_helper.exe")) { - let mut command = smol::process::Command::new(helper); + let mut command = util::command::new_smol_command(helper); command.arg("--launch"); command.arg("false"); if let Ok(mut cmd) = command.spawn() { diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs index 08c0915c58ae50741238574cec5b6f2474d06eb8..7664de3c87673a405118911526cb6606a2fecacf 100644 --- a/crates/breadcrumbs/src/breadcrumbs.rs +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -100,13 +100,21 @@ impl Render for Breadcrumbs { let breadcrumbs_stack = h_flex().gap_1().children(breadcrumbs); + let prefix_element = active_item.breadcrumb_prefix(window, cx); + + let breadcrumbs = if let Some(prefix) = prefix_element { + h_flex().gap_1p5().child(prefix).child(breadcrumbs_stack) + } else { + breadcrumbs_stack + }; + match active_item .downcast::() .map(|editor| editor.downgrade()) { Some(editor) => element.child( ButtonLike::new("toggle outline view") - .child(breadcrumbs_stack) + .child(breadcrumbs) .style(ButtonStyle::Transparent) .on_click({ let editor = editor.clone(); @@ -141,7 +149,7 @@ impl Render for Breadcrumbs { // Match the height and padding of the `ButtonLike` in the other arm. .h(rems_from_px(22.)) .pl_1() - .child(breadcrumbs_stack), + .child(breadcrumbs), } } } diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index b8ce85b6db25fdcad21245b41e4979ef61220485..d6ae5545200bb47976554814e346be3039fa276e 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -1,9 +1,6 @@ use futures::channel::oneshot; use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; -use gpui::{ - App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, - TaskLabel, -}; +use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel}; use language::{Language, LanguageRegistry}; use rope::Rope; use std::{ @@ -194,7 +191,7 @@ impl BufferDiffSnapshot { let base_text_exists; let base_text_snapshot; if let Some(text) = &base_text { - let base_text_rope = Rope::from_str(text.as_str(), cx.background_executor()); + let base_text_rope = Rope::from(text.as_str()); base_text_pair = Some((text.clone(), base_text_rope.clone())); let snapshot = language::Buffer::build_snapshot(base_text_rope, language, language_registry, cx); @@ -314,7 +311,6 @@ impl BufferDiffInner { hunks: &[DiffHunk], buffer: &text::BufferSnapshot, file_exists: bool, - cx: &BackgroundExecutor, ) -> Option { let head_text = self .base_text_exists @@ -509,7 +505,7 @@ impl BufferDiffInner { for (old_range, replacement_text) in edits { new_index_text.append(index_cursor.slice(old_range.start)); index_cursor.seek_forward(old_range.end); - new_index_text.push(&replacement_text, cx); + new_index_text.push(&replacement_text); } new_index_text.append(index_cursor.suffix()); Some(new_index_text) @@ -966,7 +962,6 @@ impl BufferDiff { hunks, buffer, file_exists, - cx.background_executor(), ); cx.emit(BufferDiffEvent::HunksStagedOrUnstaged( @@ -1390,12 +1385,7 @@ mod tests { " .unindent(); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - buffer_text, - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let mut diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), @@ -1404,7 +1394,7 @@ mod tests { &[(1..2, "two\n", "HELLO\n", DiffHunkStatus::modified_none())], ); - buffer.edit([(0..0, "point five\n")], cx.background_executor()); + buffer.edit([(0..0, "point five\n")]); diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), @@ -1469,12 +1459,7 @@ mod tests { " .unindent(); - let buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - buffer_text, - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx); let mut uncommitted_diff = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx); @@ -1543,12 +1528,7 @@ mod tests { " .unindent(); - let buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - buffer_text, - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let diff = cx .update(|cx| { BufferDiffSnapshot::new_with_base_text( @@ -1811,12 +1791,7 @@ mod tests { for example in table { let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false); - let buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - buffer_text, - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let hunk_range = buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end); @@ -1893,7 +1868,6 @@ mod tests { ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text.clone(), - cx.background_executor(), ); let unstaged = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx); let uncommitted = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx); @@ -1967,12 +1941,7 @@ mod tests { " .unindent(); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - buffer_text_1, - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text_1); let empty_diff = cx.update(|cx| BufferDiffSnapshot::empty(&buffer, cx)); let diff_1 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); @@ -1992,7 +1961,6 @@ mod tests { NINE " .unindent(), - cx.background_executor(), ); let diff_2 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); assert_eq!(None, diff_2.inner.compare(&diff_1.inner, &buffer)); @@ -2010,7 +1978,6 @@ mod tests { NINE " .unindent(), - cx.background_executor(), ); let diff_3 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); let range = diff_3.inner.compare(&diff_2.inner, &buffer).unwrap(); @@ -2028,7 +1995,6 @@ mod tests { NINE " .unindent(), - cx.background_executor(), ); let diff_4 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); let range = diff_4.inner.compare(&diff_3.inner, &buffer).unwrap(); @@ -2047,7 +2013,6 @@ mod tests { NINE " .unindent(), - cx.background_executor(), ); let diff_5 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text.clone(), cx); let range = diff_5.inner.compare(&diff_4.inner, &buffer).unwrap(); @@ -2066,7 +2031,6 @@ mod tests { «nine» " .unindent(), - cx.background_executor(), ); let diff_6 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text, cx); let range = diff_6.inner.compare(&diff_5.inner, &buffer).unwrap(); @@ -2176,14 +2140,14 @@ mod tests { let working_copy = gen_working_copy(rng, &head_text); let working_copy = cx.new(|cx| { language::Buffer::local_normalized( - Rope::from_str(working_copy.as_str(), cx.background_executor()), + Rope::from(working_copy.as_str()), text::LineEnding::default(), cx, ) }); let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot()); let mut index_text = if rng.random() { - Rope::from_str(head_text.as_str(), cx.background_executor()) + Rope::from(head_text.as_str()) } else { working_copy.as_rope().clone() }; diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 0e59ccedf5e8e0767eb9be56608eb433d63d1bf4..efa0850753887c2116ee7916727a870a3528b627 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -70,7 +70,6 @@ impl ChannelBuffer { ReplicaId::new(response.replica_id as u16), capability, base_text, - cx.background_executor(), ) })?; buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?; diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index fb457abcd46cf32b4a34d87637011b307bbacf9d..6c4cd58d132bdeaaa791f4da8406e0e6d9052981 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -701,12 +701,12 @@ impl Database { return Ok(()); } - let mut text_buffer = text::Buffer::new_slow( + let mut text_buffer = text::Buffer::new( clock::ReplicaId::LOCAL, text::BufferId::new(1).unwrap(), base_text, ); - text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire), None); + text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire)); let base_text = text_buffer.text(); let epoch = buffer.epoch + 1; diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index 82310331ffc864d4bba942f3924dcc644427891b..4eae7a54cba4a906351f05e5945cff5691fd1126 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -74,21 +74,11 @@ async fn test_channel_buffers(db: &Arc) { ReplicaId::new(0), text::BufferId::new(1).unwrap(), "".to_string(), - &db.test_options.as_ref().unwrap().executor, ); let operations = vec![ - buffer_a.edit( - [(0..0, "hello world")], - &db.test_options.as_ref().unwrap().executor, - ), - buffer_a.edit( - [(5..5, ", cruel")], - &db.test_options.as_ref().unwrap().executor, - ), - buffer_a.edit( - [(0..5, "goodbye")], - &db.test_options.as_ref().unwrap().executor, - ), + buffer_a.edit([(0..0, "hello world")]), + buffer_a.edit([(5..5, ", cruel")]), + buffer_a.edit([(0..5, "goodbye")]), buffer_a.undo().unwrap().1, ]; assert_eq!(buffer_a.text(), "hello, cruel world"); @@ -112,19 +102,15 @@ async fn test_channel_buffers(db: &Arc) { ReplicaId::new(0), text::BufferId::new(1).unwrap(), buffer_response_b.base_text, - &db.test_options.as_ref().unwrap().executor, - ); - buffer_b.apply_ops( - buffer_response_b.operations.into_iter().map(|operation| { - let operation = proto::deserialize_operation(operation).unwrap(); - if let language::Operation::Buffer(operation) = operation { - operation - } else { - unreachable!() - } - }), - None, ); + buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + })); assert_eq!(buffer_b.text(), "hello, cruel world"); @@ -261,7 +247,6 @@ async fn test_channel_buffers_last_operations(db: &Database) { ReplicaId::new(res.replica_id as u16), text::BufferId::new(1).unwrap(), "".to_string(), - &db.test_options.as_ref().unwrap().executor, )); } @@ -270,9 +255,9 @@ async fn test_channel_buffers_last_operations(db: &Database) { user_id, db, vec![ - text_buffers[0].edit([(0..0, "a")], &db.test_options.as_ref().unwrap().executor), - text_buffers[0].edit([(0..0, "b")], &db.test_options.as_ref().unwrap().executor), - text_buffers[0].edit([(0..0, "c")], &db.test_options.as_ref().unwrap().executor), + text_buffers[0].edit([(0..0, "a")]), + text_buffers[0].edit([(0..0, "b")]), + text_buffers[0].edit([(0..0, "c")]), ], ) .await; @@ -282,9 +267,9 @@ async fn test_channel_buffers_last_operations(db: &Database) { user_id, db, vec![ - text_buffers[1].edit([(0..0, "d")], &db.test_options.as_ref().unwrap().executor), - text_buffers[1].edit([(1..1, "e")], &db.test_options.as_ref().unwrap().executor), - text_buffers[1].edit([(2..2, "f")], &db.test_options.as_ref().unwrap().executor), + text_buffers[1].edit([(0..0, "d")]), + text_buffers[1].edit([(1..1, "e")]), + text_buffers[1].edit([(2..2, "f")]), ], ) .await; @@ -301,15 +286,14 @@ async fn test_channel_buffers_last_operations(db: &Database) { replica_id, text::BufferId::new(1).unwrap(), "def".to_string(), - &db.test_options.as_ref().unwrap().executor, ); update_buffer( buffers[1].channel_id, user_id, db, vec![ - text_buffers[1].edit([(0..0, "g")], &db.test_options.as_ref().unwrap().executor), - text_buffers[1].edit([(0..0, "h")], &db.test_options.as_ref().unwrap().executor), + text_buffers[1].edit([(0..0, "g")]), + text_buffers[1].edit([(0..0, "h")]), ], ) .await; @@ -318,7 +302,7 @@ async fn test_channel_buffers_last_operations(db: &Database) { buffers[2].channel_id, user_id, db, - vec![text_buffers[2].edit([(0..0, "i")], &db.test_options.as_ref().unwrap().executor)], + vec![text_buffers[2].edit([(0..0, "i")])], ) .await; diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 73fdd8da8890d62f7da39f944edfe333d2c983aa..bdc024aaca7242ab0fe261e3b673bf4d0efe23b1 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -39,6 +39,7 @@ use std::{ Arc, atomic::{self, AtomicBool, AtomicUsize}, }, + time::Duration, }; use text::Point; use util::{path, rel_path::rel_path, uri}; @@ -1817,14 +1818,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( settings.project.all_languages.defaults.inlay_hints = Some(InlayHintSettingsContent { enabled: Some(true), - show_value_hints: Some(true), - edit_debounce_ms: Some(0), - scroll_debounce_ms: Some(0), - show_type_hints: Some(true), - show_parameter_hints: Some(false), - show_other_hints: Some(true), - show_background: Some(false), - toggle_on_modifiers_press: None, + ..InlayHintSettingsContent::default() }) }); }); @@ -1834,15 +1828,8 @@ async fn test_mutual_editor_inlay_hint_cache_update( store.update_user_settings(cx, |settings| { settings.project.all_languages.defaults.inlay_hints = Some(InlayHintSettingsContent { - show_value_hints: Some(true), enabled: Some(true), - edit_debounce_ms: Some(0), - scroll_debounce_ms: Some(0), - show_type_hints: Some(true), - show_parameter_hints: Some(false), - show_other_hints: Some(true), - show_background: Some(false), - toggle_on_modifiers_press: None, + ..InlayHintSettingsContent::default() }) }); }); @@ -1935,6 +1922,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( }); let fake_language_server = fake_language_servers.next().await.unwrap(); let editor_a = file_a.await.unwrap().downcast::().unwrap(); + executor.advance_clock(Duration::from_millis(100)); executor.run_until_parked(); let initial_edit = edits_made.load(atomic::Ordering::Acquire); @@ -1955,6 +1943,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .downcast::() .unwrap(); + executor.advance_clock(Duration::from_millis(100)); executor.run_until_parked(); editor_b.update(cx_b, |editor, cx| { assert_eq!( @@ -1973,6 +1962,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( }); cx_b.focus(&editor_b); + executor.advance_clock(Duration::from_secs(1)); executor.run_until_parked(); editor_a.update(cx_a, |editor, cx| { assert_eq!( @@ -1996,6 +1986,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( }); cx_a.focus(&editor_a); + executor.advance_clock(Duration::from_secs(1)); executor.run_until_parked(); editor_a.update(cx_a, |editor, cx| { assert_eq!( @@ -2017,6 +2008,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .into_response() .expect("inlay refresh request failed"); + executor.advance_clock(Duration::from_secs(1)); executor.run_until_parked(); editor_a.update(cx_a, |editor, cx| { assert_eq!( diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 37e6622b0343bca9ae6b9179c830071999bf51df..4fa32b6c9ba55e6962547510f52251f16fc9be81 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -3694,7 +3694,7 @@ async fn test_buffer_reloading( assert_eq!(buf.line_ending(), LineEnding::Unix); }); - let new_contents = Rope::from_str_small("d\ne\nf"); + let new_contents = Rope::from("d\ne\nf"); client_a .fs() .save( @@ -4479,7 +4479,7 @@ async fn test_reloading_buffer_manually( .fs() .save( path!("/a/a.rs").as_ref(), - &Rope::from_str_small("let seven = 7;"), + &Rope::from("let seven = 7;"), LineEnding::Unix, ) .await diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 399f1a663fe72798a4269804955dcfd3678c5cca..7e9b84c0571ed6dff19702ce3532c45d56f6413f 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -27,7 +27,6 @@ use std::{ rc::Rc, sync::Arc, }; -use text::Rope; use util::{ ResultExt, path, paths::PathStyle, @@ -939,11 +938,7 @@ impl RandomizedTest for ProjectCollaborationTest { client .fs() - .save( - &path, - &Rope::from_str_small(content.as_str()), - text::LineEnding::Unix, - ) + .save(&path, &content.as_str().into(), text::LineEnding::Unix) .await .unwrap(); } diff --git a/crates/dap_adapters/src/dap_adapters.rs b/crates/dap_adapters/src/dap_adapters.rs index d8a706ba414af2c9e0beb1cffe8357bcece1dc52..2ab9cabc198c4b036301cb92e1f544ae640b898d 100644 --- a/crates/dap_adapters/src/dap_adapters.rs +++ b/crates/dap_adapters/src/dap_adapters.rs @@ -42,61 +42,63 @@ pub fn init(cx: &mut App) { } #[cfg(test)] -struct MockDelegate { - worktree_root: PathBuf, -} +mod test_mocks { + use super::*; -#[cfg(test)] -impl MockDelegate { - fn new() -> Arc { - Arc::new(Self { - worktree_root: PathBuf::from("/tmp/test"), - }) + pub(crate) struct MockDelegate { + worktree_root: PathBuf, } -} -#[cfg(test)] -#[async_trait::async_trait] -impl adapters::DapDelegate for MockDelegate { - fn worktree_id(&self) -> settings::WorktreeId { - settings::WorktreeId::from_usize(0) + impl MockDelegate { + pub(crate) fn new() -> Arc { + Arc::new(Self { + worktree_root: PathBuf::from("/tmp/test"), + }) + } } - fn worktree_root_path(&self) -> &std::path::Path { - &self.worktree_root - } + #[async_trait::async_trait] + impl adapters::DapDelegate for MockDelegate { + fn worktree_id(&self) -> settings::WorktreeId { + settings::WorktreeId::from_usize(0) + } - fn http_client(&self) -> Arc { - unimplemented!("Not needed for tests") - } + fn worktree_root_path(&self) -> &std::path::Path { + &self.worktree_root + } - fn node_runtime(&self) -> node_runtime::NodeRuntime { - unimplemented!("Not needed for tests") - } + fn http_client(&self) -> Arc { + unimplemented!("Not needed for tests") + } - fn toolchain_store(&self) -> Arc { - unimplemented!("Not needed for tests") - } + fn node_runtime(&self) -> node_runtime::NodeRuntime { + unimplemented!("Not needed for tests") + } - fn fs(&self) -> Arc { - unimplemented!("Not needed for tests") - } + fn toolchain_store(&self) -> Arc { + unimplemented!("Not needed for tests") + } - fn output_to_console(&self, _msg: String) {} + fn fs(&self) -> Arc { + unimplemented!("Not needed for tests") + } - async fn which(&self, _command: &std::ffi::OsStr) -> Option { - None - } + fn output_to_console(&self, _msg: String) {} - async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result { - Ok(String::new()) - } + async fn which(&self, _command: &std::ffi::OsStr) -> Option { + None + } - async fn shell_env(&self) -> collections::HashMap { - collections::HashMap::default() - } + async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result { + Ok(String::new()) + } - fn is_headless(&self) -> bool { - false + async fn shell_env(&self) -> collections::HashMap { + collections::HashMap::default() + } + + fn is_headless(&self) -> bool { + false + } } } diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index e718f66c78099044baed837da0ddc7bfa96ffa1c..4d81e5ba851305ae3adc2ee0a6ab6a29f43edd62 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -824,29 +824,58 @@ impl DebugAdapter for PythonDebugAdapter { .await; } - let base_path = config - .config - .get("cwd") - .and_then(|cwd| { - RelPath::new( - cwd.as_str() - .map(Path::new)? - .strip_prefix(delegate.worktree_root_path()) - .ok()?, - PathStyle::local(), - ) - .ok() + let base_paths = ["cwd", "program", "module"] + .into_iter() + .filter_map(|key| { + config.config.get(key).and_then(|cwd| { + RelPath::new( + cwd.as_str() + .map(Path::new)? + .strip_prefix(delegate.worktree_root_path()) + .ok()?, + PathStyle::local(), + ) + .ok() + }) }) - .unwrap_or_else(|| RelPath::empty().into()); - let toolchain = delegate - .toolchain_store() - .active_toolchain( - delegate.worktree_id(), - base_path.into_arc(), - language::LanguageName::new(Self::LANGUAGE_NAME), - cx, + .chain( + // While Debugpy's wiki saids absolute paths are required, but it actually supports relative paths when cwd is passed in. + // (Which should always be the case because Zed defaults to the cwd worktree root) + // So we want to check that these relative paths find toolchains as well. Otherwise, they won't be checked + // because the strip prefix in the iteration above will return an error + config + .config + .get("cwd") + .map(|_| { + ["program", "module"].into_iter().filter_map(|key| { + config.config.get(key).and_then(|value| { + let path = Path::new(value.as_str()?); + RelPath::new(path, PathStyle::local()).ok() + }) + }) + }) + .into_iter() + .flatten(), ) - .await; + .chain([RelPath::empty().into()]); + + let mut toolchain = None; + + for base_path in base_paths { + if let Some(found_toolchain) = delegate + .toolchain_store() + .active_toolchain( + delegate.worktree_id(), + base_path.into_arc(), + language::LanguageName::new(Self::LANGUAGE_NAME), + cx, + ) + .await + { + toolchain = Some(found_toolchain); + break; + } + } self.fetch_debugpy_whl(toolchain.clone(), delegate) .await @@ -914,7 +943,7 @@ mod tests { let result = adapter .get_installed_binary( - &MockDelegate::new(), + &test_mocks::MockDelegate::new(), &task_def, None, None, @@ -955,7 +984,7 @@ mod tests { let result_host = adapter .get_installed_binary( - &MockDelegate::new(), + &test_mocks::MockDelegate::new(), &task_def_host, None, None, diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index 2d01a325a2b0056bfbf42e519a79a4ec199c4a9d..e157d832b440b8016f152c88b376a9418ee3c843 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -6,7 +6,10 @@ use alacritty_terminal::vte::ansi; use anyhow::Result; use collections::HashMap; use dap::{CompletionItem, CompletionItemType, OutputEvent}; -use editor::{Bias, CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId}; +use editor::{ + Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId, + SizingBehavior, +}; use fuzzy::StringMatchCandidate; use gpui::{ Action as _, AppContext, Context, Corner, Entity, FocusHandle, Focusable, HighlightStyle, Hsla, @@ -59,6 +62,11 @@ impl Console { ) -> Self { let console = cx.new(|cx| { let mut editor = Editor::multi_line(window, cx); + editor.set_mode(EditorMode::Full { + scale_ui_elements_with_buffer_font_size: true, + show_active_line_background: true, + sizing_behavior: SizingBehavior::ExcludeOverscrollMargin, + }); editor.move_to_end(&editor::actions::MoveToEnd, window, cx); editor.set_read_only(true); editor.disable_scrollbars_and_minimap(window, cx); diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index 5bb6892f0cea9500fd66671f8e8e86ab9a6d901a..0eccf44c357125e5e11fcdccda8280c22006c6fa 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -34,6 +34,7 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true +itertools.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/diagnostics/src/buffer_diagnostics.rs b/crates/diagnostics/src/buffer_diagnostics.rs index 1205cef385fdd91af8e3f986b432b9fff4ad3ac6..8fe503a706027fb6ed2f0b9114450eb79c2aa027 100644 --- a/crates/diagnostics/src/buffer_diagnostics.rs +++ b/crates/diagnostics/src/buffer_diagnostics.rs @@ -1,5 +1,5 @@ use crate::{ - DIAGNOSTICS_UPDATE_DELAY, IncludeWarnings, ToggleWarnings, context_range_for_entry, + DIAGNOSTICS_UPDATE_DEBOUNCE, IncludeWarnings, ToggleWarnings, context_range_for_entry, diagnostic_renderer::{DiagnosticBlock, DiagnosticRenderer}, toolbar_controls::DiagnosticsToolbarEditor, }; @@ -283,7 +283,7 @@ impl BufferDiagnosticsEditor { self.update_excerpts_task = Some(cx.spawn_in(window, async move |editor, cx| { cx.background_executor() - .timer(DIAGNOSTICS_UPDATE_DELAY) + .timer(DIAGNOSTICS_UPDATE_DEBOUNCE) .await; if let Some(buffer) = buffer { @@ -938,10 +938,6 @@ impl DiagnosticsToolbarEditor for WeakEntity { .unwrap_or(false) } - fn has_stale_excerpts(&self, _cx: &App) -> bool { - false - } - fn is_updating(&self, cx: &App) -> bool { self.read_with(cx, |buffer_diagnostics_editor, cx| { buffer_diagnostics_editor.update_excerpts_task.is_some() diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 5a43fd135391a5e3d97d5c65e6d3be826210f102..5506cdba9ae4aaa7fbf1246aaa7b07e653ad0efc 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -9,7 +9,7 @@ mod diagnostics_tests; use anyhow::Result; use buffer_diagnostics::BufferDiagnosticsEditor; -use collections::{BTreeSet, HashMap}; +use collections::{BTreeSet, HashMap, HashSet}; use diagnostic_renderer::DiagnosticBlock; use editor::{ Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, @@ -17,10 +17,11 @@ use editor::{ multibuffer_context_lines, }; use gpui::{ - AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable, - Global, InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, - Subscription, Task, WeakEntity, Window, actions, div, + AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, FocusOutEvent, + Focusable, Global, InteractiveElement, IntoElement, ParentElement, Render, SharedString, + Styled, Subscription, Task, WeakEntity, Window, actions, div, }; +use itertools::Itertools as _; use language::{ Bias, Buffer, BufferRow, BufferSnapshot, DiagnosticEntry, DiagnosticEntryRef, Point, ToTreeSitterPoint, @@ -32,7 +33,7 @@ use project::{ use settings::Settings; use std::{ any::{Any, TypeId}, - cmp::{self, Ordering}, + cmp, ops::{Range, RangeInclusive}, sync::Arc, time::Duration, @@ -89,8 +90,8 @@ pub(crate) struct ProjectDiagnosticsEditor { impl EventEmitter for ProjectDiagnosticsEditor {} -const DIAGNOSTICS_UPDATE_DELAY: Duration = Duration::from_millis(50); -const DIAGNOSTICS_SUMMARY_UPDATE_DELAY: Duration = Duration::from_millis(30); +const DIAGNOSTICS_UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); +const DIAGNOSTICS_SUMMARY_UPDATE_DEBOUNCE: Duration = Duration::from_millis(30); impl Render for ProjectDiagnosticsEditor { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { @@ -149,6 +150,12 @@ impl Render for ProjectDiagnosticsEditor { } } +#[derive(PartialEq, Eq, Copy, Clone, Debug)] +enum RetainExcerpts { + Yes, + No, +} + impl ProjectDiagnosticsEditor { pub fn register( workspace: &mut Workspace, @@ -165,14 +172,21 @@ impl ProjectDiagnosticsEditor { window: &mut Window, cx: &mut Context, ) -> Self { - let project_event_subscription = - cx.subscribe_in(&project_handle, window, |this, _project, event, window, cx| match event { + let project_event_subscription = cx.subscribe_in( + &project_handle, + window, + |this, _project, event, window, cx| match event { project::Event::DiskBasedDiagnosticsStarted { .. } => { cx.notify(); } project::Event::DiskBasedDiagnosticsFinished { language_server_id } => { log::debug!("disk based diagnostics finished for server {language_server_id}"); - this.update_stale_excerpts(window, cx); + this.close_diagnosticless_buffers( + window, + cx, + this.editor.focus_handle(cx).contains_focused(window, cx) + || this.focus_handle.contains_focused(window, cx), + ); } project::Event::DiagnosticsUpdated { language_server_id, @@ -181,34 +195,39 @@ impl ProjectDiagnosticsEditor { this.paths_to_update.extend(paths.clone()); this.diagnostic_summary_update = cx.spawn(async move |this, cx| { cx.background_executor() - .timer(DIAGNOSTICS_SUMMARY_UPDATE_DELAY) + .timer(DIAGNOSTICS_SUMMARY_UPDATE_DEBOUNCE) .await; this.update(cx, |this, cx| { this.update_diagnostic_summary(cx); }) .log_err(); }); - cx.emit(EditorEvent::TitleChanged); - if this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx) { - log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. recording change"); - } else { - log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. updating excerpts"); - this.update_stale_excerpts(window, cx); - } + log::debug!( + "diagnostics updated for server {language_server_id}, \ + paths {paths:?}. updating excerpts" + ); + let focused = this.editor.focus_handle(cx).contains_focused(window, cx) + || this.focus_handle.contains_focused(window, cx); + this.update_stale_excerpts( + if focused { + RetainExcerpts::Yes + } else { + RetainExcerpts::No + }, + window, + cx, + ); } _ => {} - }); + }, + ); let focus_handle = cx.focus_handle(); - cx.on_focus_in(&focus_handle, window, |this, window, cx| { - this.focus_in(window, cx) - }) - .detach(); - cx.on_focus_out(&focus_handle, window, |this, _event, window, cx| { - this.focus_out(window, cx) - }) - .detach(); + cx.on_focus_in(&focus_handle, window, Self::focus_in) + .detach(); + cx.on_focus_out(&focus_handle, window, Self::focus_out) + .detach(); let excerpts = cx.new(|cx| MultiBuffer::new(project_handle.read(cx).capability())); let editor = cx.new(|cx| { @@ -238,8 +257,11 @@ impl ProjectDiagnosticsEditor { window.focus(&this.focus_handle); } } - EditorEvent::Blurred => this.update_stale_excerpts(window, cx), - EditorEvent::Saved => this.update_stale_excerpts(window, cx), + EditorEvent::Blurred => this.close_diagnosticless_buffers(window, cx, false), + EditorEvent::Saved => this.close_diagnosticless_buffers(window, cx, true), + EditorEvent::SelectionsChanged { .. } => { + this.close_diagnosticless_buffers(window, cx, true) + } _ => {} } }, @@ -283,15 +305,67 @@ impl ProjectDiagnosticsEditor { this } - fn update_stale_excerpts(&mut self, window: &mut Window, cx: &mut Context) { - if self.update_excerpts_task.is_some() || self.multibuffer.read(cx).is_dirty(cx) { + /// Closes all excerpts of buffers that: + /// - have no diagnostics anymore + /// - are saved (not dirty) + /// - and, if `reatin_selections` is true, do not have selections within them + fn close_diagnosticless_buffers( + &mut self, + _window: &mut Window, + cx: &mut Context, + retain_selections: bool, + ) { + let buffer_ids = self.multibuffer.read(cx).all_buffer_ids(); + let selected_buffers = self.editor.update(cx, |editor, cx| { + editor + .selections + .all_anchors(cx) + .iter() + .filter_map(|anchor| anchor.start.buffer_id) + .collect::>() + }); + for buffer_id in buffer_ids { + if retain_selections && selected_buffers.contains(&buffer_id) { + continue; + } + let has_blocks = self + .blocks + .get(&buffer_id) + .is_none_or(|blocks| blocks.is_empty()); + if !has_blocks { + continue; + } + let is_dirty = self + .multibuffer + .read(cx) + .buffer(buffer_id) + .is_some_and(|buffer| buffer.read(cx).is_dirty()); + if !is_dirty { + continue; + } + self.multibuffer.update(cx, |b, cx| { + b.remove_excerpts_for_buffer(buffer_id, cx); + }); + } + } + + fn update_stale_excerpts( + &mut self, + mut retain_excerpts: RetainExcerpts, + window: &mut Window, + cx: &mut Context, + ) { + if self.update_excerpts_task.is_some() { return; } + if self.multibuffer.read(cx).is_dirty(cx) { + retain_excerpts = RetainExcerpts::Yes; + } let project_handle = self.project.clone(); self.update_excerpts_task = Some(cx.spawn_in(window, async move |this, cx| { cx.background_executor() - .timer(DIAGNOSTICS_UPDATE_DELAY) + .timer(DIAGNOSTICS_UPDATE_DEBOUNCE) .await; loop { let Some(path) = this.update(cx, |this, cx| { @@ -312,7 +386,7 @@ impl ProjectDiagnosticsEditor { .log_err() { this.update_in(cx, |this, window, cx| { - this.update_excerpts(buffer, window, cx) + this.update_excerpts(buffer, retain_excerpts, window, cx) })? .await?; } @@ -378,10 +452,10 @@ impl ProjectDiagnosticsEditor { } } - fn focus_out(&mut self, window: &mut Window, cx: &mut Context) { + fn focus_out(&mut self, _: FocusOutEvent, window: &mut Window, cx: &mut Context) { if !self.focus_handle.is_focused(window) && !self.editor.focus_handle(cx).is_focused(window) { - self.update_stale_excerpts(window, cx); + self.close_diagnosticless_buffers(window, cx, false); } } @@ -403,12 +477,13 @@ impl ProjectDiagnosticsEditor { }); } } + multibuffer.clear(cx); }); self.paths_to_update = project_paths; }); - self.update_stale_excerpts(window, cx); + self.update_stale_excerpts(RetainExcerpts::No, window, cx); } fn diagnostics_are_unchanged( @@ -431,6 +506,7 @@ impl ProjectDiagnosticsEditor { fn update_excerpts( &mut self, buffer: Entity, + retain_excerpts: RetainExcerpts, window: &mut Window, cx: &mut Context, ) -> Task> { @@ -497,24 +573,27 @@ impl ProjectDiagnosticsEditor { ) })?; - for item in more { - let i = blocks - .binary_search_by(|probe| { - probe - .initial_range - .start - .cmp(&item.initial_range.start) - .then(probe.initial_range.end.cmp(&item.initial_range.end)) - .then(Ordering::Greater) - }) - .unwrap_or_else(|i| i); - blocks.insert(i, item); - } + blocks.extend(more); } - let mut excerpt_ranges: Vec> = Vec::new(); + let mut excerpt_ranges: Vec> = match retain_excerpts { + RetainExcerpts::No => Vec::new(), + RetainExcerpts::Yes => this.update(cx, |this, cx| { + this.multibuffer.update(cx, |multi_buffer, cx| { + multi_buffer + .excerpts_for_buffer(buffer_id, cx) + .into_iter() + .map(|(_, range)| ExcerptRange { + context: range.context.to_point(&buffer_snapshot), + primary: range.primary.to_point(&buffer_snapshot), + }) + .collect() + }) + })?, + }; + let mut result_blocks = vec![None; excerpt_ranges.len()]; let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?; - for b in blocks.iter() { + for b in blocks { let excerpt_range = context_range_for_entry( b.initial_range.clone(), context_lines, @@ -541,7 +620,8 @@ impl ProjectDiagnosticsEditor { context: excerpt_range, primary: b.initial_range.clone(), }, - ) + ); + result_blocks.insert(i, Some(b)); } this.update_in(cx, |this, window, cx| { @@ -562,7 +642,7 @@ impl ProjectDiagnosticsEditor { ) }); #[cfg(test)] - let cloned_blocks = blocks.clone(); + let cloned_blocks = result_blocks.clone(); if was_empty && let Some(anchor_range) = anchor_ranges.first() { let range_to_select = anchor_range.start..anchor_range.start; @@ -576,22 +656,20 @@ impl ProjectDiagnosticsEditor { } } - let editor_blocks = - anchor_ranges - .into_iter() - .zip(blocks.into_iter()) - .map(|(anchor, block)| { - let editor = this.editor.downgrade(); - BlockProperties { - placement: BlockPlacement::Near(anchor.start), - height: Some(1), - style: BlockStyle::Flex, - render: Arc::new(move |bcx| { - block.render_block(editor.clone(), bcx) - }), - priority: 1, - } - }); + let editor_blocks = anchor_ranges + .into_iter() + .zip_eq(result_blocks.into_iter()) + .filter_map(|(anchor, block)| { + let block = block?; + let editor = this.editor.downgrade(); + Some(BlockProperties { + placement: BlockPlacement::Near(anchor.start), + height: Some(1), + style: BlockStyle::Flex, + render: Arc::new(move |bcx| block.render_block(editor.clone(), bcx)), + priority: 1, + }) + }); let block_ids = this.editor.update(cx, |editor, cx| { editor.display_map.update(cx, |display_map, cx| { @@ -601,7 +679,9 @@ impl ProjectDiagnosticsEditor { #[cfg(test)] { - for (block_id, block) in block_ids.iter().zip(cloned_blocks.iter()) { + for (block_id, block) in + block_ids.iter().zip(cloned_blocks.into_iter().flatten()) + { let markdown = block.markdown.clone(); editor::test::set_block_content_for_tests( &this.editor, @@ -626,6 +706,7 @@ impl ProjectDiagnosticsEditor { fn update_diagnostic_summary(&mut self, cx: &mut Context) { self.summary = self.project.read(cx).diagnostic_summary(false, cx); + cx.emit(EditorEvent::TitleChanged); } } @@ -843,13 +924,6 @@ impl DiagnosticsToolbarEditor for WeakEntity { .unwrap_or(false) } - fn has_stale_excerpts(&self, cx: &App) -> bool { - self.read_with(cx, |project_diagnostics_editor, _cx| { - !project_diagnostics_editor.paths_to_update.is_empty() - }) - .unwrap_or(false) - } - fn is_updating(&self, cx: &App) -> bool { self.read_with(cx, |project_diagnostics_editor, cx| { project_diagnostics_editor.update_excerpts_task.is_some() @@ -1010,12 +1084,6 @@ async fn heuristic_syntactic_expand( return; } } - - log::info!( - "Expanding to ancestor started on {} node\ - exceeding row limit of {max_row_count}.", - node.grammar_name() - ); *ancestor_range = Some(None); } }) diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index 824d4db6a58c06db5df4c04ac79ee1e509d55d4d..ad7c675fa82aaa95379a9e06dfc2561880d5da7f 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -119,7 +119,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { let editor = diagnostics.update(cx, |diagnostics, _| diagnostics.editor.clone()); diagnostics - .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx) + .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx) .await; pretty_assertions::assert_eq!( @@ -190,7 +190,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { }); diagnostics - .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx) + .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx) .await; pretty_assertions::assert_eq!( @@ -277,7 +277,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { }); diagnostics - .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx) + .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx) .await; pretty_assertions::assert_eq!( @@ -391,7 +391,7 @@ async fn test_diagnostics_with_folds(cx: &mut TestAppContext) { // Only the first language server's diagnostics are shown. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.executor().run_until_parked(); editor.update_in(cx, |editor, window, cx| { editor.fold_ranges(vec![Point::new(0, 0)..Point::new(3, 0)], false, window, cx); @@ -490,7 +490,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { // Only the first language server's diagnostics are shown. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.executor().run_until_parked(); pretty_assertions::assert_eq!( @@ -530,7 +530,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { // Both language server's diagnostics are shown. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.executor().run_until_parked(); pretty_assertions::assert_eq!( @@ -587,7 +587,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { // Only the first language server's diagnostics are updated. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.executor().run_until_parked(); pretty_assertions::assert_eq!( @@ -629,7 +629,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { // Both language servers' diagnostics are updated. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.executor().run_until_parked(); pretty_assertions::assert_eq!( @@ -760,7 +760,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng .unwrap() }); cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.run_until_parked(); } @@ -769,7 +769,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng log::info!("updating mutated diagnostics view"); mutated_diagnostics.update_in(cx, |diagnostics, window, cx| { - diagnostics.update_stale_excerpts(window, cx) + diagnostics.update_stale_excerpts(RetainExcerpts::No, window, cx) }); log::info!("constructing reference diagnostics view"); @@ -777,7 +777,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng ProjectDiagnosticsEditor::new(true, project.clone(), workspace.downgrade(), window, cx) }); cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.run_until_parked(); let mutated_excerpts = @@ -789,7 +789,12 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng // The mutated view may contain more than the reference view as // we don't currently shrink excerpts when diagnostics were removed. - let mut ref_iter = reference_excerpts.lines().filter(|line| *line != "§ -----"); + let mut ref_iter = reference_excerpts.lines().filter(|line| { + // ignore $ ---- and $ .rs + !line.starts_with('§') + || line.starts_with("§ diagnostic") + || line.starts_with("§ related info") + }); let mut next_ref_line = ref_iter.next(); let mut skipped_block = false; @@ -797,7 +802,12 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng if let Some(ref_line) = next_ref_line { if mut_line == ref_line { next_ref_line = ref_iter.next(); - } else if mut_line.contains('§') && mut_line != "§ -----" { + } else if mut_line.contains('§') + // ignore $ ---- and $ .rs + && (!mut_line.starts_with('§') + || mut_line.starts_with("§ diagnostic") + || mut_line.starts_with("§ related info")) + { skipped_block = true; } } @@ -877,7 +887,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S vec![Inlay::edit_prediction( post_inc(&mut next_inlay_id), snapshot.buffer_snapshot().anchor_before(position), - Rope::from_iter_small(["Test inlay ", "next_inlay_id"]), + Rope::from_iter(["Test inlay ", "next_inlay_id"]), )], cx, ); @@ -949,7 +959,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S .unwrap() }); cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.run_until_parked(); } @@ -958,11 +968,11 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S log::info!("updating mutated diagnostics view"); mutated_diagnostics.update_in(cx, |diagnostics, window, cx| { - diagnostics.update_stale_excerpts(window, cx) + diagnostics.update_stale_excerpts(RetainExcerpts::No, window, cx) }); cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); cx.run_until_parked(); } @@ -1427,7 +1437,7 @@ async fn test_diagnostics_with_code(cx: &mut TestAppContext) { let editor = diagnostics.update(cx, |diagnostics, _| diagnostics.editor.clone()); diagnostics - .next_notification(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10), cx) + .next_notification(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10), cx) .await; // Verify that the diagnostic codes are displayed correctly @@ -1704,7 +1714,7 @@ async fn test_buffer_diagnostics(cx: &mut TestAppContext) { // wait a little bit to ensure that the buffer diagnostic's editor content // is rendered. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); pretty_assertions::assert_eq!( editor_content_with_blocks(&editor, cx), @@ -1837,7 +1847,7 @@ async fn test_buffer_diagnostics_without_warnings(cx: &mut TestAppContext) { // wait a little bit to ensure that the buffer diagnostic's editor content // is rendered. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); pretty_assertions::assert_eq!( editor_content_with_blocks(&editor, cx), @@ -1971,7 +1981,7 @@ async fn test_buffer_diagnostics_multiple_servers(cx: &mut TestAppContext) { // wait a little bit to ensure that the buffer diagnostic's editor content // is rendered. cx.executor() - .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10)); + .advance_clock(DIAGNOSTICS_UPDATE_DEBOUNCE + Duration::from_millis(10)); pretty_assertions::assert_eq!( editor_content_with_blocks(&editor, cx), @@ -2070,7 +2080,7 @@ fn random_lsp_diagnostic( const ERROR_MARGIN: usize = 10; let file_content = fs.read_file_sync(path).unwrap(); - let file_text = Rope::from_str_small(String::from_utf8_lossy(&file_content).as_ref()); + let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref()); let start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN)); let end = rng.random_range(start..file_text.len().saturating_add(ERROR_MARGIN)); diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index b55fa5783dc96965a7d1ce7f52c5e4336b674ed2..2ba64d39dfd63fc246bf3dedf5974909c0d67a6f 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -16,9 +16,6 @@ pub(crate) trait DiagnosticsToolbarEditor: Send + Sync { /// Toggles whether warning diagnostics should be displayed by the /// diagnostics editor. fn toggle_warnings(&self, window: &mut Window, cx: &mut App); - /// Indicates whether any of the excerpts displayed by the diagnostics - /// editor are stale. - fn has_stale_excerpts(&self, cx: &App) -> bool; /// Indicates whether the diagnostics editor is currently updating the /// diagnostics. fn is_updating(&self, cx: &App) -> bool; @@ -37,14 +34,12 @@ pub(crate) trait DiagnosticsToolbarEditor: Send + Sync { impl Render for ToolbarControls { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let mut has_stale_excerpts = false; let mut include_warnings = false; let mut is_updating = false; match &self.editor { Some(editor) => { include_warnings = editor.include_warnings(cx); - has_stale_excerpts = editor.has_stale_excerpts(cx); is_updating = editor.is_updating(cx); } None => {} @@ -86,7 +81,6 @@ impl Render for ToolbarControls { IconButton::new("refresh-diagnostics", IconName::ArrowCircle) .icon_color(Color::Info) .shape(IconButtonShape::Square) - .disabled(!has_stale_excerpts) .tooltip(Tooltip::for_action_title( "Refresh diagnostics", &ToggleDiagnosticsRefresh, diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 594c290730d5c734430e747ac6d09d6cbbbd4d0e..70c861ab1112630c2e3293cb54a4e96c6754b3bd 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -13,7 +13,7 @@ use gpui::{ }; use indoc::indoc; use language::{ - EditPredictionsMode, File, Language, Rope, + EditPredictionsMode, File, Language, language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings}, }; use project::DisableAiSettings; @@ -1056,11 +1056,8 @@ async fn open_disabled_globs_setting_in_editor( ) -> Result<()> { let settings_editor = workspace .update_in(cx, |_, window, cx| { - create_and_open_local_file(paths::settings_file(), window, cx, |cx| { - Rope::from_str( - settings::initial_user_settings_content().as_ref(), - cx.background_executor(), - ) + create_and_open_local_file(paths::settings_file(), window, cx, || { + settings::initial_user_settings_content().as_ref().into() }) })? .await? diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 359c985ee9208a1a83e3458635df883c2cf991a8..b7f3d57870a9504b7e6f9f736a0951b9b4b733e5 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -28,10 +28,12 @@ use std::{ rc::Rc, }; use task::ResolvedTask; -use ui::{Color, IntoElement, ListItem, Pixels, Popover, Styled, prelude::*}; +use ui::{ + Color, IntoElement, ListItem, Pixels, Popover, ScrollAxes, Scrollbars, Styled, WithScrollbar, + prelude::*, +}; use util::ResultExt; -use crate::CodeActionSource; use crate::hover_popover::{hover_markdown_style, open_markdown_url}; use crate::{ CodeActionProvider, CompletionId, CompletionItemKind, CompletionProvider, DisplayRow, Editor, @@ -39,7 +41,8 @@ use crate::{ actions::{ConfirmCodeAction, ConfirmCompletion}, split_words, styled_runs_for_code_label, }; -use settings::SnippetSortOrder; +use crate::{CodeActionSource, EditorSettings}; +use settings::{Settings, SnippetSortOrder}; pub const MENU_GAP: Pixels = px(4.); pub const MENU_ASIDE_X_PADDING: Pixels = px(16.); @@ -261,6 +264,20 @@ impl Drop for CompletionsMenu { } } +struct CompletionMenuScrollBarSetting; + +impl ui::scrollbars::GlobalSetting for CompletionMenuScrollBarSetting { + fn get_value(_cx: &App) -> &Self { + &Self + } +} + +impl ui::scrollbars::ScrollbarVisibility for CompletionMenuScrollBarSetting { + fn visibility(&self, cx: &App) -> ui::scrollbars::ShowScrollbar { + EditorSettings::get_global(cx).completion_menu_scrollbar + } +} + impl CompletionsMenu { pub fn new( id: CompletionId, @@ -898,7 +915,17 @@ impl CompletionsMenu { } }); - Popover::new().child(list).into_any_element() + Popover::new() + .child( + div().child(list).custom_scrollbars( + Scrollbars::for_settings::() + .show_along(ScrollAxes::Vertical) + .tracked_scroll_handle(self.scroll_handle.clone()), + window, + cx, + ), + ) + .into_any_element() } fn render_aside( diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index a269d22d71a95eef1ca1485437863091e3505439..7a225d6019edf8f09b1758d62e8181917649cc2b 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1569,7 +1569,6 @@ pub mod tests { use lsp::LanguageServerId; use project::Project; use rand::{Rng, prelude::*}; - use rope::Rope; use settings::{SettingsContent, SettingsStore}; use smol::stream::StreamExt; use std::{env, sync::Arc}; @@ -2075,7 +2074,7 @@ pub mod tests { vec![Inlay::edit_prediction( 0, buffer_snapshot.anchor_after(0), - Rope::from_str_small("\n"), + "\n", )], cx, ); diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 3c7cedb6574d02bcf6b06075b8db79cc3a6080db..486676f1120bc2e9d85effd4c328a2b7a547e06b 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -700,20 +700,16 @@ impl InlayMap { .collect::(); let next_inlay = if i % 2 == 0 { - use rope::Rope; - Inlay::mock_hint( post_inc(next_inlay_id), snapshot.buffer.anchor_at(position, bias), - Rope::from_str_small(&text), + &text, ) } else { - use rope::Rope; - Inlay::edit_prediction( post_inc(next_inlay_id), snapshot.buffer.anchor_at(position, bias), - Rope::from_str_small(&text), + &text, ) }; let inlay_id = next_inlay.id; @@ -1305,7 +1301,7 @@ mod tests { vec![Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_after(3), - Rope::from_str_small("|123|"), + "|123|", )], ); assert_eq!(inlay_snapshot.text(), "abc|123|defghi"); @@ -1382,12 +1378,12 @@ mod tests { Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(3), - Rope::from_str_small("|123|"), + "|123|", ), Inlay::edit_prediction( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_after(3), - Rope::from_str_small("|456|"), + "|456|", ), ], ); @@ -1597,17 +1593,17 @@ mod tests { Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(0), - Rope::from_str_small("|123|\n"), + "|123|\n", ), Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(4), - Rope::from_str_small("|456|"), + "|456|", ), Inlay::edit_prediction( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(7), - Rope::from_str_small("\n|567|\n"), + "\n|567|\n", ), ], ); @@ -1681,14 +1677,9 @@ mod tests { (offset, inlay.clone()) }) .collect::>(); - let mut expected_text = - Rope::from_str(&buffer_snapshot.text(), cx.background_executor()); + let mut expected_text = Rope::from(&buffer_snapshot.text()); for (offset, inlay) in inlays.iter().rev() { - expected_text.replace( - *offset..*offset, - &inlay.text().to_string(), - cx.background_executor(), - ); + expected_text.replace(*offset..*offset, &inlay.text().to_string()); } assert_eq!(inlay_snapshot.text(), expected_text.to_string()); @@ -2076,7 +2067,7 @@ mod tests { let inlay = Inlay { id: InlayId::Hint(0), position, - content: InlayContent::Text(text::Rope::from_str(inlay_text, cx.background_executor())), + content: InlayContent::Text(text::Rope::from(inlay_text)), }; let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]); @@ -2190,10 +2181,7 @@ mod tests { let inlay = Inlay { id: InlayId::Hint(0), position, - content: InlayContent::Text(text::Rope::from_str( - test_case.inlay_text, - cx.background_executor(), - )), + content: InlayContent::Text(text::Rope::from(test_case.inlay_text)), }; let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]); diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 084ced82b3aa311f90f905077e2d18dd831e0bd6..7a63723f53a49483eaa728373a5ae8530aa6f4d6 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1042,7 +1042,7 @@ mod tests { let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size); let tabs_snapshot = tab_map.set_max_expansion_column(32); - let text = text::Rope::from_str(tabs_snapshot.text().as_str(), cx.background_executor()); + let text = text::Rope::from(tabs_snapshot.text().as_str()); log::info!( "TabMap text (tab size: {}): {:?}", tab_size, diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 1f50ff28daff51e9e16da683053104ea4800977b..7371eb678538dbc12abe43bde4073ffd9d2bdb21 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -863,7 +863,7 @@ impl WrapSnapshot { } } - let text = language::Rope::from_str_small(self.text().as_str()); + let text = language::Rope::from(self.text().as_str()); let mut input_buffer_rows = self.tab_snapshot.rows(0); let mut expected_buffer_rows = Vec::new(); let mut prev_tab_row = 0; @@ -1413,10 +1413,9 @@ mod tests { } } - let mut initial_text = - Rope::from_str(initial_snapshot.text().as_str(), cx.background_executor()); + let mut initial_text = Rope::from(initial_snapshot.text().as_str()); for (snapshot, patch) in edits { - let snapshot_text = Rope::from_str(snapshot.text().as_str(), cx.background_executor()); + let snapshot_text = Rope::from(snapshot.text().as_str()); for edit in &patch { let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0)); let old_end = initial_text.point_to_offset(cmp::min( @@ -1432,7 +1431,7 @@ mod tests { .chunks_in_range(new_start..new_end) .collect::(); - initial_text.replace(old_start..old_end, &new_text, cx.background_executor()); + initial_text.replace(old_start..old_end, &new_text); } assert_eq!(initial_text.to_string(), snapshot_text.to_string()); } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ed6b8ec2eca4dcb558bc832ac56b92af8791712c..50102a685f93abac91341539ef51445cb80c6403 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -452,6 +452,20 @@ pub enum SelectMode { All, } +#[derive(Copy, Clone, Default, PartialEq, Eq, Debug)] +pub enum SizingBehavior { + /// The editor will layout itself using `size_full` and will include the vertical + /// scroll margin as requested by user settings. + #[default] + Default, + /// The editor will layout itself using `size_full`, but will not have any + /// vertical overscroll. + ExcludeOverscrollMargin, + /// The editor will request a vertical size according to its content and will be + /// layouted without a vertical scroll margin. + SizeByContent, +} + #[derive(Clone, PartialEq, Eq, Debug)] pub enum EditorMode { SingleLine, @@ -464,8 +478,8 @@ pub enum EditorMode { scale_ui_elements_with_buffer_font_size: bool, /// When set to `true`, the editor will render a background for the active line. show_active_line_background: bool, - /// When set to `true`, the editor's height will be determined by its content. - sized_by_content: bool, + /// Determines the sizing behavior for this editor + sizing_behavior: SizingBehavior, }, Minimap { parent: WeakEntity, @@ -477,7 +491,7 @@ impl EditorMode { Self::Full { scale_ui_elements_with_buffer_font_size: true, show_active_line_background: true, - sized_by_content: false, + sizing_behavior: SizingBehavior::Default, } } @@ -1832,9 +1846,15 @@ impl Editor { project::Event::RefreshCodeLens => { // we always query lens with actions, without storing them, always refreshing them } - project::Event::RefreshInlayHints(server_id) => { + project::Event::RefreshInlayHints { + server_id, + request_id, + } => { editor.refresh_inlay_hints( - InlayHintRefreshReason::RefreshRequested(*server_id), + InlayHintRefreshReason::RefreshRequested { + server_id: *server_id, + request_id: *request_id, + }, cx, ); } @@ -7852,7 +7872,7 @@ impl Editor { let inlay = Inlay::edit_prediction( post_inc(&mut self.next_inlay_id), range.start, - Rope::from_str_small(new_text.as_str()), + new_text.as_str(), ); inlay_ids.push(inlay.id); inlays.push(inlay); diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index dc67ab3ed6c8cfdbe88809e32d615789c01eef60..77c9558eaf4ea49df981b8eb32ee075d069da08f 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -55,6 +55,7 @@ pub struct EditorSettings { pub drag_and_drop_selection: DragAndDropSelection, pub lsp_document_colors: DocumentColorsRenderMode, pub minimum_contrast_for_highlights: f32, + pub completion_menu_scrollbar: ShowScrollbar, } #[derive(Debug, Clone)] pub struct Jupyter { @@ -159,6 +160,7 @@ pub struct SearchSettings { pub case_sensitive: bool, pub include_ignored: bool, pub regex: bool, + pub center_on_match: bool, } impl EditorSettings { @@ -249,6 +251,7 @@ impl Settings for EditorSettings { case_sensitive: search.case_sensitive.unwrap(), include_ignored: search.include_ignored.unwrap(), regex: search.regex.unwrap(), + center_on_match: search.center_on_match.unwrap(), }, auto_signature_help: editor.auto_signature_help.unwrap(), show_signature_help_after_edits: editor.show_signature_help_after_edits.unwrap(), @@ -266,6 +269,7 @@ impl Settings for EditorSettings { }, lsp_document_colors: editor.lsp_document_colors.unwrap(), minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap().0, + completion_menu_scrollbar: editor.completion_menu_scrollbar.map(Into::into).unwrap(), } } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 06fbd9d3381f70955049ddde1c7a395945d67c66..903ca3b9c1d947094f3b79419ae00c499ec9fd0c 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -14217,7 +14217,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte EditorMode::Full { scale_ui_elements_with_buffer_font_size: false, show_active_line_background: false, - sized_by_content: false, + sizing_behavior: SizingBehavior::Default, }, multi_buffer.clone(), Some(project.clone()), diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 7579441595c5d774e8d96439d0e03a21f3e624b8..761d71d43b24ddf54f8410f64c7357fd2fb6dca4 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8,8 +8,8 @@ use crate::{ HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt, SelectPhase, - SelectedTextHighlight, Selection, SelectionDragState, SoftWrap, StickyHeaderExcerpt, ToPoint, - ToggleFold, ToggleFoldAll, + SelectedTextHighlight, Selection, SelectionDragState, SizingBehavior, SoftWrap, + StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll, code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP}, display_map::{ Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins, @@ -8441,11 +8441,11 @@ impl Element for EditorElement { window.request_layout(style, None, cx) } EditorMode::Full { - sized_by_content, .. + sizing_behavior, .. } => { let mut style = Style::default(); style.size.width = relative(1.).into(); - if sized_by_content { + if sizing_behavior == SizingBehavior::SizeByContent { let snapshot = editor.snapshot(window, cx); let line_height = self.style.text.line_height_in_pixels(window.rem_size()); @@ -8609,7 +8609,8 @@ impl Element for EditorElement { EditorMode::SingleLine | EditorMode::AutoHeight { .. } | EditorMode::Full { - sized_by_content: true, + sizing_behavior: SizingBehavior::ExcludeOverscrollMargin + | SizingBehavior::SizeByContent, .. } ) { diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 94bc67e684cd512942d42527d0adb802500ed49f..b36a57a7e47bf148fff4201ec87ac7c868658a04 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -1115,19 +1115,18 @@ mod tests { let fs = FakeFs::new(cx.executor()); let buffer_initial_text_len = rng.random_range(5..15); - let mut buffer_initial_text = Rope::from_str( + let mut buffer_initial_text = Rope::from( RandomCharIter::new(&mut rng) .take(buffer_initial_text_len) .collect::() .as_str(), - cx.background_executor(), ); let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5); newline_ixs.sort_unstable(); for newline_ix in newline_ixs.into_iter().rev() { let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right); - buffer_initial_text.replace(newline_ix..newline_ix, "\n", cx.background_executor()); + buffer_initial_text.replace(newline_ix..newline_ix, "\n"); } log::info!("initial buffer text: {:?}", buffer_initial_text); diff --git a/crates/editor/src/inlays.rs b/crates/editor/src/inlays.rs index 1d411fef5617c00ef4d34b521f2321ac9baac934..f07bf0b315161f0ce9cdf3ef7e2f6db6d60abfb5 100644 --- a/crates/editor/src/inlays.rs +++ b/crates/editor/src/inlays.rs @@ -59,10 +59,10 @@ impl Inlay { pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self { let mut text = hint.text(); if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') { - text.push_small(" "); + text.push(" "); } if hint.padding_left && text.chars_at(0).next() != Some(' ') { - text.push_front_small(" "); + text.push_front(" "); } Self { id, @@ -72,11 +72,11 @@ impl Inlay { } #[cfg(any(test, feature = "test-support"))] - pub fn mock_hint(id: usize, position: Anchor, text: Rope) -> Self { + pub fn mock_hint(id: usize, position: Anchor, text: impl Into) -> Self { Self { id: InlayId::Hint(id), position, - content: InlayContent::Text(text), + content: InlayContent::Text(text.into()), } } @@ -88,19 +88,19 @@ impl Inlay { } } - pub fn edit_prediction(id: usize, position: Anchor, text: Rope) -> Self { + pub fn edit_prediction>(id: usize, position: Anchor, text: T) -> Self { Self { id: InlayId::EditPrediction(id), position, - content: InlayContent::Text(text), + content: InlayContent::Text(text.into()), } } - pub fn debugger(id: usize, position: Anchor, text: Rope) -> Self { + pub fn debugger>(id: usize, position: Anchor, text: T) -> Self { Self { id: InlayId::DebuggerValue(id), position, - content: InlayContent::Text(text), + content: InlayContent::Text(text.into()), } } @@ -108,7 +108,7 @@ impl Inlay { static COLOR_TEXT: OnceLock = OnceLock::new(); match &self.content { InlayContent::Text(text) => text, - InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from_str_small("◼")), + InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("◼")), } } diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 74fe9988763b976f315624b8e1ab36110e2137ee..4fd673a8d2f049da94f90adfcdfbd8cd3263d12d 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -1,5 +1,4 @@ use std::{ - collections::hash_map, ops::{ControlFlow, Range}, time::Duration, }; @@ -49,8 +48,8 @@ pub struct LspInlayHintData { allowed_hint_kinds: HashSet>, invalidate_debounce: Option, append_debounce: Option, - hint_refresh_tasks: HashMap>, Vec>>>, - hint_chunk_fetched: HashMap>)>, + hint_refresh_tasks: HashMap>>, + hint_chunk_fetching: HashMap>)>, invalidate_hints_for_buffers: HashSet, pub added_hints: HashMap>, } @@ -63,7 +62,7 @@ impl LspInlayHintData { enabled_in_settings: settings.enabled, hint_refresh_tasks: HashMap::default(), added_hints: HashMap::default(), - hint_chunk_fetched: HashMap::default(), + hint_chunk_fetching: HashMap::default(), invalidate_hints_for_buffers: HashSet::default(), invalidate_debounce: debounce_value(settings.edit_debounce_ms), append_debounce: debounce_value(settings.scroll_debounce_ms), @@ -99,9 +98,8 @@ impl LspInlayHintData { pub fn clear(&mut self) { self.hint_refresh_tasks.clear(); - self.hint_chunk_fetched.clear(); + self.hint_chunk_fetching.clear(); self.added_hints.clear(); - self.invalidate_hints_for_buffers.clear(); } /// Checks inlay hint settings for enabled hint kinds and general enabled state. @@ -199,7 +197,7 @@ impl LspInlayHintData { ) { for buffer_id in removed_buffer_ids { self.hint_refresh_tasks.remove(buffer_id); - self.hint_chunk_fetched.remove(buffer_id); + self.hint_chunk_fetching.remove(buffer_id); } } } @@ -211,7 +209,10 @@ pub enum InlayHintRefreshReason { SettingsChange(InlayHintSettings), NewLinesShown, BufferEdited(BufferId), - RefreshRequested(LanguageServerId), + RefreshRequested { + server_id: LanguageServerId, + request_id: Option, + }, ExcerptsRemoved(Vec), } @@ -296,7 +297,7 @@ impl Editor { | InlayHintRefreshReason::Toggle(_) | InlayHintRefreshReason::SettingsChange(_) => true, InlayHintRefreshReason::NewLinesShown - | InlayHintRefreshReason::RefreshRequested(_) + | InlayHintRefreshReason::RefreshRequested { .. } | InlayHintRefreshReason::ExcerptsRemoved(_) => false, InlayHintRefreshReason::BufferEdited(buffer_id) => { let Some(affected_language) = self @@ -370,48 +371,45 @@ impl Editor { let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else { continue; }; - let fetched_tasks = inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default(); + + let (fetched_for_version, fetched_chunks) = inlay_hints + .hint_chunk_fetching + .entry(buffer_id) + .or_default(); if visible_excerpts .buffer_version - .changed_since(&fetched_tasks.0) + .changed_since(fetched_for_version) { - fetched_tasks.1.clear(); - fetched_tasks.0 = visible_excerpts.buffer_version.clone(); + *fetched_for_version = visible_excerpts.buffer_version.clone(); + fetched_chunks.clear(); inlay_hints.hint_refresh_tasks.remove(&buffer_id); } - let applicable_chunks = - semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx); + let known_chunks = if ignore_previous_fetches { + None + } else { + Some((fetched_for_version.clone(), fetched_chunks.clone())) + }; - match inlay_hints + let mut applicable_chunks = + semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx); + applicable_chunks.retain(|chunk| fetched_chunks.insert(chunk.clone())); + if applicable_chunks.is_empty() && !ignore_previous_fetches { + continue; + } + inlay_hints .hint_refresh_tasks .entry(buffer_id) .or_default() - .entry(applicable_chunks) - { - hash_map::Entry::Occupied(mut o) => { - if invalidate_cache.should_invalidate() || ignore_previous_fetches { - o.get_mut().push(spawn_editor_hints_refresh( - buffer_id, - invalidate_cache, - ignore_previous_fetches, - debounce, - visible_excerpts, - cx, - )); - } - } - hash_map::Entry::Vacant(v) => { - v.insert(Vec::new()).push(spawn_editor_hints_refresh( - buffer_id, - invalidate_cache, - ignore_previous_fetches, - debounce, - visible_excerpts, - cx, - )); - } - } + .push(spawn_editor_hints_refresh( + buffer_id, + invalidate_cache, + debounce, + visible_excerpts, + known_chunks, + applicable_chunks, + cx, + )); } } @@ -506,9 +504,13 @@ impl Editor { } InlayHintRefreshReason::NewLinesShown => InvalidationStrategy::None, InlayHintRefreshReason::BufferEdited(_) => InvalidationStrategy::BufferEdited, - InlayHintRefreshReason::RefreshRequested(server_id) => { - InvalidationStrategy::RefreshRequested(*server_id) - } + InlayHintRefreshReason::RefreshRequested { + server_id, + request_id, + } => InvalidationStrategy::RefreshRequested { + server_id: *server_id, + request_id: *request_id, + }, }; match &mut self.inlay_hints { @@ -718,44 +720,29 @@ impl Editor { fn inlay_hints_for_buffer( &mut self, invalidate_cache: InvalidationStrategy, - ignore_previous_fetches: bool, buffer_excerpts: VisibleExcerpts, + known_chunks: Option<(Global, HashSet>)>, cx: &mut Context, ) -> Option, anyhow::Result)>>> { let semantics_provider = self.semantics_provider()?; - let inlay_hints = self.inlay_hints.as_mut()?; - let buffer_id = buffer_excerpts.buffer.read(cx).remote_id(); let new_hint_tasks = semantics_provider .inlay_hints( invalidate_cache, buffer_excerpts.buffer, buffer_excerpts.ranges, - inlay_hints - .hint_chunk_fetched - .get(&buffer_id) - .filter(|_| !ignore_previous_fetches && !invalidate_cache.should_invalidate()) - .cloned(), + known_chunks, cx, ) .unwrap_or_default(); - let (known_version, known_chunks) = - inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default(); - if buffer_excerpts.buffer_version.changed_since(known_version) { - known_chunks.clear(); - *known_version = buffer_excerpts.buffer_version; - } - - let mut hint_tasks = Vec::new(); + let mut hint_tasks = None; for (row_range, new_hints_task) in new_hint_tasks { - let inserted = known_chunks.insert(row_range.clone()); - if inserted || ignore_previous_fetches || invalidate_cache.should_invalidate() { - hint_tasks.push(cx.spawn(async move |_, _| (row_range, new_hints_task.await))); - } + hint_tasks + .get_or_insert_with(Vec::new) + .push(cx.spawn(async move |_, _| (row_range, new_hints_task.await))); } - - Some(hint_tasks) + hint_tasks } fn apply_fetched_hints( @@ -793,20 +780,28 @@ impl Editor { let excerpts = self.buffer.read(cx).excerpt_ids(); let hints_to_insert = new_hints .into_iter() - .filter_map(|(chunk_range, hints_result)| match hints_result { - Ok(new_hints) => Some(new_hints), - Err(e) => { - log::error!( - "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}" - ); - if let Some((for_version, chunks_fetched)) = - inlay_hints.hint_chunk_fetched.get_mut(&buffer_id) - { - if for_version == &query_version { - chunks_fetched.remove(&chunk_range); + .filter_map(|(chunk_range, hints_result)| { + let chunks_fetched = inlay_hints.hint_chunk_fetching.get_mut(&buffer_id); + match hints_result { + Ok(new_hints) => { + if new_hints.is_empty() { + if let Some((_, chunks_fetched)) = chunks_fetched { + chunks_fetched.remove(&chunk_range); + } } + Some(new_hints) + } + Err(e) => { + log::error!( + "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}" + ); + if let Some((for_version, chunks_fetched)) = chunks_fetched { + if for_version == &query_version { + chunks_fetched.remove(&chunk_range); + } + } + None } - None } }) .flat_map(|hints| hints.into_values()) @@ -856,9 +851,10 @@ struct VisibleExcerpts { fn spawn_editor_hints_refresh( buffer_id: BufferId, invalidate_cache: InvalidationStrategy, - ignore_previous_fetches: bool, debounce: Option, buffer_excerpts: VisibleExcerpts, + known_chunks: Option<(Global, HashSet>)>, + applicable_chunks: Vec>, cx: &mut Context<'_, Editor>, ) -> Task<()> { cx.spawn(async move |editor, cx| { @@ -869,12 +865,7 @@ fn spawn_editor_hints_refresh( let query_version = buffer_excerpts.buffer_version.clone(); let Some(hint_tasks) = editor .update(cx, |editor, cx| { - editor.inlay_hints_for_buffer( - invalidate_cache, - ignore_previous_fetches, - buffer_excerpts, - cx, - ) + editor.inlay_hints_for_buffer(invalidate_cache, buffer_excerpts, known_chunks, cx) }) .ok() else { @@ -882,6 +873,19 @@ fn spawn_editor_hints_refresh( }; let hint_tasks = hint_tasks.unwrap_or_default(); if hint_tasks.is_empty() { + editor + .update(cx, |editor, _| { + if let Some((_, hint_chunk_fetching)) = editor + .inlay_hints + .as_mut() + .and_then(|inlay_hints| inlay_hints.hint_chunk_fetching.get_mut(&buffer_id)) + { + for applicable_chunks in &applicable_chunks { + hint_chunk_fetching.remove(applicable_chunks); + } + } + }) + .ok(); return; } let new_hints = join_all(hint_tasks).await; @@ -1102,7 +1106,10 @@ pub mod tests { editor .update(cx, |editor, _window, cx| { editor.refresh_inlay_hints( - InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()), + InlayHintRefreshReason::RefreshRequested { + server_id: fake_server.server.server_id(), + request_id: Some(1), + }, cx, ); }) @@ -1958,15 +1965,8 @@ pub mod tests { async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) { init_test(cx, |settings| { settings.defaults.inlay_hints = Some(InlayHintSettingsContent { - show_value_hints: Some(true), enabled: Some(true), - edit_debounce_ms: Some(0), - scroll_debounce_ms: Some(0), - show_type_hints: Some(true), - show_parameter_hints: Some(true), - show_other_hints: Some(true), - show_background: Some(false), - toggle_on_modifiers_press: None, + ..InlayHintSettingsContent::default() }) }); @@ -2044,6 +2044,7 @@ pub mod tests { cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx)); cx.executor().run_until_parked(); let _fake_server = fake_servers.next().await.unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); let ranges = lsp_request_ranges @@ -2129,6 +2130,7 @@ pub mod tests { ); }) .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); editor.update(cx, |_, _, _| { let ranges = lsp_request_ranges @@ -2145,6 +2147,7 @@ pub mod tests { editor.handle_input("++++more text++++", window, cx); }) .unwrap(); + cx.executor().advance_clock(Duration::from_secs(1)); cx.executor().run_until_parked(); editor.update(cx, |editor, _window, cx| { let mut ranges = lsp_request_ranges.lock().drain(..).collect::>(); @@ -3887,7 +3890,10 @@ let c = 3;"# editor .update(cx, |editor, _, cx| { editor.refresh_inlay_hints( - InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()), + InlayHintRefreshReason::RefreshRequested { + server_id: fake_server.server.server_id(), + request_id: Some(1), + }, cx, ); }) @@ -4022,7 +4028,7 @@ let c = 3;"# let mut all_fetched_hints = Vec::new(); for buffer in editor.buffer.read(cx).all_buffers() { lsp_store.update(cx, |lsp_store, cx| { - let hints = &lsp_store.latest_lsp_data(&buffer, cx).inlay_hints(); + let hints = lsp_store.latest_lsp_data(&buffer, cx).inlay_hints(); all_cached_labels.extend(hints.all_cached_hints().into_iter().map(|hint| { let mut label = hint.text().to_string(); if hint.padding_left { diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index c064e3dbaf2873fef03d65dbd5794e6453599cec..726ac800d601f8d98055d4e577b3af4f9ed436e2 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1593,7 +1593,12 @@ impl SearchableItem for Editor { ) { self.unfold_ranges(&[matches[index].clone()], false, true, cx); let range = self.range_for_match(&matches[index], collapse); - self.change_selections(Default::default(), window, cx, |s| { + let autoscroll = if EditorSettings::get_global(cx).search.center_on_match { + Autoscroll::center() + } else { + Autoscroll::fit() + }; + self.change_selections(SelectionEffects::scroll(autoscroll), window, cx, |s| { s.select_ranges([range]); }) } diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index 0c4760f5684acf450b793a1deac54be983dcafd0..36353e8d42527cd59043ab3cf2b6105c534412d9 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -60,8 +60,10 @@ async fn lsp_task_context( buffer: &Entity, cx: &mut AsyncApp, ) -> Option { - let worktree_store = project - .read_with(cx, |project, _| project.worktree_store()) + let (worktree_store, environment) = project + .read_with(cx, |project, _| { + (project.worktree_store(), project.environment().clone()) + }) .ok()?; let worktree_abs_path = cx @@ -74,9 +76,9 @@ async fn lsp_task_context( }) .ok()?; - let project_env = project - .update(cx, |project, cx| { - project.buffer_environment(buffer, &worktree_store, cx) + let project_env = environment + .update(cx, |environment, cx| { + environment.buffer_environment(buffer, &worktree_store, cx) }) .ok()? .await; diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 1c15990b13ea99db269d21bcdcd591e50ebf4d69..418fa4fcb442b1de133972457497c0e592e77d15 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -878,7 +878,6 @@ mod tests { use gpui::{AppContext as _, font, px}; use language::Capability; use project::{Project, project_settings::DiagnosticSeverity}; - use rope::Rope; use settings::SettingsStore; use util::post_inc; @@ -1025,22 +1024,22 @@ mod tests { Inlay::edit_prediction( post_inc(&mut id), buffer_snapshot.anchor_before(offset), - Rope::from_str_small("test"), + "test", ), Inlay::edit_prediction( post_inc(&mut id), buffer_snapshot.anchor_after(offset), - Rope::from_str_small("test"), + "test", ), Inlay::mock_hint( post_inc(&mut id), buffer_snapshot.anchor_before(offset), - Rope::from_str_small("test"), + "test", ), Inlay::mock_hint( post_inc(&mut id), buffer_snapshot.anchor_after(offset), - Rope::from_str_small("test"), + "test", ), ] }) diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index 3ef8ca09ab0af2714c353b1ad3c31556b0783c3d..8d74638e4c2aaf356ffabdeef717b9b105487ee3 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -193,7 +193,7 @@ impl Editor { if let Some(language) = language { for signature in &mut signature_help.signatures { - let text = Rope::from_str_small(signature.label.as_ref()); + let text = Rope::from(signature.label.as_ref()); let highlights = language .highlight_text(&text, 0..signature.label.len()) .into_iter() diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 50b5169f7ad1196a3628c59d4fda6162126b2190..04b03352d83fd3323770a00a13c4377dc111535a 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -1468,7 +1468,6 @@ impl ExtensionStore { let extensions_dir = self.installed_dir.clone(); let index_path = self.index_path.clone(); let proxy = self.proxy.clone(); - let executor = cx.background_executor().clone(); cx.background_spawn(async move { let start_time = Instant::now(); let mut index = ExtensionIndex::default(); @@ -1502,14 +1501,10 @@ impl ExtensionStore { } if let Ok(index_json) = serde_json::to_string_pretty(&index) { - fs.save( - &index_path, - &Rope::from_str(&index_json, &executor), - Default::default(), - ) - .await - .context("failed to save extension index") - .log_err(); + fs.save(&index_path, &index_json.as_str().into(), Default::default()) + .await + .context("failed to save extension index") + .log_err(); } log::info!("rebuilt extension index in {:?}", start_time.elapsed()); @@ -1676,7 +1671,7 @@ impl ExtensionStore { let manifest_toml = toml::to_string(&loaded_extension.manifest)?; fs.save( &tmp_dir.join(EXTENSION_TOML), - &Rope::from_str_small(&manifest_toml), + &Rope::from(manifest_toml), language::LineEnding::Unix, ) .await?; diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 539f2d8864134effdf0a3edcdefa4ca213b7eff3..3a7e1a80dd348d97a54f1dce21794760a2399740 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -805,25 +805,22 @@ impl ExtensionsPage { ) .child( h_flex() - .gap_2() + .gap_1() .justify_between() .child( - h_flex() - .gap_1() - .child( - Icon::new(IconName::Person) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child( - Label::new(extension.manifest.authors.join(", ")) - .size(LabelSize::Small) - .color(Color::Muted) - .truncate(), - ), + Icon::new(IconName::Person) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(extension.manifest.authors.join(", ")) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate(), ) .child( h_flex() + .ml_auto() .gap_1() .child( IconButton::new( diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 694ef1eaceb720c3b63d4ca9d243ab73e9442970..f29c0e6cd20f423dd9073abced0182f272b588c9 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -711,7 +711,9 @@ impl PickerDelegate for OpenPathDelegate { match &self.directory_state { DirectoryState::List { parent_path, .. } => { - let (label, indices) = if *parent_path == self.prompt_root { + let (label, indices) = if is_current_dir_candidate { + ("open this directory".to_string(), vec![]) + } else if *parent_path == self.prompt_root { match_positions.iter_mut().for_each(|position| { *position += self.prompt_root.len(); }); @@ -719,8 +721,6 @@ impl PickerDelegate for OpenPathDelegate { format!("{}{}", self.prompt_root, candidate.path.string), match_positions, ) - } else if is_current_dir_candidate { - ("open this directory".to_string(), vec![]) } else { (candidate.path.string, match_positions) }; diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index c794303ef71232d5a162b51ec8db7d472328b767..0202b2134f4fd0d3f983b2c67e97414a44457143 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -377,7 +377,7 @@ impl Fs for RealFs { #[cfg(windows)] if smol::fs::metadata(&target).await?.is_dir() { - let status = smol::process::Command::new("cmd") + let status = new_smol_command("cmd") .args(["/C", "mklink", "/J"]) .args([path, target.as_path()]) .status() diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index 2b3e8f235ff6e5f351c1875107443f51838c6da9..851556151e285975cb1eb7d3d33244d7e11b5663 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -23,6 +23,7 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true url.workspace = true +urlencoding.workspace = true util.workspace = true [dev-dependencies] diff --git a/crates/git_hosting_providers/src/providers/gitee.rs b/crates/git_hosting_providers/src/providers/gitee.rs index e2bcb6668240fa43120555f9b3c11a10dd1418d7..120a360cb19615e11e0ea4829a6fcd68665e4fcc 100644 --- a/crates/git_hosting_providers/src/providers/gitee.rs +++ b/crates/git_hosting_providers/src/providers/gitee.rs @@ -1,5 +1,11 @@ -use std::str::FromStr; - +use std::{str::FromStr, sync::Arc}; + +use anyhow::{Context as _, Result, bail}; +use async_trait::async_trait; +use futures::AsyncReadExt; +use gpui::SharedString; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; +use serde::Deserialize; use url::Url; use git::{ @@ -9,6 +15,55 @@ use git::{ pub struct Gitee; +#[derive(Debug, Deserialize)] +struct CommitDetails { + author: Option, +} + +#[derive(Debug, Deserialize)] +struct Author { + avatar_url: String, +} + +impl Gitee { + async fn fetch_gitee_commit_author( + &self, + repo_owner: &str, + repo: &str, + commit: &str, + client: &Arc, + ) -> Result> { + let url = format!("https://gitee.com/api/v5/repos/{repo_owner}/{repo}/commits/{commit}"); + + let request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); + + let mut response = client + .send(request.body(AsyncBody::default())?) + .await + .with_context(|| format!("error fetching Gitee commit details at {:?}", url))?; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let body_str = std::str::from_utf8(&body)?; + + serde_json::from_str::(body_str) + .map(|commit| commit.author) + .context("failed to deserialize Gitee commit details") + } +} + +#[async_trait] impl GitHostingProvider for Gitee { fn name(&self) -> String { "Gitee".to_string() @@ -19,7 +74,7 @@ impl GitHostingProvider for Gitee { } fn supports_avatars(&self) -> bool { - false + true } fn format_line_number(&self, line: u32) -> String { @@ -80,6 +135,26 @@ impl GitHostingProvider for Gitee { ); permalink } + + async fn commit_author_avatar_url( + &self, + repo_owner: &str, + repo: &str, + commit: SharedString, + http_client: Arc, + ) -> Result> { + let commit = commit.to_string(); + let avatar_url = self + .fetch_gitee_commit_author(repo_owner, repo, &commit, &http_client) + .await? + .map(|author| -> Result { + let mut url = Url::parse(&author.avatar_url)?; + url.set_query(Some("width=128")); + Ok(url) + }) + .transpose()?; + Ok(avatar_url) + } } #[cfg(test)] diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index d18af7cccae058a7b9746f7dfe86beef8d6fda94..af3bb17494a79056db0fd4c531f67b77a31e0954 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -1,6 +1,11 @@ -use std::str::FromStr; - -use anyhow::{Result, bail}; +use std::{str::FromStr, sync::Arc}; + +use anyhow::{Context as _, Result, bail}; +use async_trait::async_trait; +use futures::AsyncReadExt; +use gpui::SharedString; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; +use serde::Deserialize; use url::Url; use git::{ @@ -10,6 +15,16 @@ use git::{ use crate::get_host_from_git_remote_url; +#[derive(Debug, Deserialize)] +struct CommitDetails { + author_email: String, +} + +#[derive(Debug, Deserialize)] +struct AvatarInfo { + avatar_url: String, +} + #[derive(Debug)] pub struct Gitlab { name: String, @@ -46,8 +61,79 @@ impl Gitlab { Url::parse(&format!("https://{}", host))?, )) } + + async fn fetch_gitlab_commit_author( + &self, + repo_owner: &str, + repo: &str, + commit: &str, + client: &Arc, + ) -> Result> { + let Some(host) = self.base_url.host_str() else { + bail!("failed to get host from gitlab base url"); + }; + let project_path = format!("{}/{}", repo_owner, repo); + let project_path_encoded = urlencoding::encode(&project_path); + let url = format!( + "https://{host}/api/v4/projects/{project_path_encoded}/repository/commits/{commit}" + ); + + let request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); + + let mut response = client + .send(request.body(AsyncBody::default())?) + .await + .with_context(|| format!("error fetching GitLab commit details at {:?}", url))?; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let body_str = std::str::from_utf8(&body)?; + + let author_email = serde_json::from_str::(body_str) + .map(|commit| commit.author_email) + .context("failed to deserialize GitLab commit details")?; + + let avatar_info_url = format!("https://{host}/api/v4/avatar?email={author_email}"); + + let request = Request::get(&avatar_info_url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); + + let mut response = client + .send(request.body(AsyncBody::default())?) + .await + .with_context(|| format!("error fetching GitLab avatar info at {:?}", url))?; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let body_str = std::str::from_utf8(&body)?; + + serde_json::from_str::>(body_str) + .context("failed to deserialize GitLab avatar info") + } } +#[async_trait] impl GitHostingProvider for Gitlab { fn name(&self) -> String { self.name.clone() @@ -58,7 +144,7 @@ impl GitHostingProvider for Gitlab { } fn supports_avatars(&self) -> bool { - false + true } fn format_line_number(&self, line: u32) -> String { @@ -122,6 +208,39 @@ impl GitHostingProvider for Gitlab { ); permalink } + + async fn commit_author_avatar_url( + &self, + repo_owner: &str, + repo: &str, + commit: SharedString, + http_client: Arc, + ) -> Result> { + let commit = commit.to_string(); + let avatar_url = self + .fetch_gitlab_commit_author(repo_owner, repo, &commit, &http_client) + .await? + .map(|author| -> Result { + let mut url = Url::parse(&author.avatar_url)?; + if let Some(host) = url.host_str() { + let size_query = if host.contains("gravatar") || host.contains("libravatar") { + Some("s=128") + } else if self + .base_url + .host_str() + .is_some_and(|base_host| host.contains(base_host)) + { + Some("width=128") + } else { + None + }; + url.set_query(size_query); + } + Ok(url) + }) + .transpose()?; + Ok(avatar_url) + } } #[cfg(test)] @@ -134,8 +253,8 @@ mod tests { #[test] fn test_invalid_self_hosted_remote_url() { let remote_url = "https://gitlab.com/zed-industries/zed.git"; - let github = Gitlab::from_remote_url(remote_url); - assert!(github.is_err()); + let gitlab = Gitlab::from_remote_url(remote_url); + assert!(gitlab.is_err()); } #[test] diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index b302d551ddccd17a757b81452f0ed597dde88c57..0a0c4c18e1f528a9ebaad9a8d9862982632dd04f 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -170,10 +170,7 @@ impl CommitView { ReplicaId::LOCAL, cx.entity_id().as_non_zero_u64().into(), LineEnding::default(), - Rope::from_str( - &format_commit(&commit, stash.is_some()), - cx.background_executor(), - ), + format_commit(&commit, stash.is_some()).into(), ); metadata_buffer_id = Some(buffer.remote_id()); Buffer::build(buffer, Some(file.clone()), Capability::ReadWrite) @@ -339,7 +336,7 @@ async fn build_buffer( ) -> Result> { let line_ending = LineEnding::detect(&text); LineEnding::normalize(&mut text); - let text = Rope::from_str(&text, cx.background_executor()); + let text = Rope::from(text); let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx))?; let language = if let Some(language) = language { language_registry @@ -379,7 +376,7 @@ async fn build_buffer_diff( let base_buffer = cx .update(|cx| { Buffer::build_snapshot( - Rope::from_str(old_text.as_deref().unwrap_or(""), cx.background_executor()), + old_text.as_deref().unwrap_or("").into(), buffer.language().cloned(), Some(language_registry.clone()), cx, diff --git a/crates/git_ui/src/file_diff_view.rs b/crates/git_ui/src/file_diff_view.rs index a99b7f8e2428ca0bcf726f2ac7661df171bef34a..387bda808708cf38beded2fe17edd92466885672 100644 --- a/crates/git_ui/src/file_diff_view.rs +++ b/crates/git_ui/src/file_diff_view.rs @@ -359,7 +359,6 @@ mod tests { use super::*; use editor::test::editor_test_context::assert_state_with_diff; use gpui::TestAppContext; - use language::Rope; use project::{FakeFs, Fs, Project}; use settings::SettingsStore; use std::path::PathBuf; @@ -430,7 +429,7 @@ mod tests { // Modify the new file on disk fs.save( path!("/test/new_file.txt").as_ref(), - &Rope::from_str_small(&unindent( + &unindent( " new line 1 line 2 @@ -438,7 +437,8 @@ mod tests { line 4 new line 5 ", - )), + ) + .into(), Default::default(), ) .await @@ -465,14 +465,15 @@ mod tests { // Modify the old file on disk fs.save( path!("/test/old_file.txt").as_ref(), - &Rope::from_str_small(&unindent( + &unindent( " new line 1 line 2 old line 3 line 4 ", - )), + ) + .into(), Default::default(), ) .await diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index 260a07cc3ba6805b91207e000b02d23e57f2be4e..381541d4b11377b988dd30e03155855c7ba25aed 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -260,19 +260,6 @@ impl AsyncApp { } } -impl sum_tree::BackgroundSpawn for BackgroundExecutor { - type Task - = Task - where - R: Send + Sync; - fn background_spawn(&self, future: impl Future + Send + 'static) -> Self::Task - where - R: Send + Sync + 'static, - { - self.spawn(future) - } -} - /// A cloneable, owned handle to the application context, /// composed with the window associated with the current task. #[derive(Clone, Deref, DerefMut)] diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 40397f9d9d359d2ac914b6006b0ae883fa151fc2..d974823396d9f0d546a6b035f47b569145eb021b 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -393,11 +393,6 @@ impl TestAppContext { } } - /// Returns the background executor for this context. - pub fn background_executor(&self) -> &BackgroundExecutor { - &self.background_executor - } - /// Wait until there are no more pending tasks. pub fn run_until_parked(&mut self) { self.background_executor.run_until_parked() diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 739fa1c5e25eb62378fbe57eea1b62c833780d9d..93082563c02f4168b1d73e2929a6bf9dbd153237 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -251,8 +251,6 @@ impl Element for UniformList { None } - // self.max_found_width = 0.0 - // fn request_layout( &mut self, global_id: Option<&GlobalElementId>, diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index c1e5c066b43604f5e7d47588ef3c2ebc33cd524e..b6d3a407f5dbbab07e0273e668e9b5710824edda 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -342,7 +342,7 @@ impl BackgroundExecutor { /// for all of them to complete before returning. pub async fn scoped<'scope, F>(&self, scheduler: F) where - F: for<'a> FnOnce(&'a mut Scope<'scope>), + F: FnOnce(&mut Scope<'scope>), { let mut scope = Scope::new(self.clone()); (scheduler)(&mut scope); diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index 6759a573e6c04ecf943f6cc17616743bcab4ef28..8d3e6305f6b4bb60f6c282280bafa7f76f59eecb 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -80,27 +80,15 @@ impl PlatformDispatcher for WindowsDispatcher { } fn dispatch_on_main_thread(&self, runnable: Runnable) { - let was_empty = self.main_sender.is_empty(); match self.main_sender.send(runnable) { Ok(_) => unsafe { - // Only send a `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` to the - // queue if we have no runnables queued up yet, otherwise we - // risk filling the message queue with gpui messages causing us - // to starve the message loop of system messages, resulting in a - // process hang. - // - // When the message loop receives a - // `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` message we drain the - // runnable queue entirely. - if was_empty { - PostMessageW( - Some(self.platform_window_handle.as_raw()), - WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, - WPARAM(self.validation_number), - LPARAM(0), - ) - .log_err(); - } + PostMessageW( + Some(self.platform_window_handle.as_raw()), + WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, + WPARAM(self.validation_number), + LPARAM(0), + ) + .log_err(); }, Err(runnable) => { // NOTE: Runnable may wrap a Future that is !Send. diff --git a/crates/gpui/src/platform/windows/keyboard.rs b/crates/gpui/src/platform/windows/keyboard.rs index 7a8478d5910d35fb98a913ed799f2fa1447e9a65..cd0c1da10561d7bfafafbc70989344826e8e5b16 100644 --- a/crates/gpui/src/platform/windows/keyboard.rs +++ b/crates/gpui/src/platform/windows/keyboard.rs @@ -9,7 +9,6 @@ use windows::Win32::UI::{ }, WindowsAndMessaging::KL_NAMELENGTH, }; -use windows_core::HSTRING; use crate::{ KeybindingKeystroke, Keystroke, Modifiers, PlatformKeyboardLayout, PlatformKeyboardMapper, @@ -93,14 +92,13 @@ impl PlatformKeyboardMapper for WindowsKeyboardMapper { impl WindowsKeyboardLayout { pub(crate) fn new() -> Result { - let mut buffer = [0u16; KL_NAMELENGTH as usize]; + let mut buffer = [0u16; KL_NAMELENGTH as usize]; // KL_NAMELENGTH includes the null terminator unsafe { GetKeyboardLayoutNameW(&mut buffer)? }; - let id = HSTRING::from_wide(&buffer).to_string(); + let id = String::from_utf16_lossy(&buffer[..buffer.len() - 1]); // Remove the null terminator let entry = windows_registry::LOCAL_MACHINE.open(format!( - "System\\CurrentControlSet\\Control\\Keyboard Layouts\\{}", - id + "System\\CurrentControlSet\\Control\\Keyboard Layouts\\{id}" ))?; - let name = entry.get_hstring("Layout Text")?.to_string(); + let name = entry.get_string("Layout Text")?; Ok(Self { id, name }) } @@ -135,6 +133,7 @@ impl WindowsKeyboardLayout { b"0405" | // Czech b"040E" | // Hungarian b"0424" | // Slovenian + b"041A" | // Croatian b"041B" | // Slovak b"0418" // Romanian ) diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 1442c482d89f0c46e45ccd280e678021e6ba63c7..a4da8c6ccdf04f453a368b902af8543625100436 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -53,6 +53,7 @@ pub enum IconName { Check, CheckDouble, ChevronDown, + ChevronDownUp, ChevronLeft, ChevronRight, ChevronUp, diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index 70e58de3d14403440a0cd291754e0a4593290d01..3d840de64d67f5bad7646339d66229ff47831028 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -11,7 +11,7 @@ mod ui_components; use anyhow::{Context as _, anyhow}; use collections::{HashMap, HashSet}; -use editor::{CompletionProvider, Editor, EditorEvent}; +use editor::{CompletionProvider, Editor, EditorEvent, EditorMode, SizingBehavior}; use fs::Fs; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ @@ -22,7 +22,7 @@ use gpui::{ ScrollWheelEvent, Stateful, StyledText, Subscription, Task, TextStyleRefinement, WeakEntity, actions, anchored, deferred, div, }; -use language::{Language, LanguageConfig, Rope, ToOffset as _}; +use language::{Language, LanguageConfig, ToOffset as _}; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{CompletionDisplayOptions, Project}; use settings::{ @@ -2119,7 +2119,7 @@ impl RenderOnce for SyntaxHighlightedText { let highlights = self .language - .highlight_text(&Rope::from_str_small(text.as_ref()), 0..text.len()); + .highlight_text(&text.as_ref().into(), 0..text.len()); let mut runs = Vec::with_capacity(highlights.len()); let mut offset = 0; @@ -2788,10 +2788,10 @@ impl ActionArgumentsEditor { let editor = cx.new_window_entity(|window, cx| { let multi_buffer = cx.new(|cx| editor::MultiBuffer::singleton(buffer, cx)); let mut editor = Editor::new( - editor::EditorMode::Full { + EditorMode::Full { scale_ui_elements_with_buffer_font_size: true, show_active_line_background: false, - sized_by_content: true, + sizing_behavior: SizingBehavior::SizeByContent, }, multi_buffer, project.upgrade(), diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index d67434741032ae7f42dc5e95ec34a57b7c84ebb4..c72350f38561e7aea62b7d3402eaa24bbdb08044 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -24,8 +24,8 @@ use collections::HashMap; use fs::MTime; use futures::channel::oneshot; use gpui::{ - App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle, - SharedString, StyledText, Task, TaskLabel, TextStyle, + App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText, + Task, TaskLabel, TextStyle, }; use lsp::{LanguageServerId, NumberOrString}; @@ -832,7 +832,6 @@ impl Buffer { ReplicaId::LOCAL, cx.entity_id().as_non_zero_u64().into(), base_text.into(), - &cx.background_executor(), ), None, Capability::ReadWrite, @@ -863,10 +862,9 @@ impl Buffer { replica_id: ReplicaId, capability: Capability, base_text: impl Into, - cx: &BackgroundExecutor, ) -> Self { Self::build( - TextBuffer::new(replica_id, remote_id, base_text.into(), cx), + TextBuffer::new(replica_id, remote_id, base_text.into()), None, capability, ) @@ -879,10 +877,9 @@ impl Buffer { capability: Capability, message: proto::BufferState, file: Option>, - cx: &BackgroundExecutor, ) -> Result { let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?; - let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx); + let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text); let mut this = Self::build(buffer, file, capability); this.text.set_line_ending(proto::deserialize_line_ending( rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?, @@ -1141,14 +1138,13 @@ impl Buffer { let old_snapshot = self.text.snapshot(); let mut branch_buffer = self.text.branch(); let mut syntax_snapshot = self.syntax_map.lock().snapshot(); - let executor = cx.background_executor().clone(); cx.background_spawn(async move { if !edits.is_empty() { if let Some(language) = language.clone() { syntax_snapshot.reparse(&old_snapshot, registry.clone(), language); } - branch_buffer.edit(edits.iter().cloned(), &executor); + branch_buffer.edit(edits.iter().cloned()); let snapshot = branch_buffer.snapshot(); syntax_snapshot.interpolate(&snapshot); @@ -2365,9 +2361,7 @@ impl Buffer { let autoindent_request = autoindent_mode .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode))); - let edit_operation = self - .text - .edit(edits.iter().cloned(), cx.background_executor()); + let edit_operation = self.text.edit(edits.iter().cloned()); let edit_id = edit_operation.timestamp(); if let Some((before_edit, mode)) = autoindent_request { @@ -2598,8 +2592,7 @@ impl Buffer { for operation in buffer_ops.iter() { self.send_operation(Operation::Buffer(operation.clone()), false, cx); } - self.text - .apply_ops(buffer_ops, Some(cx.background_executor())); + self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 6b6d85c3790123acd6e95dd1d196f6c5845f5ede..f824639ad762191f4168586551af51fb4e37c8dc 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -75,7 +75,6 @@ fn test_set_line_ending(cx: &mut TestAppContext) { Capability::ReadWrite, base.read(cx).to_proto(cx), None, - cx.background_executor(), ) .unwrap() }); @@ -256,18 +255,14 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) { .is_none() ); assert!( - cx.read(|cx| languages.language_for_file( - &file("the/script"), - Some(&Rope::from_str("nothing", cx.background_executor())), - cx - )) - .is_none() + cx.read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx)) + .is_none() ); assert_eq!( cx.read(|cx| languages.language_for_file( &file("the/script"), - Some(&Rope::from_str("#!/bin/env node", cx.background_executor())), + Some(&"#!/bin/env node".into()), cx )) .unwrap() @@ -411,7 +406,6 @@ fn test_edit_events(cx: &mut gpui::App) { ReplicaId::new(1), Capability::ReadWrite, "abcdef", - cx.background_executor(), ) }); let buffer1_ops = Arc::new(Mutex::new(Vec::new())); @@ -2787,14 +2781,8 @@ fn test_serialization(cx: &mut gpui::App) { .background_executor() .block(buffer1.read(cx).serialize_ops(None, cx)); let buffer2 = cx.new(|cx| { - let mut buffer = Buffer::from_proto( - ReplicaId::new(1), - Capability::ReadWrite, - state, - None, - cx.background_executor(), - ) - .unwrap(); + let mut buffer = + Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| proto::deserialize_operation(op).unwrap()), @@ -2818,7 +2806,6 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { Capability::ReadWrite, base.read(cx).to_proto(cx), None, - cx.background_executor(), ) .unwrap() }); @@ -3133,14 +3120,9 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { let ops = cx .background_executor() .block(base_buffer.read(cx).serialize_ops(None, cx)); - let mut buffer = Buffer::from_proto( - ReplicaId::new(i as u16), - Capability::ReadWrite, - state, - None, - cx.background_executor(), - ) - .unwrap(); + let mut buffer = + Buffer::from_proto(ReplicaId::new(i as u16), Capability::ReadWrite, state, None) + .unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| proto::deserialize_operation(op).unwrap()), @@ -3269,7 +3251,6 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { Capability::ReadWrite, old_buffer_state, None, - cx.background_executor(), ) .unwrap(); new_buffer.apply_ops( @@ -3433,7 +3414,7 @@ fn test_contiguous_ranges() { } #[gpui::test(iterations = 500)] -fn test_trailing_whitespace_ranges(mut rng: StdRng, cx: &mut TestAppContext) { +fn test_trailing_whitespace_ranges(mut rng: StdRng) { // Generate a random multi-line string containing // some lines with trailing whitespace. let mut text = String::new(); @@ -3457,7 +3438,7 @@ fn test_trailing_whitespace_ranges(mut rng: StdRng, cx: &mut TestAppContext) { _ => {} } - let rope = Rope::from_str(text.as_str(), cx.background_executor()); + let rope = Rope::from(text.as_str()); let actual_ranges = trailing_whitespace_ranges(&rope); let expected_ranges = TRAILING_WHITESPACE_REGEX .find_iter(&text) diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index 99fd365b50f5c93b965b7193365b49b2bc636a2e..9c4eecad363de386cddc6e943e20e5762634d713 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -100,7 +100,6 @@ fn test_syntax_map_layers_for_range(cx: &mut App) { } "# .unindent(), - cx.background_executor(), ); let mut syntax_map = SyntaxMap::new(&buffer); @@ -148,7 +147,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) { // Replace a vec! macro invocation with a plain slice, removing a syntactic layer. let macro_name_range = range_for_text(&buffer, "vec!"); - buffer.edit([(macro_name_range, "&")], cx.background_executor()); + buffer.edit([(macro_name_range, "&")]); syntax_map.interpolate(&buffer); syntax_map.reparse(language.clone(), &buffer); @@ -200,7 +199,6 @@ fn test_dynamic_language_injection(cx: &mut App) { ``` "# .unindent(), - cx.background_executor(), ); let mut syntax_map = SyntaxMap::new(&buffer); @@ -220,10 +218,7 @@ fn test_dynamic_language_injection(cx: &mut App) { // Replace `rs` with a path to ending in `.rb` in code block. let macro_name_range = range_for_text(&buffer, "rs"); - buffer.edit( - [(macro_name_range, "foo/bar/baz.rb")], - cx.background_executor(), - ); + buffer.edit([(macro_name_range, "foo/bar/baz.rb")]); syntax_map.interpolate(&buffer); syntax_map.reparse(markdown.clone(), &buffer); syntax_map.reparse(markdown_inline.clone(), &buffer); @@ -240,7 +235,7 @@ fn test_dynamic_language_injection(cx: &mut App) { // Replace Ruby with a language that hasn't been loaded yet. let macro_name_range = range_for_text(&buffer, "foo/bar/baz.rb"); - buffer.edit([(macro_name_range, "html")], cx.background_executor()); + buffer.edit([(macro_name_range, "html")]); syntax_map.interpolate(&buffer); syntax_map.reparse(markdown.clone(), &buffer); syntax_map.reparse(markdown_inline.clone(), &buffer); @@ -816,12 +811,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) { .unindent(); let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - text, - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text); let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); @@ -869,7 +859,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) { .unindent(); log::info!("editing"); - buffer.edit_via_marked_text(&text, cx.background_executor()); + buffer.edit_via_marked_text(&text); syntax_map.interpolate(&buffer); syntax_map.reparse(language, &buffer); @@ -913,7 +903,7 @@ fn test_random_syntax_map_edits_rust_macros(rng: StdRng, cx: &mut App) { let language = Arc::new(rust_lang()); registry.add(language.clone()); - test_random_edits(text, registry, language, rng, cx); + test_random_edits(text, registry, language, rng); } #[gpui::test(iterations = 50)] @@ -942,7 +932,7 @@ fn test_random_syntax_map_edits_with_erb(rng: StdRng, cx: &mut App) { registry.add(Arc::new(ruby_lang())); registry.add(Arc::new(html_lang())); - test_random_edits(text, registry, language, rng, cx); + test_random_edits(text, registry, language, rng); } #[gpui::test(iterations = 50)] @@ -975,7 +965,7 @@ fn test_random_syntax_map_edits_with_heex(rng: StdRng, cx: &mut App) { registry.add(Arc::new(heex_lang())); registry.add(Arc::new(html_lang())); - test_random_edits(text, registry, language, rng, cx); + test_random_edits(text, registry, language, rng); } fn test_random_edits( @@ -983,18 +973,12 @@ fn test_random_edits( registry: Arc, language: Arc, mut rng: StdRng, - cx: &mut App, ) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - text, - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text); let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); @@ -1009,7 +993,7 @@ fn test_random_edits( let prev_buffer = buffer.snapshot(); let prev_syntax_map = syntax_map.snapshot(); - buffer.randomly_edit(&mut rng, 3, cx.background_executor()); + buffer.randomly_edit(&mut rng, 3); log::info!("text:\n{}", buffer.text()); syntax_map.interpolate(&buffer); @@ -1175,12 +1159,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf .now_or_never() .unwrap() .unwrap(); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "", - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); let mut mutated_syntax_map = SyntaxMap::new(&buffer); mutated_syntax_map.set_language_registry(registry.clone()); @@ -1189,7 +1168,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf for (i, marked_string) in steps.iter().enumerate() { let marked_string = marked_string.unindent(); log::info!("incremental parse {i}: {marked_string:?}"); - buffer.edit_via_marked_text(&marked_string, cx.background_executor()); + buffer.edit_via_marked_text(&marked_string); // Reparse the syntax map mutated_syntax_map.interpolate(&buffer); diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index cc9fb639f228ea7af42238296ae88c95ae439881..01b726748649e29b4fe69ce26df5564819894985 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -11,7 +11,7 @@ use futures::{Future, FutureExt, future::join_all}; use gpui::{App, AppContext, AsyncApp, Task}; use language::{ BinaryStatus, CodeLabel, DynLspInstaller, HighlightId, Language, LanguageName, LspAdapter, - LspAdapterDelegate, Rope, Toolchain, + LspAdapterDelegate, Toolchain, }; use lsp::{ CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName, @@ -403,10 +403,7 @@ fn labels_from_extension( let runs = if label.code.is_empty() { Vec::new() } else { - language.highlight_text( - &Rope::from_str_small(label.code.as_str()), - 0..label.code.len(), - ) + language.highlight_text(&label.code.as_str().into(), 0..label.code.len()) }; build_code_label(&label, &runs, language) }) diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index bbf4cc3240f0f33ee73fed10d96edc36467e51f4..8e90cf821368c0c88781b2d10e82ad9eaa05989c 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -189,7 +189,7 @@ impl super::LspAdapter for CLspAdapter { Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => { let detail = completion.detail.as_ref().unwrap(); let text = format!("{} {}", detail, label); - let source = Rope::from_str_small(format!("struct S {{ {} }}", text).as_str()); + let source = Rope::from(format!("struct S {{ {} }}", text).as_str()); let runs = language.highlight_text(&source, 11..11 + text.len()); let filter_range = completion .filter_text @@ -206,8 +206,7 @@ impl super::LspAdapter for CLspAdapter { { let detail = completion.detail.as_ref().unwrap(); let text = format!("{} {}", detail, label); - let runs = - language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len()); + let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len()); let filter_range = completion .filter_text .as_deref() @@ -223,8 +222,7 @@ impl super::LspAdapter for CLspAdapter { { let detail = completion.detail.as_ref().unwrap(); let text = format!("{} {}", detail, label); - let runs = - language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len()); + let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len()); let filter_range = completion .filter_text .as_deref() @@ -328,7 +326,7 @@ impl super::LspAdapter for CLspAdapter { Some(CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), + language.highlight_text(&text.as_str().into(), display_range), )) } diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index 55acc64f3e1b5592a55c551aa6c0b255cae3834a..6c75abf123af62b3f4ab43a6e94d3b040e2f010a 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -221,7 +221,7 @@ impl LspAdapter for GoLspAdapter { match completion.kind.zip(completion.detail.as_ref()) { Some((lsp::CompletionItemKind::MODULE, detail)) => { let text = format!("{label} {detail}"); - let source = Rope::from_str_small(format!("import {text}").as_str()); + let source = Rope::from(format!("import {text}").as_str()); let runs = language.highlight_text(&source, 7..7 + text[name_offset..].len()); let filter_range = completion .filter_text @@ -238,9 +238,8 @@ impl LspAdapter for GoLspAdapter { detail, )) => { let text = format!("{label} {detail}"); - let source = Rope::from_str_small( - format!("var {} {}", &text[name_offset..], detail).as_str(), - ); + let source = + Rope::from(format!("var {} {}", &text[name_offset..], detail).as_str()); let runs = adjust_runs( name_offset, language.highlight_text(&source, 4..4 + text[name_offset..].len()), @@ -257,8 +256,7 @@ impl LspAdapter for GoLspAdapter { } Some((lsp::CompletionItemKind::STRUCT, _)) => { let text = format!("{label} struct {{}}"); - let source = - Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str()); + let source = Rope::from(format!("type {}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, language.highlight_text(&source, 5..5 + text[name_offset..].len()), @@ -275,8 +273,7 @@ impl LspAdapter for GoLspAdapter { } Some((lsp::CompletionItemKind::INTERFACE, _)) => { let text = format!("{label} interface {{}}"); - let source = - Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str()); + let source = Rope::from(format!("type {}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, language.highlight_text(&source, 5..5 + text[name_offset..].len()), @@ -293,9 +290,8 @@ impl LspAdapter for GoLspAdapter { } Some((lsp::CompletionItemKind::FIELD, detail)) => { let text = format!("{label} {detail}"); - let source = Rope::from_str_small( - format!("type T struct {{ {} }}", &text[name_offset..]).as_str(), - ); + let source = + Rope::from(format!("type T struct {{ {} }}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, language.highlight_text(&source, 16..16 + text[name_offset..].len()), @@ -313,9 +309,7 @@ impl LspAdapter for GoLspAdapter { Some((lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD, detail)) => { if let Some(signature) = detail.strip_prefix("func") { let text = format!("{label}{signature}"); - let source = Rope::from_str_small( - format!("func {} {{}}", &text[name_offset..]).as_str(), - ); + let source = Rope::from(format!("func {} {{}}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, language.highlight_text(&source, 5..5 + text[name_offset..].len()), @@ -391,7 +385,7 @@ impl LspAdapter for GoLspAdapter { Some(CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), + language.highlight_text(&text.as_str().into(), display_range), )) } diff --git a/crates/languages/src/gomod/highlights.scm b/crates/languages/src/gomod/highlights.scm index bfcb6fcabbda446fc4849427ce499b4f4d7102c9..03be1b5957160820033d93b35b39d4329b7890a6 100644 --- a/crates/languages/src/gomod/highlights.scm +++ b/crates/languages/src/gomod/highlights.scm @@ -7,6 +7,7 @@ "exclude" "retract" "module" + "ignore" ] @keyword "=>" @operator diff --git a/crates/languages/src/gomod/structure.scm b/crates/languages/src/gomod/structure.scm index 0df01ea2557c8bac518c8ebcd865500b67e9fb19..ce1bc9aa3ee0b1f77086103bad91825b5927005f 100644 --- a/crates/languages/src/gomod/structure.scm +++ b/crates/languages/src/gomod/structure.scm @@ -27,3 +27,9 @@ ("(") @structure.open (")") @structure.close ) + +(ignore_directive + "ignore" @structure.anchor + ("(") @structure.open + (")") @structure.close +) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index a87f17795f5b6a1d69368d826688a6ed48309d23..b8956b55873b27b42ce94e12dd9239f33359420d 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -19,7 +19,6 @@ use pet_core::python_environment::{PythonEnvironment, PythonEnvironmentKind}; use pet_virtualenv::is_virtualenv_dir; use project::Fs; use project::lsp_store::language_server_settings; -use rope::Rope; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; use smol::lock::OnceCell; @@ -467,7 +466,7 @@ impl LspAdapter for PyrightLspAdapter { Some(language::CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), + language.highlight_text(&text.as_str().into(), display_range), )) } @@ -1211,7 +1210,7 @@ impl ToolchainLister for PythonToolchainProvider { activation_script.extend(match shell { ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")), ShellKind::Posix => Some(format!("\"{pyenv}\" shell - sh {version}")), - ShellKind::Nushell => Some(format!("\"{pyenv}\" shell - nu {version}")), + ShellKind::Nushell => Some(format!("^\"{pyenv}\" shell - nu {version}")), ShellKind::PowerShell => None, ShellKind::Csh => None, ShellKind::Tcsh => None, @@ -1512,7 +1511,7 @@ impl LspAdapter for PyLspAdapter { Some(language::CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), + language.highlight_text(&text.as_str().into(), display_range), )) } @@ -1801,7 +1800,7 @@ impl LspAdapter for BasedPyrightLspAdapter { Some(language::CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), + language.highlight_text(&text.as_str().into(), display_range), )) } diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index b6f7b10da69f7f3f8d8551a88fa8409f05c2fed8..4b56a617735ab1a5932a56a4f6e51397721d8a86 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -252,7 +252,7 @@ impl LspAdapter for RustLspAdapter { let name = &completion.label; let text = format!("{name}: {signature}"); let prefix = "struct S { "; - let source = Rope::from_iter_small([prefix, &text, " }"]); + let source = Rope::from_iter([prefix, &text, " }"]); let runs = language.highlight_text(&source, prefix.len()..prefix.len() + text.len()); mk_label(text, &|| 0..completion.label.len(), runs) @@ -264,7 +264,7 @@ impl LspAdapter for RustLspAdapter { let name = &completion.label; let text = format!("{name}: {signature}",); let prefix = "let "; - let source = Rope::from_iter_small([prefix, &text, " = ();"]); + let source = Rope::from_iter([prefix, &text, " = ();"]); let runs = language.highlight_text(&source, prefix.len()..prefix.len() + text.len()); mk_label(text, &|| 0..completion.label.len(), runs) @@ -302,7 +302,7 @@ impl LspAdapter for RustLspAdapter { .filter(|it| it.contains(&label)) .and_then(|it| Some((it, FULL_SIGNATURE_REGEX.find(it)?))) { - let source = Rope::from_str_small(function_signature); + let source = Rope::from(function_signature); let runs = language.highlight_text(&source, 0..function_signature.len()); mk_label( function_signature.to_owned(), @@ -311,7 +311,7 @@ impl LspAdapter for RustLspAdapter { ) } else if let Some((prefix, suffix)) = fn_prefixed { let text = format!("{label}{suffix}"); - let source = Rope::from_iter_small([prefix, " ", &text, " {}"]); + let source = Rope::from_iter([prefix, " ", &text, " {}"]); let run_start = prefix.len() + 1; let runs = language.highlight_text(&source, run_start..run_start + text.len()); mk_label(text, &|| 0..label.len(), runs) @@ -322,7 +322,7 @@ impl LspAdapter for RustLspAdapter { { let text = completion.label.clone(); let len = text.len(); - let source = Rope::from_str_small(text.as_str()); + let source = Rope::from(text.as_str()); let runs = language.highlight_text(&source, 0..len); mk_label(text, &|| 0..completion.label.len(), runs) } else if detail_left.is_none() { @@ -399,10 +399,7 @@ impl LspAdapter for RustLspAdapter { Some(CodeLabel::new( format!("{prefix}{name}"), filter_range, - language.highlight_text( - &Rope::from_iter_small([prefix, name, suffix]), - display_range, - ), + language.highlight_text(&Rope::from_iter([prefix, name, suffix]), display_range), )) } diff --git a/crates/languages/src/typescript/highlights.scm b/crates/languages/src/typescript/highlights.scm index 8a85dfea07fe4f50cb271f65ec1bdeeaf2ea150c..6474ba2a05af330b1a7bd2da8ed3411b9132fe22 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/languages/src/typescript/highlights.scm @@ -121,6 +121,15 @@ ; Tokens +[ + ";" + "?." + "." + "," + ":" + "?" +] @punctuation.delimiter + [ "..." "-" @@ -179,15 +188,6 @@ ] @operator ) -[ - ";" - "?." - "." - "," - ":" - "?" -] @punctuation.delimiter - [ "(" ")" diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index eb239fd46fe8c0a6cfcfb6ea4a7610ddb6dabf47..c34ed69288e39c26d105877d76ee76c01c864c72 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -1558,9 +1558,7 @@ impl MarkdownElementBuilder { if let Some(Some(language)) = self.code_block_stack.last() { let mut offset = 0; - for (range, highlight_id) in - language.highlight_text(&Rope::from_str_small(text), 0..text.len()) - { + for (range, highlight_id) in language.highlight_text(&Rope::from(text), 0..text.len()) { if range.start > offset { self.pending_line .runs diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index d46224a736dfd7e2a57c88d9512774562e10dab8..8f2203c25b9a7193759668a35016c2d3203310b6 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -779,7 +779,7 @@ impl<'a> MarkdownParser<'a> { let highlights = if let Some(language) = &language { if let Some(registry) = &self.language_registry { - let rope = language::Rope::from_str_small(code.as_str()); + let rope: language::Rope = code.as_str().into(); registry .language_for_name_or_extension(language) .await diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 947d6be1199ca73be910c5cc606147ef75bd9376..a9121b9104400d88d5f22801db1bfebaeeb060d6 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -1,6 +1,6 @@ use super::*; use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind}; -use gpui::{App, BackgroundExecutor, TestAppContext}; +use gpui::{App, TestAppContext}; use indoc::indoc; use language::{Buffer, Rope}; use parking_lot::RwLock; @@ -79,14 +79,9 @@ fn test_remote(cx: &mut App) { let ops = cx .background_executor() .block(host_buffer.read(cx).serialize_ops(None, cx)); - let mut buffer = Buffer::from_proto( - ReplicaId::REMOTE_SERVER, - Capability::ReadWrite, - state, - None, - cx.background_executor(), - ) - .unwrap(); + let mut buffer = + Buffer::from_proto(ReplicaId::REMOTE_SERVER, Capability::ReadWrite, state, None) + .unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| language::proto::deserialize_operation(op).unwrap()), @@ -1229,7 +1224,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot, cx.background_executor()); + assert_line_indents(&snapshot); multibuffer.update(cx, |multibuffer, cx| { multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) @@ -1253,7 +1248,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot, cx.background_executor()); + assert_line_indents(&snapshot); // Expand the first diff hunk multibuffer.update(cx, |multibuffer, cx| { @@ -1305,7 +1300,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot, cx.background_executor()); + assert_line_indents(&snapshot); // Edit the buffer before the first hunk buffer.update(cx, |buffer, cx| { @@ -1347,7 +1342,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot, cx.background_executor()); + assert_line_indents(&snapshot); // Recalculate the diff, changing the first diff hunk. diff.update(cx, |diff, cx| { @@ -2072,7 +2067,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { } assert_position_translation(&snapshot); - assert_line_indents(&snapshot, cx.background_executor()); + assert_line_indents(&snapshot); assert_eq!( snapshot @@ -2123,7 +2118,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { ), ); - assert_line_indents(&snapshot, cx.background_executor()); + assert_line_indents(&snapshot); } /// A naive implementation of a multi-buffer that does not maintain @@ -2893,7 +2888,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { ); } - let text_rope = Rope::from_str(expected_text.as_str(), cx.background_executor()); + let text_rope = Rope::from(expected_text.as_str()); for _ in 0..10 { let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right); let start_ix = text_rope.clip_offset(rng.random_range(0..=end_ix), Bias::Left); @@ -3517,7 +3512,7 @@ fn assert_consistent_line_numbers(snapshot: &MultiBufferSnapshot) { #[track_caller] fn assert_position_translation(snapshot: &MultiBufferSnapshot) { - let text = Rope::from_str_small(&snapshot.text()); + let text = Rope::from(snapshot.text()); let mut left_anchors = Vec::new(); let mut right_anchors = Vec::new(); @@ -3641,10 +3636,10 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) { } } -fn assert_line_indents(snapshot: &MultiBufferSnapshot, executor: &BackgroundExecutor) { +fn assert_line_indents(snapshot: &MultiBufferSnapshot) { let max_row = snapshot.max_point().row; let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id(); - let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text(), executor); + let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text()); let mut line_indents = text .line_indents_in_row_range(0..max_row + 1) .collect::>(); diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index 568d1ac8671fc3e10fb7656dfdffa7211accd1cd..c750bb912f0c2767e4c56890d0ab75046c094e71 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -5,7 +5,7 @@ use gpui::{App, AppContext, Context, Entity}; use itertools::Itertools; use language::{Buffer, BufferSnapshot}; use rope::Point; -use text::{Bias, OffsetRangeExt, locator::Locator}; +use text::{Bias, BufferId, OffsetRangeExt, locator::Locator}; use util::{post_inc, rel_path::RelPath}; use crate::{ @@ -152,6 +152,15 @@ impl MultiBuffer { } } + pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context) { + self.remove_excerpts( + self.excerpts_for_buffer(buffer, cx) + .into_iter() + .map(|(excerpt, _)| excerpt), + cx, + ); + } + pub(super) fn expand_excerpts_with_paths( &mut self, ids: impl IntoIterator, diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index f9b1afe34e5ebf51576b07164f5ccfa23428ca56..69f0857df517724c70359b5043125765b83c29b1 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -465,14 +465,8 @@ impl SearchData { let match_offset_range = match_range.to_offset(multi_buffer_snapshot); let mut search_match_indices = vec![ - multi_buffer_snapshot.clip_offset( - match_offset_range.start - context_offset_range.start, - Bias::Left, - ) - ..multi_buffer_snapshot.clip_offset( - match_offset_range.end - context_offset_range.start, - Bias::Right, - ), + match_offset_range.start - context_offset_range.start + ..match_offset_range.end - context_offset_range.start, ]; let entire_context_text = multi_buffer_snapshot @@ -509,14 +503,8 @@ impl SearchData { .next() .is_some_and(|c| !c.is_whitespace()); search_match_indices.iter_mut().for_each(|range| { - range.start = multi_buffer_snapshot.clip_offset( - range.start.saturating_sub(left_whitespaces_offset), - Bias::Left, - ); - range.end = multi_buffer_snapshot.clip_offset( - range.end.saturating_sub(left_whitespaces_offset), - Bias::Right, - ); + range.start = range.start.saturating_sub(left_whitespaces_offset); + range.end = range.end.saturating_sub(left_whitespaces_offset); }); let trimmed_row_offset_range = @@ -5256,10 +5244,13 @@ mod tests { use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; use pretty_assertions::assert_eq; use project::FakeFs; - use search::project_search::{self, perform_project_search}; + use search::{ + buffer_search, + project_search::{self, perform_project_search}, + }; use serde_json::json; use util::path; - use workspace::{OpenOptions, OpenVisible}; + use workspace::{OpenOptions, OpenVisible, ToolbarItemView}; use super::*; @@ -5322,25 +5313,28 @@ mod tests { ide/src/ inlay_hints/ fn_lifetime_fn.rs - search: match config.param_names_for_lifetime_elision_hints { - search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { - search: Some(it) if config.param_names_for_lifetime_elision_hints => { - search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + search: match config.«param_names_for_lifetime_elision_hints» { + search: allocated_lifetimes.push(if config.«param_names_for_lifetime_elision_hints» { + search: Some(it) if config.«param_names_for_lifetime_elision_hints» => { + search: InlayHintsConfig { «param_names_for_lifetime_elision_hints»: true, ..TEST_CONFIG }, inlay_hints.rs - search: pub param_names_for_lifetime_elision_hints: bool, - search: param_names_for_lifetime_elision_hints: self + search: pub «param_names_for_lifetime_elision_hints»: bool, + search: «param_names_for_lifetime_elision_hints»: self static_index.rs - search: param_names_for_lifetime_elision_hints: false, + search: «param_names_for_lifetime_elision_hints»: false, rust-analyzer/src/ cli/ analysis_stats.rs - search: param_names_for_lifetime_elision_hints: true, + search: «param_names_for_lifetime_elision_hints»: true, config.rs - search: param_names_for_lifetime_elision_hints: self"# + search: «param_names_for_lifetime_elision_hints»: self"# .to_string(); let select_first_in_all_matches = |line_to_select: &str| { - assert!(all_matches.contains(line_to_select)); + assert!( + all_matches.contains(line_to_select), + "`{line_to_select}` was not found in all matches `{all_matches}`" + ); all_matches.replacen( line_to_select, &format!("{line_to_select}{SELECTED_MARKER}"), @@ -5361,7 +5355,7 @@ mod tests { cx, ), select_first_in_all_matches( - "search: match config.param_names_for_lifetime_elision_hints {" + "search: match config.«param_names_for_lifetime_elision_hints» {" ) ); }); @@ -5401,16 +5395,16 @@ mod tests { inlay_hints/ fn_lifetime_fn.rs{SELECTED_MARKER} inlay_hints.rs - search: pub param_names_for_lifetime_elision_hints: bool, - search: param_names_for_lifetime_elision_hints: self + search: pub «param_names_for_lifetime_elision_hints»: bool, + search: «param_names_for_lifetime_elision_hints»: self static_index.rs - search: param_names_for_lifetime_elision_hints: false, + search: «param_names_for_lifetime_elision_hints»: false, rust-analyzer/src/ cli/ analysis_stats.rs - search: param_names_for_lifetime_elision_hints: true, + search: «param_names_for_lifetime_elision_hints»: true, config.rs - search: param_names_for_lifetime_elision_hints: self"#, + search: «param_names_for_lifetime_elision_hints»: self"#, ) ); }); @@ -5471,9 +5465,9 @@ mod tests { rust-analyzer/src/ cli/ analysis_stats.rs - search: param_names_for_lifetime_elision_hints: true, + search: «param_names_for_lifetime_elision_hints»: true, config.rs - search: param_names_for_lifetime_elision_hints: self"#, + search: «param_names_for_lifetime_elision_hints»: self"#, ) ); }); @@ -5553,21 +5547,21 @@ mod tests { ide/src/ inlay_hints/ fn_lifetime_fn.rs - search: match config.param_names_for_lifetime_elision_hints { - search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { - search: Some(it) if config.param_names_for_lifetime_elision_hints => { - search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + search: match config.«param_names_for_lifetime_elision_hints» { + search: allocated_lifetimes.push(if config.«param_names_for_lifetime_elision_hints» { + search: Some(it) if config.«param_names_for_lifetime_elision_hints» => { + search: InlayHintsConfig { «param_names_for_lifetime_elision_hints»: true, ..TEST_CONFIG }, inlay_hints.rs - search: pub param_names_for_lifetime_elision_hints: bool, - search: param_names_for_lifetime_elision_hints: self + search: pub «param_names_for_lifetime_elision_hints»: bool, + search: «param_names_for_lifetime_elision_hints»: self static_index.rs - search: param_names_for_lifetime_elision_hints: false, + search: «param_names_for_lifetime_elision_hints»: false, rust-analyzer/src/ cli/ analysis_stats.rs - search: param_names_for_lifetime_elision_hints: true, + search: «param_names_for_lifetime_elision_hints»: true, config.rs - search: param_names_for_lifetime_elision_hints: self"# + search: «param_names_for_lifetime_elision_hints»: self"# .to_string(); cx.executor() @@ -5692,30 +5686,40 @@ mod tests { ide/src/ inlay_hints/ fn_lifetime_fn.rs - search: match config.param_names_for_lifetime_elision_hints { - search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { - search: Some(it) if config.param_names_for_lifetime_elision_hints => { - search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + search: match config.«param_names_for_lifetime_elision_hints» { + search: allocated_lifetimes.push(if config.«param_names_for_lifetime_elision_hints» { + search: Some(it) if config.«param_names_for_lifetime_elision_hints» => { + search: InlayHintsConfig { «param_names_for_lifetime_elision_hints»: true, ..TEST_CONFIG }, inlay_hints.rs - search: pub param_names_for_lifetime_elision_hints: bool, - search: param_names_for_lifetime_elision_hints: self + search: pub «param_names_for_lifetime_elision_hints»: bool, + search: «param_names_for_lifetime_elision_hints»: self static_index.rs - search: param_names_for_lifetime_elision_hints: false, + search: «param_names_for_lifetime_elision_hints»: false, rust-analyzer/src/ cli/ analysis_stats.rs - search: param_names_for_lifetime_elision_hints: true, + search: «param_names_for_lifetime_elision_hints»: true, config.rs - search: param_names_for_lifetime_elision_hints: self"# + search: «param_names_for_lifetime_elision_hints»: self"# .to_string(); let select_first_in_all_matches = |line_to_select: &str| { - assert!(all_matches.contains(line_to_select)); + assert!( + all_matches.contains(line_to_select), + "`{line_to_select}` was not found in all matches `{all_matches}`" + ); all_matches.replacen( line_to_select, &format!("{line_to_select}{SELECTED_MARKER}"), 1, ) }; + let clear_outline_metadata = |input: &str| { + input + .replace("search: ", "") + .replace("«", "") + .replace("»", "") + }; + cx.executor() .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); cx.run_until_parked(); @@ -5726,7 +5730,7 @@ mod tests { .expect("should have an active editor open") }); let initial_outline_selection = - "search: match config.param_names_for_lifetime_elision_hints {"; + "search: match config.«param_names_for_lifetime_elision_hints» {"; outline_panel.update_in(cx, |outline_panel, window, cx| { assert_eq!( display_entries( @@ -5740,7 +5744,7 @@ mod tests { ); assert_eq!( selected_row_text(&active_editor, cx), - initial_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes + clear_outline_metadata(initial_outline_selection), "Should place the initial editor selection on the corresponding search result" ); @@ -5749,7 +5753,7 @@ mod tests { }); let navigated_outline_selection = - "search: Some(it) if config.param_names_for_lifetime_elision_hints => {"; + "search: Some(it) if config.«param_names_for_lifetime_elision_hints» => {"; outline_panel.update(cx, |outline_panel, cx| { assert_eq!( display_entries( @@ -5767,7 +5771,7 @@ mod tests { outline_panel.update(cx, |_, cx| { assert_eq!( selected_row_text(&active_editor, cx), - navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes + clear_outline_metadata(navigated_outline_selection), "Should still have the initial caret position after SelectNext calls" ); }); @@ -5778,7 +5782,7 @@ mod tests { outline_panel.update(cx, |_outline_panel, cx| { assert_eq!( selected_row_text(&active_editor, cx), - navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes + clear_outline_metadata(navigated_outline_selection), "After opening, should move the caret to the opened outline entry's position" ); }); @@ -5786,7 +5790,7 @@ mod tests { outline_panel.update_in(cx, |outline_panel, window, cx| { outline_panel.select_next(&SelectNext, window, cx); }); - let next_navigated_outline_selection = "search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },"; + let next_navigated_outline_selection = "search: InlayHintsConfig { «param_names_for_lifetime_elision_hints»: true, ..TEST_CONFIG },"; outline_panel.update(cx, |outline_panel, cx| { assert_eq!( display_entries( @@ -5804,7 +5808,7 @@ mod tests { outline_panel.update(cx, |_outline_panel, cx| { assert_eq!( selected_row_text(&active_editor, cx), - next_navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes + clear_outline_metadata(next_navigated_outline_selection), "Should again preserve the selection after another SelectNext call" ); }); @@ -5837,7 +5841,7 @@ mod tests { ); assert_eq!( selected_row_text(&new_active_editor, cx), - next_navigated_outline_selection.replace("search: ", ""), // Clear outline metadata prefixes + clear_outline_metadata(next_navigated_outline_selection), "When opening the excerpt, should navigate to the place corresponding the outline entry" ); }); @@ -5939,11 +5943,11 @@ mod tests { format!( r#"one/ a.txt - search: aaa aaa <==== selected - search: aaa aaa + search: «aaa» aaa <==== selected + search: aaa «aaa» two/ b.txt - search: a aaa"#, + search: a «aaa»"#, ), ); }); @@ -5969,7 +5973,7 @@ two/ a.txt <==== selected two/ b.txt - search: a aaa"#, + search: a «aaa»"#, ), ); }); @@ -6018,7 +6022,7 @@ two/ <==== selected"#, a.txt two/ <==== selected b.txt - search: a aaa"#, + search: a «aaa»"#, ) ); }); @@ -6483,18 +6487,18 @@ outline: struct OutlineEntryExcerpt r#"frontend-project/ public/lottie/ syntax-tree.json - search: {{ "something": "static" }} <==== selected + search: {{ "something": "«static»" }} <==== selected src/ app/(site)/ (about)/jobs/[slug]/ page.tsx - search: static + search: «static» (blog)/post/[slug]/ page.tsx - search: static + search: «static» components/ ErrorBoundary.tsx - search: static"# + search: «static»"# ) ); }); @@ -6522,12 +6526,12 @@ outline: struct OutlineEntryExcerpt r#"frontend-project/ public/lottie/ syntax-tree.json - search: {{ "something": "static" }} + search: {{ "something": "«static»" }} src/ app/(site)/ <==== selected components/ ErrorBoundary.tsx - search: static"# + search: «static»"# ) ); }); @@ -6552,12 +6556,12 @@ outline: struct OutlineEntryExcerpt r#"frontend-project/ public/lottie/ syntax-tree.json - search: {{ "something": "static" }} + search: {{ "something": "«static»" }} src/ app/(site)/ components/ ErrorBoundary.tsx - search: static <==== selected"# + search: «static» <==== selected"# ) ); }); @@ -6586,7 +6590,7 @@ outline: struct OutlineEntryExcerpt r#"frontend-project/ public/lottie/ syntax-tree.json - search: {{ "something": "static" }} + search: {{ "something": "«static»" }} src/ app/(site)/ components/ @@ -6619,12 +6623,12 @@ outline: struct OutlineEntryExcerpt r#"frontend-project/ public/lottie/ syntax-tree.json - search: {{ "something": "static" }} + search: {{ "something": "«static»" }} src/ app/(site)/ components/ ErrorBoundary.tsx <==== selected - search: static"# + search: «static»"# ) ); }); @@ -6667,18 +6671,18 @@ outline: struct OutlineEntryExcerpt r#"frontend-project/ public/lottie/ syntax-tree.json - search: {{ "something": "static" }} + search: {{ "something": "«static»" }} src/ app/(site)/ (about)/jobs/[slug]/ page.tsx - search: static + search: «static» (blog)/post/[slug]/ page.tsx - search: static + search: «static» components/ ErrorBoundary.tsx <==== selected - search: static"# + search: «static»"# ) ); }); @@ -6784,16 +6788,21 @@ outline: struct OutlineEntryExcerpt } }, PanelEntry::Search(search_entry) => { - format!( - "search: {}", - search_entry - .render_data - .get_or_init(|| SearchData::new( - &search_entry.match_range, - multi_buffer_snapshot - )) - .context_text - ) + let search_data = search_entry.render_data.get_or_init(|| { + SearchData::new(&search_entry.match_range, multi_buffer_snapshot) + }); + let mut search_result = String::new(); + let mut last_end = 0; + for range in &search_data.search_match_indices { + search_result.push_str(&search_data.context_text[last_end..range.start]); + search_result.push('«'); + search_result.push_str(&search_data.context_text[range.start..range.end]); + search_result.push('»'); + last_end = range.end; + } + search_result.push_str(&search_data.context_text[last_end..]); + + format!("search: {search_result}") } }; @@ -6816,6 +6825,7 @@ outline: struct OutlineEntryExcerpt workspace::init_settings(cx); Project::init_settings(cx); project_search::init(cx); + buffer_search::init(cx); super::init(cx); }); } @@ -7827,4 +7837,102 @@ outline: fn main()" }; }); } + + #[gpui::test] + async fn test_buffer_search(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/test", + json!({ + "foo.txt": r#"<_constitution> + + + + + +## 📊 Output + +| Field | Meaning | +"# + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let workspace = add_outline_panel(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + let editor = workspace + .update(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from("/test/foo.txt"), + OpenOptions { + visible: Some(OpenVisible::All), + ..OpenOptions::default() + }, + window, + cx, + ) + }) + .unwrap() + .await + .unwrap() + .downcast::() + .unwrap(); + + let search_bar = workspace + .update(cx, |_, window, cx| { + cx.new(|cx| { + let mut search_bar = BufferSearchBar::new(None, window, cx); + search_bar.set_active_pane_item(Some(&editor), window, cx); + search_bar.show(window, cx); + search_bar + }) + }) + .unwrap(); + + let outline_panel = outline_panel(&workspace, cx); + + outline_panel.update_in(cx, |outline_panel, window, cx| { + outline_panel.set_active(true, window, cx) + }); + + search_bar + .update_in(cx, |search_bar, window, cx| { + search_bar.search(" ", None, true, window, cx) + }) + .await + .unwrap(); + + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(500)); + cx.run_until_parked(); + + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &project, + &snapshot(outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry(), + cx, + ), + "search: | Field« » | Meaning | <==== selected +search: | Field « » | Meaning | +search: | Field « » | Meaning | +search: | Field « » | Meaning | +search: | Field « »| Meaning | +search: | Field | Meaning« » | +search: | Field | Meaning « » | +search: | Field | Meaning « » | +search: | Field | Meaning « » | +search: | Field | Meaning « » | +search: | Field | Meaning « » | +search: | Field | Meaning « » | +search: | Field | Meaning « »|" + ); + }); + } } diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index a6efa1ef75786d3f0dc77ed2e57ec0edec42fc8c..af0c97013e9d66fc01e2f35b03ffafe6d0b4e6a7 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -1,7 +1,6 @@ use std::{ any::Any, borrow::Borrow, - collections::HashSet, path::{Path, PathBuf}, str::FromStr as _, sync::Arc, @@ -137,7 +136,7 @@ impl EventEmitter for AgentServerStore {} #[cfg(test)] mod ext_agent_tests { use super::*; - use std::fmt::Write as _; + use std::{collections::HashSet, fmt::Write as _}; // Helper to build a store in Collab mode so we can mutate internal maps without // needing to spin up a full project environment. @@ -244,25 +243,18 @@ impl AgentServerStore { // Collect manifests first so we can iterate twice let manifests: Vec<_> = manifests.into_iter().collect(); - // Remove existing extension-provided agents by tracking which ones we're about to add - let extension_agent_names: HashSet<_> = manifests - .iter() - .flat_map(|(_, manifest)| manifest.agent_servers.keys().map(|k| k.to_string())) - .collect(); - - let keys_to_remove: Vec<_> = self - .external_agents - .keys() - .filter(|name| { - // Remove if it matches an extension agent name from any extension - extension_agent_names.contains(name.0.as_ref()) - }) - .cloned() - .collect(); - for key in &keys_to_remove { - self.external_agents.remove(key); - self.agent_icons.remove(key); - } + // Remove all extension-provided agents + // (They will be re-added below if they're in the currently installed extensions) + self.external_agents.retain(|name, agent| { + if agent.downcast_mut::().is_some() { + self.agent_icons.remove(name); + false + } else { + // Keep the hardcoded external agents that don't come from extensions + // (In the future we may move these over to being extensions too.) + true + } + }); // Insert agent servers from extension manifests match &self.state { @@ -1037,7 +1029,7 @@ impl ExternalAgentServer for LocalGemini { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_local_directory_environment( + project_environment.local_directory_environment( &Shell::System, root_dir.clone(), cx, @@ -1133,7 +1125,7 @@ impl ExternalAgentServer for LocalClaudeCode { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_local_directory_environment( + project_environment.local_directory_environment( &Shell::System, root_dir.clone(), cx, @@ -1227,7 +1219,7 @@ impl ExternalAgentServer for LocalCodex { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_local_directory_environment( + project_environment.local_directory_environment( &Shell::System, root_dir.clone(), cx, @@ -1402,7 +1394,7 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { // Get project environment let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_local_directory_environment( + project_environment.local_directory_environment( &Shell::System, root_dir.clone(), cx, @@ -1585,7 +1577,7 @@ impl ExternalAgentServer for LocalCustomAgent { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_local_directory_environment( + project_environment.local_directory_environment( &Shell::System, root_dir.clone(), cx, @@ -1702,6 +1694,8 @@ impl settings::Settings for AllAgentServersSettings { #[cfg(test)] mod extension_agent_tests { + use crate::worktree_store::WorktreeStore; + use super::*; use gpui::TestAppContext; use std::sync::Arc; @@ -1826,7 +1820,9 @@ mod extension_agent_tests { async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) { let fs = fs::FakeFs::new(cx.background_executor.clone()); let http_client = http_client::FakeHttpClient::with_404_response(); - let project_environment = cx.new(|cx| crate::ProjectEnvironment::new(None, cx)); + let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone())); + let project_environment = + cx.new(|cx| crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, cx)); let agent = LocalExtensionArchiveAgent { fs, diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 3fb702518690585d3237324c04802c9deec0892e..39e302a2d9b1ae92cce9691c957cb9fcfbf26d7d 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -180,13 +180,7 @@ impl RemoteBufferStore { buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?) as Arc); } - Buffer::from_proto( - replica_id, - capability, - state, - buffer_file, - cx.background_executor(), - ) + Buffer::from_proto(replica_id, capability, state, buffer_file) }); match buffer_result { @@ -634,10 +628,9 @@ impl LocalBufferStore { Ok(loaded) => { let reservation = cx.reserve_entity::()?; let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); - let executor = cx.background_executor().clone(); let text_buffer = cx .background_spawn(async move { - text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text, &executor) + text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text) }) .await; cx.insert_entity(reservation, |_| { @@ -646,12 +639,7 @@ impl LocalBufferStore { } Err(error) if is_not_found_error(&error) => cx.new(|cx| { let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); - let text_buffer = text::Buffer::new( - ReplicaId::LOCAL, - buffer_id, - "", - cx.background_executor(), - ); + let text_buffer = text::Buffer::new(ReplicaId::LOCAL, buffer_id, ""); Buffer::build( text_buffer, Some(Arc::new(File { diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 7d80c563e9678ec097dab030bdca047a967e2cf0..0b733aac29843090361cd5868799f6cb1db630f6 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -49,7 +49,7 @@ use std::{ path::{Path, PathBuf}, sync::{Arc, Once}, }; -use task::{DebugScenario, Shell, SpawnInTerminal, TaskContext, TaskTemplate}; +use task::{DebugScenario, SpawnInTerminal, TaskContext, TaskTemplate}; use util::{ResultExt as _, rel_path::RelPath}; use worktree::Worktree; @@ -267,8 +267,8 @@ impl DapStore { let user_env = dap_settings.map(|s| s.env.clone()); let delegate = self.delegate(worktree, console, cx); - let cwd: Arc = worktree.read(cx).abs_path().as_ref().into(); + let worktree = worktree.clone(); cx.spawn(async move |this, cx| { let mut binary = adapter .get_binary( @@ -287,11 +287,7 @@ impl DapStore { .unwrap() .environment .update(cx, |environment, cx| { - environment.get_local_directory_environment( - &Shell::System, - cwd, - cx, - ) + environment.worktree_environment(worktree, cx) }) })? .await; @@ -607,9 +603,9 @@ impl DapStore { local_store.node_runtime.clone(), local_store.http_client.clone(), local_store.toolchain_store.clone(), - local_store.environment.update(cx, |env, cx| { - env.get_worktree_environment(worktree.clone(), cx) - }), + local_store + .environment + .update(cx, |env, cx| env.worktree_environment(worktree.clone(), cx)), local_store.is_headless, )) } diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 0f713b7deb3aca07ea7f867fc768ab2af9716c15..cc14611edbcec922c439cb06c63566036dc64cc6 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -5,11 +5,12 @@ use remote::RemoteClient; use rpc::proto::{self, REMOTE_SERVER_PROJECT_ID}; use std::{collections::VecDeque, path::Path, sync::Arc}; use task::{Shell, shell_to_proto}; -use util::ResultExt; +use terminal::terminal_settings::TerminalSettings; +use util::{ResultExt, rel_path::RelPath}; use worktree::Worktree; use collections::HashMap; -use gpui::{AppContext as _, Context, Entity, EventEmitter, Task}; +use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task, WeakEntity}; use settings::Settings as _; use crate::{ @@ -23,6 +24,8 @@ pub struct ProjectEnvironment { remote_environments: HashMap<(Shell, Arc), Shared>>>>, environment_error_messages: VecDeque, environment_error_messages_tx: mpsc::UnboundedSender, + worktree_store: WeakEntity, + remote_client: Option>, _tasks: Vec>, } @@ -33,7 +36,12 @@ pub enum ProjectEnvironmentEvent { impl EventEmitter for ProjectEnvironment {} impl ProjectEnvironment { - pub fn new(cli_environment: Option>, cx: &mut Context) -> Self { + pub fn new( + cli_environment: Option>, + worktree_store: WeakEntity, + remote_client: Option>, + cx: &mut Context, + ) -> Self { let (tx, mut rx) = mpsc::unbounded(); let task = cx.spawn(async move |this, cx| { while let Some(message) = rx.next().await { @@ -50,12 +58,17 @@ impl ProjectEnvironment { remote_environments: Default::default(), environment_error_messages: Default::default(), environment_error_messages_tx: tx, + worktree_store, + remote_client, _tasks: vec![task], } } /// Returns the inherited CLI environment, if this project was opened from the Zed CLI. pub(crate) fn get_cli_environment(&self) -> Option> { + if cfg!(any(test, feature = "test-support")) { + return Some(HashMap::default()); + } if let Some(mut env) = self.cli_environment.clone() { set_origin_marker(&mut env, EnvironmentOrigin::Cli); Some(env) @@ -64,16 +77,12 @@ impl ProjectEnvironment { } } - pub(crate) fn get_buffer_environment( + pub fn buffer_environment( &mut self, buffer: &Entity, worktree_store: &Entity, cx: &mut Context, ) -> Shared>>> { - if cfg!(any(test, feature = "test-support")) { - return Task::ready(Some(HashMap::default())).shared(); - } - if let Some(cli_environment) = self.get_cli_environment() { log::debug!("using project environment variables from CLI"); return Task::ready(Some(cli_environment)).shared(); @@ -87,54 +96,105 @@ impl ProjectEnvironment { else { return Task::ready(None).shared(); }; - - self.get_worktree_environment(worktree, cx) + self.worktree_environment(worktree, cx) } - pub fn get_worktree_environment( + pub fn worktree_environment( &mut self, worktree: Entity, - cx: &mut Context, + cx: &mut App, ) -> Shared>>> { - if cfg!(any(test, feature = "test-support")) { - return Task::ready(Some(HashMap::default())).shared(); - } - if let Some(cli_environment) = self.get_cli_environment() { log::debug!("using project environment variables from CLI"); return Task::ready(Some(cli_environment)).shared(); } - let mut abs_path = worktree.read(cx).abs_path(); - if !worktree.read(cx).is_local() { - log::error!( - "attempted to get project environment for a non-local worktree at {abs_path:?}" - ); - return Task::ready(None).shared(); - } else if worktree.read(cx).is_single_file() { + let worktree = worktree.read(cx); + let mut abs_path = worktree.abs_path(); + if worktree.is_single_file() { let Some(parent) = abs_path.parent() else { return Task::ready(None).shared(); }; abs_path = parent.into(); } - self.get_local_directory_environment(&Shell::System, abs_path, cx) + let remote_client = self.remote_client.as_ref().and_then(|it| it.upgrade()); + match remote_client { + Some(remote_client) => remote_client.clone().read(cx).shell().map(|shell| { + self.remote_directory_environment( + &Shell::Program(shell), + abs_path, + remote_client, + cx, + ) + }), + None => Some({ + let shell = TerminalSettings::get( + Some(settings::SettingsLocation { + worktree_id: worktree.id(), + path: RelPath::empty(), + }), + cx, + ) + .shell + .clone(); + + self.local_directory_environment(&shell, abs_path, cx) + }), + } + .unwrap_or_else(|| Task::ready(None).shared()) + } + + pub fn directory_environment( + &mut self, + abs_path: Arc, + cx: &mut App, + ) -> Shared>>> { + let remote_client = self.remote_client.as_ref().and_then(|it| it.upgrade()); + match remote_client { + Some(remote_client) => remote_client.clone().read(cx).shell().map(|shell| { + self.remote_directory_environment( + &Shell::Program(shell), + abs_path, + remote_client, + cx, + ) + }), + None => self + .worktree_store + .read_with(cx, |worktree_store, cx| { + worktree_store.find_worktree(&abs_path, cx) + }) + .ok() + .map(|worktree| { + let shell = terminal::terminal_settings::TerminalSettings::get( + worktree + .as_ref() + .map(|(worktree, path)| settings::SettingsLocation { + worktree_id: worktree.read(cx).id(), + path: &path, + }), + cx, + ) + .shell + .clone(); + + self.local_directory_environment(&shell, abs_path, cx) + }), + } + .unwrap_or_else(|| Task::ready(None).shared()) } /// Returns the project environment, if possible. /// If the project was opened from the CLI, then the inherited CLI environment is returned. /// If it wasn't opened from the CLI, and an absolute path is given, then a shell is spawned in /// that directory, to get environment variables as if the user has `cd`'d there. - pub fn get_local_directory_environment( + pub fn local_directory_environment( &mut self, shell: &Shell, abs_path: Arc, - cx: &mut Context, + cx: &mut App, ) -> Shared>>> { - if cfg!(any(test, feature = "test-support")) { - return Task::ready(Some(HashMap::default())).shared(); - } - if let Some(cli_environment) = self.get_cli_environment() { log::debug!("using project environment variables from CLI"); return Task::ready(Some(cli_environment)).shared(); @@ -146,7 +206,7 @@ impl ProjectEnvironment { let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); let shell = shell.clone(); let tx = self.environment_error_messages_tx.clone(); - cx.spawn(async move |_, cx| { + cx.spawn(async move |cx| { let mut shell_env = cx .background_spawn(load_directory_shell_environment( shell, @@ -178,12 +238,12 @@ impl ProjectEnvironment { .clone() } - pub fn get_remote_directory_environment( + pub fn remote_directory_environment( &mut self, shell: &Shell, abs_path: Arc, remote_client: Entity, - cx: &mut Context, + cx: &mut App, ) -> Shared>>> { if cfg!(any(test, feature = "test-support")) { return Task::ready(Some(HashMap::default())).shared(); @@ -201,7 +261,7 @@ impl ProjectEnvironment { shell: Some(shell_to_proto(shell.clone())), directory: abs_path.to_string_lossy().to_string(), }); - cx.spawn(async move |_, _| { + cx.background_spawn(async move { let environment = response.await.log_err()?; Some(environment.environment.into_iter().collect()) }) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index e29710682b45125ff06a0cc8390e768a11289c6d..2ada4a94eff69e73cd4e9d5fc360443d583ced91 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -4804,7 +4804,7 @@ impl Repository { .upgrade() .context("missing project environment")? .update(cx, |project_environment, cx| { - project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx) + project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx) })? .await .unwrap_or_else(|| { diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 46c2e1f92415044ce1d9e8bdf9053a3d3768f372..160a384a4a0ff4481c97b6eda75faded28f01624 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -276,8 +276,8 @@ mod tests { use util::{path, rel_path::rel_path}; use worktree::WorktreeSettings; - #[gpui::test] - fn test_parse_conflicts_in_buffer(cx: &mut TestAppContext) { + #[test] + fn test_parse_conflicts_in_buffer() { // Create a buffer with conflict markers let test_content = r#" This is some text before the conflict. @@ -299,12 +299,7 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new( - ReplicaId::LOCAL, - buffer_id, - test_content, - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -360,8 +355,8 @@ mod tests { assert_eq!(conflicts_in_range.len(), 0); } - #[gpui::test] - fn test_nested_conflict_markers(cx: &mut TestAppContext) { + #[test] + fn test_nested_conflict_markers() { // Create a buffer with nested conflict markers let test_content = r#" This is some text before the conflict. @@ -379,12 +374,7 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new( - ReplicaId::LOCAL, - buffer_id, - test_content, - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -406,8 +396,8 @@ mod tests { assert_eq!(their_text, "This is their version in a nested conflict\n"); } - #[gpui::test] - fn test_conflict_markers_at_eof(cx: &mut TestAppContext) { + #[test] + fn test_conflict_markers_at_eof() { let test_content = r#" <<<<<<< ours ======= @@ -415,20 +405,15 @@ mod tests { >>>>>>> "# .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new( - ReplicaId::LOCAL, - buffer_id, - test_content, - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); assert_eq!(conflict_snapshot.conflicts.len(), 1); } - #[gpui::test] - fn test_conflicts_in_range(cx: &mut TestAppContext) { + #[test] + fn test_conflicts_in_range() { // Create a buffer with conflict markers let test_content = r#" one @@ -462,12 +447,7 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new( - ReplicaId::LOCAL, - buffer_id, - test_content.clone(), - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content.clone()); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 762070796f068fb01b19522b4a506eb693b9bd63..5ed0d39de47a56e4aec5e0e215f220854b11c32e 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -75,14 +75,14 @@ use language::{ range_from_lsp, range_to_lsp, }; use lsp::{ - AdapterServerCapabilities, CodeActionKind, CompletionContext, DiagnosticServerCapabilities, - DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, - FileOperationFilter, FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, - FileSystemWatcher, LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, - LanguageServerBinaryOptions, LanguageServerId, LanguageServerName, LanguageServerSelector, - LspRequestFuture, MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind, - TextDocumentSyncSaveOptions, TextEdit, Uri, WillRenameFiles, WorkDoneProgressCancelParams, - WorkspaceFolder, notification::DidRenameFiles, + AdapterServerCapabilities, CodeActionKind, CompletionContext, CompletionOptions, + DiagnosticServerCapabilities, DiagnosticSeverity, DiagnosticTag, + DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter, FileOperationPatternKind, + FileOperationRegistrationOptions, FileRename, FileSystemWatcher, LSP_REQUEST_TIMEOUT, + LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, + LanguageServerName, LanguageServerSelector, LspRequestFuture, MessageActionItem, MessageType, + OneOf, RenameFilesParams, SymbolKind, TextDocumentSyncSaveOptions, TextEdit, Uri, + WillRenameFiles, WorkDoneProgressCancelParams, WorkspaceFolder, notification::DidRenameFiles, }; use node_runtime::read_package_installed_version; use parking_lot::Mutex; @@ -853,23 +853,32 @@ impl LocalLspStore { language_server .on_request::({ let lsp_store = lsp_store.clone(); + let request_id = Arc::new(AtomicUsize::new(0)); move |(), cx| { - let this = lsp_store.clone(); + let lsp_store = lsp_store.clone(); + let request_id = request_id.clone(); let mut cx = cx.clone(); async move { - this.update(&mut cx, |lsp_store, cx| { - cx.emit(LspStoreEvent::RefreshInlayHints(server_id)); - lsp_store - .downstream_client - .as_ref() - .map(|(client, project_id)| { - client.send(proto::RefreshInlayHints { - project_id: *project_id, - server_id: server_id.to_proto(), + lsp_store + .update(&mut cx, |lsp_store, cx| { + let request_id = + Some(request_id.fetch_add(1, atomic::Ordering::AcqRel)); + cx.emit(LspStoreEvent::RefreshInlayHints { + server_id, + request_id, + }); + lsp_store + .downstream_client + .as_ref() + .map(|(client, project_id)| { + client.send(proto::RefreshInlayHints { + project_id: *project_id, + server_id: server_id.to_proto(), + request_id: request_id.map(|id| id as u64), + }) }) - }) - })? - .transpose()?; + })? + .transpose()?; Ok(()) } } @@ -3659,7 +3668,10 @@ pub enum LspStoreEvent { new_language: Option>, }, Notification(String), - RefreshInlayHints(LanguageServerId), + RefreshInlayHints { + server_id: LanguageServerId, + request_id: Option, + }, RefreshCodeLens, DiagnosticsUpdated { server_id: LanguageServerId, @@ -5329,8 +5341,8 @@ impl LspStore { request.to_proto(project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { + cx.spawn(async move |weak_lsp_store, cx| { + let Some(lsp_store) = weak_lsp_store.upgrade() else { return Ok(None); }; let Some(responses) = request_task.await? else { @@ -5339,7 +5351,7 @@ impl LspStore { let actions = join_all(responses.payload.into_iter().map(|response| { GetDefinitions { position }.response_from_proto( response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ) @@ -5395,8 +5407,8 @@ impl LspStore { request.to_proto(project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { + cx.spawn(async move |weak_lsp_store, cx| { + let Some(lsp_store) = weak_lsp_store.upgrade() else { return Ok(None); }; let Some(responses) = request_task.await? else { @@ -5405,7 +5417,7 @@ impl LspStore { let actions = join_all(responses.payload.into_iter().map(|response| { GetDeclarations { position }.response_from_proto( response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ) @@ -5461,8 +5473,8 @@ impl LspStore { request.to_proto(project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { + cx.spawn(async move |weak_lsp_store, cx| { + let Some(lsp_store) = weak_lsp_store.upgrade() else { return Ok(None); }; let Some(responses) = request_task.await? else { @@ -5471,7 +5483,7 @@ impl LspStore { let actions = join_all(responses.payload.into_iter().map(|response| { GetTypeDefinitions { position }.response_from_proto( response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ) @@ -5527,8 +5539,8 @@ impl LspStore { request.to_proto(project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { + cx.spawn(async move |weak_lsp_store, cx| { + let Some(lsp_store) = weak_lsp_store.upgrade() else { return Ok(None); }; let Some(responses) = request_task.await? else { @@ -5537,7 +5549,7 @@ impl LspStore { let actions = join_all(responses.payload.into_iter().map(|response| { GetImplementations { position }.response_from_proto( response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ) @@ -5594,8 +5606,8 @@ impl LspStore { request.to_proto(project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { + cx.spawn(async move |weak_lsp_store, cx| { + let Some(lsp_store) = weak_lsp_store.upgrade() else { return Ok(None); }; let Some(responses) = request_task.await? else { @@ -5605,7 +5617,7 @@ impl LspStore { let locations = join_all(responses.payload.into_iter().map(|lsp_response| { GetReferences { position }.response_from_proto( lsp_response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ) @@ -5662,8 +5674,8 @@ impl LspStore { request.to_proto(project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { + cx.spawn(async move |weak_lsp_store, cx| { + let Some(lsp_store) = weak_lsp_store.upgrade() else { return Ok(None); }; let Some(responses) = request_task.await? else { @@ -5676,7 +5688,7 @@ impl LspStore { } .response_from_proto( response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ) @@ -6636,14 +6648,22 @@ impl LspStore { cx: &mut Context, ) -> HashMap, Task>> { let buffer_snapshot = buffer.read(cx).snapshot(); - let for_server = if let InvalidationStrategy::RefreshRequested(server_id) = invalidate { + let next_hint_id = self.next_hint_id.clone(); + let lsp_data = self.latest_lsp_data(&buffer, cx); + let mut lsp_refresh_requested = false; + let for_server = if let InvalidationStrategy::RefreshRequested { + server_id, + request_id, + } = invalidate + { + let invalidated = lsp_data + .inlay_hints + .invalidate_for_server_refresh(server_id, request_id); + lsp_refresh_requested = invalidated; Some(server_id) } else { None }; - let invalidate_cache = invalidate.should_invalidate(); - let next_hint_id = self.next_hint_id.clone(); - let lsp_data = self.latest_lsp_data(&buffer, cx); let existing_inlay_hints = &mut lsp_data.inlay_hints; let known_chunks = known_chunks .filter(|(known_version, _)| !lsp_data.buffer_version.changed_since(known_version)) @@ -6651,8 +6671,8 @@ impl LspStore { .unwrap_or_default(); let mut hint_fetch_tasks = Vec::new(); - let mut cached_inlay_hints = HashMap::default(); - let mut ranges_to_query = Vec::new(); + let mut cached_inlay_hints = None; + let mut ranges_to_query = None; let applicable_chunks = existing_inlay_hints .applicable_chunks(ranges.as_slice()) .filter(|chunk| !known_chunks.contains(&(chunk.start..chunk.end))) @@ -6667,12 +6687,12 @@ impl LspStore { match ( existing_inlay_hints .cached_hints(&row_chunk) - .filter(|_| !invalidate_cache) + .filter(|_| !lsp_refresh_requested) .cloned(), existing_inlay_hints .fetched_hints(&row_chunk) .as_ref() - .filter(|_| !invalidate_cache) + .filter(|_| !lsp_refresh_requested) .cloned(), ) { (None, None) => { @@ -6681,19 +6701,18 @@ impl LspStore { } else { Point::new(row_chunk.end, 0) }; - ranges_to_query.push(( + ranges_to_query.get_or_insert_with(Vec::new).push(( row_chunk, buffer_snapshot.anchor_before(Point::new(row_chunk.start, 0)) ..buffer_snapshot.anchor_after(end), )); } - (None, Some(fetched_hints)) => { - hint_fetch_tasks.push((row_chunk, fetched_hints.clone())) - } + (None, Some(fetched_hints)) => hint_fetch_tasks.push((row_chunk, fetched_hints)), (Some(cached_hints), None) => { for (server_id, cached_hints) in cached_hints { if for_server.is_none_or(|for_server| for_server == server_id) { cached_inlay_hints + .get_or_insert_with(HashMap::default) .entry(row_chunk.start..row_chunk.end) .or_insert_with(HashMap::default) .entry(server_id) @@ -6703,10 +6722,11 @@ impl LspStore { } } (Some(cached_hints), Some(fetched_hints)) => { - hint_fetch_tasks.push((row_chunk, fetched_hints.clone())); + hint_fetch_tasks.push((row_chunk, fetched_hints)); for (server_id, cached_hints) in cached_hints { if for_server.is_none_or(|for_server| for_server == server_id) { cached_inlay_hints + .get_or_insert_with(HashMap::default) .entry(row_chunk.start..row_chunk.end) .or_insert_with(HashMap::default) .entry(server_id) @@ -6718,18 +6738,18 @@ impl LspStore { } } - let cached_chunk_data = cached_inlay_hints - .into_iter() - .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints)))) - .collect(); - if hint_fetch_tasks.is_empty() && ranges_to_query.is_empty() { - cached_chunk_data + if hint_fetch_tasks.is_empty() + && ranges_to_query + .as_ref() + .is_none_or(|ranges| ranges.is_empty()) + && let Some(cached_inlay_hints) = cached_inlay_hints + { + cached_inlay_hints + .into_iter() + .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints)))) + .collect() } else { - if invalidate_cache { - lsp_data.inlay_hints.clear(); - } - - for (chunk, range_to_query) in ranges_to_query { + for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() { let next_hint_id = next_hint_id.clone(); let buffer = buffer.clone(); let new_inlay_hints = cx @@ -6745,31 +6765,38 @@ impl LspStore { let update_cache = !lsp_data .buffer_version .changed_since(&buffer.read(cx).version()); - new_hints_by_server - .into_iter() - .map(|(server_id, new_hints)| { - let new_hints = new_hints - .into_iter() - .map(|new_hint| { - ( - InlayId::Hint(next_hint_id.fetch_add( - 1, - atomic::Ordering::AcqRel, - )), - new_hint, - ) - }) - .collect::>(); - if update_cache { - lsp_data.inlay_hints.insert_new_hints( - chunk, - server_id, - new_hints.clone(), - ); - } - (server_id, new_hints) - }) - .collect() + if new_hints_by_server.is_empty() { + if update_cache { + lsp_data.inlay_hints.invalidate_for_chunk(chunk); + } + HashMap::default() + } else { + new_hints_by_server + .into_iter() + .map(|(server_id, new_hints)| { + let new_hints = new_hints + .into_iter() + .map(|new_hint| { + ( + InlayId::Hint(next_hint_id.fetch_add( + 1, + atomic::Ordering::AcqRel, + )), + new_hint, + ) + }) + .collect::>(); + if update_cache { + lsp_data.inlay_hints.insert_new_hints( + chunk, + server_id, + new_hints.clone(), + ); + } + (server_id, new_hints) + }) + .collect() + } }) }) .map_err(Arc::new) @@ -6781,22 +6808,25 @@ impl LspStore { hint_fetch_tasks.push((chunk, new_inlay_hints)); } - let mut combined_data = cached_chunk_data; - combined_data.extend(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| { - ( - chunk.start..chunk.end, - cx.spawn(async move |_, _| { - hints_fetch.await.map_err(|e| { - if e.error_code() != ErrorCode::Internal { - anyhow!(e.error_code()) - } else { - anyhow!("{e:#}") - } - }) - }), - ) - })); - combined_data + cached_inlay_hints + .unwrap_or_default() + .into_iter() + .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints)))) + .chain(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| { + ( + chunk.start..chunk.end, + cx.spawn(async move |_, _| { + hints_fetch.await.map_err(|e| { + if e.error_code() != ErrorCode::Internal { + anyhow!(e.error_code()) + } else { + anyhow!("{e:#}") + } + }) + }), + ) + })) + .collect() } } @@ -7157,7 +7187,7 @@ impl LspStore { ); let buffer = buffer.clone(); cx.spawn(async move |lsp_store, cx| { - let Some(project) = lsp_store.upgrade() else { + let Some(lsp_store) = lsp_store.upgrade() else { return Ok(None); }; let colors = join_all( @@ -7171,7 +7201,7 @@ impl LspStore { .map(|color_response| { let response = request.response_from_proto( color_response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ); @@ -7235,8 +7265,8 @@ impl LspStore { request.to_proto(upstream_project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let project = weak_project.upgrade()?; + cx.spawn(async move |weak_lsp_store, cx| { + let lsp_store = weak_lsp_store.upgrade()?; let signatures = join_all( request_task .await @@ -7248,7 +7278,7 @@ impl LspStore { .map(|response| { let response = GetSignatureHelp { position }.response_from_proto( response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ); @@ -7299,8 +7329,8 @@ impl LspStore { request.to_proto(upstream_project_id, buffer.read(cx)), ); let buffer = buffer.clone(); - cx.spawn(async move |weak_project, cx| { - let project = weak_project.upgrade()?; + cx.spawn(async move |weak_lsp_store, cx| { + let lsp_store = weak_lsp_store.upgrade()?; let hovers = join_all( request_task .await @@ -7312,7 +7342,7 @@ impl LspStore { .map(|response| { let response = GetHover { position }.response_from_proto( response.response, - project.clone(), + lsp_store.clone(), buffer.clone(), cx.clone(), ); @@ -9604,7 +9634,10 @@ impl LspStore { if let Some(work) = status.pending_work.remove(&token) && !work.is_disk_based_diagnostics_progress { - cx.emit(LspStoreEvent::RefreshInlayHints(language_server_id)); + cx.emit(LspStoreEvent::RefreshInlayHints { + server_id: language_server_id, + request_id: None, + }); } cx.notify(); } @@ -9743,9 +9776,10 @@ impl LspStore { mut cx: AsyncApp, ) -> Result { lsp_store.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::RefreshInlayHints( - LanguageServerId::from_proto(envelope.payload.server_id), - )); + cx.emit(LspStoreEvent::RefreshInlayHints { + server_id: LanguageServerId::from_proto(envelope.payload.server_id), + request_id: envelope.payload.request_id.map(|id| id as usize), + }); })?; Ok(proto::Ack {}) } @@ -10130,7 +10164,7 @@ impl LspStore { ) -> Shared>>> { if let Some(environment) = &self.as_local().map(|local| local.environment.clone()) { environment.update(cx, |env, cx| { - env.get_buffer_environment(buffer, &self.worktree_store, cx) + env.buffer_environment(buffer, &self.worktree_store, cx) }) } else { Task::ready(None).shared() @@ -10972,7 +11006,6 @@ impl LspStore { language_server.name(), Some(key.worktree_id), )); - cx.emit(LspStoreEvent::RefreshInlayHints(server_id)); let server_capabilities = language_server.capabilities(); if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { @@ -11898,12 +11931,38 @@ impl LspStore { "textDocument/completion" => { if let Some(caps) = reg .register_options - .map(serde_json::from_value) + .map(serde_json::from_value::) .transpose()? { server.update_capabilities(|capabilities| { - capabilities.completion_provider = Some(caps); + capabilities.completion_provider = Some(caps.clone()); }); + + if let Some(local) = self.as_local() { + let mut buffers_with_language_server = Vec::new(); + for handle in self.buffer_store.read(cx).buffers() { + let buffer_id = handle.read(cx).remote_id(); + if local + .buffers_opened_in_servers + .get(&buffer_id) + .filter(|s| s.contains(&server_id)) + .is_some() + { + buffers_with_language_server.push(handle); + } + } + let triggers = caps + .trigger_characters + .unwrap_or_default() + .into_iter() + .collect::>(); + for handle in buffers_with_language_server { + let triggers = triggers.clone(); + let _ = handle.update(cx, move |buffer, cx| { + buffer.set_completion_triggers(server_id, triggers, cx); + }); + } + } notify_server_capabilities_updated(&server, cx); } } @@ -12890,7 +12949,7 @@ impl LanguageServerWatchedPathsBuilder { language_server_id: LanguageServerId, cx: &mut Context, ) -> LanguageServerWatchedPaths { - let project = cx.weak_entity(); + let lsp_store = cx.weak_entity(); const LSP_ABS_PATH_OBSERVE: Duration = Duration::from_millis(100); let abs_paths = self @@ -12901,7 +12960,7 @@ impl LanguageServerWatchedPathsBuilder { let abs_path = abs_path.clone(); let fs = fs.clone(); - let lsp_store = project.clone(); + let lsp_store = lsp_store.clone(); async move |_, cx| { maybe!(async move { let mut push_updates = fs.watch(&abs_path, LSP_ABS_PATH_OBSERVE).await; @@ -13369,9 +13428,8 @@ impl LocalLspAdapterDelegate { fs: Arc, cx: &mut App, ) -> Arc { - let load_shell_env_task = environment.update(cx, |env, cx| { - env.get_worktree_environment(worktree.clone(), cx) - }); + let load_shell_env_task = + environment.update(cx, |env, cx| env.worktree_environment(worktree.clone(), cx)); Arc::new(Self { lsp_store, diff --git a/crates/project/src/lsp_store/inlay_hint_cache.rs b/crates/project/src/lsp_store/inlay_hint_cache.rs index 7d3ec27e5af83c4d83b269c171943d90754bd1a6..51189d8fdae788c7c12546f2c9ac1735930c3095 100644 --- a/crates/project/src/lsp_store/inlay_hint_cache.rs +++ b/crates/project/src/lsp_store/inlay_hint_cache.rs @@ -19,7 +19,10 @@ pub enum InvalidationStrategy { /// Demands to re-query all inlay hints needed and invalidate all cached entries, but does not require instant update with invalidation. /// /// Despite nothing forbids language server from sending this request on every edit, it is expected to be sent only when certain internal server state update, invisible for the editor otherwise. - RefreshRequested(LanguageServerId), + RefreshRequested { + server_id: LanguageServerId, + request_id: Option, + }, /// Multibuffer excerpt(s) and/or singleton buffer(s) were edited at least on one place. /// Neither editor nor LSP is able to tell which open file hints' are not affected, so all of them have to be invalidated, re-queried and do that fast enough to avoid being slow, but also debounce to avoid loading hints on every fast keystroke sequence. BufferEdited, @@ -36,7 +39,7 @@ impl InvalidationStrategy { pub fn should_invalidate(&self) -> bool { matches!( self, - InvalidationStrategy::RefreshRequested(_) | InvalidationStrategy::BufferEdited + InvalidationStrategy::RefreshRequested { .. } | InvalidationStrategy::BufferEdited ) } } @@ -47,6 +50,7 @@ pub struct BufferInlayHints { hints_by_chunks: Vec>, fetches_by_chunks: Vec>, hints_by_id: HashMap, + latest_invalidation_requests: HashMap>, pub(super) hint_resolves: HashMap>>, } @@ -104,6 +108,7 @@ impl BufferInlayHints { Self { hints_by_chunks: vec![None; buffer_chunks.len()], fetches_by_chunks: vec![None; buffer_chunks.len()], + latest_invalidation_requests: HashMap::default(), hints_by_id: HashMap::default(), hint_resolves: HashMap::default(), snapshot, @@ -176,6 +181,7 @@ impl BufferInlayHints { self.fetches_by_chunks = vec![None; self.buffer_chunks.len()]; self.hints_by_id.clear(); self.hint_resolves.clear(); + self.latest_invalidation_requests.clear(); } pub fn insert_new_hints( @@ -222,4 +228,48 @@ impl BufferInlayHints { pub fn buffer_chunks_len(&self) -> usize { self.buffer_chunks.len() } + + pub(crate) fn invalidate_for_server_refresh( + &mut self, + for_server: LanguageServerId, + request_id: Option, + ) -> bool { + match self.latest_invalidation_requests.entry(for_server) { + hash_map::Entry::Occupied(mut o) => { + if request_id > *o.get() { + o.insert(request_id); + } else { + return false; + } + } + hash_map::Entry::Vacant(v) => { + v.insert(request_id); + } + } + + for (chunk_id, chunk_data) in self.hints_by_chunks.iter_mut().enumerate() { + if let Some(removed_hints) = chunk_data + .as_mut() + .and_then(|chunk_data| chunk_data.remove(&for_server)) + { + for (id, _) in removed_hints { + self.hints_by_id.remove(&id); + self.hint_resolves.remove(&id); + } + self.fetches_by_chunks[chunk_id] = None; + } + } + + true + } + + pub(crate) fn invalidate_for_chunk(&mut self, chunk: BufferChunk) { + self.fetches_by_chunks[chunk.id] = None; + if let Some(hints_by_server) = self.hints_by_chunks[chunk.id].take() { + for (hint_id, _) in hints_by_server.into_values().flatten() { + self.hints_by_id.remove(&hint_id); + self.hint_resolves.remove(&hint_id); + } + } + } } diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 3743f9769eaaff7f3acd1cc5bad16e31f6e80987..40deac76404ddb4378fe08cae931d0f0e3583487 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -13,9 +13,7 @@ use futures::{ future::{self, Shared}, stream::FuturesUnordered, }; -use gpui::{ - AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, WeakEntity, -}; +use gpui::{AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity}; use language::{ Buffer, LanguageRegistry, LocalFile, language_settings::{Formatter, LanguageSettings}, @@ -560,137 +558,99 @@ impl PrettierStore { let plugins_to_install = new_plugins.clone(); let fs = Arc::clone(&self.fs); let new_installation_task = cx - .spawn(async move |prettier_store, cx| { - cx.background_executor() - .timer(Duration::from_millis(30)) - .await; + .spawn(async move |prettier_store, cx| { + cx.background_executor().timer(Duration::from_millis(30)).await; let location_data = prettier_store.update(cx, |prettier_store, cx| { - worktree - .and_then(|worktree_id| { - prettier_store - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - .map(|worktree| worktree.read(cx).abs_path()) - }) - .map(|locate_from| { - let installed_prettiers = - prettier_store.prettier_instances.keys().cloned().collect(); - (locate_from, installed_prettiers) - }) + worktree.and_then(|worktree_id| { + prettier_store.worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + }).map(|locate_from| { + let installed_prettiers = prettier_store.prettier_instances.keys().cloned().collect(); + (locate_from, installed_prettiers) + }) })?; let locate_prettier_installation = match location_data { - Some((locate_from, installed_prettiers)) => { - Prettier::locate_prettier_installation( - fs.as_ref(), - &installed_prettiers, - locate_from.as_ref(), - ) - .await - .context("locate prettier installation") - .map_err(Arc::new)? - } + Some((locate_from, installed_prettiers)) => Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + locate_from.as_ref(), + ) + .await + .context("locate prettier installation").map_err(Arc::new)?, None => ControlFlow::Continue(None), }; - match locate_prettier_installation { + match locate_prettier_installation + { ControlFlow::Break(()) => return Ok(()), ControlFlow::Continue(prettier_path) => { if prettier_path.is_some() { new_plugins.clear(); } - let mut needs_install = - should_write_prettier_server_file(fs.as_ref()).await; + let mut needs_install = should_write_prettier_server_file(fs.as_ref()).await; if let Some(previous_installation_task) = previous_installation_task - && let Err(e) = previous_installation_task.await - { - log::error!("Failed to install default prettier: {e:#}"); - prettier_store.update(cx, |prettier_store, _| { - if let PrettierInstallation::NotInstalled { - attempts, - not_installed_plugins, - .. - } = &mut prettier_store.default_prettier.prettier - { - *attempts += 1; - new_plugins.extend(not_installed_plugins.iter().cloned()); - installation_attempt = *attempts; - needs_install = true; - }; - })?; - }; + && let Err(e) = previous_installation_task.await { + log::error!("Failed to install default prettier: {e:#}"); + prettier_store.update(cx, |prettier_store, _| { + if let PrettierInstallation::NotInstalled { attempts, not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier { + *attempts += 1; + new_plugins.extend(not_installed_plugins.iter().cloned()); + installation_attempt = *attempts; + needs_install = true; + }; + })?; + }; if installation_attempt > prettier::FAIL_THRESHOLD { prettier_store.update(cx, |prettier_store, _| { - if let PrettierInstallation::NotInstalled { - installation_task, - .. - } = &mut prettier_store.default_prettier.prettier - { + if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut prettier_store.default_prettier.prettier { *installation_task = None; }; })?; log::warn!( - "Default prettier installation had failed {installation_attempt} \ - times, not attempting again", + "Default prettier installation had failed {installation_attempt} times, not attempting again", ); return Ok(()); } prettier_store.update(cx, |prettier_store, _| { new_plugins.retain(|plugin| { - !prettier_store - .default_prettier - .installed_plugins - .contains(plugin) + !prettier_store.default_prettier.installed_plugins.contains(plugin) }); - if let PrettierInstallation::NotInstalled { - not_installed_plugins, - .. - } = &mut prettier_store.default_prettier.prettier - { + if let PrettierInstallation::NotInstalled { not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier { not_installed_plugins.retain(|plugin| { - !prettier_store - .default_prettier - .installed_plugins - .contains(plugin) + !prettier_store.default_prettier.installed_plugins.contains(plugin) }); not_installed_plugins.extend(new_plugins.iter().cloned()); } needs_install |= !new_plugins.is_empty(); })?; if needs_install { - log::info!( - "Initializing default prettier with plugins {new_plugins:?}" - ); + log::info!("Initializing default prettier with plugins {new_plugins:?}"); let installed_plugins = new_plugins.clone(); - let executor = cx.background_executor().clone(); cx.background_spawn(async move { install_prettier_packages(fs.as_ref(), new_plugins, node).await?; // Save the server file last, so the reinstall need could be determined by the absence of the file. - save_prettier_server_file(fs.as_ref(), &executor).await?; + save_prettier_server_file(fs.as_ref()).await?; anyhow::Ok(()) }) - .await - .context("prettier & plugins install") - .map_err(Arc::new)?; - log::info!( - "Initialized default prettier with plugins: {installed_plugins:?}" - ); + .await + .context("prettier & plugins install") + .map_err(Arc::new)?; + log::info!("Initialized default prettier with plugins: {installed_plugins:?}"); prettier_store.update(cx, |prettier_store, _| { prettier_store.default_prettier.prettier = PrettierInstallation::Installed(PrettierInstance { attempt: 0, prettier: None, }); - prettier_store - .default_prettier + prettier_store.default_prettier .installed_plugins .extend(installed_plugins); })?; } else { prettier_store.update(cx, |prettier_store, _| { - if let PrettierInstallation::NotInstalled { .. } = - &mut prettier_store.default_prettier.prettier - { + if let PrettierInstallation::NotInstalled { .. } = &mut prettier_store.default_prettier.prettier { prettier_store.default_prettier.prettier = PrettierInstallation::Installed(PrettierInstance { attempt: 0, @@ -976,14 +936,11 @@ async fn install_prettier_packages( anyhow::Ok(()) } -async fn save_prettier_server_file( - fs: &dyn Fs, - executor: &BackgroundExecutor, -) -> anyhow::Result<()> { +async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> { let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); fs.save( &prettier_wrapper_path, - &text::Rope::from_str(prettier::PRETTIER_SERVER_JS, executor), + &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix, ) .await diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7c7fe9a43091611a53dbde0ecbaf6691b7d768d0..c65a0b5c3a6e46b4326fecd283736edbb9ec9626 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -33,7 +33,6 @@ pub mod search_history; mod yarn; use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope}; -use task::Shell; use crate::{ agent_server_store::AllAgentServersSettings, @@ -68,7 +67,7 @@ use futures::future::join_all; use futures::{ StreamExt, channel::mpsc::{self, UnboundedReceiver}, - future::{Shared, try_join_all}, + future::try_join_all, }; pub use image_store::{ImageItem, ImageStore}; use image_store::{ImageItemEvent, ImageStoreEvent}; @@ -337,7 +336,10 @@ pub enum Event { HostReshared, Reshared, Rejoined, - RefreshInlayHints(LanguageServerId), + RefreshInlayHints { + server_id: LanguageServerId, + request_id: Option, + }, RefreshCodeLens, RevealInProjectPanel(ProjectEntryId), SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>), @@ -712,10 +714,8 @@ pub enum ResolveState { impl InlayHint { pub fn text(&self) -> Rope { match &self.label { - InlayHintLabel::String(s) => Rope::from_str_small(s), - InlayHintLabel::LabelParts(parts) => { - Rope::from_iter_small(parts.iter().map(|part| &*part.value)) - } + InlayHintLabel::String(s) => Rope::from(s), + InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &*part.value).collect(), } } } @@ -1070,9 +1070,10 @@ impl Project { let weak_self = cx.weak_entity(); let context_server_store = - cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self, cx)); + cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self.clone(), cx)); - let environment = cx.new(|cx| ProjectEnvironment::new(env, cx)); + let environment = + cx.new(|cx| ProjectEnvironment::new(env, worktree_store.downgrade(), None, cx)); let manifest_tree = ManifestTree::new(worktree_store.clone(), cx); let toolchain_store = cx.new(|cx| { ToolchainStore::local( @@ -1261,7 +1262,7 @@ impl Project { let weak_self = cx.weak_entity(); let context_server_store = - cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self, cx)); + cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self.clone(), cx)); let buffer_store = cx.new(|cx| { BufferStore::remote( @@ -1307,7 +1308,14 @@ impl Project { cx.subscribe(&settings_observer, Self::on_settings_observer_event) .detach(); - let environment = cx.new(|cx| ProjectEnvironment::new(None, cx)); + let environment = cx.new(|cx| { + ProjectEnvironment::new( + None, + worktree_store.downgrade(), + Some(remote.downgrade()), + cx, + ) + }); let lsp_store = cx.new(|cx| { LspStore::new_remote( @@ -1520,8 +1528,8 @@ impl Project { ImageStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx) })?; - let environment = cx.new(|cx| ProjectEnvironment::new(None, cx))?; - + let environment = + cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, cx))?; let breakpoint_store = cx.new(|_| BreakpointStore::remote(remote_id, client.clone().into()))?; let dap_store = cx.new(|cx| { @@ -1925,32 +1933,6 @@ impl Project { self.environment.read(cx).get_cli_environment() } - pub fn buffer_environment<'a>( - &'a self, - buffer: &Entity, - worktree_store: &Entity, - cx: &'a mut App, - ) -> Shared>>> { - self.environment.update(cx, |environment, cx| { - environment.get_buffer_environment(buffer, worktree_store, cx) - }) - } - - pub fn directory_environment( - &self, - shell: &Shell, - abs_path: Arc, - cx: &mut App, - ) -> Shared>>> { - self.environment.update(cx, |environment, cx| { - if let Some(remote_client) = self.remote_client.clone() { - environment.get_remote_directory_environment(shell, abs_path, remote_client, cx) - } else { - environment.get_local_directory_environment(shell, abs_path, cx) - } - }) - } - #[inline] pub fn peek_environment_error<'a>(&'a self, cx: &'a App) -> Option<&'a String> { self.environment.read(cx).peek_environment_error() @@ -3076,9 +3058,13 @@ impl Project { return; }; } - LspStoreEvent::RefreshInlayHints(server_id) => { - cx.emit(Event::RefreshInlayHints(*server_id)) - } + LspStoreEvent::RefreshInlayHints { + server_id, + request_id, + } => cx.emit(Event::RefreshInlayHints { + server_id: *server_id, + request_id: *request_id, + }), LspStoreEvent::RefreshCodeLens => cx.emit(Event::RefreshCodeLens), LspStoreEvent::LanguageServerPrompt(prompt) => { cx.emit(Event::LanguageServerPrompt(prompt.clone())) @@ -5404,12 +5390,7 @@ impl Project { worktree .update(cx, |worktree, cx| { let line_ending = text::LineEnding::detect(&new_text); - worktree.write_file( - rel_path.clone(), - Rope::from_str(&new_text, cx.background_executor()), - line_ending, - cx, - ) + worktree.write_file(rel_path.clone(), new_text.into(), line_ending, cx) })? .await .context("Failed to write settings file")?; diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 3dc918d5a757af56038471e1a601d6f2cf7dbbe1..3a824cb16eeaa0b2e82d14c89cc906e52e74cd7a 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1461,21 +1461,21 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon .unwrap(); fs.save( path!("/the-root/Cargo.lock").as_ref(), - &Rope::default(), + &"".into(), Default::default(), ) .await .unwrap(); fs.save( path!("/the-stdlib/LICENSE").as_ref(), - &Rope::default(), + &"".into(), Default::default(), ) .await .unwrap(); fs.save( path!("/the/stdlib/src/string.rs").as_ref(), - &Rope::default(), + &"".into(), Default::default(), ) .await @@ -1815,10 +1815,6 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { fake_server .start_progress(format!("{}/0", progress_token)) .await; - assert_eq!( - events.next().await.unwrap(), - Event::RefreshInlayHints(fake_server.server.server_id()) - ); assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsStarted { @@ -1957,10 +1953,6 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC Some(worktree_id) ) ); - assert_eq!( - events.next().await.unwrap(), - Event::RefreshInlayHints(fake_server.server.server_id()) - ); fake_server.start_progress(progress_token).await; assert_eq!( events.next().await.unwrap(), @@ -4072,7 +4064,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) // to be detected by the worktree, so that the buffer starts reloading. fs.save( path!("/dir/file1").as_ref(), - &Rope::from_str("the first contents", cx.background_executor()), + &"the first contents".into(), Default::default(), ) .await @@ -4083,7 +4075,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) // previous file change may still be in progress. fs.save( path!("/dir/file1").as_ref(), - &Rope::from_str("the second contents", cx.background_executor()), + &"the second contents".into(), Default::default(), ) .await @@ -4127,7 +4119,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) { // to be detected by the worktree, so that the buffer starts reloading. fs.save( path!("/dir/file1").as_ref(), - &Rope::from_str("the first contents", cx.background_executor()), + &"the first contents".into(), Default::default(), ) .await @@ -4805,7 +4797,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n"); fs.save( path!("/dir/the-file").as_ref(), - &Rope::from_str(new_contents.as_str(), cx.background_executor()), + &new_contents.as_str().into(), LineEnding::Unix, ) .await @@ -4837,7 +4829,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { // Change the file on disk again, adding blank lines to the beginning. fs.save( path!("/dir/the-file").as_ref(), - &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()), + &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(), LineEnding::Unix, ) .await @@ -4889,7 +4881,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) { // state updates correctly. fs.save( path!("/dir/file1").as_ref(), - &Rope::from_str("aaa\nb\nc\n", cx.background_executor()), + &"aaa\nb\nc\n".into(), LineEnding::Windows, ) .await diff --git a/crates/project/src/task_store.rs b/crates/project/src/task_store.rs index 0de5e239798c6a95078d79c2a25775c914a13611..462b164e83b6d7dd91c11edc8482290079019bf3 100644 --- a/crates/project/src/task_store.rs +++ b/crates/project/src/task_store.rs @@ -317,7 +317,7 @@ fn local_task_context_for_location( cx.spawn(async move |cx| { let project_env = environment .update(cx, |environment, cx| { - environment.get_buffer_environment(&location.buffer, &worktree_store, cx) + environment.buffer_environment(&location.buffer, &worktree_store, cx) }) .ok()? .await; diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 5ea9824916520cfb53673f82f17c1d0e5d31ede3..17564b17dd4d6623d7ca72fadbd0aa8defd1f9cc 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -8,7 +8,6 @@ use remote::RemoteClient; use settings::{Settings, SettingsLocation}; use smol::channel::bounded; use std::{ - borrow::Cow, path::{Path, PathBuf}, sync::Arc, }; @@ -122,6 +121,7 @@ impl Project { let lang_registry = self.languages.clone(); cx.spawn(async move |project, cx| { let shell_kind = ShellKind::new(&shell, is_windows); + let activation_script = maybe!(async { for toolchain in toolchains { let Some(toolchain) = toolchain.await else { @@ -143,14 +143,8 @@ impl Project { .update(cx, move |_, cx| { let format_to_run = || { if let Some(command) = &spawn_task.command { - let mut command: Option> = shell_kind.try_quote(command); - if let Some(command) = &mut command - && command.starts_with('"') - && let Some(prefix) = shell_kind.command_prefix() - { - *command = Cow::Owned(format!("{prefix}{command}")); - } - + let command = shell_kind.prepend_command_prefix(command); + let command = shell_kind.try_quote_prefix_aware(&command); let args = spawn_task .args .iter() @@ -172,12 +166,13 @@ impl Project { let activation_script = activation_script.join(&format!("{separator} ")); let to_run = format_to_run(); + + let arg = format!("{activation_script}{separator} {to_run}"); + let args = shell_kind.args_for_shell(false, arg); let shell = remote_client .read(cx) .shell() .unwrap_or_else(get_default_system_shell); - let arg = format!("{activation_script}{separator} {to_run}"); - let args = shell_kind.args_for_shell(false, arg); create_remote_shell( Some((&shell, &args)), diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index d1c4fc629698bb70d156786837bc2540533d4867..21b74bd784d1d9af12fe43e3fe82051afc103b0d 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -527,7 +527,7 @@ impl LocalToolchainStore { let project_env = environment .update(cx, |environment, cx| { - environment.get_local_directory_environment( + environment.local_directory_environment( &Shell::System, abs_path.as_path().into(), cx, @@ -590,7 +590,7 @@ impl LocalToolchainStore { let project_env = environment .update(cx, |environment, cx| { - environment.get_local_directory_environment( + environment.local_directory_environment( &Shell::System, path.as_path().into(), cx, diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index 30059431094bf1b11c1e481979ed5ea651f1d40b..644e492ef6a5d639a99f75b18465ca93b0c0ef92 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -466,6 +466,7 @@ message ResolveInlayHintResponse { message RefreshInlayHints { uint64 project_id = 1; uint64 server_id = 2; + optional uint64 request_id = 3; } message CodeLens { diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index c371b27ce1dcfe665d96f548bca2c893559005ec..7c8557f9dac2131a84c54cc60657e105d2839658 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -574,6 +574,7 @@ pub async fn open_remote_project( open_options: workspace::OpenOptions, cx: &mut AsyncApp, ) -> Result<()> { + let created_new_window = open_options.replace_window.is_none(); let window = if let Some(window) = open_options.replace_window { window } else { @@ -648,7 +649,45 @@ pub async fn open_remote_project( let Some(delegate) = delegate else { break }; let remote_connection = - remote::connect(connection_options.clone(), delegate.clone(), cx).await?; + match remote::connect(connection_options.clone(), delegate.clone(), cx).await { + Ok(connection) => connection, + Err(e) => { + window + .update(cx, |workspace, _, cx| { + if let Some(ui) = workspace.active_modal::(cx) { + ui.update(cx, |modal, cx| modal.finished(cx)) + } + }) + .ok(); + log::error!("Failed to open project: {e:?}"); + let response = window + .update(cx, |_, window, cx| { + window.prompt( + PromptLevel::Critical, + match connection_options { + RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH", + RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL", + }, + Some(&e.to_string()), + &["Retry", "Cancel"], + cx, + ) + })? + .await; + + if response == Ok(0) { + continue; + } + + if created_new_window { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } + break; + } + }; + let (paths, paths_with_positions) = determine_paths_with_positions(&remote_connection, paths.clone()).await; @@ -686,7 +725,7 @@ pub async fn open_remote_project( RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL", }, Some(&e.to_string()), - &["Retry", "Ok"], + &["Retry", "Cancel"], cx, ) })? @@ -694,7 +733,14 @@ pub async fn open_remote_project( if response == Ok(0) { continue; } + + if created_new_window { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } } + Ok(items) => { for (item, path) in items.into_iter().zip(paths_with_positions) { let Some(item) = item else { diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 86d93ac2454a41a45d531dd8076066988634e5ce..18a4f64de28d1665deb4c788d7e4673e1e3b9ec5 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -39,6 +39,7 @@ pub(crate) struct SshRemoteConnection { ssh_platform: RemotePlatform, ssh_path_style: PathStyle, ssh_shell: String, + ssh_shell_kind: ShellKind, ssh_default_system_shell: String, _temp_dir: TempDir, } @@ -241,6 +242,7 @@ impl RemoteConnection for SshRemoteConnection { let Self { ssh_path_style, socket, + ssh_shell_kind, ssh_shell, .. } = self; @@ -254,6 +256,7 @@ impl RemoteConnection for SshRemoteConnection { env, *ssh_path_style, ssh_shell, + *ssh_shell_kind, socket.ssh_args(), ) } @@ -367,7 +370,7 @@ impl RemoteConnection for SshRemoteConnection { let ssh_proxy_process = match self .socket - .ssh_command("env", &proxy_args) + .ssh_command(self.ssh_shell_kind, "env", &proxy_args) // IMPORTANT: we kill this process when we drop the task that uses it. .kill_on_drop(true) .spawn() @@ -490,6 +493,13 @@ impl SshRemoteConnection { _ => PathStyle::Posix, }; let ssh_default_system_shell = String::from("/bin/sh"); + let ssh_shell_kind = ShellKind::new( + &ssh_shell, + match ssh_platform.os { + "windows" => true, + _ => false, + }, + ); let mut this = Self { socket, @@ -499,6 +509,7 @@ impl SshRemoteConnection { ssh_path_style, ssh_platform, ssh_shell, + ssh_shell_kind, ssh_default_system_shell, }; @@ -563,7 +574,11 @@ impl SshRemoteConnection { if self .socket - .run_command(&dst_path.display(self.path_style()), &["version"]) + .run_command( + self.ssh_shell_kind, + &dst_path.display(self.path_style()), + &["version"], + ) .await .is_ok() { @@ -632,7 +647,11 @@ impl SshRemoteConnection { ) -> Result<()> { if let Some(parent) = tmp_path_gz.parent() { self.socket - .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()]) + .run_command( + self.ssh_shell_kind, + "mkdir", + &["-p", parent.display(self.path_style()).as_ref()], + ) .await?; } @@ -641,6 +660,7 @@ impl SshRemoteConnection { match self .socket .run_command( + self.ssh_shell_kind, "curl", &[ "-f", @@ -660,13 +680,19 @@ impl SshRemoteConnection { { Ok(_) => {} Err(e) => { - if self.socket.run_command("which", &["curl"]).await.is_ok() { + if self + .socket + .run_command(self.ssh_shell_kind, "which", &["curl"]) + .await + .is_ok() + { return Err(e); } match self .socket .run_command( + self.ssh_shell_kind, "wget", &[ "--header=Content-Type: application/json", @@ -681,7 +707,12 @@ impl SshRemoteConnection { { Ok(_) => {} Err(e) => { - if self.socket.run_command("which", &["wget"]).await.is_ok() { + if self + .socket + .run_command(self.ssh_shell_kind, "which", &["wget"]) + .await + .is_ok() + { return Err(e); } else { anyhow::bail!("Neither curl nor wget is available"); @@ -703,7 +734,11 @@ impl SshRemoteConnection { ) -> Result<()> { if let Some(parent) = tmp_path_gz.parent() { self.socket - .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()]) + .run_command( + self.ssh_shell_kind, + "mkdir", + &["-p", parent.display(self.path_style()).as_ref()], + ) .await?; } @@ -750,7 +785,7 @@ impl SshRemoteConnection { format!("chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",) }; let args = shell_kind.args_for_shell(false, script.to_string()); - self.socket.run_command("sh", &args).await?; + self.socket.run_command(shell_kind, "sh", &args).await?; Ok(()) } @@ -894,11 +929,16 @@ impl SshSocket { // Furthermore, some setups (e.g. Coder) will change directory when SSH'ing // into a machine. You must use `cd` to get back to $HOME. // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'" - fn ssh_command(&self, program: &str, args: &[impl AsRef]) -> process::Command { - let shell_kind = ShellKind::Posix; + fn ssh_command( + &self, + shell_kind: ShellKind, + program: &str, + args: &[impl AsRef], + ) -> process::Command { let mut command = util::command::new_smol_command("ssh"); + let program = shell_kind.prepend_command_prefix(program); let mut to_run = shell_kind - .try_quote(program) + .try_quote_prefix_aware(&program) .expect("shell quoting") .into_owned(); for arg in args { @@ -920,8 +960,13 @@ impl SshSocket { command } - async fn run_command(&self, program: &str, args: &[impl AsRef]) -> Result { - let output = self.ssh_command(program, args).output().await?; + async fn run_command( + &self, + shell_kind: ShellKind, + program: &str, + args: &[impl AsRef], + ) -> Result { + let output = self.ssh_command(shell_kind, program, args).output().await?; anyhow::ensure!( output.status.success(), "failed to run command: {}", @@ -994,12 +1039,7 @@ impl SshSocket { } async fn platform(&self, shell: ShellKind) -> Result { - let program = if shell == ShellKind::Nushell { - "^uname" - } else { - "uname" - }; - let uname = self.run_command(program, &["-sm"]).await?; + let uname = self.run_command(shell, "uname", &["-sm"]).await?; let Some((os, arch)) = uname.split_once(" ") else { anyhow::bail!("unknown uname: {uname:?}") }; @@ -1030,7 +1070,10 @@ impl SshSocket { } async fn shell(&self) -> String { - match self.run_command("sh", &["-c", "echo $SHELL"]).await { + match self + .run_command(ShellKind::Posix, "sh", &["-c", "echo $SHELL"]) + .await + { Ok(shell) => shell.trim().to_owned(), Err(e) => { log::error!("Failed to get shell: {e}"); @@ -1256,11 +1299,11 @@ fn build_command( ssh_env: HashMap, ssh_path_style: PathStyle, ssh_shell: &str, + ssh_shell_kind: ShellKind, ssh_args: Vec, ) -> Result { use std::fmt::Write as _; - let shell_kind = ShellKind::new(ssh_shell, false); let mut exec = String::new(); if let Some(working_dir) = working_dir { let working_dir = RemotePathBuf::new(working_dir, ssh_path_style).to_string(); @@ -1270,12 +1313,24 @@ fn build_command( const TILDE_PREFIX: &'static str = "~/"; if working_dir.starts_with(TILDE_PREFIX) { let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/"); - write!(exec, "cd \"$HOME/{working_dir}\" && ",)?; + write!( + exec, + "cd \"$HOME/{working_dir}\" {} ", + ssh_shell_kind.sequential_and_commands_separator() + )?; } else { - write!(exec, "cd \"{working_dir}\" && ",)?; + write!( + exec, + "cd \"{working_dir}\" {} ", + ssh_shell_kind.sequential_and_commands_separator() + )?; } } else { - write!(exec, "cd && ")?; + write!( + exec, + "cd {} ", + ssh_shell_kind.sequential_and_commands_separator() + )?; }; write!(exec, "exec env ")?; @@ -1284,7 +1339,7 @@ fn build_command( exec, "{}={} ", k, - shell_kind.try_quote(v).context("shell quoting")? + ssh_shell_kind.try_quote(v).context("shell quoting")? )?; } @@ -1292,12 +1347,12 @@ fn build_command( write!( exec, "{}", - shell_kind - .try_quote(&input_program) + ssh_shell_kind + .try_quote_prefix_aware(&input_program) .context("shell quoting")? )?; for arg in input_args { - let arg = shell_kind.try_quote(&arg).context("shell quoting")?; + let arg = ssh_shell_kind.try_quote(&arg).context("shell quoting")?; write!(exec, " {}", &arg)?; } } else { @@ -1341,6 +1396,7 @@ mod tests { env.clone(), PathStyle::Posix, "/bin/fish", + ShellKind::Fish, vec!["-p".to_string(), "2222".to_string()], )?; @@ -1370,6 +1426,7 @@ mod tests { env.clone(), PathStyle::Posix, "/bin/fish", + ShellKind::Fish, vec!["-p".to_string(), "2222".to_string()], )?; diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index e6827347914cc35e266080dab7c83fd182e16a64..1bfa5e640d991f939456418750b633d87cbde3f6 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -44,6 +44,7 @@ pub(crate) struct WslRemoteConnection { remote_binary_path: Option>, platform: RemotePlatform, shell: String, + shell_kind: ShellKind, default_system_shell: String, connection_options: WslConnectionOptions, can_exec: bool, @@ -73,16 +74,17 @@ impl WslRemoteConnection { remote_binary_path: None, platform: RemotePlatform { os: "", arch: "" }, shell: String::new(), + shell_kind: ShellKind::Posix, default_system_shell: String::from("/bin/sh"), can_exec: true, }; delegate.set_status(Some("Detecting WSL environment"), cx); this.shell = this.detect_shell().await?; - let shell = ShellKind::new(&this.shell, false); - this.can_exec = this.detect_can_exec(shell).await?; - this.platform = this.detect_platform(shell).await?; + this.shell_kind = ShellKind::new(&this.shell, false); + this.can_exec = this.detect_can_exec().await?; + this.platform = this.detect_platform().await?; this.remote_binary_path = Some( - this.ensure_server_binary(&delegate, release_channel, version, commit, shell, cx) + this.ensure_server_binary(&delegate, release_channel, version, commit, cx) .await?, ); log::debug!("Detected WSL environment: {this:#?}"); @@ -90,20 +92,16 @@ impl WslRemoteConnection { Ok(this) } - async fn detect_can_exec(&self, shell: ShellKind) -> Result { + async fn detect_can_exec(&self) -> Result { let options = &self.connection_options; - let program = if shell == ShellKind::Nushell { - "^uname" - } else { - "uname" - }; + let program = self.shell_kind.prepend_command_prefix("uname"); let args = &["-m"]; - let output = wsl_command_impl(options, program, args, true) + let output = wsl_command_impl(options, &program, args, true) .output() .await?; if !output.status.success() { - let output = wsl_command_impl(options, program, args, false) + let output = wsl_command_impl(options, &program, args, false) .output() .await?; @@ -120,14 +118,9 @@ impl WslRemoteConnection { Ok(true) } } - async fn detect_platform(&self, shell: ShellKind) -> Result { - let arch_str = if shell == ShellKind::Nushell { - // https://github.com/nushell/nushell/issues/12570 - self.run_wsl_command("sh", &["-c", "uname -m"]) - } else { - self.run_wsl_command("uname", &["-m"]) - } - .await?; + async fn detect_platform(&self) -> Result { + let program = self.shell_kind.prepend_command_prefix("uname"); + let arch_str = self.run_wsl_command(&program, &["-m"]).await?; let arch_str = arch_str.trim().to_string(); let arch = match arch_str.as_str() { "x86_64" => "x86_64", @@ -163,7 +156,6 @@ impl WslRemoteConnection { release_channel: ReleaseChannel, version: SemanticVersion, commit: Option, - shell: ShellKind, cx: &mut AsyncApp, ) -> Result> { let version_str = match release_channel { @@ -186,12 +178,9 @@ impl WslRemoteConnection { if let Some(parent) = dst_path.parent() { let parent = parent.display(PathStyle::Posix); - if shell == ShellKind::Nushell { - self.run_wsl_command("mkdir", &[&parent]).await - } else { - self.run_wsl_command("mkdir", &["-p", &parent]).await - } - .map_err(|e| anyhow!("Failed to create directory: {}", e))?; + self.run_wsl_command("mkdir", &["-p", &parent]) + .await + .map_err(|e| anyhow!("Failed to create directory: {}", e))?; } #[cfg(debug_assertions)] @@ -206,7 +195,7 @@ impl WslRemoteConnection { )) .unwrap(), ); - self.upload_file(&remote_server_path, &tmp_path, delegate, &shell, cx) + self.upload_file(&remote_server_path, &tmp_path, delegate, cx) .await?; self.extract_and_install(&tmp_path, &dst_path, delegate, cx) .await?; @@ -239,8 +228,7 @@ impl WslRemoteConnection { ); let tmp_path = RelPath::unix(&tmp_path).unwrap(); - self.upload_file(&src_path, &tmp_path, delegate, &shell, cx) - .await?; + self.upload_file(&src_path, &tmp_path, delegate, cx).await?; self.extract_and_install(&tmp_path, &dst_path, delegate, cx) .await?; @@ -252,19 +240,15 @@ impl WslRemoteConnection { src_path: &Path, dst_path: &RelPath, delegate: &Arc, - shell: &ShellKind, cx: &mut AsyncApp, ) -> Result<()> { delegate.set_status(Some("Uploading remote server to WSL"), cx); if let Some(parent) = dst_path.parent() { let parent = parent.display(PathStyle::Posix); - if *shell == ShellKind::Nushell { - self.run_wsl_command("mkdir", &[&parent]).await - } else { - self.run_wsl_command("mkdir", &["-p", &parent]).await - } - .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?; + self.run_wsl_command("mkdir", &["-p", &parent]) + .await + .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?; } let t0 = Instant::now(); @@ -441,7 +425,7 @@ impl RemoteConnection for WslRemoteConnection { bail!("WSL shares the network interface with the host system"); } - let shell_kind = ShellKind::new(&self.shell, false); + let shell_kind = self.shell_kind; let working_dir = working_dir .map(|working_dir| RemotePathBuf::new(working_dir, PathStyle::Posix).to_string()) .unwrap_or("~".to_string()); @@ -461,7 +445,9 @@ impl RemoteConnection for WslRemoteConnection { write!( exec, "{}", - shell_kind.try_quote(&program).context("shell quoting")? + shell_kind + .try_quote_prefix_aware(&program) + .context("shell quoting")? )?; for arg in args { let arg = shell_kind.try_quote(&arg).context("shell quoting")?; diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 5d50853601b3949835a350559d48ef755419c93d..57d77f3696283d2e8074713ecd69916eaff07cac 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -94,7 +94,8 @@ impl HeadlessProject { store }); - let environment = cx.new(|cx| ProjectEnvironment::new(None, cx)); + let environment = + cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, cx)); let manifest_tree = ManifestTree::new(worktree_store.clone(), cx); let toolchain_store = cx.new(|cx| { ToolchainStore::local( @@ -786,7 +787,7 @@ impl HeadlessProject { let environment = this .update(&mut cx, |this, cx| { this.environment.update(cx, |environment, cx| { - environment.get_local_directory_environment(&shell, directory.into(), cx) + environment.local_directory_environment(&shell, directory.into(), cx) }) })? .await diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index c7e09e3f681d770959709893561cf7a1ba377b37..969363fb2bd02e7bc514cd68d488ca57aef9f0b9 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -13,7 +13,7 @@ use fs::{FakeFs, Fs}; use gpui::{AppContext as _, Entity, SemanticVersion, SharedString, TestAppContext}; use http_client::{BlockedHttpClient, FakeHttpClient}; use language::{ - Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, Rope, + Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, language_settings::{AllLanguageSettings, language_settings}, }; use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind, LanguageServerName}; @@ -120,7 +120,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test // sees the new file. fs.save( path!("/code/project1/src/main.rs").as_ref(), - &Rope::from_str_small("fn main() {}"), + &"fn main() {}".into(), Default::default(), ) .await @@ -766,7 +766,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont fs.save( &PathBuf::from(path!("/code/project1/src/lib.rs")), - &Rope::from_str_small("bangles"), + &("bangles".to_string().into()), LineEnding::Unix, ) .await @@ -781,7 +781,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont fs.save( &PathBuf::from(path!("/code/project1/src/lib.rs")), - &Rope::from_str_small("bloop"), + &("bloop".to_string().into()), LineEnding::Unix, ) .await diff --git a/crates/rich_text/src/rich_text.rs b/crates/rich_text/src/rich_text.rs index 4e30c22e7f4da2f2656861b792ada5ef6fa9311b..2af9988f032c5dc9651e1da6e8c3b52c6c668866 100644 --- a/crates/rich_text/src/rich_text.rs +++ b/crates/rich_text/src/rich_text.rs @@ -1,10 +1,9 @@ use futures::FutureExt; use gpui::{ - AnyElement, AnyView, App, BackgroundExecutor, ElementId, FontStyle, FontWeight, HighlightStyle, - InteractiveText, IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, - Window, + AnyElement, AnyView, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText, + IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, Window, }; -use language::{HighlightId, Language, LanguageRegistry, Rope}; +use language::{HighlightId, Language, LanguageRegistry}; use std::{ops::Range, sync::Arc}; use theme::ActiveTheme; use ui::LinkPreview; @@ -57,7 +56,6 @@ impl RichText { block: String, mentions: &[Mention], language_registry: &Arc, - executor: &BackgroundExecutor, ) -> Self { let mut text = String::new(); let mut highlights = Vec::new(); @@ -72,7 +70,6 @@ impl RichText { &mut highlights, &mut link_ranges, &mut link_urls, - executor, ); text.truncate(text.trim_end().len()); @@ -187,7 +184,6 @@ pub fn render_markdown_mut( highlights: &mut Vec<(Range, Highlight)>, link_ranges: &mut Vec>, link_urls: &mut Vec, - executor: &BackgroundExecutor, ) { use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd}; @@ -206,7 +202,7 @@ pub fn render_markdown_mut( match event { Event::Text(t) => { if let Some(language) = ¤t_language { - render_code(text, highlights, t.as_ref(), language, executor); + render_code(text, highlights, t.as_ref(), language); } else { while let Some(mention) = mentions.first() { if !source_range.contains_inclusive(&mention.range) { @@ -377,14 +373,11 @@ pub fn render_code( highlights: &mut Vec<(Range, Highlight)>, content: &str, language: &Arc, - executor: &BackgroundExecutor, ) { let prev_len = text.len(); text.push_str(content); let mut offset = 0; - for (range, highlight_id) in - language.highlight_text(&Rope::from_str(content, executor), 0..content.len()) - { + for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) { if range.start > offset { highlights.push((prev_len + offset..prev_len + range.start, Highlight::Code)); } diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index 30f702292bf1e04524fe0c2489b1c4a8783e9ca4..4107c2e012debc13b0cc44003250f4da63e5039f 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -14,10 +14,10 @@ path = "src/rope.rs" [dependencies] arrayvec = "0.7.1" log.workspace = true +rayon.workspace = true sum_tree.workspace = true unicode-segmentation.workspace = true util.workspace = true -gpui.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/rope/benches/rope_benchmark.rs b/crates/rope/benches/rope_benchmark.rs index 5075dff788dfadd49783e89937e19986d9234580..030bec01df4d223cd5288842ba0f9c1386dac31b 100644 --- a/crates/rope/benches/rope_benchmark.rs +++ b/crates/rope/benches/rope_benchmark.rs @@ -3,7 +3,6 @@ use std::ops::Range; use criterion::{ BatchSize, BenchmarkId, Criterion, Throughput, black_box, criterion_group, criterion_main, }; -use gpui::{AsyncApp, TestAppContext}; use rand::prelude::*; use rand::rngs::StdRng; use rope::{Point, Rope}; @@ -27,10 +26,10 @@ fn generate_random_text(rng: &mut StdRng, len: usize) -> String { str } -fn generate_random_rope(rng: &mut StdRng, text_len: usize, cx: &AsyncApp) -> Rope { +fn generate_random_rope(rng: &mut StdRng, text_len: usize) -> Rope { let text = generate_random_text(rng, text_len); let mut rope = Rope::new(); - rope.push(&text, cx.background_executor()); + rope.push(&text); rope } @@ -83,13 +82,11 @@ fn rope_benchmarks(c: &mut Criterion) { group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); let text = generate_random_text(&mut rng, *size); - let cx = TestAppContext::single(); - let cx = cx.to_async(); b.iter(|| { let mut rope = Rope::new(); for _ in 0..10 { - rope.push(&text, cx.background_executor()); + rope.push(&text); } }); }); @@ -102,10 +99,8 @@ fn rope_benchmarks(c: &mut Criterion) { group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); let mut random_ropes = Vec::new(); - let cx = TestAppContext::single(); - let cx = cx.to_async(); for _ in 0..5 { - let rope = generate_random_rope(&mut rng, *size, &cx); + let rope = generate_random_rope(&mut rng, *size); random_ropes.push(rope); } @@ -124,9 +119,7 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let cx = TestAppContext::single(); - let cx = cx.to_async(); - let rope = generate_random_rope(&mut rng, *size, &cx); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( || generate_random_rope_ranges(&mut rng, &rope), @@ -146,9 +139,7 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let cx = TestAppContext::single(); - let cx = cx.to_async(); - let rope = generate_random_rope(&mut rng, *size, &cx); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( || generate_random_rope_ranges(&mut rng, &rope), @@ -169,9 +160,7 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let cx = TestAppContext::single(); - let cx = cx.to_async(); - let rope = generate_random_rope(&mut rng, *size, &cx); + let rope = generate_random_rope(&mut rng, *size); b.iter(|| { let chars = rope.chars().count(); @@ -186,9 +175,7 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let cx = TestAppContext::single(); - let cx = cx.to_async(); - let rope = generate_random_rope(&mut rng, *size, &cx); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( || generate_random_rope_points(&mut rng, &rope), @@ -209,9 +196,7 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let cx = TestAppContext::single(); - let cx = cx.to_async(); - let rope = generate_random_rope(&mut rng, *size, &cx); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( || generate_random_rope_points(&mut rng, &rope), @@ -231,9 +216,7 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let cx = TestAppContext::single(); - let cx = cx.to_async(); - let rope = generate_random_rope(&mut rng, *size, &cx); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( || { diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index b515f46ea89ddd5f8f29ca7d462b48fe8fff1d38..394e6ef0ca589d19ffcf7cf07a92bcd15c8e4a18 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -5,7 +5,7 @@ mod point_utf16; mod unclipped; use arrayvec::ArrayVec; -use gpui::BackgroundExecutor; +use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; use std::{ cmp, fmt, io, mem, ops::{self, AddAssign, Range}, @@ -31,41 +31,6 @@ impl Rope { Self::default() } - /// Create a new rope from a string without trying to parallelize the construction for large strings. - pub fn from_str_small(text: &str) -> Self { - let mut rope = Self::new(); - rope.push_small(text); - rope - } - - /// Create a new rope from a string. - pub fn from_str(text: &str, executor: &BackgroundExecutor) -> Self { - let mut rope = Self::new(); - rope.push(text, executor); - rope - } - - /// Create a new rope from a string without trying to parallelize the construction for large strings. - pub fn from_iter_small<'a, T: IntoIterator>(iter: T) -> Self { - let mut rope = Rope::new(); - for chunk in iter { - rope.push_small(chunk); - } - rope - } - - /// Create a new rope from a string. - pub fn from_iter<'a, T: IntoIterator>( - iter: T, - executor: &BackgroundExecutor, - ) -> Self { - let mut rope = Rope::new(); - for chunk in iter { - rope.push(chunk, executor); - } - rope - } - /// Checks that `index`-th byte is the first byte in a UTF-8 code point /// sequence or the end of the string. /// @@ -180,12 +145,12 @@ impl Rope { self.check_invariants(); } - pub fn replace(&mut self, range: Range, text: &str, executor: &BackgroundExecutor) { + pub fn replace(&mut self, range: Range, text: &str) { let mut new_rope = Rope::new(); let mut cursor = self.cursor(0); new_rope.append(cursor.slice(range.start)); cursor.seek_forward(range.end); - new_rope.push(text, executor); + new_rope.push(text); new_rope.append(cursor.suffix()); *self = new_rope; } @@ -203,12 +168,28 @@ impl Rope { self.slice(start..end) } - pub fn push(&mut self, mut text: &str, executor: &BackgroundExecutor) { - self.fill_last_chunk(&mut text); + pub fn push(&mut self, mut text: &str) { + self.chunks.update_last( + |last_chunk| { + let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE { + text.len() + } else { + let mut split_ix = cmp::min( + chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), + text.len(), + ); + while !text.is_char_boundary(split_ix) { + split_ix += 1; + } + split_ix + }; - if text.is_empty() { - return; - } + let (suffix, remainder) = text.split_at(split_ix); + last_chunk.push_str(suffix); + text = remainder; + }, + (), + ); #[cfg(all(test, not(rust_analyzer)))] const NUM_CHUNKS: usize = 16; @@ -219,8 +200,7 @@ impl Rope { // but given the chunk boundary can land within a character // we need to accommodate for the worst case where every chunk gets cut short by up to 4 bytes if text.len() > NUM_CHUNKS * chunk::MAX_BASE - NUM_CHUNKS * 4 { - let future = self.push_large(text, executor.clone()); - return executor.block(future); + return self.push_large(text); } // 16 is enough as otherwise we will hit the branch above let mut new_chunks = ArrayVec::<_, NUM_CHUNKS>::new(); @@ -240,57 +220,8 @@ impl Rope { self.check_invariants(); } - /// Pushes a string into the rope. Unlike [`push`], this method does not parallelize the construction on large strings. - pub fn push_small(&mut self, mut text: &str) { - self.fill_last_chunk(&mut text); - if text.is_empty() { - return; - } - - // 16 is enough as otherwise we will hit the branch above - let mut new_chunks = Vec::new(); - - while !text.is_empty() { - let mut split_ix = cmp::min(chunk::MAX_BASE, text.len()); - while !text.is_char_boundary(split_ix) { - split_ix -= 1; - } - let (chunk, remainder) = text.split_at(split_ix); - new_chunks.push(chunk); - text = remainder; - } - self.chunks - .extend(new_chunks.into_iter().map(Chunk::new), ()); - - self.check_invariants(); - } - - fn fill_last_chunk(&mut self, text: &mut &str) { - self.chunks.update_last( - |last_chunk| { - let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE { - text.len() - } else { - let mut split_ix = cmp::min( - chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), - text.len(), - ); - while !text.is_char_boundary(split_ix) { - split_ix += 1; - } - split_ix - }; - - let (suffix, remainder) = text.split_at(split_ix); - last_chunk.push_str(suffix); - *text = remainder; - }, - (), - ); - } - /// A copy of `push` specialized for working with large quantities of text. - async fn push_large(&mut self, mut text: &str, executor: BackgroundExecutor) { + fn push_large(&mut self, mut text: &str) { // To avoid frequent reallocs when loading large swaths of file contents, // we estimate worst-case `new_chunks` capacity; // Chunk is a fixed-capacity buffer. If a character falls on @@ -323,22 +254,8 @@ impl Rope { const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { - let cx2 = executor.clone(); - executor - .scoped(|scope| { - // SAFETY: transmuting to 'static is safe because the future is scoped - // and the underlying string data cannot go out of scope because dropping the scope - // will wait for the task to finish - let new_chunks = - unsafe { std::mem::transmute::, Vec<&'static str>>(new_chunks) }; - - let async_extend = self - .chunks - .async_extend(new_chunks.into_iter().map(Chunk::new), cx2); - - scope.spawn(async_extend); - }) - .await; + self.chunks + .par_extend(new_chunks.into_par_iter().map(Chunk::new), ()); } else { self.chunks .extend(new_chunks.into_iter().map(Chunk::new), ()); @@ -375,13 +292,8 @@ impl Rope { } } - pub fn push_front(&mut self, text: &str, cx: &BackgroundExecutor) { - let suffix = mem::replace(self, Rope::from_str(text, cx)); - self.append(suffix); - } - - pub fn push_front_small(&mut self, text: &str) { - let suffix = mem::replace(self, Rope::from_str_small(text)); + pub fn push_front(&mut self, text: &str) { + let suffix = mem::replace(self, Rope::from(text)); self.append(suffix); } @@ -665,19 +577,37 @@ impl Rope { } } -// impl From for Rope { -// #[inline(always)] -// fn from(text: String) -> Self { -// Rope::from(text.as_str()) -// } -// } +impl<'a> From<&'a str> for Rope { + fn from(text: &'a str) -> Self { + let mut rope = Self::new(); + rope.push(text); + rope + } +} -// impl From<&String> for Rope { -// #[inline(always)] -// fn from(text: &String) -> Self { -// Rope::from(text.as_str()) -// } -// } +impl<'a> FromIterator<&'a str> for Rope { + fn from_iter>(iter: T) -> Self { + let mut rope = Rope::new(); + for chunk in iter { + rope.push(chunk); + } + rope + } +} + +impl From for Rope { + #[inline(always)] + fn from(text: String) -> Self { + Rope::from(text.as_str()) + } +} + +impl From<&String> for Rope { + #[inline(always)] + fn from(text: &String) -> Self { + Rope::from(text.as_str()) + } +} impl fmt::Display for Rope { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1709,7 +1639,6 @@ where mod tests { use super::*; use Bias::{Left, Right}; - use gpui::TestAppContext; use rand::prelude::*; use std::{cmp::Ordering, env, io::Read}; use util::RandomCharIter; @@ -1719,17 +1648,17 @@ mod tests { zlog::init_test(); } - #[gpui::test] - async fn test_all_4_byte_chars(cx: &mut TestAppContext) { + #[test] + fn test_all_4_byte_chars() { let mut rope = Rope::new(); let text = "🏀".repeat(256); - rope.push(&text, cx.background_executor()); + rope.push(&text); assert_eq!(rope.text(), text); } - #[gpui::test] - fn test_clip(cx: &mut TestAppContext) { - let rope = Rope::from_str("🧘", cx.background_executor()); + #[test] + fn test_clip() { + let rope = Rope::from("🧘"); assert_eq!(rope.clip_offset(1, Bias::Left), 0); assert_eq!(rope.clip_offset(1, Bias::Right), 4); @@ -1775,9 +1704,9 @@ mod tests { ); } - #[gpui::test] - fn test_prev_next_line(cx: &mut TestAppContext) { - let rope = Rope::from_str("abc\ndef\nghi\njkl", cx.background_executor()); + #[test] + fn test_prev_next_line() { + let rope = Rope::from("abc\ndef\nghi\njkl"); let mut chunks = rope.chunks(); assert_eq!(chunks.peek().unwrap().chars().next().unwrap(), 'a'); @@ -1819,16 +1748,16 @@ mod tests { assert_eq!(chunks.peek(), None); } - #[gpui::test] - fn test_lines(cx: &mut TestAppContext) { - let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor()); + #[test] + fn test_lines() { + let rope = Rope::from("abc\ndefg\nhi"); let mut lines = rope.chunks().lines(); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), Some("defg")); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), None); - let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor()); + let rope = Rope::from("abc\ndefg\nhi\n"); let mut lines = rope.chunks().lines(); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), Some("defg")); @@ -1836,14 +1765,14 @@ mod tests { assert_eq!(lines.next(), Some("")); assert_eq!(lines.next(), None); - let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor()); + let rope = Rope::from("abc\ndefg\nhi"); let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines(); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), Some("defg")); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), None); - let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor()); + let rope = Rope::from("abc\ndefg\nhi\n"); let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines(); assert_eq!(lines.next(), Some("")); assert_eq!(lines.next(), Some("hi")); @@ -1851,14 +1780,14 @@ mod tests { assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), None); - let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor()); + let rope = Rope::from("abc\nlonger line test\nhi"); let mut lines = rope.chunks().lines(); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), Some("longer line test")); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), None); - let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor()); + let rope = Rope::from("abc\nlonger line test\nhi"); let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines(); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), Some("longer line test")); @@ -1867,7 +1796,7 @@ mod tests { } #[gpui::test(iterations = 100)] - async fn test_random_rope(cx: &mut TestAppContext, mut rng: StdRng) { + fn test_random_rope(mut rng: StdRng) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); @@ -1883,7 +1812,7 @@ mod tests { let mut new_actual = Rope::new(); let mut cursor = actual.cursor(0); new_actual.append(cursor.slice(start_ix)); - new_actual.push(&new_text, cx.background_executor()); + new_actual.push(&new_text); cursor.seek_forward(end_ix); new_actual.append(cursor.suffix()); actual = new_actual; @@ -2183,10 +2112,10 @@ mod tests { } } - #[gpui::test] - fn test_chunks_equals_str(cx: &mut TestAppContext) { + #[test] + fn test_chunks_equals_str() { let text = "This is a multi-chunk\n& multi-line test string!"; - let rope = Rope::from_str(text, cx.background_executor()); + let rope = Rope::from(text); for start in 0..text.len() { for end in start..text.len() { let range = start..end; @@ -2229,37 +2158,34 @@ mod tests { } } - let rope = Rope::from_str("", cx.background_executor()); + let rope = Rope::from(""); assert!(rope.chunks_in_range(0..0).equals_str("")); assert!(rope.reversed_chunks_in_range(0..0).equals_str("")); assert!(!rope.chunks_in_range(0..0).equals_str("foo")); assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo")); } - #[gpui::test] - fn test_is_char_boundary(cx: &mut TestAppContext) { + #[test] + fn test_is_char_boundary() { let fixture = "地"; - let rope = Rope::from_str("地", cx.background_executor()); + let rope = Rope::from("地"); for b in 0..=fixture.len() { assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); } let fixture = ""; - let rope = Rope::from_str("", cx.background_executor()); + let rope = Rope::from(""); for b in 0..=fixture.len() { assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); } let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; - let rope = Rope::from_str( - "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩", - cx.background_executor(), - ); + let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); for b in 0..=fixture.len() { assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); } } - #[gpui::test] - fn test_floor_char_boundary(cx: &mut TestAppContext) { + #[test] + fn test_floor_char_boundary() { // polyfill of str::floor_char_boundary fn floor_char_boundary(str: &str, index: usize) -> usize { if index >= str.len() { @@ -2275,7 +2201,7 @@ mod tests { } let fixture = "地"; - let rope = Rope::from_str("地", cx.background_executor()); + let rope = Rope::from("地"); for b in 0..=fixture.len() { assert_eq!( rope.floor_char_boundary(b), @@ -2284,7 +2210,7 @@ mod tests { } let fixture = ""; - let rope = Rope::from_str("", cx.background_executor()); + let rope = Rope::from(""); for b in 0..=fixture.len() { assert_eq!( rope.floor_char_boundary(b), @@ -2293,10 +2219,7 @@ mod tests { } let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; - let rope = Rope::from_str( - "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩", - cx.background_executor(), - ); + let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); for b in 0..=fixture.len() { assert_eq!( rope.floor_char_boundary(b), @@ -2305,8 +2228,8 @@ mod tests { } } - #[gpui::test] - fn test_ceil_char_boundary(cx: &mut TestAppContext) { + #[test] + fn test_ceil_char_boundary() { // polyfill of str::ceil_char_boundary fn ceil_char_boundary(str: &str, index: usize) -> usize { if index > str.len() { @@ -2321,22 +2244,19 @@ mod tests { } let fixture = "地"; - let rope = Rope::from_str("地", cx.background_executor()); + let rope = Rope::from("地"); for b in 0..=fixture.len() { assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); } let fixture = ""; - let rope = Rope::from_str("", cx.background_executor()); + let rope = Rope::from(""); for b in 0..=fixture.len() { assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); } let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; - let rope = Rope::from_str( - "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩", - cx.background_executor(), - ); + let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); for b in 0..=fixture.len() { assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); } diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index 3cc05fd2d26fa52282030ad1eb564e3cfd8cb609..207a9841e41bf35e1f63bb00b0c62073c1cf0224 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -554,7 +554,7 @@ impl RulesLibrary { let prompt_id = PromptId::new(); let save = self.store.update(cx, |store, cx| { - store.save(prompt_id, None, false, Default::default(), cx) + store.save(prompt_id, None, false, "".into(), cx) }); self.picker .update(cx, |picker, cx| picker.refresh(window, cx)); @@ -888,13 +888,7 @@ impl RulesLibrary { let new_id = PromptId::new(); let body = rule.body_editor.read(cx).text(cx); let save = self.store.update(cx, |store, cx| { - store.save( - new_id, - Some(title.into()), - false, - Rope::from_str(&body, cx.background_executor()), - cx, - ) + store.save(new_id, Some(title.into()), false, body.into(), cx) }); self.picker .update(cx, |picker, cx| picker.refresh(window, cx)); diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index f01073b6228ed3d314990187e63262a111f365c5..cd25c6c1bff63839a3f15a2d1cd50f7f55987a74 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -2813,6 +2813,7 @@ mod tests { case_sensitive: false, include_ignored: false, regex: false, + center_on_match: false, }, cx, ); @@ -2875,6 +2876,7 @@ mod tests { case_sensitive: true, include_ignored: false, regex: false, + center_on_match: false, }, cx, ); @@ -2912,6 +2914,7 @@ mod tests { case_sensitive: true, include_ignored: false, regex: false, + center_on_match: false, }, cx, ); @@ -2938,6 +2941,7 @@ mod tests { case_sensitive: Some(search_settings.case_sensitive), include_ignored: Some(search_settings.include_ignored), regex: Some(search_settings.regex), + center_on_match: Some(search_settings.center_on_match), }); }); }); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 0bb05ecb93cd5cc6c9730307792c1737531a39a5..f5a9c272d4846a94230286cc3ae2f7903608dd7d 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -12,7 +12,9 @@ use editor::{ SelectionEffects, VimFlavor, actions::{Backtab, SelectAll, Tab}, items::active_match_index, - multibuffer_context_lines, vim_flavor, + multibuffer_context_lines, + scroll::Autoscroll, + vim_flavor, }; use futures::{StreamExt, stream::FuturesOrdered}; use gpui::{ @@ -55,7 +57,9 @@ actions!( /// Moves to the next input field. NextField, /// Toggles the search filters panel. - ToggleFilters + ToggleFilters, + /// Toggles collapse/expand state of all search result excerpts. + ToggleAllSearchResults ] ); @@ -118,6 +122,20 @@ pub fn init(cx: &mut App) { ProjectSearchView::search_in_new(workspace, action, window, cx) }); + register_workspace_action_for_present_search( + workspace, + |workspace, action: &ToggleAllSearchResults, window, cx| { + if let Some(search_view) = workspace + .active_item(cx) + .and_then(|item| item.downcast::()) + { + search_view.update(cx, |search_view, cx| { + search_view.toggle_all_search_results(action, window, cx); + }); + } + }, + ); + register_workspace_action_for_present_search( workspace, |workspace, _: &menu::Cancel, window, cx| { @@ -217,6 +235,7 @@ pub struct ProjectSearchView { replace_enabled: bool, included_opened_only: bool, regex_language: Option>, + results_collapsed: bool, _subscriptions: Vec, } @@ -649,6 +668,44 @@ impl Item for ProjectSearchView { fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { self.results_editor.breadcrumbs(theme, cx) } + + fn breadcrumb_prefix( + &self, + _window: &mut Window, + cx: &mut Context, + ) -> Option { + if !self.has_matches() { + return None; + } + + let is_collapsed = self.results_collapsed; + + let (icon, tooltip_label) = if is_collapsed { + (IconName::ChevronUpDown, "Expand All Search Results") + } else { + (IconName::ChevronDownUp, "Collapse All Search Results") + }; + + let focus_handle = self.query_editor.focus_handle(cx); + + Some( + IconButton::new("project-search-collapse-expand", icon) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + tooltip_label, + &ToggleAllSearchResults, + &focus_handle, + cx, + ) + }) + .on_click(cx.listener(|this, _, window, cx| { + this.toggle_all_search_results(&ToggleAllSearchResults, window, cx); + })) + .into_any_element(), + ) + } } impl ProjectSearchView { @@ -751,6 +808,34 @@ impl ProjectSearchView { }); } + fn toggle_all_search_results( + &mut self, + _: &ToggleAllSearchResults, + _window: &mut Window, + cx: &mut Context, + ) { + self.results_collapsed = !self.results_collapsed; + self.update_results_visibility(cx); + } + + fn update_results_visibility(&mut self, cx: &mut Context) { + self.results_editor.update(cx, |editor, cx| { + let multibuffer = editor.buffer().read(cx); + let buffer_ids = multibuffer.excerpt_buffer_ids(); + + if self.results_collapsed { + for buffer_id in buffer_ids { + editor.fold_buffer(buffer_id, cx); + } + } else { + for buffer_id in buffer_ids { + editor.unfold_buffer(buffer_id, cx); + } + } + }); + cx.notify(); + } + pub fn new( workspace: WeakEntity, entity: Entity, @@ -909,8 +994,10 @@ impl ProjectSearchView { replace_enabled: false, included_opened_only: false, regex_language: None, + results_collapsed: false, _subscriptions: subscriptions, }; + this.entity_changed(window, cx); this } @@ -1346,8 +1433,13 @@ impl ProjectSearchView { self.results_editor.update(cx, |editor, cx| { let collapse = vim_flavor(cx) == Some(VimFlavor::Vim); let range_to_select = editor.range_for_match(&range_to_select, collapse); + let autoscroll = if EditorSettings::get_global(cx).search.center_on_match { + Autoscroll::center() + } else { + Autoscroll::fit() + }; editor.unfold_ranges(std::slice::from_ref(&range_to_select), false, true, cx); - editor.change_selections(Default::default(), window, cx, |s| { + editor.change_selections(SelectionEffects::scroll(autoscroll), window, cx, |s| { s.select_ranges([range_to_select]) }); }); @@ -1404,6 +1496,7 @@ impl ProjectSearchView { fn entity_changed(&mut self, window: &mut Window, cx: &mut Context) { let match_ranges = self.entity.read(cx).match_ranges.clone(); + if match_ranges.is_empty() { self.active_match_index = None; self.results_editor.update(cx, |editor, cx| { @@ -1961,6 +2054,8 @@ impl Render for ProjectSearchBar { }) .unwrap_or_else(|| "0/0".to_string()); + let query_focus = search.query_editor.focus_handle(cx); + let query_column = input_base_styles(InputPanel::Query) .on_action(cx.listener(|this, action, window, cx| this.confirm(action, window, cx))) .on_action(cx.listener(|this, action, window, cx| { @@ -1990,11 +2085,9 @@ impl Render for ProjectSearchBar { )), ); - let query_focus = search.query_editor.focus_handle(cx); - let matches_column = h_flex() - .pl_2() - .ml_2() + .ml_1() + .pl_1p5() .border_l_1() .border_color(theme_colors.border_variant) .child(render_action_button( @@ -2346,7 +2439,15 @@ pub fn perform_project_search( #[cfg(test)] pub mod tests { - use std::{ops::Deref as _, sync::Arc, time::Duration}; + use std::{ + ops::Deref as _, + path::PathBuf, + sync::{ + Arc, + atomic::{self, AtomicUsize}, + }, + time::Duration, + }; use super::*; use editor::{DisplayPoint, display_map::DisplayRow}; @@ -4247,6 +4348,8 @@ pub mod tests { ) .await; + let requests_count = Arc::new(AtomicUsize::new(0)); + let closure_requests_count = requests_count.clone(); let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let language = rust_lang(); @@ -4258,21 +4361,26 @@ pub mod tests { inlay_hint_provider: Some(lsp::OneOf::Left(true)), ..lsp::ServerCapabilities::default() }, - initializer: Some(Box::new(|fake_server| { - fake_server.set_request_handler::( - move |_, _| async move { - Ok(Some(vec![lsp::InlayHint { - position: lsp::Position::new(0, 17), - label: lsp::InlayHintLabel::String(": i32".to_owned()), - kind: Some(lsp::InlayHintKind::TYPE), - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - }, - ); + initializer: Some(Box::new(move |fake_server| { + let requests_count = closure_requests_count.clone(); + fake_server.set_request_handler::({ + move |_, _| { + let requests_count = requests_count.clone(); + async move { + requests_count.fetch_add(1, atomic::Ordering::Release); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 17), + label: lsp::InlayHintLabel::String(": i32".to_owned()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + } + }); })), ..FakeLspAdapter::default() }, @@ -4286,7 +4394,7 @@ pub mod tests { }); perform_search(search_view, "let ", cx); - let _fake_server = fake_servers.next().await.unwrap(); + let fake_server = fake_servers.next().await.unwrap(); cx.executor().advance_clock(Duration::from_secs(1)); cx.executor().run_until_parked(); search_view @@ -4299,11 +4407,127 @@ pub mod tests { ); }) .unwrap(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 1, + "New hints should have been queried", + ); // Can do the 2nd search without any panics perform_search(search_view, "let ", cx); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + search_view + .update(cx, |search_view, _, cx| { + assert_eq!( + search_view + .results_editor + .update(cx, |editor, cx| editor.display_text(cx)), + "\n\nfn main() { let a: i32 = 2; }\n" + ); + }) + .unwrap(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 2, + "We did drop the previous buffer when cleared the old project search results, hence another query was made", + ); + + let singleton_editor = window + .update(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from(path!("/dir/main.rs")), + workspace::OpenOptions::default(), + window, + cx, + ) + }) + .unwrap() + .await + .unwrap() + .downcast::() + .unwrap(); cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); + singleton_editor.update(cx, |editor, cx| { + assert_eq!( + editor.display_text(cx), + "fn main() { let a: i32 = 2; }\n", + "Newly opened editor should have the correct text with hints", + ); + }); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 2, + "Opening the same buffer again should reuse the cached hints", + ); + + window + .update(cx, |_, window, cx| { + singleton_editor.update(cx, |editor, cx| { + editor.handle_input("test", window, cx); + }); + }) + .unwrap(); + + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + singleton_editor.update(cx, |editor, cx| { + assert_eq!( + editor.display_text(cx), + "testfn main() { l: i32et a = 2; }\n", + "Newly opened editor should have the correct text with hints", + ); + }); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 3, + "We have edited the buffer and should send a new request", + ); + + window + .update(cx, |_, window, cx| { + singleton_editor.update(cx, |editor, cx| { + editor.undo(&editor::actions::Undo, window, cx); + }); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 4, + "We have edited the buffer again and should send a new request again", + ); + singleton_editor.update(cx, |editor, cx| { + assert_eq!( + editor.display_text(cx), + "fn main() { let a: i32 = 2; }\n", + "Newly opened editor should have the correct text with hints", + ); + }); + project.update(cx, |_, cx| { + cx.emit(project::Event::RefreshInlayHints { + server_id: fake_server.server.server_id(), + request_id: Some(1), + }); + }); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 5, + "After a simulated server refresh request, we should have sent another request", + ); + + perform_search(search_view, "let ", cx); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 5, + "New project search should reuse the cached hints", + ); search_view .update(cx, |search_view, _, cx| { assert_eq!( diff --git a/crates/search/src/search_bar.rs b/crates/search/src/search_bar.rs index 14a5fefcf7341694260da96a8f2c43d149356074..61fa46ed9770fbaf49b43979d366655c1b658fc3 100644 --- a/crates/search/src/search_bar.rs +++ b/crates/search/src/search_bar.rs @@ -46,7 +46,6 @@ pub(crate) fn input_base_styles(border_color: Hsla, map: impl FnOnce(Div) -> Div .h_8() .pl_2() .pr_1() - .py_1() .border_1() .border_color(border_color) .rounded_md() diff --git a/crates/settings/src/settings_content/editor.rs b/crates/settings/src/settings_content/editor.rs index 920f02a0f6597454c82d421247787e8ad6f7f74b..a1567f9d0e5b16ed6058cef9ca954c0e842605be 100644 --- a/crates/settings/src/settings_content/editor.rs +++ b/crates/settings/src/settings_content/editor.rs @@ -197,6 +197,19 @@ pub struct EditorSettingsContent { /// /// Default: [`DocumentColorsRenderMode::Inlay`] pub lsp_document_colors: Option, + /// When to show the scrollbar in the completion menu. + /// This setting can take four values: + /// + /// 1. Show the scrollbar if there's important information or + /// follow the system's configured behavior + /// "auto" + /// 2. Match the system's configured behavior: + /// "system" + /// 3. Always show the scrollbar: + /// "always" + /// 4. Never show the scrollbar: + /// "never" (default) + pub completion_menu_scrollbar: Option, } // Toolbar related settings @@ -699,6 +712,8 @@ pub struct SearchSettingsContent { pub case_sensitive: Option, pub include_ignored: Option, pub regex: Option, + /// Whether to center the cursor on each search match when navigating. + pub center_on_match: Option, } #[skip_serializing_none] diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 8f9c60960ce9dddf49109d0374d611f7672077ad..ba8392bcd3a3a775af7e4caaa949a79095703817 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -299,6 +299,7 @@ impl VsCodeSettings { toolbar: None, use_smartcase_search: self.read_bool("search.smartCase"), vertical_scroll_margin: self.read_f32("editor.cursorSurroundingLines"), + completion_menu_scrollbar: None, } } diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 8075e6396ae0011d00f7a9a65fc3732c08823787..89986d925ce88d50629a58fb48faa629f63cab12 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -2450,6 +2450,29 @@ pub(crate) fn settings_data(cx: &App) -> Vec { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Center on Match", + description: "Whether to center the current match in the editor", + field: Box::new(SettingField { + json_path: Some("editor.search.center_on_match"), + pick: |settings_content| { + settings_content + .editor + .search + .as_ref() + .and_then(|search| search.center_on_match.as_ref()) + }, + write: |settings_content, value| { + settings_content + .editor + .search + .get_or_insert_default() + .center_on_match = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Seed Search Query From Cursor", description: "When to populate a new search's query based on the text under the cursor.", @@ -6518,6 +6541,19 @@ fn language_settings_data() -> Vec { metadata: None, files: USER | PROJECT, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Completion Menu Scrollbar", + description: "When to show the scrollbar in the completion menu.", + field: Box::new(SettingField { + json_path: Some("editor.completion_menu_scrollbar"), + pick: |settings_content| settings_content.editor.completion_menu_scrollbar.as_ref(), + write: |settings_content, value| { + settings_content.editor.completion_menu_scrollbar = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SectionHeader("Inlay Hints"), SettingsPageItem::SettingItem(SettingItem { title: "Enabled", diff --git a/crates/streaming_diff/Cargo.toml b/crates/streaming_diff/Cargo.toml index 8825914baa8d08734e66485b4bea418840d72228..b3645a182c3abf52c6ee2f2c23feaedeacf8574a 100644 --- a/crates/streaming_diff/Cargo.toml +++ b/crates/streaming_diff/Cargo.toml @@ -14,7 +14,6 @@ path = "src/streaming_diff.rs" [dependencies] ordered-float.workspace = true rope.workspace = true -gpui.workspace = true [dev-dependencies] rand.workspace = true diff --git a/crates/streaming_diff/src/streaming_diff.rs b/crates/streaming_diff/src/streaming_diff.rs index 34a74afa84431079b4d9d0815c96e0114248ca98..5677981b0dc9878963e01d09e7281749d6603c8f 100644 --- a/crates/streaming_diff/src/streaming_diff.rs +++ b/crates/streaming_diff/src/streaming_diff.rs @@ -503,12 +503,11 @@ fn is_line_end(point: Point, text: &Rope) -> bool { #[cfg(test)] mod tests { use super::*; - use gpui::BackgroundExecutor; use rand::prelude::*; use std::env; - #[gpui::test] - fn test_delete_first_of_two_lines(cx: &mut gpui::TestAppContext) { + #[test] + fn test_delete_first_of_two_lines() { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Delete { bytes: 5 }, @@ -524,18 +523,18 @@ mod tests { apply_line_operations(old_text, &new_text, &expected_line_ops) ); - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!(line_ops, expected_line_ops); } - #[gpui::test] - fn test_delete_second_of_two_lines(cx: &mut gpui::TestAppContext) { + #[test] + fn test_delete_second_of_two_lines() { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, CharOperation::Delete { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -551,8 +550,8 @@ mod tests { ); } - #[gpui::test] - fn test_add_new_line(cx: &mut gpui::TestAppContext) { + #[test] + fn test_add_new_line() { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 9 }, @@ -560,7 +559,7 @@ mod tests { text: "\ncccc".into(), }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -575,15 +574,15 @@ mod tests { ); } - #[gpui::test] - fn test_delete_line_in_middle(cx: &mut gpui::TestAppContext) { + #[test] + fn test_delete_line_in_middle() { let old_text = "aaaa\nbbbb\ncccc"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, CharOperation::Delete { bytes: 5 }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -599,8 +598,8 @@ mod tests { ); } - #[gpui::test] - fn test_replace_line(cx: &mut gpui::TestAppContext) { + #[test] + fn test_replace_line() { let old_text = "aaaa\nbbbb\ncccc"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, @@ -610,7 +609,7 @@ mod tests { }, CharOperation::Keep { bytes: 5 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -627,8 +626,8 @@ mod tests { ); } - #[gpui::test] - fn test_multiple_edits_on_different_lines(cx: &mut gpui::TestAppContext) { + #[test] + fn test_multiple_edits_on_different_lines() { let old_text = "aaaa\nbbbb\ncccc\ndddd"; let char_ops = vec![ CharOperation::Insert { text: "A".into() }, @@ -639,7 +638,7 @@ mod tests { text: "\nEEEE".into(), }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -657,15 +656,15 @@ mod tests { ); } - #[gpui::test] - fn test_edit_at_end_of_line(cx: &mut gpui::TestAppContext) { + #[test] + fn test_edit_at_end_of_line() { let old_text = "aaaa\nbbbb\ncccc"; let char_ops = vec![ CharOperation::Keep { bytes: 4 }, CharOperation::Insert { text: "A".into() }, CharOperation::Keep { bytes: 10 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -681,8 +680,8 @@ mod tests { ); } - #[gpui::test] - fn test_insert_newline_character(cx: &mut gpui::TestAppContext) { + #[test] + fn test_insert_newline_character() { let old_text = "aaaabbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 4 }, @@ -690,7 +689,7 @@ mod tests { CharOperation::Keep { bytes: 4 }, ]; let new_text = apply_char_operations(old_text, &char_ops); - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -704,14 +703,14 @@ mod tests { ); } - #[gpui::test] - fn test_insert_newline_at_beginning(cx: &mut gpui::TestAppContext) { + #[test] + fn test_insert_newline_at_beginning() { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Insert { text: "\n".into() }, CharOperation::Keep { bytes: 9 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -726,15 +725,15 @@ mod tests { ); } - #[gpui::test] - fn test_delete_newline(cx: &mut gpui::TestAppContext) { + #[test] + fn test_delete_newline() { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 4 }, CharOperation::Delete { bytes: 1 }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -750,8 +749,8 @@ mod tests { ); } - #[gpui::test] - fn test_insert_multiple_newlines(cx: &mut gpui::TestAppContext) { + #[test] + fn test_insert_multiple_newlines() { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, @@ -760,7 +759,7 @@ mod tests { }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -776,15 +775,15 @@ mod tests { ); } - #[gpui::test] - fn test_delete_multiple_newlines(cx: &mut gpui::TestAppContext) { + #[test] + fn test_delete_multiple_newlines() { let old_text = "aaaa\n\n\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, CharOperation::Delete { bytes: 2 }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -800,8 +799,8 @@ mod tests { ); } - #[gpui::test] - fn test_complex_scenario(cx: &mut gpui::TestAppContext) { + #[test] + fn test_complex_scenario() { let old_text = "line1\nline2\nline3\nline4"; let char_ops = vec![ CharOperation::Keep { bytes: 6 }, @@ -815,7 +814,7 @@ mod tests { }, CharOperation::Keep { bytes: 6 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -835,8 +834,8 @@ mod tests { ); } - #[gpui::test] - fn test_cleaning_up_common_suffix(cx: &mut gpui::TestAppContext) { + #[test] + fn test_cleaning_up_common_suffix() { let old_text = concat!( " for y in 0..size.y() {\n", " let a = 10;\n", @@ -884,7 +883,7 @@ mod tests { }, CharOperation::Keep { bytes: 1 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); + let line_ops = char_ops_to_line_ops(old_text, &char_ops); assert_eq!( line_ops, vec![ @@ -902,8 +901,8 @@ mod tests { ); } - #[gpui::test] - fn test_random_diffs(cx: &mut gpui::TestAppContext) { + #[test] + fn test_random_diffs() { random_test(|mut rng| { let old_text_len = env::var("OLD_TEXT_LEN") .map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable")) @@ -923,19 +922,15 @@ mod tests { assert_eq!(patched, new); // Test char_ops_to_line_ops - let line_ops = char_ops_to_line_ops(&old, &char_operations, cx.background_executor()); + let line_ops = char_ops_to_line_ops(&old, &char_operations); println!("line operations: {:?}", line_ops); let patched = apply_line_operations(&old, &new, &line_ops); assert_eq!(patched, new); }); } - fn char_ops_to_line_ops( - old_text: &str, - char_ops: &[CharOperation], - executor: &BackgroundExecutor, - ) -> Vec { - let old_rope = Rope::from_str(old_text, executor); + fn char_ops_to_line_ops(old_text: &str, char_ops: &[CharOperation]) -> Vec { + let old_rope = Rope::from(old_text); let mut diff = LineDiff::default(); for op in char_ops { diff.push_char_operation(op, &old_rope); diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index fd39bd4d83c65501b4731f31d3f357a3ff7f6fa3..81916c842225085ceec4721dbd8d212608f6bcb9 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -15,12 +15,10 @@ doctest = false [dependencies] arrayvec = "0.7.1" +rayon.workspace = true log.workspace = true -futures.workspace = true -itertools.workspace = true [dev-dependencies] ctor.workspace = true rand.workspace = true zlog.workspace = true -pollster = "0.4.0" diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 8562766b1b49ac8eb1e3c816f210d1a60cae2aed..95fbd5ed0d5f5700d0c894cda68ed15ce6590ced 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -3,8 +3,7 @@ mod tree_map; use arrayvec::ArrayVec; pub use cursor::{Cursor, FilterCursor, Iter}; -use futures::{StreamExt, stream}; -use itertools::Itertools as _; +use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator as _}; use std::marker::PhantomData; use std::mem; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; @@ -15,18 +14,6 @@ pub const TREE_BASE: usize = 2; #[cfg(not(test))] pub const TREE_BASE: usize = 6; -pub trait BackgroundSpawn { - type Task: Future + Send + Sync - where - R: Send + Sync; - fn background_spawn( - &self, - future: impl Future + Send + Sync + 'static, - ) -> Self::Task - where - R: Send + Sync + 'static; -} - /// An item that can be stored in a [`SumTree`] /// /// Must be summarized by a type that implements [`Summary`] @@ -311,71 +298,62 @@ impl SumTree { } } - pub async fn from_iter_async(iter: I, spawn: S) -> Self + pub fn from_par_iter(iter: I, cx: ::Context<'_>) -> Self where - T: 'static + Send + Sync, - for<'a> T::Summary: Summary = ()> + Send + Sync, - S: BackgroundSpawn, - I: IntoIterator, + I: IntoParallelIterator, + Iter: IndexedParallelIterator, + T: Send + Sync, + T::Summary: Send + Sync, + for<'a> ::Context<'a>: Sync, { - let mut futures = vec![]; - let chunks = iter.into_iter().chunks(2 * TREE_BASE); - for chunk in chunks.into_iter() { - let items: ArrayVec = chunk.into_iter().collect(); - futures.push(async move { + let mut nodes = iter + .into_par_iter() + .chunks(2 * TREE_BASE) + .map(|items| { + let items: ArrayVec = items.into_iter().collect(); let item_summaries: ArrayVec = - items.iter().map(|item| item.summary(())).collect(); + items.iter().map(|item| item.summary(cx)).collect(); let mut summary = item_summaries[0].clone(); for item_summary in &item_summaries[1..] { - ::add_summary(&mut summary, item_summary, ()); + ::add_summary(&mut summary, item_summary, cx); } SumTree(Arc::new(Node::Leaf { summary, items, item_summaries, })) - }); - } - - let mut nodes = futures::stream::iter(futures) - .map(|future| spawn.background_spawn(future)) - .buffered(4) - .collect::>() - .await; + }) + .collect::>(); let mut height = 0; while nodes.len() > 1 { height += 1; - let current_nodes = mem::take(&mut nodes); - nodes = stream::iter(current_nodes) + nodes = nodes + .into_par_iter() .chunks(2 * TREE_BASE) - .map(|chunk| { - spawn.background_spawn(async move { - let child_trees: ArrayVec, { 2 * TREE_BASE }> = - chunk.into_iter().collect(); - let child_summaries: ArrayVec = child_trees - .iter() - .map(|child_tree| child_tree.summary().clone()) - .collect(); - let mut summary = child_summaries[0].clone(); - for child_summary in &child_summaries[1..] { - ::add_summary(&mut summary, child_summary, ()); - } - SumTree(Arc::new(Node::Internal { - height, - summary, - child_summaries, - child_trees, - })) - }) + .map(|child_nodes| { + let child_trees: ArrayVec, { 2 * TREE_BASE }> = + child_nodes.into_iter().collect(); + let child_summaries: ArrayVec = child_trees + .iter() + .map(|child_tree| child_tree.summary().clone()) + .collect(); + let mut summary = child_summaries[0].clone(); + for child_summary in &child_summaries[1..] { + ::add_summary(&mut summary, child_summary, cx); + } + SumTree(Arc::new(Node::Internal { + height, + summary, + child_summaries, + child_trees, + })) }) - .buffered(4) - .collect::>() - .await; + .collect::>(); } if nodes.is_empty() { - Self::new(()) + Self::new(cx) } else { debug_assert_eq!(nodes.len(), 1); nodes.pop().unwrap() @@ -619,15 +597,15 @@ impl SumTree { self.append(Self::from_iter(iter, cx), cx); } - pub async fn async_extend(&mut self, iter: I, spawn: S) + pub fn par_extend(&mut self, iter: I, cx: ::Context<'_>) where - S: BackgroundSpawn, - I: IntoIterator + 'static, - T: 'static + Send + Sync, - for<'b> T::Summary: Summary = ()> + Send + Sync, + I: IntoParallelIterator, + Iter: IndexedParallelIterator, + T: Send + Sync, + T::Summary: Send + Sync, + for<'a> ::Context<'a>: Sync, { - let other = Self::from_iter_async(iter, spawn); - self.append(other.await, ()); + self.append(Self::from_par_iter(iter, cx), cx); } pub fn push(&mut self, item: T, cx: ::Context<'_>) { @@ -1092,23 +1070,6 @@ mod tests { #[test] fn test_random() { - struct NoSpawn; - impl BackgroundSpawn for NoSpawn { - type Task - = std::pin::Pin + Sync + Send>> - where - R: Send + Sync; - fn background_spawn( - &self, - future: impl Future + Send + Sync + 'static, - ) -> Self::Task - where - R: Send + Sync + 'static, - { - Box::pin(future) - } - } - let mut starting_seed = 0; if let Ok(value) = std::env::var("SEED") { starting_seed = value.parse().expect("invalid SEED variable"); @@ -1134,7 +1095,7 @@ mod tests { .sample_iter(StandardUniform) .take(count) .collect::>(); - pollster::block_on(tree.async_extend(items, NoSpawn)); + tree.par_extend(items, ()); } for _ in 0..num_operations { @@ -1156,7 +1117,7 @@ mod tests { if rng.random() { new_tree.extend(new_items, ()); } else { - pollster::block_on(new_tree.async_extend(new_items, NoSpawn)); + new_tree.par_extend(new_items, ()); } cursor.seek(&Count(splice_end), Bias::Right); new_tree.append(cursor.slice(&tree_end, Bias::Right), ()); diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index f82321feeb245b4ee3b6d56627387c8594d5db8e..0497512b762fd141e8bc727b66354f7fbcef7925 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -448,11 +448,12 @@ impl PickerDelegate for TasksModalDelegate { let template = resolved_task.original_task(); let display_label = resolved_task.display_label(); - let mut tooltip_label_text = if display_label != &template.label { - resolved_task.resolved_label.clone() - } else { - String::new() - }; + let mut tooltip_label_text = + if display_label != &template.label || source_kind == &TaskSourceKind::UserInput { + resolved_task.resolved_label.clone() + } else { + String::new() + }; if resolved_task.resolved.command_label != resolved_task.resolved_label { if !tooltip_label_text.trim().is_empty() { diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index e9f9279f0d0b41f651c2ac218adf58bd76af2021..ed02381eb83db5daececd159171a90072244a340 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -28,7 +28,6 @@ rope.workspace = true smallvec.workspace = true sum_tree.workspace = true util.workspace = true -gpui.workspace = true [dev-dependencies] collections = { workspace = true, features = ["test-support"] } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 6281c2f0e2ef21cb3756cfe5da814d294b49b108..c9e04e407ffdb8ffde6b139e01d78822e54e1a4b 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -14,29 +14,24 @@ fn init_logger() { zlog::init_test(); } -#[gpui::test] -fn test_edit(cx: &mut gpui::TestAppContext) { - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "abc", - cx.background_executor(), - ); +#[test] +fn test_edit() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abc"); assert_eq!(buffer.text(), "abc"); - buffer.edit([(3..3, "def")], cx.background_executor()); + buffer.edit([(3..3, "def")]); assert_eq!(buffer.text(), "abcdef"); - buffer.edit([(0..0, "ghi")], cx.background_executor()); + buffer.edit([(0..0, "ghi")]); assert_eq!(buffer.text(), "ghiabcdef"); - buffer.edit([(5..5, "jkl")], cx.background_executor()); + buffer.edit([(5..5, "jkl")]); assert_eq!(buffer.text(), "ghiabjklcdef"); - buffer.edit([(6..7, "")], cx.background_executor()); + buffer.edit([(6..7, "")]); assert_eq!(buffer.text(), "ghiabjlcdef"); - buffer.edit([(4..9, "mno")], cx.background_executor()); + buffer.edit([(4..9, "mno")]); assert_eq!(buffer.text(), "ghiamnoef"); } #[gpui::test(iterations = 100)] -fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) { +fn test_random_edits(mut rng: StdRng) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); @@ -49,7 +44,6 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) { ReplicaId::LOCAL, BufferId::new(1).unwrap(), reference_string.clone(), - cx.background_executor(), ); LineEnding::normalize(&mut reference_string); @@ -62,7 +56,7 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) { ); for _i in 0..operations { - let (edits, _) = buffer.randomly_edit(&mut rng, 5, cx.background_executor()); + let (edits, _) = buffer.randomly_edit(&mut rng, 5); for (old_range, new_text) in edits.iter().rev() { reference_string.replace_range(old_range.clone(), new_text); } @@ -112,11 +106,7 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) { let mut text = old_buffer.visible_text.clone(); for edit in edits { let new_text: String = buffer.text_for_range(edit.new.clone()).collect(); - text.replace( - edit.new.start..edit.new.start + edit.old.len(), - &new_text, - cx.background_executor(), - ); + text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text); } assert_eq!(text.to_string(), buffer.text()); @@ -171,18 +161,14 @@ fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) { let mut text = old_buffer.visible_text.clone(); for edit in subscription_edits.into_inner() { let new_text: String = buffer.text_for_range(edit.new.clone()).collect(); - text.replace( - edit.new.start..edit.new.start + edit.old.len(), - &new_text, - cx.background_executor(), - ); + text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text); } assert_eq!(text.to_string(), buffer.text()); } } -#[gpui::test] -fn test_line_endings(cx: &mut gpui::TestAppContext) { +#[test] +fn test_line_endings() { assert_eq!(LineEnding::detect(&"🍐✅\n".repeat(1000)), LineEnding::Unix); assert_eq!(LineEnding::detect(&"abcd\n".repeat(1000)), LineEnding::Unix); assert_eq!( @@ -198,34 +184,25 @@ fn test_line_endings(cx: &mut gpui::TestAppContext) { ReplicaId::LOCAL, BufferId::new(1).unwrap(), "one\r\ntwo\rthree", - cx.background_executor(), ); assert_eq!(buffer.text(), "one\ntwo\nthree"); assert_eq!(buffer.line_ending(), LineEnding::Windows); buffer.check_invariants(); - buffer.edit( - [(buffer.len()..buffer.len(), "\r\nfour")], - cx.background_executor(), - ); - buffer.edit([(0..0, "zero\r\n")], cx.background_executor()); + buffer.edit([(buffer.len()..buffer.len(), "\r\nfour")]); + buffer.edit([(0..0, "zero\r\n")]); assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour"); assert_eq!(buffer.line_ending(), LineEnding::Windows); buffer.check_invariants(); } -#[gpui::test] -fn test_line_len(cx: &mut gpui::TestAppContext) { - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "", - cx.background_executor(), - ); - buffer.edit([(0..0, "abcd\nefg\nhij")], cx.background_executor()); - buffer.edit([(12..12, "kl\nmno")], cx.background_executor()); - buffer.edit([(18..18, "\npqrs\n")], cx.background_executor()); - buffer.edit([(18..21, "\nPQ")], cx.background_executor()); +#[test] +fn test_line_len() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); + buffer.edit([(0..0, "abcd\nefg\nhij")]); + buffer.edit([(12..12, "kl\nmno")]); + buffer.edit([(18..18, "\npqrs\n")]); + buffer.edit([(18..21, "\nPQ")]); assert_eq!(buffer.line_len(0), 4); assert_eq!(buffer.line_len(1), 3); @@ -235,15 +212,10 @@ fn test_line_len(cx: &mut gpui::TestAppContext) { assert_eq!(buffer.line_len(5), 0); } -#[gpui::test] -fn test_common_prefix_at_position(cx: &mut gpui::TestAppContext) { +#[test] +fn test_common_prefix_at_position() { let text = "a = str; b = δα"; - let buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - text, - cx.background_executor(), - ); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text); let offset1 = offset_after(text, "str"); let offset2 = offset_after(text, "δα"); @@ -289,13 +261,12 @@ fn test_common_prefix_at_position(cx: &mut gpui::TestAppContext) { } } -#[gpui::test] -fn test_text_summary_for_range(cx: &mut gpui::TestAppContext) { +#[test] +fn test_text_summary_for_range() { let buffer = Buffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ab\nefg\nhklm\nnopqrs\ntuvwxyz", - cx.background_executor(), ); assert_eq!( buffer.text_summary_for_range::(0..2), @@ -383,18 +354,13 @@ fn test_text_summary_for_range(cx: &mut gpui::TestAppContext) { ); } -#[gpui::test] -fn test_chars_at(cx: &mut gpui::TestAppContext) { - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "", - cx.background_executor(), - ); - buffer.edit([(0..0, "abcd\nefgh\nij")], cx.background_executor()); - buffer.edit([(12..12, "kl\nmno")], cx.background_executor()); - buffer.edit([(18..18, "\npqrs")], cx.background_executor()); - buffer.edit([(18..21, "\nPQ")], cx.background_executor()); +#[test] +fn test_chars_at() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); + buffer.edit([(0..0, "abcd\nefgh\nij")]); + buffer.edit([(12..12, "kl\nmno")]); + buffer.edit([(18..18, "\npqrs")]); + buffer.edit([(18..21, "\nPQ")]); let chars = buffer.chars_at(Point::new(0, 0)); assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); @@ -412,53 +378,43 @@ fn test_chars_at(cx: &mut gpui::TestAppContext) { assert_eq!(chars.collect::(), "PQrs"); // Regression test: - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "", - cx.background_executor(), - ); - buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")], cx.background_executor()); - buffer.edit([(60..60, "\n")], cx.background_executor()); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); + buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")]); + buffer.edit([(60..60, "\n")]); let chars = buffer.chars_at(Point::new(6, 0)); assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); } -#[gpui::test] -fn test_anchors(cx: &mut gpui::TestAppContext) { - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "", - cx.background_executor(), - ); - buffer.edit([(0..0, "abc")], cx.background_executor()); +#[test] +fn test_anchors() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); + buffer.edit([(0..0, "abc")]); let left_anchor = buffer.anchor_before(2); let right_anchor = buffer.anchor_after(2); - buffer.edit([(1..1, "def\n")], cx.background_executor()); + buffer.edit([(1..1, "def\n")]); assert_eq!(buffer.text(), "adef\nbc"); assert_eq!(left_anchor.to_offset(&buffer), 6); assert_eq!(right_anchor.to_offset(&buffer), 6); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - buffer.edit([(2..3, "")], cx.background_executor()); + buffer.edit([(2..3, "")]); assert_eq!(buffer.text(), "adf\nbc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 5); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - buffer.edit([(5..5, "ghi\n")], cx.background_executor()); + buffer.edit([(5..5, "ghi\n")]); assert_eq!(buffer.text(), "adf\nbghi\nc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 9); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); - buffer.edit([(7..9, "")], cx.background_executor()); + buffer.edit([(7..9, "")]); assert_eq!(buffer.text(), "adf\nbghc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 7); @@ -548,18 +504,13 @@ fn test_anchors(cx: &mut gpui::TestAppContext) { ); } -#[gpui::test] -fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) { - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "", - cx.background_executor(), - ); +#[test] +fn test_anchors_at_start_and_end() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); let before_start_anchor = buffer.anchor_before(0); let after_end_anchor = buffer.anchor_after(0); - buffer.edit([(0..0, "abc")], cx.background_executor()); + buffer.edit([(0..0, "abc")]); assert_eq!(buffer.text(), "abc"); assert_eq!(before_start_anchor.to_offset(&buffer), 0); assert_eq!(after_end_anchor.to_offset(&buffer), 3); @@ -567,8 +518,8 @@ fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) { let after_start_anchor = buffer.anchor_after(0); let before_end_anchor = buffer.anchor_before(3); - buffer.edit([(3..3, "def")], cx.background_executor()); - buffer.edit([(0..0, "ghi")], cx.background_executor()); + buffer.edit([(3..3, "def")]); + buffer.edit([(0..0, "ghi")]); assert_eq!(buffer.text(), "ghiabcdef"); assert_eq!(before_start_anchor.to_offset(&buffer), 0); assert_eq!(after_start_anchor.to_offset(&buffer), 3); @@ -576,20 +527,15 @@ fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) { assert_eq!(after_end_anchor.to_offset(&buffer), 9); } -#[gpui::test] -fn test_undo_redo(cx: &mut gpui::TestAppContext) { - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "1234", - cx.background_executor(), - ); +#[test] +fn test_undo_redo() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234"); // Set group interval to zero so as to not group edits in the undo stack. buffer.set_group_interval(Duration::from_secs(0)); - buffer.edit([(1..1, "abx")], cx.background_executor()); - buffer.edit([(3..4, "yzef")], cx.background_executor()); - buffer.edit([(3..5, "cd")], cx.background_executor()); + buffer.edit([(1..1, "abx")]); + buffer.edit([(3..4, "yzef")]); + buffer.edit([(3..5, "cd")]); assert_eq!(buffer.text(), "1abcdef234"); let entries = buffer.history.undo_stack.clone(); @@ -617,31 +563,26 @@ fn test_undo_redo(cx: &mut gpui::TestAppContext) { assert_eq!(buffer.text(), "1234"); } -#[gpui::test] -fn test_history(cx: &mut gpui::TestAppContext) { +#[test] +fn test_history() { let mut now = Instant::now(); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "123456", - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456"); buffer.set_group_interval(Duration::from_millis(300)); let transaction_1 = buffer.start_transaction_at(now).unwrap(); - buffer.edit([(2..4, "cd")], cx.background_executor()); + buffer.edit([(2..4, "cd")]); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56"); buffer.start_transaction_at(now); - buffer.edit([(4..5, "e")], cx.background_executor()); + buffer.edit([(4..5, "e")]); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "12cde6"); now += buffer.transaction_group_interval() + Duration::from_millis(1); buffer.start_transaction_at(now); - buffer.edit([(0..1, "a")], cx.background_executor()); - buffer.edit([(1..1, "b")], cx.background_executor()); + buffer.edit([(0..1, "a")]); + buffer.edit([(1..1, "b")]); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "ab2cde6"); @@ -668,7 +609,7 @@ fn test_history(cx: &mut gpui::TestAppContext) { // Redo stack gets cleared after performing an edit. buffer.start_transaction_at(now); - buffer.edit([(0..0, "X")], cx.background_executor()); + buffer.edit([(0..0, "X")]); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "X12cde6"); buffer.redo(); @@ -689,31 +630,26 @@ fn test_history(cx: &mut gpui::TestAppContext) { assert_eq!(buffer.text(), "X12cde6"); } -#[gpui::test] -fn test_finalize_last_transaction(cx: &mut gpui::TestAppContext) { +#[test] +fn test_finalize_last_transaction() { let now = Instant::now(); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "123456", - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456"); buffer.history.group_interval = Duration::from_millis(1); buffer.start_transaction_at(now); - buffer.edit([(2..4, "cd")], cx.background_executor()); + buffer.edit([(2..4, "cd")]); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56"); buffer.finalize_last_transaction(); buffer.start_transaction_at(now); - buffer.edit([(4..5, "e")], cx.background_executor()); + buffer.edit([(4..5, "e")]); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "12cde6"); buffer.start_transaction_at(now); - buffer.edit([(0..1, "a")], cx.background_executor()); - buffer.edit([(1..1, "b")], cx.background_executor()); + buffer.edit([(0..1, "a")]); + buffer.edit([(1..1, "b")]); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "ab2cde6"); @@ -730,19 +666,14 @@ fn test_finalize_last_transaction(cx: &mut gpui::TestAppContext) { assert_eq!(buffer.text(), "ab2cde6"); } -#[gpui::test] -fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) { +#[test] +fn test_edited_ranges_for_transaction() { let now = Instant::now(); - let mut buffer = Buffer::new( - ReplicaId::LOCAL, - BufferId::new(1).unwrap(), - "1234567", - cx.background_executor(), - ); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234567"); buffer.start_transaction_at(now); - buffer.edit([(2..4, "cd")], cx.background_executor()); - buffer.edit([(6..6, "efg")], cx.background_executor()); + buffer.edit([(2..4, "cd")]); + buffer.edit([(6..6, "efg")]); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56efg7"); @@ -754,7 +685,7 @@ fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) { [2..4, 6..9] ); - buffer.edit([(5..5, "hijk")], cx.background_executor()); + buffer.edit([(5..5, "hijk")]); assert_eq!(buffer.text(), "12cd5hijk6efg7"); assert_eq!( buffer @@ -763,7 +694,7 @@ fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) { [2..4, 10..13] ); - buffer.edit([(4..4, "l")], cx.background_executor()); + buffer.edit([(4..4, "l")]); assert_eq!(buffer.text(), "12cdl5hijk6efg7"); assert_eq!( buffer @@ -773,42 +704,27 @@ fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) { ); } -#[gpui::test] -fn test_concurrent_edits(cx: &mut gpui::TestAppContext) { +#[test] +fn test_concurrent_edits() { let text = "abcdef"; - let mut buffer1 = Buffer::new( - ReplicaId::new(1), - BufferId::new(1).unwrap(), - text, - cx.background_executor(), - ); - let mut buffer2 = Buffer::new( - ReplicaId::new(2), - BufferId::new(1).unwrap(), - text, - cx.background_executor(), - ); - let mut buffer3 = Buffer::new( - ReplicaId::new(3), - BufferId::new(1).unwrap(), - text, - cx.background_executor(), - ); + let mut buffer1 = Buffer::new(ReplicaId::new(1), BufferId::new(1).unwrap(), text); + let mut buffer2 = Buffer::new(ReplicaId::new(2), BufferId::new(1).unwrap(), text); + let mut buffer3 = Buffer::new(ReplicaId::new(3), BufferId::new(1).unwrap(), text); - let buf1_op = buffer1.edit([(1..2, "12")], cx.background_executor()); + let buf1_op = buffer1.edit([(1..2, "12")]); assert_eq!(buffer1.text(), "a12cdef"); - let buf2_op = buffer2.edit([(3..4, "34")], cx.background_executor()); + let buf2_op = buffer2.edit([(3..4, "34")]); assert_eq!(buffer2.text(), "abc34ef"); - let buf3_op = buffer3.edit([(5..6, "56")], cx.background_executor()); + let buf3_op = buffer3.edit([(5..6, "56")]); assert_eq!(buffer3.text(), "abcde56"); - buffer1.apply_op(buf2_op.clone(), Some(cx.background_executor())); - buffer1.apply_op(buf3_op.clone(), Some(cx.background_executor())); - buffer2.apply_op(buf1_op.clone(), Some(cx.background_executor())); - buffer2.apply_op(buf3_op, Some(cx.background_executor())); - buffer3.apply_op(buf1_op, Some(cx.background_executor())); - buffer3.apply_op(buf2_op, Some(cx.background_executor())); + buffer1.apply_op(buf2_op.clone()); + buffer1.apply_op(buf3_op.clone()); + buffer2.apply_op(buf1_op.clone()); + buffer2.apply_op(buf3_op); + buffer3.apply_op(buf1_op); + buffer3.apply_op(buf2_op); assert_eq!(buffer1.text(), "a12c34e56"); assert_eq!(buffer2.text(), "a12c34e56"); @@ -816,7 +732,7 @@ fn test_concurrent_edits(cx: &mut gpui::TestAppContext) { } #[gpui::test(iterations = 100)] -fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext) { +fn test_random_concurrent_edits(mut rng: StdRng) { let peers = env::var("PEERS") .map(|i| i.parse().expect("invalid `PEERS` variable")) .unwrap_or(5); @@ -837,7 +753,6 @@ fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext) ReplicaId::new(i as u16), BufferId::new(1).unwrap(), base_text.clone(), - cx.background_executor(), ); buffer.history.group_interval = Duration::from_millis(rng.random_range(0..=200)); buffers.push(buffer); @@ -854,9 +769,7 @@ fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext) let buffer = &mut buffers[replica_index]; match rng.random_range(0..=100) { 0..=50 if mutation_count != 0 => { - let op = buffer - .randomly_edit(&mut rng, 5, cx.background_executor()) - .1; + let op = buffer.randomly_edit(&mut rng, 5).1; network.broadcast(buffer.replica_id, vec![op]); log::info!("buffer {:?} text: {:?}", buffer.replica_id, buffer.text()); mutation_count -= 1; @@ -874,7 +787,7 @@ fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext) replica_id, ops.len() ); - buffer.apply_ops(ops, Some(cx.background_executor())); + buffer.apply_ops(ops); } } _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index de559075403f53468e0b0cbeb4ecdc1754f4375b..8e98ac5f19b3d4f00bb573c32d555b0e5e387617 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -15,7 +15,6 @@ use anyhow::{Context as _, Result}; use clock::Lamport; pub use clock::ReplicaId; use collections::{HashMap, HashSet}; -use gpui::BackgroundExecutor; use locator::Locator; use operation_queue::OperationQueue; pub use patch::Patch; @@ -710,41 +709,11 @@ impl FromIterator for LineIndent { } impl Buffer { - /// Create a new buffer from a string. - pub fn new( - replica_id: ReplicaId, - remote_id: BufferId, - base_text: impl Into, - executor: &BackgroundExecutor, - ) -> Buffer { + pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into) -> Buffer { let mut base_text = base_text.into(); let line_ending = LineEnding::detect(&base_text); LineEnding::normalize(&mut base_text); - Self::new_normalized( - replica_id, - remote_id, - line_ending, - Rope::from_str(&base_text, executor), - ) - } - - /// Create a new buffer from a string. - /// - /// Unlike [`Buffer::new`], this does not construct the backing rope in parallel if it is large enough. - pub fn new_slow( - replica_id: ReplicaId, - remote_id: BufferId, - base_text: impl Into, - ) -> Buffer { - let mut base_text = base_text.into(); - let line_ending = LineEnding::detect(&base_text); - LineEnding::normalize(&mut base_text); - Self::new_normalized( - replica_id, - remote_id, - line_ending, - Rope::from_str_small(&base_text), - ) + Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text)) } pub fn new_normalized( @@ -839,7 +808,7 @@ impl Buffer { self.history.group_interval } - pub fn edit(&mut self, edits: R, cx: &BackgroundExecutor) -> Operation + pub fn edit(&mut self, edits: R) -> Operation where R: IntoIterator, I: ExactSizeIterator, T)>, @@ -852,7 +821,7 @@ impl Buffer { self.start_transaction(); let timestamp = self.lamport_clock.tick(); - let operation = Operation::Edit(self.apply_local_edit(edits, timestamp, cx)); + let operation = Operation::Edit(self.apply_local_edit(edits, timestamp)); self.history.push(operation.clone()); self.history.push_undo(operation.timestamp()); @@ -865,7 +834,6 @@ impl Buffer { &mut self, edits: impl ExactSizeIterator, T)>, timestamp: clock::Lamport, - executor: &BackgroundExecutor, ) -> EditOperation { let mut edits_patch = Patch::default(); let mut edit_op = EditOperation { @@ -954,7 +922,7 @@ impl Buffer { }); insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); new_insertions.push(InsertionFragment::insert_new(&fragment)); - new_ropes.push_str(new_text.as_ref(), executor); + new_ropes.push_str(new_text.as_ref()); new_fragments.push(fragment, &None); insertion_offset += new_text.len(); } @@ -1033,26 +1001,22 @@ impl Buffer { self.snapshot.line_ending = line_ending; } - pub fn apply_ops>( - &mut self, - ops: I, - executor: Option<&BackgroundExecutor>, - ) { + pub fn apply_ops>(&mut self, ops: I) { let mut deferred_ops = Vec::new(); for op in ops { self.history.push(op.clone()); if self.can_apply_op(&op) { - self.apply_op(op, executor); + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - self.flush_deferred_ops(executor); + self.flush_deferred_ops(); } - fn apply_op(&mut self, op: Operation, executor: Option<&BackgroundExecutor>) { + fn apply_op(&mut self, op: Operation) { match op { Operation::Edit(edit) => { if !self.version.observed(edit.timestamp) { @@ -1061,7 +1025,6 @@ impl Buffer { &edit.ranges, &edit.new_text, edit.timestamp, - executor, ); self.snapshot.version.observe(edit.timestamp); self.lamport_clock.observe(edit.timestamp); @@ -1092,7 +1055,6 @@ impl Buffer { ranges: &[Range], new_text: &[Arc], timestamp: clock::Lamport, - executor: Option<&BackgroundExecutor>, ) { if ranges.is_empty() { return; @@ -1208,10 +1170,7 @@ impl Buffer { }); insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); new_insertions.push(InsertionFragment::insert_new(&fragment)); - match executor { - Some(executor) => new_ropes.push_str(new_text, executor), - None => new_ropes.push_str_small(new_text), - } + new_ropes.push_str(new_text); new_fragments.push(fragment, &None); insertion_offset += new_text.len(); } @@ -1389,12 +1348,12 @@ impl Buffer { self.subscriptions.publish_mut(&edits); } - fn flush_deferred_ops(&mut self, executor: Option<&BackgroundExecutor>) { + fn flush_deferred_ops(&mut self) { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { - self.apply_op(op, executor); + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); @@ -1752,9 +1711,9 @@ impl Buffer { #[cfg(any(test, feature = "test-support"))] impl Buffer { #[track_caller] - pub fn edit_via_marked_text(&mut self, marked_string: &str, cx: &BackgroundExecutor) { + pub fn edit_via_marked_text(&mut self, marked_string: &str) { let edits = self.edits_for_marked_text(marked_string); - self.edit(edits, cx); + self.edit(edits); } #[track_caller] @@ -1891,7 +1850,6 @@ impl Buffer { &mut self, rng: &mut T, edit_count: usize, - executor: &BackgroundExecutor, ) -> (Vec<(Range, Arc)>, Operation) where T: rand::Rng, @@ -1899,7 +1857,7 @@ impl Buffer { let mut edits = self.get_random_edits(rng, edit_count); log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits); - let op = self.edit(edits.iter().cloned(), executor); + let op = self.edit(edits.iter().cloned()); if let Operation::Edit(edit) = &op { assert_eq!(edits.len(), edit.new_text.len()); for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) { @@ -2748,12 +2706,8 @@ impl<'a> RopeBuilder<'a> { } } - fn push_str(&mut self, text: &str, cx: &BackgroundExecutor) { - self.new_visible.push(text, cx); - } - - fn push_str_small(&mut self, text: &str) { - self.new_visible.push_small(text); + fn push_str(&mut self, text: &str) { + self.new_visible.push(text); } fn finish(mut self) -> (Rope, Rope) { diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 070952d1cec346e4ec41e26f69895b65cd74f082..16a0389efa46429d91c79f4eb1e99f62d01753b5 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -220,6 +220,8 @@ impl TitleBar { .on_click({ let peer_id = collaborator.peer_id; cx.listener(move |this, _, window, cx| { + cx.stop_propagation(); + this.workspace .update(cx, |workspace, cx| { if is_following { diff --git a/crates/ui/src/components/popover_menu.rs b/crates/ui/src/components/popover_menu.rs index 439b53f0388114aa37adcf5277e87744e6f4f9e4..b1a52bec8fdf1f7030b5b321bed7702d602ff212 100644 --- a/crates/ui/src/components/popover_menu.rs +++ b/crates/ui/src/components/popover_menu.rs @@ -270,11 +270,11 @@ fn show_menu( window: &mut Window, cx: &mut App, ) { + let previous_focus_handle = window.focused(cx); let Some(new_menu) = (builder)(window, cx) else { return; }; let menu2 = menu.clone(); - let previous_focus_handle = window.focused(cx); window .subscribe(&new_menu, cx, move |modal, _: &DismissEvent, window, cx| { diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index b7548218371d0772b422adb04f1e326de040241f..d3d33a296bbd65edb24371d8f5f1e6462e77e3fe 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -392,7 +392,7 @@ pub struct Scrollbars { impl Scrollbars { pub fn new(show_along: ScrollAxes) -> Self { - Self::new_with_setting(show_along, |_| ShowScrollbar::Always) + Self::new_with_setting(show_along, |_| ShowScrollbar::default()) } pub fn for_settings() -> Scrollbars { diff --git a/crates/util/src/shell.rs b/crates/util/src/shell.rs index 7ab214d5105fb81c930954a1aaf9c4aa6fb865c5..e2da1c394b7d151a9ac4c7059c7d4f25e0d5fea5 100644 --- a/crates/util/src/shell.rs +++ b/crates/util/src/shell.rs @@ -408,6 +408,15 @@ impl ShellKind { } } + pub fn prepend_command_prefix<'a>(&self, command: &'a str) -> Cow<'a, str> { + match self.command_prefix() { + Some(prefix) if !command.starts_with(prefix) => { + Cow::Owned(format!("{prefix}{command}")) + } + _ => Cow::Borrowed(command), + } + } + pub const fn sequential_commands_separator(&self) -> char { match self { ShellKind::Cmd => '&', @@ -422,6 +431,20 @@ impl ShellKind { } } + pub const fn sequential_and_commands_separator(&self) -> &'static str { + match self { + ShellKind::Cmd + | ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::PowerShell + | ShellKind::Xonsh => "&&", + ShellKind::Nushell => ";", + } + } + pub fn try_quote<'a>(&self, arg: &'a str) -> Option> { shlex::try_quote(arg).ok().map(|arg| match self { // If we are running in PowerShell, we want to take extra care when escaping strings. @@ -438,6 +461,42 @@ impl ShellKind { }) } + /// Quotes the given argument if necessary, taking into account the command prefix. + /// + /// In other words, this will consider quoting arg without its command prefix to not break the command. + /// You should use this over `try_quote` when you want to quote a shell command. + pub fn try_quote_prefix_aware<'a>(&self, arg: &'a str) -> Option> { + if let Some(char) = self.command_prefix() { + if let Some(arg) = arg.strip_prefix(char) { + // we have a command that is prefixed + for quote in ['\'', '"'] { + if let Some(arg) = arg + .strip_prefix(quote) + .and_then(|arg| arg.strip_suffix(quote)) + { + // and the command itself is wrapped as a literal, that + // means the prefix exists to interpret a literal as a + // command. So strip the quotes, quote the command, and + // re-add the quotes if they are missing after requoting + let quoted = self.try_quote(arg)?; + return Some(if quoted.starts_with(['\'', '"']) { + Cow::Owned(self.prepend_command_prefix("ed).into_owned()) + } else { + Cow::Owned( + self.prepend_command_prefix(&format!("{quote}{quoted}{quote}")) + .into_owned(), + ) + }); + } + } + return self + .try_quote(arg) + .map(|quoted| Cow::Owned(self.prepend_command_prefix("ed).into_owned())); + } + } + self.try_quote(arg) + } + pub fn split(&self, input: &str) -> Option> { shlex::split(input) } @@ -525,4 +584,75 @@ mod tests { "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string() ); } + + #[test] + fn test_try_quote_nu_command() { + let shell_kind = ShellKind::Nushell; + assert_eq!( + shell_kind.try_quote("'uname'").unwrap().into_owned(), + "\"'uname'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("'uname'") + .unwrap() + .into_owned(), + "\"'uname'\"".to_string() + ); + assert_eq!( + shell_kind.try_quote("^uname").unwrap().into_owned(), + "'^uname'".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("^uname") + .unwrap() + .into_owned(), + "^uname".to_string() + ); + assert_eq!( + shell_kind.try_quote("^'uname'").unwrap().into_owned(), + "'^'\"'uname\'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("^'uname'") + .unwrap() + .into_owned(), + "^'uname'".to_string() + ); + assert_eq!( + shell_kind.try_quote("'uname a'").unwrap().into_owned(), + "\"'uname a'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("'uname a'") + .unwrap() + .into_owned(), + "\"'uname a'\"".to_string() + ); + assert_eq!( + shell_kind.try_quote("^'uname a'").unwrap().into_owned(), + "'^'\"'uname a'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("^'uname a'") + .unwrap() + .into_owned(), + "^'uname a'".to_string() + ); + assert_eq!( + shell_kind.try_quote("uname").unwrap().into_owned(), + "uname".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("uname") + .unwrap() + .into_owned(), + "uname".to_string() + ); + } } diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index a885538e13e11b9720c3d7ffed5f7e6461943598..2da1083ee6623cc8a463ef31be7e90dca0063b34 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -3096,7 +3096,6 @@ mod test { use indoc::indoc; use language::Point; use multi_buffer::MultiBufferRow; - use text::Rope; #[gpui::test] async fn test_start_end_of_paragraph(cx: &mut gpui::TestAppContext) { @@ -3823,7 +3822,7 @@ mod test { cx.update_editor(|editor, _window, cx| { let range = editor.selections.newest_anchor().range(); let inlay_text = " field: int,\n field2: string\n field3: float"; - let inlay = Inlay::edit_prediction(1, range.start, Rope::from_str_small(inlay_text)); + let inlay = Inlay::edit_prediction(1, range.start, inlay_text); editor.splice_inlays(&[], vec![inlay], cx); }); @@ -3855,7 +3854,7 @@ mod test { let end_of_line = snapshot.anchor_after(Point::new(0, snapshot.line_len(MultiBufferRow(0)))); let inlay_text = " hint"; - let inlay = Inlay::edit_prediction(1, end_of_line, Rope::from_str_small(inlay_text)); + let inlay = Inlay::edit_prediction(1, end_of_line, inlay_text); editor.splice_inlays(&[], vec![inlay], cx); }); cx.simulate_keystrokes("$"); @@ -3894,7 +3893,7 @@ mod test { // The empty line is at line 3 (0-indexed) let line_start = snapshot.anchor_after(Point::new(3, 0)); let inlay_text = ": Vec"; - let inlay = Inlay::edit_prediction(1, line_start, Rope::from_str_small(inlay_text)); + let inlay = Inlay::edit_prediction(1, line_start, inlay_text); editor.splice_inlays(&[], vec![inlay], cx); }); @@ -3938,8 +3937,7 @@ mod test { let snapshot = editor.buffer().read(cx).snapshot(cx); let empty_line_start = snapshot.anchor_after(Point::new(2, 0)); let inlay_text = ": i32"; - let inlay = - Inlay::edit_prediction(2, empty_line_start, Rope::from_str_small(inlay_text)); + let inlay = Inlay::edit_prediction(2, empty_line_start, inlay_text); editor.splice_inlays(&[], vec![inlay], cx); }); diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index b77075f92bf69dc292cf69ac7eac147043d7d8b7..ee9a10d9c5344cfa372bf88a95e46de1705ee093 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -296,6 +296,15 @@ pub trait Item: Focusable + EventEmitter + Render + Sized { None } + /// Returns optional elements to render to the left of the breadcrumb. + fn breadcrumb_prefix( + &self, + _window: &mut Window, + _cx: &mut Context, + ) -> Option { + None + } + fn added_to_workspace( &mut self, _workspace: &mut Workspace, @@ -479,6 +488,7 @@ pub trait ItemHandle: 'static + Send { fn to_searchable_item_handle(&self, cx: &App) -> Option>; fn breadcrumb_location(&self, cx: &App) -> ToolbarItemLocation; fn breadcrumbs(&self, theme: &Theme, cx: &App) -> Option>; + fn breadcrumb_prefix(&self, window: &mut Window, cx: &mut App) -> Option; fn show_toolbar(&self, cx: &App) -> bool; fn pixel_position_of_cursor(&self, cx: &App) -> Option>; fn downgrade_item(&self) -> Box; @@ -979,6 +989,10 @@ impl ItemHandle for Entity { self.read(cx).breadcrumbs(theme, cx) } + fn breadcrumb_prefix(&self, window: &mut Window, cx: &mut App) -> Option { + self.update(cx, |item, cx| item.breadcrumb_prefix(window, cx)) + } + fn show_toolbar(&self, cx: &App) -> bool { self.read(cx).show_toolbar() } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 62e29f215146c03060afe81ee67b78e3b3ea8a59..b1de240eb62bcca6967333641bf8234825730300 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -7580,13 +7580,13 @@ pub fn create_and_open_local_file( path: &'static Path, window: &mut Window, cx: &mut Context, - default_content: impl 'static + Send + FnOnce(&mut AsyncApp) -> Rope, + default_content: impl 'static + Send + FnOnce() -> Rope, ) -> Task>> { cx.spawn_in(window, async move |workspace, cx| { let fs = workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?; if !fs.is_file(path).await { fs.create_file(path, Default::default()).await?; - fs.save(path, &default_content(cx), Default::default()) + fs.save(path, &default_content(), Default::default()) .await?; } diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 1cce23712ae88f0e42faf240099ebecd9000fc4e..d89e1ef4e4df7dbef3cf51789c1f1fc8a5309eb1 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -20,7 +20,6 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use text::Rope; use util::{ ResultExt, path, rel_path::{RelPath, rel_path}, @@ -647,13 +646,9 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { // Update the gitignore so that node_modules is no longer ignored, // but a subdirectory is ignored - fs.save( - "/root/.gitignore".as_ref(), - &Rope::from_str("e", cx.background_executor()), - Default::default(), - ) - .await - .unwrap(); + fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default()) + .await + .unwrap(); cx.executor().run_until_parked(); // All of the directories that are no longer ignored are now loaded. @@ -721,7 +716,7 @@ async fn test_write_file(cx: &mut TestAppContext) { .update(cx, |tree, cx| { tree.write_file( rel_path("tracked-dir/file.txt").into(), - Rope::from_str("hello", cx.background_executor()), + "hello".into(), Default::default(), cx, ) @@ -732,7 +727,7 @@ async fn test_write_file(cx: &mut TestAppContext) { .update(cx, |tree, cx| { tree.write_file( rel_path("ignored-dir/file.txt").into(), - Rope::from_str("world", cx.background_executor()), + "world".into(), Default::default(), cx, ) @@ -1470,7 +1465,7 @@ async fn test_random_worktree_operations_during_initial_scan( let fs = FakeFs::new(cx.background_executor.clone()) as Arc; fs.as_fake().insert_tree(root_dir, json!({})).await; for _ in 0..initial_entries { - randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await; + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; } log::info!("generated initial tree"); @@ -1560,7 +1555,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) let fs = FakeFs::new(cx.background_executor.clone()) as Arc; fs.as_fake().insert_tree(root_dir, json!({})).await; for _ in 0..initial_entries { - randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await; + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; } log::info!("generated initial tree"); @@ -1603,7 +1598,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) .await .log_err(); } else { - randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await; + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; } let buffered_event_count = fs.as_fake().buffered_event_count(); @@ -1612,7 +1607,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) log::info!("flushing {} events", len); fs.as_fake().flush_events(len); } else { - randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng, cx.background_executor()).await; + randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await; mutations_len -= 1; } @@ -1764,12 +1759,8 @@ fn randomly_mutate_worktree( }) } else { log::info!("overwriting file {:?} ({})", &entry.path, entry.id.0); - let task = worktree.write_file( - entry.path.clone(), - Rope::default(), - Default::default(), - cx, - ); + let task = + worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx); cx.background_spawn(async move { task.await?; Ok(()) @@ -1784,7 +1775,6 @@ async fn randomly_mutate_fs( root_path: &Path, insertion_probability: f64, rng: &mut impl Rng, - executor: &BackgroundExecutor, ) { log::info!("mutating fs"); let mut files = Vec::new(); @@ -1859,7 +1849,7 @@ async fn randomly_mutate_fs( ); fs.save( &ignore_path, - &Rope::from_str(ignore_contents.as_str(), executor), + &ignore_contents.as_str().into(), Default::default(), ) .await diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 9f6196c1482bcff2db9b7812dfb75b1471fec273..ebb3d8beb321cb6ee42cec84ddf7f456672a0265 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -73,6 +73,7 @@ gpui = { workspace = true, features = [ "windows-manifest", ] } gpui_tokio.workspace = true +rayon.workspace = true edit_prediction_button.workspace = true http_client.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index b873a58d3b61338b25c5908c2f87b62acb95d6f6..18903d888090bbc1fa0955d46417486a3f9fe13b 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -257,6 +257,13 @@ pub fn main() { return; } + rayon::ThreadPoolBuilder::new() + .num_threads(4) + .stack_size(10 * 1024 * 1024) + .thread_name(|ix| format!("RayonWorker{}", ix)) + .build_global() + .unwrap(); + log::info!( "========== starting zed version {}, sha {} ==========", app_version, diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index bd0a600ce52a265f9785b1e26e7a123f270ce263..2d7d47e968e93eef3d455cec9c324a4d4e0cff42 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -28,10 +28,10 @@ use git_ui::commit_view::CommitViewToolbar; use git_ui::git_panel::GitPanel; use git_ui::project_diff::ProjectDiffToolbar; use gpui::{ - Action, App, AppContext as _, AsyncApp, Context, DismissEvent, Element, Entity, Focusable, - KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, - Task, TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache, - point, px, retain_all, + Action, App, AppContext as _, Context, DismissEvent, Element, Entity, Focusable, KeyBinding, + ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, Task, + TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache, point, + px, retain_all, }; use image_viewer::ImageInfo; use language::Capability; @@ -201,12 +201,7 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::keymap_file(), - |cx| { - Rope::from_str( - settings::initial_keymap_content().as_ref(), - cx.background_executor(), - ) - }, + || settings::initial_keymap_content().as_ref().into(), window, cx, ); @@ -216,12 +211,7 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::settings_file(), - |cx| { - Rope::from_str( - settings::initial_user_settings_content().as_ref(), - cx.background_executor(), - ) - }, + || settings::initial_user_settings_content().as_ref().into(), window, cx, ); @@ -236,12 +226,7 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::tasks_file(), - |cx| { - Rope::from_str( - settings::initial_tasks_content().as_ref(), - cx.background_executor(), - ) - }, + || settings::initial_tasks_content().as_ref().into(), window, cx, ); @@ -251,12 +236,7 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::debug_scenarios_file(), - |cx| { - Rope::from_str( - settings::initial_debug_tasks_content().as_ref(), - cx.background_executor(), - ) - }, + || settings::initial_debug_tasks_content().as_ref().into(), window, cx, ); @@ -1959,7 +1939,7 @@ fn open_bundled_file( fn open_settings_file( abs_path: &'static Path, - default_content: impl FnOnce(&mut AsyncApp) -> Rope + Send + 'static, + default_content: impl FnOnce() -> Rope + Send + 'static, window: &mut Window, cx: &mut Context, ) { @@ -4375,7 +4355,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#), + &r#"{"base_keymap": "Atom"}"#.into(), Default::default(), ) .await @@ -4385,7 +4365,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#), + &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(), Default::default(), ) .await @@ -4433,7 +4413,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#), + &r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#.into(), Default::default(), ) .await @@ -4453,7 +4433,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#), + &r#"{"base_keymap": "JetBrains"}"#.into(), Default::default(), ) .await @@ -4493,7 +4473,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#), + &r#"{"base_keymap": "Atom"}"#.into(), Default::default(), ) .await @@ -4502,7 +4482,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#), + &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(), Default::default(), ) .await @@ -4545,7 +4525,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &Rope::from_str_small(r#"[{"bindings": {"backspace": null}}]"#), + &r#"[{"bindings": {"backspace": null}}]"#.into(), Default::default(), ) .await @@ -4565,7 +4545,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#), + &r#"{"base_keymap": "JetBrains"}"#.into(), Default::default(), ) .await diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index bc6c25105e69eb85e8db3714c48dc30791683109..3abb76715d67e3d288cf812fc6a4bff58ac3ddfe 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -861,7 +861,7 @@ mod tests { .fs .save( Path::new(file1_path), - &Rope::from_str("content1", cx.background_executor()), + &Rope::from("content1"), LineEnding::Unix, ) .await @@ -875,7 +875,7 @@ mod tests { .fs .save( Path::new(file2_path), - &Rope::from_str("content2", cx.background_executor()), + &Rope::from("content2"), LineEnding::Unix, ) .await diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index ca2edd0682e181c8db7b8f1973386d3190eab12d..454a1526a9e8c6a75d47bda875feb6843b454a0d 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -1836,13 +1836,12 @@ mod tests { let fs = project::FakeFs::new(cx.executor()); let project = Project::test(fs.clone(), [], cx).await; - let buffer = cx.new(|cx| { + let buffer = cx.new(|_cx| { Buffer::remote( language::BufferId::new(1).unwrap(), ReplicaId::new(1), language::Capability::ReadWrite, "fn main() {\n println!(\"Hello\");\n}", - cx.background_executor(), ) }); diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 9e5bbb1413fec9b021d73dce0f002c1e039c5da9..1a4783cdf5342c0ab92d4eea45260c416fc68cd8 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -165,6 +165,5 @@ - [Local Collaboration](./development/local-collaboration.md) - [Using Debuggers](./development/debuggers.md) - [Glossary](./development/glossary.md) -- [Release Process](./development/releases.md) - [Release Notes](./development/release-notes.md) - [Debugging Crashes](./development/debugging-crashes.md) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 054b6b1b5c812bed95dc7db6e63522b11b86c09c..77906a83a499a2e09ebbb93842545075bd46b8a7 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -3163,6 +3163,12 @@ Non-negative `integer` values - Setting: `search_wrap` - Default: `true` +## Center on Match + +- Description: If `center_on_match` is enabled, the editor will center the cursor on the current match when searching. +- Setting: `center_on_match` +- Default: `false` + ## Seed Search Query From Cursor - Description: When to populate a new search's query based on the text under the cursor. diff --git a/docs/src/development.md b/docs/src/development.md index 6cb5f0b8271ab0347d33ee0cf634b60e790f3ba0..31bb245ac42f80c830a0faba405323d1097e3f51 100644 --- a/docs/src/development.md +++ b/docs/src/development.md @@ -88,7 +88,6 @@ in-depth examples and explanations. ## Contributor links - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md) -- [Releases](./development/releases.md) - [Debugging Crashes](./development/debugging-crashes.md) - [Code of Conduct](https://zed.dev/code-of-conduct) - [Zed Contributor License](https://zed.dev/cla) diff --git a/docs/src/development/linux.md b/docs/src/development/linux.md index a6799378bc00b5c992fd99562b6446729de22559..df3b840fa17a547efd4324f3bdaa119b8ade8738 100644 --- a/docs/src/development/linux.md +++ b/docs/src/development/linux.md @@ -165,6 +165,58 @@ $ cargo heaptrack -b zed When this zed instance is exited, terminal output will include a command to run `heaptrack_interpret` to convert the `*.raw.zst` profile to a `*.zst` file which can be passed to `heaptrack_gui` for viewing. +## Perf recording + +How to get a flamegraph with resolved symbols from a running zed instance. Use +when zed is using a lot of CPU. Not useful for hangs. + +### During the incident + +- Find the PID (process ID) using: + `ps -eo size,pid,comm | grep zed | sort | head -n 1 | cut -d ' ' -f 2` + Or find the pid of the command zed-editor with the most ram usage in something + like htop/btop/top. + +- Install perf: + On Ubuntu (derivatives) run `sudo apt install linux-tools`. + +- Perf Record: + run `sudo perf record -p `, wait a few seconds to gather data then press Ctrl+C. You should now have a perf.data file + +- Make the output file user owned: + run `sudo chown $USER:$USER perf.data` + +- Get build info: + Run zed again and type `zed: about` in the command pallet to get the exact commit. + +The `data.perf` file can be send to zed together with the exact commit. + +### Later + +This can be done by Zed staff. + +- Build Zed with symbols: + Check out the commit found previously and modify `Cargo.toml`. + Apply the following diff then make a release build. + +```diff +[profile.release] +-debug = "limited" ++debug = "full" +``` + +- Add the symbols to perf database: + `pref buildid-cache -v -a ` + +- Resolve the symbols from the db: + `perf inject -i perf.data -o perf_with_symbols.data` + +- Install flamegraph: + `cargo install cargo-flamegraph` + +- Render the flamegraph: + `flamegraph --perfdata perf_with_symbols.data` + ## Troubleshooting ### Cargo errors claiming that a dependency is using unstable features diff --git a/docs/src/development/releases.md b/docs/src/development/releases.md deleted file mode 100644 index 6cb3deb31680f8c038195c93ebf12fe6699354e2..0000000000000000000000000000000000000000 --- a/docs/src/development/releases.md +++ /dev/null @@ -1,147 +0,0 @@ -# Zed Releases - -Read about Zed's [release channels here](https://zed.dev/faq#what-are-the-release-channels). - -## Wednesday Release Process - -You will need write access to the Zed repository to do this. - -Credentials for various services used in this process can be found in 1Password. - -Use the `releases` Slack channel to notify the team that releases will be starting. -This is mostly a formality on Wednesday's minor update releases, but can be beneficial when doing patch releases, as other devs may have landed fixes they'd like to cherry pick. - -### Starting the Builds - -1. Checkout `main` and ensure your working copy is clean. - -1. Run `git fetch && git pull` to ensure you have the latest commits locally. - -1. Run `git fetch --tags --force` to forcibly ensure your local tags are in sync with the remote. - -1. Run `./script/get-stable-channel-release-notes` and store output locally. - -1. Run `./script/bump-zed-minor-versions`. - - - Push the tags and branches as instructed. - -1. Run `./script/get-preview-channel-changes` and store output locally. - -> **Note:** Always prioritize the stable release. -> If you've completed aggregating stable release notes, you can move on to working on aggregating preview release notes, but once the stable build has finished, work through the rest of the stable steps to fully publish. -> Preview can be finished up after. - -### Stable Release - -1. Aggregate stable release notes. - - - Follow the instructions at the end of the script and aggregate the release notes into one structure. - -1. Once the stable release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the stable release notes into it and **save**. - - - **Do not publish the draft!** - -1. Check the stable release assets. - - - Ensure the stable release job has finished without error. - - Ensure the draft has the proper number of assets—releases currently have 12 assets each (as of v0.211). - - Download the artifacts for the stable release draft and test that you can run them locally. - -1. Publish the stable draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). - - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. - The release will be public once the rebuild has completed. - -1. Post the stable release notes to social media. - - - Bluesky and X posts will already be built as drafts in [Buffer](https://buffer.com). - - Double-check links. - - Publish both, one at a time, ensuring both are posted to each respective platform. - -1. Send the stable release notes email. - - - The email broadcast will already be built as a draft in [Kit](https://kit.com). - - Double-check links. - - Publish the email. - -### Preview Release - -1. Aggregate preview release notes. - - - Take the script's output and build release notes by organizing each release note line into a category. - - Use a prior release for the initial outline. - - Make sure to append the `Credit` line, if present, to the end of each release note line. - -1. Once the preview release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the preview release notes into it and **save**. - - - **Do not publish the draft!** - -1. Check the preview release assets. - - - Ensure the preview release job has finished without error. - - Ensure the draft has the proper number of assets—releases currently have 12 assets each (as of v0.211). - - Download the artifacts for the preview release draft and test that you can run them locally. - -1. Publish the preview draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. - The release will be public once the rebuild has completed. - -### Prep Content for Next Week's Stable Release - -1. Build social media posts based on the popular items in preview. - - - Draft the copy in the [tweets](https://zed.dev/channel/tweets-23331) channel. - - Create the preview media (videos, screenshots). - - For features that you film videos around, try to create alternative photo-only versions to be used in the email, as videos and GIFs aren't great for email. - - Store all created media in `Feature Media` in our Google Drive. - - Build X and Bluesky post drafts (copy and media) in [Buffer](https://buffer.com), to be sent for next week's stable release. - - **Note: These are preview items and you may discover bugs.** - **This is a very good time to report these findings to the team!** - -1. Build email based on the popular items in preview. - - - You can reuse the copy and photo media from the preview social media posts. - - Create a draft email in [Kit](https://kit.com), to be sent for next week's stable release. - -## Patch Release Process - -If your PR fixes a panic or a crash, you should cherry-pick it to the current stable and preview branches. -If your PR fixes a regression in recently released code, you should cherry-pick it to preview. - -You will need write access to the Zed repository to do this: - ---- - -1. Send a PR containing your change to `main` as normal. - -1. Once it is merged, cherry-pick the commit locally to either of the release branches (`v0.XXX.x`). - - - In some cases, you may have to handle a merge conflict. - More often than not, this will happen when cherry-picking to stable, as the stable branch is more "stale" than the preview branch. - -1. After the commit is cherry-picked, run `./script/trigger-release {preview|stable}`. - This will bump the version numbers, create a new release tag, and kick off a release build. - - - This can also be run from the [GitHub Actions UI](https://github.com/zed-industries/zed/actions/workflows/bump_patch_version.yml): - ![](https://github.com/zed-industries/zed/assets/1486634/9e31ae95-09e1-4c7f-9591-944f4f5b63ea) - -1. Once release drafts are up on [GitHub Releases](https://github.com/zed-industries/zed/releases), proofread and edit the release notes as needed and **save**. - - - **Do not publish the drafts, yet.** - -1. Check the release assets. - - - Ensure the stable / preview release jobs have finished without error. - - Ensure each draft has the proper number of assets—releases currently have 10 assets each. - - Download the artifacts for each release draft and test that you can run them locally. - -1. Publish stable / preview drafts, one at a time. - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. - The release will be public once the rebuild has completed. - -## Nightly release process - -In addition to the public releases, we also have a nightly build that we encourage employees to use. -Nightly is released by cron once a day, and can be shipped as often as you'd like. -There are no release notes or announcements, so you can just merge your changes to main and run `./script/trigger-release nightly`. diff --git a/docs/src/extensions/icon-themes.md b/docs/src/extensions/icon-themes.md index 697723a59677c25dd14982a1c7f7cf92d1950a70..676cae59cd343a3f73ce5e0504e370e92c699d2b 100644 --- a/docs/src/extensions/icon-themes.md +++ b/docs/src/extensions/icon-themes.md @@ -11,7 +11,7 @@ The [Material Icon Theme](https://github.com/zed-extensions/material-icon-theme) There are two important directories for an icon theme extension: - `icon_themes`: This directory will contain one or more JSON files containing the icon theme definitions. -- `icons`: This directory contains the icons assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired. +- `icons`: This directory contains the icon assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired. Each icon theme file should adhere to the JSON schema specified at [`https://zed.dev/schema/icon_themes/v0.3.0.json`](https://zed.dev/schema/icon_themes/v0.3.0.json). diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index 5c63b880c875701e1721b8d6298dc49da6b45a98..7eb6a355dbfcafaa01ca885789d41e28c474d2f4 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -324,7 +324,7 @@ This query marks number and string values in key-value pairs and arrays for reda The `runnables.scm` file defines rules for detecting runnable code. -Here's an example from an `runnables.scm` file for JSON: +Here's an example from a `runnables.scm` file for JSON: ```scheme ( diff --git a/docs/src/icon-themes.md b/docs/src/icon-themes.md index e035c7171ef84d77f3d18ae704af8b369c23947e..72fc51b834acc7f4cd03eee83246f9d7b1f9b756 100644 --- a/docs/src/icon-themes.md +++ b/docs/src/icon-themes.md @@ -4,19 +4,21 @@ Zed comes with a built-in icon theme, with more icon themes available as extensi ## Selecting an Icon Theme -See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with "icon theme selector: toggle". +See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with `icon theme selector: toggle`. Navigating through the icon theme list by moving up and down will change the icon theme in real time and hitting enter will save it to your settings file. ## Installing more Icon Themes -More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions). +More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=icon-themes). ## Configuring Icon Themes -Your selected icon theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux). +Your selected icon theme is stored in your settings file. +You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}). -Just like with themes, Zed allows for configuring different icon themes for light and dark mode. You can set the mode to `"light"` or `"dark"` to ignore the current system mode. +Just like with themes, Zed allows for configuring different icon themes for light and dark mode. +You can set the mode to `"light"` or `"dark"` to ignore the current system mode. ```json [settings] { diff --git a/docs/src/installation.md b/docs/src/installation.md index 7f8baf0d49bb5ae7873c09424c6f464af044f241..f22c267396abe5f6272d48d970fb2190e24a90b6 100644 --- a/docs/src/installation.md +++ b/docs/src/installation.md @@ -50,6 +50,7 @@ Zed supports the follow macOS releases: | Version | Codename | Apple Status | Zed Status | | ------------- | -------- | -------------- | ------------------- | +| macOS 26.x | Tahoe | Supported | Supported | | macOS 15.x | Sequoia | Supported | Supported | | macOS 14.x | Sonoma | Supported | Supported | | macOS 13.x | Ventura | Supported | Supported | diff --git a/docs/src/languages/rego.md b/docs/src/languages/rego.md index 21192a5c53a2e05a34754eb80421d60fc77467ac..14231c65620ee2c88ac3bb100d6ac91b941c80f4 100644 --- a/docs/src/languages/rego.md +++ b/docs/src/languages/rego.md @@ -7,7 +7,7 @@ Rego language support in Zed is provided by the community-maintained [Rego exten ## Installation -The extensions is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information. +The extension is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information. ## Configuration diff --git a/docs/src/snippets.md b/docs/src/snippets.md index 21aed43452318863b735a9b46cd5399a8bfca1c6..29ecd9bc850b919dbc63a87e2f1bf9477901a33d 100644 --- a/docs/src/snippets.md +++ b/docs/src/snippets.md @@ -1,6 +1,6 @@ # Snippets -Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit a existing snippets file for a specified [scope](#scopes). +Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit an existing snippets file for a specified [scope](#scopes). The snippets are located in `~/.config/zed/snippets` directory to which you can navigate to with the {#action snippets::OpenFolder} action. diff --git a/docs/src/themes.md b/docs/src/themes.md index 00c2a9571c82c044864d181f8547f2d28ef1a489..0bbea57ebfd7c9d55031c2ca9ff31b67b360bcdd 100644 --- a/docs/src/themes.md +++ b/docs/src/themes.md @@ -4,21 +4,23 @@ Zed comes with a number of built-in themes, with more themes available as extens ## Selecting a Theme -See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with "theme selector: Toggle" (bound to `cmd-k cmd-t` on macOS and `ctrl-k ctrl-t` on Linux). +See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with `theme selector: toggle` (bound to {#kb theme_selector::Toggle}). Navigating through the theme list by moving up and down will change the theme in real time and hitting enter will save it to your settings file. ## Installing more Themes -More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions). +More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=themes). Many popular themes have been ported to Zed, and if you're struggling to choose one, visit [zed-themes.com](https://zed-themes.com), a third-party gallery with visible previews for many of them. ## Configuring a Theme -Your selected theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux). +Your selected theme is stored in your settings file. +You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}). -By default, Zed maintains two themes: one for light mode and one for dark mode. You can set the mode to `"dark"` or `"light"` to ignore the current system mode. +By default, Zed maintains two themes: one for light mode and one for dark mode. +You can set the mode to `"dark"` or `"light"` to ignore the current system mode. ```json [settings] { @@ -32,7 +34,8 @@ By default, Zed maintains two themes: one for light mode and one for dark mode. ## Theme Overrides -To override specific attributes of a theme, use the `theme_overrides` setting. This setting can be used to configure theme-specific overrides. +To override specific attributes of a theme, use the `theme_overrides` setting. +This setting can be used to configure theme-specific overrides. For example, add the following to your `settings.json` if you wish to override the background color of the editor and display comments and doc comments as italics: @@ -54,17 +57,17 @@ For example, add the following to your `settings.json` if you wish to override t } ``` -To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see: [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting). +To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting). -To see a list of available theme attributes look at the JSON file for your theme. For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes. +To see a list of available theme attributes look at the JSON file for your theme. +For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes. ## Local Themes Store new themes locally by placing them in the `~/.config/zed/themes` directory (macOS and Linux) or `%USERPROFILE%\AppData\Roaming\Zed\themes\` (Windows). -For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory. It will be available in the theme selector the next time Zed loads. - -Find more themes at [zed-themes.com](https://zed-themes.com). +For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory. +It will be available in the theme selector the next time Zed loads. ## Theme Development diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index b353377dd764d2506abd4cce46352df3ca47dfcb..a5c4ccf8490458ff671fdc3425f480471df077a9 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -1,14 +1,14 @@ # Visual Customization -Various aspects of Zed's visual layout can be configured via Zed settings.json which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}). +Various aspects of Zed's visual layout can be configured via either the settings window or the `settings.json` file, which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}) and {#action zed::OpenSettingsFile} ({#kb zed::OpenSettingsFile}) respectively. See [Configuring Zed](./configuring-zed.md) for additional information and other non-visual settings. ## Themes -Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu. +You can install many [themes](./themes.md) and [icon themes](./icon-themes.md) in form of extensions by running {#action zed::Extensions} from the command palette. -You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings: +You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and {#action icon_theme_selector::Toggle} ({#kb icon_theme_selector::Toggle}) which will modify the following settings: ```json [settings] { @@ -61,15 +61,20 @@ If you would like to use distinct themes for light mode/dark mode that can be se "line_height": "standard", }, - // Agent Panel Font Settings - "agent_font_size": 15 + // Controls the font size for agent responses in the agent panel. + // If not specified, it falls back to the UI font size. + "agent_ui_font_size": 15, + // Controls the font size for the agent panel's message editor, user message, + // and any other snippet of code. + "agent_buffer_font_size": 12 ``` ### Font ligatures By default Zed enable font ligatures which will visually combines certain adjacent characters. -For example `=>` will be displayed as `→` and `!=` will be `≠`. This is purely cosmetic and the individual characters remain unchanged. +For example `=>` will be displayed as `→` and `!=` will be `≠`. +This is purely cosmetic and the individual characters remain unchanged. To disable this behavior use: @@ -363,6 +368,8 @@ TBD: Centered layout related settings // How to render LSP `textDocument/documentColor` colors in the editor. "lsp_document_colors": "inlay", // none, inlay, border, background + // When to show the scrollbar in the completion menu. + "completion_menu_scrollbar": "never", // auto, system, always, never ``` ### Edit Predictions {#editor-ai} @@ -464,7 +471,12 @@ Project panel can be shown/hidden with {#action project_panel::ToggleFocus} ({#k "default_width": 640, // Default width (left/right docked) "default_height": 320, // Default height (bottom docked) }, - "agent_font_size": 16 + // Controls the font size for agent responses in the agent panel. + // If not specified, it falls back to the UI font size. + "agent_ui_font_size": 15, + // Controls the font size for the agent panel's message editor, user message, + // and any other snippet of code. + "agent_buffer_font_size": 12 ``` See [Zed AI Documentation](./ai/overview.md) for additional non-visual AI settings. diff --git a/script/bundle-linux b/script/bundle-linux index e41a3d4783d5378409a568e77f74c7f9393576b5..dee60f01e6d1d0ba2624284a3e44c50b35a885c7 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -170,12 +170,7 @@ cp "assets/licenses.md" "${zed_dir}/licenses.md" # Create archive out of everything that's in the temp directory arch=$(uname -m) -target="linux-${arch}" -if [[ "$channel" == "dev" ]]; then - archive="zed-${commit}-${target}.tar.gz" -else - archive="zed-${target}.tar.gz" -fi +archive="zed-linux-${arch}.tar.gz" rm -rf "${archive}" remove_match="zed(-[a-zA-Z0-9]+)?-linux-$(uname -m)\.tar\.gz" diff --git a/script/bundle-mac b/script/bundle-mac index 901d59f1d011a0ed2b20065bd86739605a81a487..b559768348bccf4db64f1436e9eb970e1fbde014 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -70,12 +70,12 @@ target_triple=${host_line#*: } if [[ $# -gt 0 && -n "$1" ]]; then target_triple="$1" fi -remote_server_arch="" +arch_suffix="" if [[ "$target_triple" = "x86_64-apple-darwin" ]]; then - remote_server_arch="x86_64" + arch_suffix="x86_64" elif [[ "$target_triple" = "aarch64-apple-darwin" ]]; then - remote_server_arch="aarch64" + arch_suffix="aarch64" else echo "Unsupported architecture $target_triple" exit 1 @@ -232,7 +232,7 @@ function sign_app_binaries() { else dmg_target_directory="target/${target_triple}/${target_dir}" dmg_source_directory="${dmg_target_directory}/dmg" - dmg_file_path="${dmg_target_directory}/Zed.dmg" + dmg_file_path="${dmg_target_directory}/Zed-${arch_suffix}.dmg" xcode_bin_dir_path="$(xcode-select -p)/usr/bin" rm -rf ${dmg_source_directory} @@ -310,4 +310,4 @@ cp target/${target_triple}/${target_dir}/cli "${app_path}/Contents/MacOS/cli" sign_app_binaries sign_binary "target/$target_triple/release/remote_server" -gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$remote_server_arch.gz +gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$arch_suffix.gz diff --git a/script/run-unit-evals b/script/run-unit-evals new file mode 100755 index 0000000000000000000000000000000000000000..02481e1ce9dde7d2cbde9603f663093bf7a2ee38 --- /dev/null +++ b/script/run-unit-evals @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' diff --git a/script/upload-nightly b/script/upload-nightly index 985c43936457f53d1db72e4436434aa0ee59296a..abeedc5527520965c7eb5f5714d3d757702711b0 100755 --- a/script/upload-nightly +++ b/script/upload-nightly @@ -1,75 +1,18 @@ #!/usr/bin/env bash -# Based on the template in: https://docs.digitalocean.com/reference/api/spaces-api/ bash -euo pipefail source script/lib/blob-store.sh -allowed_targets=("linux-targz" "macos" "freebsd") -is_allowed_target() { - for val in "${allowed_targets[@]}"; do - if [[ "$1" == "$val" ]]; then - return 0 - fi - done - return 1 -} - -allowed_arch=("x86_64" "aarch64") -is_allowed_arch() { - for val in "${allowed_arch[@]}"; do - if [[ "$1" == "$val" ]]; then - return 0 - fi - done - return 1 -} - -if is_allowed_target "$1"; then - target="$1" -else - echo "Error: Target '$1' is not allowed" - echo "Usage: $0 [${allowed_targets[*]}] {arch}" - exit 1 -fi -if is_allowed_arch "$2"; then - arch="$2" -else - echo "Error: Arch '$2' is not allowed" - echo "Usage: $0 $1 [${allowed_arch[*]}]" - exit 1 -fi -echo "Uploading nightly for target: $target $arch" - bucket_name="zed-nightly-host" -sha=$(git rev-parse HEAD) -echo ${sha} > target/latest-sha -find target -type f -name "zed-remote-server-*.gz" -print0 | while IFS= read -r -d '' file_to_upload; do +for file_to_upload in ./release-artifacts/*; do + [ -f "$file_to_upload" ] || continue upload_to_blob_store $bucket_name "$file_to_upload" "nightly/$(basename "$file_to_upload")" + upload_to_blob_store $bucket_name "$file_to_upload" "${GITHUB_SHA}/$(basename "$file_to_upload")" rm -f "$file_to_upload" done -case "$target" in - macos) - upload_to_blob_store $bucket_name "target/$arch-apple-darwin/release/Zed.dmg" "nightly/Zed-$arch.dmg" - upload_to_blob_store $bucket_name "target/latest-sha" "nightly/latest-sha" - rm -f "target/$arch-apple-darwin/release/Zed.dmg" "target/release/Zed.dmg" - rm -f "target/latest-sha" - ;; - linux-targz) - find . -type f -name "zed-*.tar.gz" -print0 | while IFS= read -r -d '' file_to_upload; do - upload_to_blob_store $bucket_name "$file_to_upload" "nightly/$(basename "$file_to_upload")" - rm -f "$file_to_upload" - done - upload_to_blob_store $bucket_name "target/latest-sha" "nightly/latest-sha-linux-targz" - rm -f "target/latest-sha" - ;; - freebsd) - echo "No freebsd client build (yet)." - ;; - *) - echo "Error: Unknown target '$target'" - exit 1 - ;; -esac +sha=$(git rev-parse HEAD) +echo -n ${sha} > ./release-artifacts/latest-sha +upload_to_blob_store $bucket_name "release-artifacts/latest-sha" "nightly/latest-sha" diff --git a/tooling/perf/src/main.rs b/tooling/perf/src/main.rs index 1e6ddedf11e2c5f265d3d4dd93785afbf7f565d2..243658e50807aa71c25c279dda637f4db9b64766 100644 --- a/tooling/perf/src/main.rs +++ b/tooling/perf/src/main.rs @@ -413,10 +413,26 @@ fn triage_test( } } +/// Try to find the hyperfine binary the user has installed. +fn hyp_binary() -> Option { + const HYP_PATH: &str = "hyperfine"; + const HYP_HOME: &str = "~/.cargo/bin/hyperfine"; + if Command::new(HYP_PATH).output().is_err() { + if Command::new(HYP_HOME).output().is_err() { + None + } else { + Some(Command::new(HYP_HOME)) + } + } else { + Some(Command::new(HYP_PATH)) + } +} + /// Profiles a given test with hyperfine, returning the mean and standard deviation /// for its runtime. If the test errors, returns `None` instead. fn hyp_profile(t_bin: &str, t_name: &str, iterations: NonZero) -> Option { - let mut perf_cmd = Command::new("hyperfine"); + let mut perf_cmd = hyp_binary().expect("Couldn't find the Hyperfine binary on the system"); + // Warm up the cache and print markdown output to stdout, which we parse. perf_cmd.args([ "--style", diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index a8472606ffd6aea48775f3fca28f9c30b2223cc5..538724bcd9648b89d303a6eff834d08ffb3bf18a 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -10,6 +10,7 @@ mod release_nightly; mod run_bundling; mod release; +mod run_agent_evals; mod run_tests; mod runners; mod steps; @@ -28,6 +29,8 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { ("run_tests.yml", run_tests::run_tests()), ("release.yml", release::release()), ("compare_perf.yml", compare_perf::compare_perf()), + ("run_unit_evals.yml", run_agent_evals::run_unit_evals()), + ("run_agent_evals.yml", run_agent_evals::run_agent_evals()), ]; fs::create_dir_all(dir) .with_context(|| format!("Failed to create directory: {}", dir.display()))?; diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index c3d86192fd0e17c268a235292cf24223622094e1..4f1166fbe0b39393d337d6cffefe1fc44fc4e12e 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -1,9 +1,11 @@ use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow}; use crate::tasks::workflows::{ - run_bundling, run_tests, runners, - steps::{self, NamedJob, dependant_job, named, release_job}, - vars, + run_bundling::{bundle_linux, bundle_mac, bundle_windows}, + run_tests, + runners::{self, Arch}, + steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job}, + vars::{self, assets}, }; pub(crate) fn release() -> Workflow { @@ -15,12 +17,12 @@ pub(crate) fn release() -> Workflow { let create_draft_release = create_draft_release(); let bundle = ReleaseBundleJobs { - linux_arm64: bundle_linux_arm64(&[&linux_tests, &check_scripts]), - linux_x86_64: bundle_linux_x86_64(&[&linux_tests, &check_scripts]), - mac_arm64: bundle_mac_arm64(&[&macos_tests, &check_scripts]), - mac_x86_64: bundle_mac_x86_64(&[&macos_tests, &check_scripts]), - windows_arm64: bundle_windows_arm64(&[&windows_tests, &check_scripts]), - windows_x86_64: bundle_windows_x86_64(&[&windows_tests, &check_scripts]), + linux_aarch64: bundle_linux(Arch::AARCH64, None, &[&linux_tests, &check_scripts]), + linux_x86_64: bundle_linux(Arch::X86_64, None, &[&linux_tests, &check_scripts]), + mac_aarch64: bundle_mac(Arch::AARCH64, None, &[&macos_tests, &check_scripts]), + mac_x86_64: bundle_mac(Arch::X86_64, None, &[&macos_tests, &check_scripts]), + windows_aarch64: bundle_windows(Arch::AARCH64, None, &[&windows_tests, &check_scripts]), + windows_x86_64: bundle_windows(Arch::X86_64, None, &[&windows_tests, &check_scripts]), }; let upload_release_assets = upload_release_assets(&[&create_draft_release], &bundle); @@ -31,47 +33,68 @@ pub(crate) fn release() -> Workflow { .on(Event::default().push(Push::default().tags(vec!["v*".to_string()]))) .concurrency(vars::one_workflow_per_non_main_branch()) .add_env(("CARGO_TERM_COLOR", "always")) - .add_env(("CARGO_INCREMENTAL", "0")) .add_env(("RUST_BACKTRACE", "1")) - .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) - .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT)) .add_job(macos_tests.name, macos_tests.job) .add_job(linux_tests.name, linux_tests.job) .add_job(windows_tests.name, windows_tests.job) .add_job(check_scripts.name, check_scripts.job) .add_job(create_draft_release.name, create_draft_release.job) - .add_job(bundle.linux_arm64.name, bundle.linux_arm64.job) - .add_job(bundle.linux_x86_64.name, bundle.linux_x86_64.job) - .add_job(bundle.mac_arm64.name, bundle.mac_arm64.job) - .add_job(bundle.mac_x86_64.name, bundle.mac_x86_64.job) - .add_job(bundle.windows_arm64.name, bundle.windows_arm64.job) - .add_job(bundle.windows_x86_64.name, bundle.windows_x86_64.job) + .map(|mut workflow| { + for job in bundle.into_jobs() { + workflow = workflow.add_job(job.name, job.job); + } + workflow + }) .add_job(upload_release_assets.name, upload_release_assets.job) .add_job(auto_release_preview.name, auto_release_preview.job) } -struct ReleaseBundleJobs { - linux_arm64: NamedJob, - linux_x86_64: NamedJob, - mac_arm64: NamedJob, - mac_x86_64: NamedJob, - windows_arm64: NamedJob, - windows_x86_64: NamedJob, +pub(crate) struct ReleaseBundleJobs { + pub linux_aarch64: NamedJob, + pub linux_x86_64: NamedJob, + pub mac_aarch64: NamedJob, + pub mac_x86_64: NamedJob, + pub windows_aarch64: NamedJob, + pub windows_x86_64: NamedJob, } -fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob { - fn create_sentry_release() -> Step { - named::uses( - "getsentry", - "action-release", - "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 - ) - .add_env(("SENTRY_ORG", "zed-dev")) - .add_env(("SENTRY_PROJECT", "zed")) - .add_env(("SENTRY_AUTH_TOKEN", "${{ secrets.SENTRY_AUTH_TOKEN }}")) - .add_with(("environment", "production")) +impl ReleaseBundleJobs { + pub fn jobs(&self) -> Vec<&NamedJob> { + vec![ + &self.linux_aarch64, + &self.linux_x86_64, + &self.mac_aarch64, + &self.mac_x86_64, + &self.windows_aarch64, + &self.windows_x86_64, + ] } + pub fn into_jobs(self) -> Vec { + vec![ + self.linux_aarch64, + self.linux_x86_64, + self.mac_aarch64, + self.mac_x86_64, + self.windows_aarch64, + self.windows_x86_64, + ] + } +} + +pub(crate) fn create_sentry_release() -> Step { + named::uses( + "getsentry", + "action-release", + "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 + ) + .add_env(("SENTRY_ORG", "zed-dev")) + .add_env(("SENTRY_PROJECT", "zed")) + .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN)) + .add_with(("environment", "production")) +} + +fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob { named::job( dependant_job(deps) .runs_on(runners::LINUX_SMALL) @@ -86,84 +109,44 @@ fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob { steps::script( r#"gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false"#, ) - .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")), + .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)), ) .add_step(create_sentry_release()), ) } -fn upload_release_assets(deps: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs) -> NamedJob { - fn download_workflow_artifacts() -> Step { - named::uses( - "actions", - "download-artifact", - "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0 - ) - .add_with(("path", "./artifacts/")) - } +pub(crate) fn download_workflow_artifacts() -> Step { + named::uses( + "actions", + "download-artifact", + "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0 + ) + .add_with(("path", "./artifacts/")) +} - fn prep_release_artifacts(bundle: &ReleaseBundleJobs) -> Step { - let assets = [ - (&bundle.mac_x86_64.name, "zed", "Zed-x86_64.dmg"), - (&bundle.mac_arm64.name, "zed", "Zed-aarch64.dmg"), - (&bundle.windows_x86_64.name, "zed", "Zed-x86_64.exe"), - (&bundle.windows_arm64.name, "zed", "Zed-aarch64.exe"), - (&bundle.linux_arm64.name, "zed", "zed-linux-aarch64.tar.gz"), - (&bundle.linux_x86_64.name, "zed", "zed-linux-x86_64.tar.gz"), - ( - &bundle.linux_x86_64.name, - "remote-server", - "zed-remote-server-linux-x86_64.gz", - ), - ( - &bundle.linux_arm64.name, - "remote-server", - "zed-remote-server-linux-aarch64.gz", - ), - ( - &bundle.mac_x86_64.name, - "remote-server", - "zed-remote-server-macos-x86_64.gz", - ), - ( - &bundle.mac_arm64.name, - "remote-server", - "zed-remote-server-macos-aarch64.gz", - ), - ]; - - let mut script_lines = vec!["mkdir -p release-artifacts/\n".to_string()]; - for (job_name, artifact_kind, release_artifact_name) in assets { - let artifact_path = - ["${{ needs.", job_name, ".outputs.", artifact_kind, " }}"].join(""); - let mv_command = format!( - "mv ./artifacts/{artifact_path}/* release-artifacts/{release_artifact_name}" - ); - script_lines.push(mv_command) - } - - named::bash(&script_lines.join("\n")) +pub(crate) fn prep_release_artifacts() -> Step { + let mut script_lines = vec!["mkdir -p release-artifacts/\n".to_string()]; + for asset in assets::all() { + let mv_command = format!("mv ./artifacts/{asset}/{asset} release-artifacts/{asset}"); + script_lines.push(mv_command) } + named::bash(&script_lines.join("\n")) +} + +fn upload_release_assets(deps: &[&NamedJob], bundle: &ReleaseBundleJobs) -> NamedJob { let mut deps = deps.to_vec(); - deps.extend([ - &bundle_jobs.linux_arm64, - &bundle_jobs.linux_x86_64, - &bundle_jobs.mac_arm64, - &bundle_jobs.mac_x86_64, - &bundle_jobs.windows_arm64, - &bundle_jobs.windows_x86_64, - ]); + deps.extend(bundle.jobs()); named::job( dependant_job(&deps) .runs_on(runners::LINUX_MEDIUM) .add_step(download_workflow_artifacts()) .add_step(steps::script("ls -lR ./artifacts")) - .add_step(prep_release_artifacts(bundle_jobs)) + .add_step(prep_release_artifacts()) .add_step( steps::script("gh release upload \"$GITHUB_REF_NAME\" --repo=zed-industries/zed release-artifacts/*") - .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")), + .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)), ), ) } @@ -177,7 +160,7 @@ fn create_draft_release() -> NamedJob { fn create_release() -> Step { named::bash("script/create-draft-release target/release-notes.md") - .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")) + .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)) } named::job( @@ -199,25 +182,3 @@ fn create_draft_release() -> NamedJob { .add_step(create_release()), ) } - -fn bundle_mac_x86_64(deps: &[&NamedJob]) -> NamedJob { - named::job(run_bundling::bundle_mac_job(runners::Arch::X86_64, deps)) -} -fn bundle_mac_arm64(deps: &[&NamedJob]) -> NamedJob { - named::job(run_bundling::bundle_mac_job(runners::Arch::ARM64, deps)) -} -fn bundle_linux_x86_64(deps: &[&NamedJob]) -> NamedJob { - named::job(run_bundling::bundle_linux_job(runners::Arch::X86_64, deps)) -} -fn bundle_linux_arm64(deps: &[&NamedJob]) -> NamedJob { - named::job(run_bundling::bundle_linux_job(runners::Arch::ARM64, deps)) -} -fn bundle_windows_x86_64(deps: &[&NamedJob]) -> NamedJob { - named::job(run_bundling::bundle_windows_job( - runners::Arch::X86_64, - deps, - )) -} -fn bundle_windows_arm64(deps: &[&NamedJob]) -> NamedJob { - named::job(run_bundling::bundle_windows_job(runners::Arch::ARM64, deps)) -} diff --git a/tooling/xtask/src/tasks/workflows/release_nightly.rs b/tooling/xtask/src/tasks/workflows/release_nightly.rs index 7079cdbf028c31a27eb9cc230c7c93eb67367680..f557a26f670320a452d83cbc7d48cbabf115b8df 100644 --- a/tooling/xtask/src/tasks/workflows/release_nightly.rs +++ b/tooling/xtask/src/tasks/workflows/release_nightly.rs @@ -1,46 +1,33 @@ use crate::tasks::workflows::{ nix_build::build_nix, - run_bundling::{bundle_mac, bundle_windows}, + release::{ + ReleaseBundleJobs, create_sentry_release, download_workflow_artifacts, + prep_release_artifacts, + }, + run_bundling::{bundle_linux, bundle_mac, bundle_windows}, run_tests::run_platform_tests, - runners::{Arch, Platform}, - steps::NamedJob, - vars::{mac_bundle_envs, windows_bundle_envs}, + runners::{Arch, Platform, ReleaseChannel}, + steps::{FluentBuilder, NamedJob}, }; use super::{runners, steps, steps::named, vars}; use gh_workflow::*; -use indexmap::IndexMap; /// Generates the release_nightly.yml workflow pub fn release_nightly() -> Workflow { - let env: IndexMap<_, _> = [ - ("CARGO_TERM_COLOR", "always"), - ("CARGO_INCREMENTAL", "0"), - ("RUST_BACKTRACE", "1"), - ("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED), - ("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT), - ( - "DIGITALOCEAN_SPACES_ACCESS_KEY", - vars::DIGITALOCEAN_SPACES_ACCESS_KEY, - ), - ( - "DIGITALOCEAN_SPACES_SECRET_KEY", - vars::DIGITALOCEAN_SPACES_SECRET_KEY, - ), - ] - .into_iter() - .map(|(key, value)| (key.into(), value.into())) - .collect(); - let style = check_style(); - let tests = run_platform_tests(Platform::Mac); - let windows_tests = run_platform_tests(Platform::Windows); - let bundle_mac_x86 = bundle_mac_nightly(Arch::X86_64, &[&style, &tests]); - let bundle_mac_arm = bundle_mac_nightly(Arch::ARM64, &[&style, &tests]); - let linux_x86 = bundle_linux_nightly(Arch::X86_64, &[&style, &tests]); - let linux_arm = bundle_linux_nightly(Arch::ARM64, &[&style, &tests]); - let windows_x86 = bundle_windows_nightly(Arch::X86_64, &[&style, &windows_tests]); - let windows_arm = bundle_windows_nightly(Arch::ARM64, &[&style, &windows_tests]); + // run only on windows as that's our fastest platform right now. + let tests = run_platform_tests(Platform::Windows); + let nightly = Some(ReleaseChannel::Nightly); + + let bundle = ReleaseBundleJobs { + linux_aarch64: bundle_linux(Arch::AARCH64, nightly, &[&style, &tests]), + linux_x86_64: bundle_linux(Arch::X86_64, nightly, &[&style, &tests]), + mac_aarch64: bundle_mac(Arch::AARCH64, nightly, &[&style, &tests]), + mac_x86_64: bundle_mac(Arch::X86_64, nightly, &[&style, &tests]), + windows_aarch64: bundle_windows(Arch::AARCH64, nightly, &[&style, &tests]), + windows_x86_64: bundle_windows(Arch::X86_64, nightly, &[&style, &tests]), + }; let nix_linux_x86 = build_nix( Platform::Linux, @@ -51,35 +38,28 @@ pub fn release_nightly() -> Workflow { ); let nix_mac_arm = build_nix( Platform::Mac, - Arch::ARM64, + Arch::AARCH64, "default", None, &[&style, &tests], ); - let update_nightly_tag = update_nightly_tag_job(&[ - &bundle_mac_x86, - &bundle_mac_arm, - &linux_x86, - &linux_arm, - &windows_x86, - &windows_arm, - ]); + let update_nightly_tag = update_nightly_tag_job(&bundle); named::workflow() .on(Event::default() // Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) .schedule([Schedule::new("0 7 * * *")]) .push(Push::default().add_tag("nightly"))) - .envs(env) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("RUST_BACKTRACE", "1")) .add_job(style.name, style.job) .add_job(tests.name, tests.job) - .add_job(windows_tests.name, windows_tests.job) - .add_job(bundle_mac_x86.name, bundle_mac_x86.job) - .add_job(bundle_mac_arm.name, bundle_mac_arm.job) - .add_job(linux_x86.name, linux_x86.job) - .add_job(linux_arm.name, linux_arm.job) - .add_job(windows_x86.name, windows_x86.job) - .add_job(windows_arm.name, windows_arm.job) + .map(|mut workflow| { + for job in bundle.into_jobs() { + workflow = workflow.add_job(job.name, job.job); + } + workflow + }) .add_job(nix_linux_x86.name, nix_linux_x86.job) .add_job(nix_mac_arm.name, nix_mac_arm.job) .add_job(update_nightly_tag.name, update_nightly_tag.job) @@ -112,62 +92,7 @@ fn release_job(deps: &[&NamedJob]) -> Job { } } -fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { - let platform = Platform::Mac; - NamedJob { - name: format!("bundle_mac_nightly_{arch}"), - job: release_job(deps) - .runs_on(runners::MAC_DEFAULT) - .envs(mac_bundle_envs()) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_node()) - .add_step(steps::setup_sentry()) - .add_step(steps::clear_target_dir_if_large(platform)) - .add_step(set_release_channel_to_nightly(platform)) - .add_step(bundle_mac(arch)) - .add_step(upload_zed_nightly(platform, arch)), - } -} - -fn bundle_linux_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { - let platform = Platform::Linux; - let mut job = steps::release_job(deps) - .runs_on(arch.linux_bundler()) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_sentry()) - .add_step(steps::script("./script/linux")); - - // todo(ci) can we do this on arm too? - if arch == Arch::X86_64 { - job = job.add_step(steps::script("./script/install-mold")); - } - job = job - .add_step(steps::clear_target_dir_if_large(platform)) - .add_step(set_release_channel_to_nightly(platform)) - .add_step(steps::script("./script/bundle-linux")) - .add_step(upload_zed_nightly(platform, arch)); - NamedJob { - name: format!("bundle_linux_nightly_{arch}"), - job, - } -} - -fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { - let platform = Platform::Windows; - NamedJob { - name: format!("bundle_windows_nightly_{arch}"), - job: steps::release_job(deps) - .runs_on(runners::WINDOWS_DEFAULT) - .envs(windows_bundle_envs()) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_sentry()) - .add_step(set_release_channel_to_nightly(platform)) - .add_step(bundle_windows(arch)) - .add_step(upload_zed_nightly(platform, arch)), - } -} - -fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob { +fn update_nightly_tag_job(bundle: &ReleaseBundleJobs) -> NamedJob { fn update_nightly_tag() -> Step { named::bash(indoc::indoc! {r#" if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then @@ -181,56 +106,26 @@ fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob { "#}) } - fn create_sentry_release() -> Step { - named::uses( - "getsentry", - "action-release", - "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 - ) - .add_env(("SENTRY_ORG", "zed-dev")) - .add_env(("SENTRY_PROJECT", "zed")) - .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN)) - .add_with(("environment", "production")) - } - NamedJob { name: "update_nightly_tag".to_owned(), - job: steps::release_job(deps) - .runs_on(runners::LINUX_SMALL) + job: steps::release_job(&bundle.jobs()) + .runs_on(runners::LINUX_MEDIUM) .add_step(steps::checkout_repo().add_with(("fetch-depth", 0))) + .add_step(download_workflow_artifacts()) + .add_step(steps::script("ls -lR ./artifacts")) + .add_step(prep_release_artifacts()) + .add_step( + steps::script("./script/upload-nightly") + .add_env(( + "DIGITALOCEAN_SPACES_ACCESS_KEY", + vars::DIGITALOCEAN_SPACES_ACCESS_KEY, + )) + .add_env(( + "DIGITALOCEAN_SPACES_SECRET_KEY", + vars::DIGITALOCEAN_SPACES_SECRET_KEY, + )), + ) .add_step(update_nightly_tag()) .add_step(create_sentry_release()), } } - -fn set_release_channel_to_nightly(platform: Platform) -> Step { - match platform { - Platform::Linux | Platform::Mac => named::bash(indoc::indoc! {r#" - set -eu - version=$(git rev-parse --short HEAD) - echo "Publishing version: ${version} on release channel nightly" - echo "nightly" > crates/zed/RELEASE_CHANNEL - "#}), - Platform::Windows => named::pwsh(indoc::indoc! {r#" - $ErrorActionPreference = "Stop" - $version = git rev-parse --short HEAD - Write-Host "Publishing version: $version on release channel nightly" - "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" - "#}) - .working_directory("${{ env.ZED_WORKSPACE }}"), - } -} - -fn upload_zed_nightly(platform: Platform, arch: Arch) -> Step { - match platform { - Platform::Linux => named::bash(&format!("script/upload-nightly linux-targz {arch}")), - Platform::Mac => named::bash(&format!("script/upload-nightly macos {arch}")), - Platform::Windows => { - let cmd = match arch { - Arch::X86_64 => "script/upload-nightly.ps1 -Architecture x86_64", - Arch::ARM64 => "script/upload-nightly.ps1 -Architecture aarch64", - }; - named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}") - } - } -} diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs new file mode 100644 index 0000000000000000000000000000000000000000..1af09f6ca8fa0bc24c99eda7a18904b1b8886bb3 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -0,0 +1,107 @@ +use gh_workflow::{ + Event, Expression, Job, PullRequest, PullRequestType, Run, Schedule, Step, Use, Workflow, + WorkflowDispatch, +}; + +use crate::tasks::workflows::{ + runners::{self, Platform}, + steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config}, + vars, +}; + +pub(crate) fn run_agent_evals() -> Workflow { + let agent_evals = agent_evals(); + + named::workflow() + .on(Event::default() + .schedule([Schedule::default().cron("0 0 * * *")]) + .pull_request(PullRequest::default().add_branch("**").types([ + PullRequestType::Synchronize, + PullRequestType::Reopened, + PullRequestType::Labeled, + ])) + .workflow_dispatch(WorkflowDispatch::default())) + .concurrency(vars::one_workflow_per_non_main_branch()) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("CARGO_INCREMENTAL", 0)) + .add_env(("RUST_BACKTRACE", 1)) + .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)) + .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) + .add_env(("ZED_EVAL_TELEMETRY", 1)) + .add_job(agent_evals.name, agent_evals.job) +} + +fn agent_evals() -> NamedJob { + fn run_eval() -> Step { + named::bash("cargo run --package=eval -- --repetitions=8 --concurrency=1") + } + + named::job( + Job::default() + .cond(Expression::new(indoc::indoc!{r#" + github.repository_owner == 'zed-industries' && + (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) + "#})) + .runs_on(runners::LINUX_DEFAULT) + .timeout_minutes(60_u32) + .add_step(steps::checkout_repo()) + .add_step(steps::cache_rust_dependencies_namespace()) + .map(steps::install_linux_dependencies) + .add_step(setup_cargo_config(Platform::Linux)) + .add_step(steps::script("cargo build --package=eval")) + .add_step(run_eval()) + .add_step(steps::cleanup_cargo_config(Platform::Linux)) + ) +} + +pub(crate) fn run_unit_evals() -> Workflow { + let unit_evals = unit_evals(); + + named::workflow() + .on(Event::default() + .schedule([ + // GitHub might drop jobs at busy times, so we choose a random time in the middle of the night. + Schedule::default().cron("47 1 * * 2"), + ]) + .workflow_dispatch(WorkflowDispatch::default())) + .concurrency(vars::one_workflow_per_non_main_branch()) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("CARGO_INCREMENTAL", 0)) + .add_env(("RUST_BACKTRACE", 1)) + .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) + .add_job(unit_evals.name, unit_evals.job) +} + +fn unit_evals() -> NamedJob { + fn send_failure_to_slack() -> Step { + named::uses( + "slackapi", + "slack-github-action", + "b0fa283ad8fea605de13dc3f449259339835fc52", + ) + .if_condition(Expression::new("${{ failure() }}")) + .add_with(("method", "chat.postMessage")) + .add_with(("token", vars::SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN)) + .add_with(("payload", indoc::indoc!{r#" + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" + "#})) + } + + named::job( + Job::default() + .runs_on(runners::LINUX_DEFAULT) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(Platform::Linux)) + .add_step(steps::cache_rust_dependencies_namespace()) + .map(steps::install_linux_dependencies) + .add_step(steps::cargo_install_nextest(Platform::Linux)) + .add_step(steps::clear_target_dir_if_large(Platform::Linux)) + .add_step( + steps::script("./script/run-unit-evals") + .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)), + ) + .add_step(send_failure_to_slack()) + .add_step(steps::cleanup_cargo_config(Platform::Linux)), + ) +} diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index be163b215bace76c411145d0ad04d8a9dd7788ca..f01ca4aaa26e8d3e731fdac4f1d77b4b10a39174 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -1,13 +1,24 @@ +use std::path::Path; + use crate::tasks::workflows::{ + release::ReleaseBundleJobs, + runners::{Arch, Platform, ReleaseChannel}, steps::{FluentBuilder, NamedJob, dependant_job, named}, - vars::{mac_bundle_envs, windows_bundle_envs}, + vars::{assets, bundle_envs}, }; -use super::{runners, steps, vars}; +use super::{runners, steps}; use gh_workflow::*; -use indexmap::IndexMap; pub fn run_bundling() -> Workflow { + let bundle = ReleaseBundleJobs { + linux_aarch64: bundle_linux(Arch::AARCH64, None, &[]), + linux_x86_64: bundle_linux(Arch::X86_64, None, &[]), + mac_aarch64: bundle_mac(Arch::AARCH64, None, &[]), + mac_x86_64: bundle_mac(Arch::X86_64, None, &[]), + windows_aarch64: bundle_windows(Arch::AARCH64, None, &[]), + windows_x86_64: bundle_windows(Arch::X86_64, None, &[]), + }; named::workflow() .on(Event::default().pull_request( PullRequest::default().types([PullRequestType::Labeled, PullRequestType::Synchronize]), @@ -19,34 +30,13 @@ pub fn run_bundling() -> Workflow { .cancel_in_progress(true), ) .add_env(("CARGO_TERM_COLOR", "always")) - .add_env(("CARGO_INCREMENTAL", "0")) .add_env(("RUST_BACKTRACE", "1")) - .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) - .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT)) - .add_job( - "bundle_mac_x86_64", - bundle_mac_job(runners::Arch::X86_64, &[]), - ) - .add_job( - "bundle_mac_arm64", - bundle_mac_job(runners::Arch::ARM64, &[]), - ) - .add_job( - "bundle_linux_x86_64", - bundle_linux_job(runners::Arch::X86_64, &[]), - ) - .add_job( - "bundle_linux_arm64", - bundle_linux_job(runners::Arch::ARM64, &[]), - ) - .add_job( - "bundle_windows_x86_64", - bundle_windows_job(runners::Arch::X86_64, &[]), - ) - .add_job( - "bundle_windows_arm64", - bundle_windows_job(runners::Arch::ARM64, &[]), - ) + .map(|mut workflow| { + for job in bundle.into_jobs() { + workflow = workflow.add_job(job.name, job.job); + } + workflow + }) } fn bundle_job(deps: &[&NamedJob]) -> Job { @@ -59,95 +49,145 @@ fn bundle_job(deps: &[&NamedJob]) -> Job { .timeout_minutes(60u32) } -pub(crate) fn bundle_mac_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job { - use vars::GITHUB_SHA; - let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.dmg"); - let remote_server_artifact_name = format!("zed-remote-server-{GITHUB_SHA}-macos-{arch}.gz"); - bundle_job(deps) - .runs_on(runners::MAC_DEFAULT) - .envs(mac_bundle_envs()) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_node()) - .add_step(steps::setup_sentry()) - .add_step(steps::clear_target_dir_if_large(runners::Platform::Mac)) - .add_step(bundle_mac(arch)) - .add_step(steps::upload_artifact( - &artifact_name, - &format!("target/{arch}-apple-darwin/release/Zed.dmg"), - )) - .add_step(steps::upload_artifact( - &remote_server_artifact_name, - &format!("target/zed-remote-server-macos-{arch}.gz"), - )) - .outputs( - [ - ("zed".to_string(), artifact_name), - ("remote-server".to_string(), remote_server_artifact_name), - ] - .into_iter() - .collect::>(), +pub(crate) fn bundle_mac( + arch: Arch, + release_channel: Option, + deps: &[&NamedJob], +) -> NamedJob { + pub fn bundle_mac(arch: Arch) -> Step { + named::bash(&format!("./script/bundle-mac {arch}-apple-darwin")) + } + let platform = Platform::Mac; + let artifact_name = match arch { + Arch::X86_64 => assets::MAC_X86_64, + Arch::AARCH64 => assets::MAC_AARCH64, + }; + let remote_server_artifact_name = match arch { + Arch::X86_64 => assets::REMOTE_SERVER_MAC_X86_64, + Arch::AARCH64 => assets::REMOTE_SERVER_MAC_AARCH64, + }; + NamedJob { + name: format!("bundle_mac_{arch}"), + job: bundle_job(deps) + .runs_on(runners::MAC_DEFAULT) + .envs(bundle_envs(platform)) + .add_step(steps::checkout_repo()) + .when_some(release_channel, |job, release_channel| { + job.add_step(set_release_channel(platform, release_channel)) + }) + .add_step(steps::setup_node()) + .add_step(steps::setup_sentry()) + .add_step(steps::clear_target_dir_if_large(runners::Platform::Mac)) + .add_step(bundle_mac(arch)) + .add_step(upload_artifact(&format!( + "target/{arch}-apple-darwin/release/{artifact_name}" + ))) + .add_step(upload_artifact(&format!( + "target/{remote_server_artifact_name}" + ))), + } +} + +pub fn upload_artifact(path: &str) -> Step { + let name = Path::new(path).file_name().unwrap().to_str().unwrap(); + Step::new(format!("@actions/upload-artifact {}", name)) + .uses( + "actions", + "upload-artifact", + "330a01c490aca151604b8cf639adc76d48f6c5d4", // v5 ) + // N.B. "name" is the name for the asset. The uploaded + // file retains its filename. + .add_with(("name", name)) + .add_with(("path", path)) + .add_with(("if-no-files-found", "error")) } -pub fn bundle_mac(arch: runners::Arch) -> Step { - named::bash(&format!("./script/bundle-mac {arch}-apple-darwin")) +pub(crate) fn bundle_linux( + arch: Arch, + release_channel: Option, + deps: &[&NamedJob], +) -> NamedJob { + let platform = Platform::Linux; + let artifact_name = match arch { + Arch::X86_64 => assets::LINUX_X86_64, + Arch::AARCH64 => assets::LINUX_AARCH64, + }; + let remote_server_artifact_name = match arch { + Arch::X86_64 => assets::REMOTE_SERVER_LINUX_X86_64, + Arch::AARCH64 => assets::REMOTE_SERVER_LINUX_AARCH64, + }; + NamedJob { + name: format!("bundle_linux_{arch}"), + job: bundle_job(deps) + .runs_on(arch.linux_bundler()) + .envs(bundle_envs(platform)) + .add_step(steps::checkout_repo()) + .when_some(release_channel, |job, release_channel| { + job.add_step(set_release_channel(platform, release_channel)) + }) + .add_step(steps::setup_sentry()) + .map(steps::install_linux_dependencies) + .add_step(steps::script("./script/bundle-linux")) + .add_step(upload_artifact(&format!("target/release/{artifact_name}"))) + .add_step(upload_artifact(&format!( + "target/{remote_server_artifact_name}" + ))), + } } -pub(crate) fn bundle_linux_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job { - let artifact_name = format!("zed-{}-{}.tar.gz", vars::GITHUB_SHA, arch.triple()); - let remote_server_artifact_name = format!( - "zed-remote-server-{}-{}.tar.gz", - vars::GITHUB_SHA, - arch.triple() - ); - bundle_job(deps) - .runs_on(arch.linux_bundler()) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_sentry()) - .map(steps::install_linux_dependencies) - .add_step(steps::script("./script/bundle-linux")) - .add_step(steps::upload_artifact( - &artifact_name, - "target/release/zed-*.tar.gz", - )) - .add_step(steps::upload_artifact( - &remote_server_artifact_name, - "target/zed-remote-server-*.gz", - )) - .outputs( - [ - ("zed".to_string(), artifact_name), - ("remote-server".to_string(), remote_server_artifact_name), - ] - .into_iter() - .collect::>(), - ) +pub(crate) fn bundle_windows( + arch: Arch, + release_channel: Option, + deps: &[&NamedJob], +) -> NamedJob { + let platform = Platform::Windows; + pub fn bundle_windows(arch: Arch) -> Step { + let step = match arch { + Arch::X86_64 => named::pwsh("script/bundle-windows.ps1 -Architecture x86_64"), + Arch::AARCH64 => named::pwsh("script/bundle-windows.ps1 -Architecture aarch64"), + }; + step.working_directory("${{ env.ZED_WORKSPACE }}") + } + let artifact_name = match arch { + Arch::X86_64 => assets::WINDOWS_X86_64, + Arch::AARCH64 => assets::WINDOWS_AARCH64, + }; + NamedJob { + name: format!("bundle_windows_{arch}"), + job: bundle_job(deps) + .runs_on(runners::WINDOWS_DEFAULT) + .envs(bundle_envs(platform)) + .add_step(steps::checkout_repo()) + .when_some(release_channel, |job, release_channel| { + job.add_step(set_release_channel(platform, release_channel)) + }) + .add_step(steps::setup_sentry()) + .add_step(bundle_windows(arch)) + .add_step(upload_artifact(&format!("target/{artifact_name}"))), + } } -pub(crate) fn bundle_windows_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job { - use vars::GITHUB_SHA; - let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.exe"); - bundle_job(deps) - .runs_on(runners::WINDOWS_DEFAULT) - .envs(windows_bundle_envs()) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_sentry()) - .add_step(bundle_windows(arch)) - .add_step(steps::upload_artifact( - &artifact_name, - "${{ env.SETUP_PATH }}", - )) - .outputs( - [("zed".to_string(), artifact_name)] - .into_iter() - .collect::>(), - ) +fn set_release_channel(platform: Platform, release_channel: ReleaseChannel) -> Step { + match release_channel { + ReleaseChannel::Nightly => set_release_channel_to_nightly(platform), + } } -pub fn bundle_windows(arch: runners::Arch) -> Step { - let step = match arch { - runners::Arch::X86_64 => named::pwsh("script/bundle-windows.ps1 -Architecture x86_64"), - runners::Arch::ARM64 => named::pwsh("script/bundle-windows.ps1 -Architecture aarch64"), - }; - step.working_directory("${{ env.ZED_WORKSPACE }}") +fn set_release_channel_to_nightly(platform: Platform) -> Step { + match platform { + Platform::Linux | Platform::Mac => named::bash(indoc::indoc! {r#" + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + "#}), + Platform::Windows => named::pwsh(indoc::indoc! {r#" + $ErrorActionPreference = "Stop" + $version = git rev-parse --short HEAD + Write-Host "Publishing version: $version on release channel nightly" + "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" + "#}) + .working_directory("${{ env.ZED_WORKSPACE }}"), + } } diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 88874754706661939490fc470c58d8a0c867c0d8..595b185f4530d9b6cce52e51e8657cfb70987721 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -65,7 +65,7 @@ pub(crate) fn run_tests() -> Workflow { )), should_build_nix.guard(build_nix( Platform::Mac, - Arch::ARM64, + Arch::AARCH64, "debug", // *don't* cache the built output Some("-zed-editor-[0-9.]*-nightly"), @@ -74,7 +74,7 @@ pub(crate) fn run_tests() -> Workflow { ]; let tests_pass = tests_pass(&jobs); - let mut workflow = named::workflow() + named::workflow() .add_event(Event::default() .push( Push::default() @@ -89,11 +89,14 @@ pub(crate) fn run_tests() -> Workflow { ) .add_env(( "CARGO_TERM_COLOR", "always" )) .add_env(( "RUST_BACKTRACE", 1 )) - .add_env(( "CARGO_INCREMENTAL", 0 )); - for job in jobs { - workflow = workflow.add_job(job.name, job.job) - } - workflow.add_job(tests_pass.name, tests_pass.job) + .add_env(( "CARGO_INCREMENTAL", 0 )) + .map(|mut workflow| { + for job in jobs { + workflow = workflow.add_job(job.name, job.job) + } + workflow + }) + .add_job(tests_pass.name, tests_pass.job) } // Generates a bash script that checks changed files against regex patterns @@ -226,6 +229,7 @@ fn check_style() -> NamedJob { release_job(&[]) .runs_on(runners::LINUX_MEDIUM) .add_step(steps::checkout_repo()) + .add_step(steps::cache_rust_dependencies_namespace()) .add_step(steps::setup_pnpm()) .add_step(steps::script("./script/prettier")) .add_step(steps::script("./script/check-todos")) @@ -273,6 +277,7 @@ fn check_dependencies() -> NamedJob { release_job(&[]) .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo()) + .add_step(steps::cache_rust_dependencies_namespace()) .add_step(install_cargo_machete()) .add_step(run_cargo_machete()) .add_step(check_cargo_lock()) @@ -287,6 +292,7 @@ fn check_workspace_binaries() -> NamedJob { .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(Platform::Linux)) .map(steps::install_linux_dependencies) + .add_step(steps::cache_rust_dependencies_namespace()) .add_step(steps::script("cargo build -p collab")) .add_step(steps::script("cargo build --workspace --bins --examples")) .add_step(steps::cleanup_cargo_config(Platform::Linux)), @@ -309,6 +315,9 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { platform == Platform::Linux, steps::install_linux_dependencies, ) + .when(platform == Platform::Linux, |this| { + this.add_step(steps::cache_rust_dependencies_namespace()) + }) .add_step(steps::setup_node()) .add_step(steps::clippy(platform)) .add_step(steps::cargo_install_nextest(platform)) @@ -368,7 +377,7 @@ fn doctests() -> NamedJob { release_job(&[]) .runs_on(runners::LINUX_DEFAULT) .add_step(steps::checkout_repo()) - .add_step(steps::cache_rust_dependencies()) + .add_step(steps::cache_rust_dependencies_namespace()) .map(steps::install_linux_dependencies) .add_step(steps::setup_cargo_config(Platform::Linux)) .add_step(run_doctests()) @@ -381,6 +390,7 @@ fn check_licenses() -> NamedJob { Job::default() .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo()) + .add_step(steps::cache_rust_dependencies_namespace()) .add_step(steps::script("./script/check-licenses")) .add_step(steps::script("./script/generate-licenses")), ) @@ -420,7 +430,7 @@ fn check_docs() -> NamedJob { .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(Platform::Linux)) // todo(ci): un-inline build_docs/action.yml here - .add_step(steps::cache_rust_dependencies()) + .add_step(steps::cache_rust_dependencies_namespace()) .add_step( lychee_link_check("./docs/src/**/*"), // check markdown links ) diff --git a/tooling/xtask/src/tasks/workflows/runners.rs b/tooling/xtask/src/tasks/workflows/runners.rs index d001439b175789e709bd733f7660cc3200721d0a..df98826f8afb7dccb3f9e268fe427634caec8dba 100644 --- a/tooling/xtask/src/tasks/workflows/runners.rs +++ b/tooling/xtask/src/tasks/workflows/runners.rs @@ -22,30 +22,23 @@ impl Into for Runner { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Arch { X86_64, - ARM64, + AARCH64, } impl std::fmt::Display for Arch { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Arch::X86_64 => write!(f, "x86_64"), - Arch::ARM64 => write!(f, "aarch64"), + Arch::AARCH64 => write!(f, "aarch64"), } } } impl Arch { - pub fn triple(&self) -> &'static str { - match self { - Arch::X86_64 => "x86_64-unknown-linux-gnu", - Arch::ARM64 => "aarch64-unknown-linux-gnu", - } - } - pub fn linux_bundler(&self) -> Runner { match self { Arch::X86_64 => LINUX_X86_BUNDLER, - Arch::ARM64 => LINUX_ARM_BUNDLER, + Arch::AARCH64 => LINUX_ARM_BUNDLER, } } } @@ -66,3 +59,8 @@ impl std::fmt::Display for Platform { } } } + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ReleaseChannel { + Nightly, +} diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 5a6196b599ec56b68948afc55316175779877a48..ddbb81a3c54733a2cda68fecd31fc7cb09718e35 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -86,22 +86,10 @@ pub fn cleanup_cargo_config(platform: Platform) -> Step { step.if_condition(Expression::new("always()")) } -pub fn upload_artifact(name: &str, path: &str) -> Step { - Step::new(format!("@actions/upload-artifact {}", name)) - .uses( - "actions", - "upload-artifact", - "330a01c490aca151604b8cf639adc76d48f6c5d4", // v5 - ) - .add_with(("name", name)) - .add_with(("path", path)) - .add_with(("if-no-files-found", "error")) -} - pub fn clear_target_dir_if_large(platform: Platform) -> Step { match platform { Platform::Windows => named::pwsh("./script/clear-target-dir-if-larger-than.ps1 250"), - Platform::Linux => named::bash("./script/clear-target-dir-if-larger-than 100"), + Platform::Linux => named::bash("./script/clear-target-dir-if-larger-than 250"), Platform::Mac => named::bash("./script/clear-target-dir-if-larger-than 300"), } } @@ -113,13 +101,8 @@ pub(crate) fn clippy(platform: Platform) -> Step { } } -pub(crate) fn cache_rust_dependencies() -> Step { - named::uses( - "swatinem", - "rust-cache", - "9d47c6ad4b02e050fd481d890b2ea34778fd09d6", // v2 - ) - .with(("save-if", "${{ github.ref == 'refs/heads/main' }}")) +pub(crate) fn cache_rust_dependencies_namespace() -> Step { + named::uses("namespacelabs", "nscloud-cache-action", "v1").add_with(("cache", "rust")) } fn setup_linux() -> Step { diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index b852e12400098c3d49f806c0010458d123ad24fa..9a7ffed5960f3b8f3f762a0977f78e17e86365e2 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -2,7 +2,7 @@ use std::cell::RefCell; use gh_workflow::{Concurrency, Env, Expression}; -use crate::tasks::workflows::steps::NamedJob; +use crate::tasks::workflows::{runners::Platform, steps::NamedJob}; macro_rules! secret { ($secret_name:ident) => { @@ -16,6 +16,7 @@ macro_rules! var { }; } +secret!(ANTHROPIC_API_KEY); secret!(APPLE_NOTARIZATION_ISSUER_ID); secret!(APPLE_NOTARIZATION_KEY); secret!(APPLE_NOTARIZATION_KEY_ID); @@ -32,34 +33,38 @@ secret!(SENTRY_AUTH_TOKEN); secret!(ZED_CLIENT_CHECKSUM_SEED); secret!(ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON); secret!(ZED_SENTRY_MINIDUMP_ENDPOINT); +secret!(SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN); // todo(ci) make these secrets too... var!(AZURE_SIGNING_ACCOUNT_NAME); var!(AZURE_SIGNING_CERT_PROFILE_NAME); var!(AZURE_SIGNING_ENDPOINT); -pub const GITHUB_SHA: &str = "${{ github.event.pull_request.head.sha || github.sha }}"; +pub fn bundle_envs(platform: Platform) -> Env { + let env = Env::default() + .add("CARGO_INCREMENTAL", 0) + .add("ZED_CLIENT_CHECKSUM_SEED", ZED_CLIENT_CHECKSUM_SEED) + .add("ZED_MINIDUMP_ENDPOINT", ZED_SENTRY_MINIDUMP_ENDPOINT); -pub fn mac_bundle_envs() -> Env { - Env::default() - .add("MACOS_CERTIFICATE", MACOS_CERTIFICATE) - .add("MACOS_CERTIFICATE_PASSWORD", MACOS_CERTIFICATE_PASSWORD) - .add("APPLE_NOTARIZATION_KEY", APPLE_NOTARIZATION_KEY) - .add("APPLE_NOTARIZATION_KEY_ID", APPLE_NOTARIZATION_KEY_ID) - .add("APPLE_NOTARIZATION_ISSUER_ID", APPLE_NOTARIZATION_ISSUER_ID) -} - -pub fn windows_bundle_envs() -> Env { - Env::default() - .add("AZURE_TENANT_ID", AZURE_SIGNING_TENANT_ID) - .add("AZURE_CLIENT_ID", AZURE_SIGNING_CLIENT_ID) - .add("AZURE_CLIENT_SECRET", AZURE_SIGNING_CLIENT_SECRET) - .add("ACCOUNT_NAME", AZURE_SIGNING_ACCOUNT_NAME) - .add("CERT_PROFILE_NAME", AZURE_SIGNING_CERT_PROFILE_NAME) - .add("ENDPOINT", AZURE_SIGNING_ENDPOINT) - .add("FILE_DIGEST", "SHA256") - .add("TIMESTAMP_DIGEST", "SHA256") - .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com") + match platform { + Platform::Linux => env, + Platform::Mac => env + .add("MACOS_CERTIFICATE", MACOS_CERTIFICATE) + .add("MACOS_CERTIFICATE_PASSWORD", MACOS_CERTIFICATE_PASSWORD) + .add("APPLE_NOTARIZATION_KEY", APPLE_NOTARIZATION_KEY) + .add("APPLE_NOTARIZATION_KEY_ID", APPLE_NOTARIZATION_KEY_ID) + .add("APPLE_NOTARIZATION_ISSUER_ID", APPLE_NOTARIZATION_ISSUER_ID), + Platform::Windows => env + .add("AZURE_TENANT_ID", AZURE_SIGNING_TENANT_ID) + .add("AZURE_CLIENT_ID", AZURE_SIGNING_CLIENT_ID) + .add("AZURE_CLIENT_SECRET", AZURE_SIGNING_CLIENT_SECRET) + .add("ACCOUNT_NAME", AZURE_SIGNING_ACCOUNT_NAME) + .add("CERT_PROFILE_NAME", AZURE_SIGNING_CERT_PROFILE_NAME) + .add("ENDPOINT", AZURE_SIGNING_ENDPOINT) + .add("FILE_DIGEST", "SHA256") + .add("TIMESTAMP_DIGEST", "SHA256") + .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com"), + } } pub(crate) fn one_workflow_per_non_main_branch() -> Concurrency { @@ -110,3 +115,33 @@ impl PathCondition { } } } + +pub mod assets { + // NOTE: these asset names also exist in the zed.dev codebase. + pub const MAC_AARCH64: &str = "Zed-aarch64.dmg"; + pub const MAC_X86_64: &str = "Zed-x86_64.dmg"; + pub const LINUX_AARCH64: &str = "zed-linux-aarch64.tar.gz"; + pub const LINUX_X86_64: &str = "zed-linux-x86_64.tar.gz"; + pub const WINDOWS_X86_64: &str = "Zed-x86_64.exe"; + pub const WINDOWS_AARCH64: &str = "Zed-aarch64.exe"; + + pub const REMOTE_SERVER_MAC_AARCH64: &str = "zed-remote-server-macos-aarch64.gz"; + pub const REMOTE_SERVER_MAC_X86_64: &str = "zed-remote-server-macos-x86_64.gz"; + pub const REMOTE_SERVER_LINUX_AARCH64: &str = "zed-remote-server-linux-aarch64.gz"; + pub const REMOTE_SERVER_LINUX_X86_64: &str = "zed-remote-server-linux-x86_64.gz"; + + pub fn all() -> Vec<&'static str> { + vec![ + MAC_AARCH64, + MAC_X86_64, + LINUX_AARCH64, + LINUX_X86_64, + WINDOWS_X86_64, + WINDOWS_AARCH64, + REMOTE_SERVER_MAC_AARCH64, + REMOTE_SERVER_MAC_X86_64, + REMOTE_SERVER_LINUX_AARCH64, + REMOTE_SERVER_LINUX_X86_64, + ] + } +}